Compare commits

..

657 Commits

Author SHA1 Message Date
Anthony Dresser
f46e1471cb add GITHUBTOKEN env for product installs (#8717) 2020-01-30 13:53:36 -08:00
Anthony Dresser
6db7f727ee Add missing deps for linux image (#8958)
* add missing deps for linux image

* use docker versioning

* add comment to linux docker container
2020-01-30 13:20:10 -08:00
Karl Burtram
596d65b2e2 Bump to 1.14.2 for potential hotfix 2020-01-30 11:47:22 -08:00
Anthony Dresser
f9b968c1ae use URI to equal (#8770) 2019-12-23 18:05:34 -08:00
Karl Burtram
e3f6feb135 Revert "Bump SQL Tools to 2.0.0-release.41"
This reverts commit 7b0be2e773.
2019-12-23 18:01:32 -08:00
Karl Burtram
7b0be2e773 Bump SQL Tools to 2.0.0-release.41 2019-12-19 13:30:11 -08:00
Karl Burtram
9fec7edba7 Bump to 1.14.1 for Dec hotfix 1 2019-12-19 13:28:46 -08:00
swjain23
27468f75a5 Bug fix to return authType as undefined if displayName is not set (#8731)
We noticed that if displayName is undefined this method would return the first auth type it found as getAuthTypeDisplayName() would return undefined.
If the displayName is undefined, we would not have a matchingTYpe and it should be undefined.
2019-12-18 11:45:57 -08:00
Karl Burtram
79ad848216 Bump SQL Tools to 2.0.0-release.40 (#8719)
To pick up DacFx vbump
2019-12-17 15:44:38 -08:00
Anthony Dresser
1a2c6f1578 remove unnecessary info from problem svgs (#8712) 2019-12-17 10:24:44 -08:00
Aditya Bist
17e4d3bfa3 fix provider dropdown option for cms dialog (#8711) 2019-12-16 19:09:39 -08:00
Alan Ren
2599fb1252 download 2019 developer edition (#8704) 2019-12-16 14:56:14 -08:00
Charles Gagnon
26d59b528e Bump extension versions (#8674) (#8697)
* Bump agent version

* Bump more extensions

* Undo version change for BDC - is builtin

(cherry picked from commit 9587edd867)
2019-12-16 14:40:18 -08:00
Alan Ren
093f44a1e7 change order of tasks (#8703) 2019-12-16 14:14:44 -08:00
Chris LaFreniere
290f43dbd7 Fix passing in connection profile (#8689) 2019-12-16 14:14:13 -08:00
Chris LaFreniere
fb2486a54b Fix Notebook Scrolling by Setting alwaysConsumeMouseWheel to False (#8686)
* flip alwaysConsumeMouseWheel to false for nb

* remove semicolon
2019-12-16 14:14:05 -08:00
Charles Gagnon
7e02c16fd7 Revert iKey (#8667) 2019-12-13 09:54:12 -08:00
BranislavGrbicMDCS
6638db1f35 Brgrbic/code cleanup issue8158 (#8655)
* Removing engine edition specific condition from displaying error message

* Removing unused import
2019-12-12 13:24:31 -08:00
Anthony Dresser
f0dde491be fix edit data close (#8657) 2019-12-12 13:18:33 -08:00
Anthony Dresser
994a2382ad fix build stream for rebuild extensions (#8650) 2019-12-12 12:07:49 -08:00
Charles Gagnon
856fec4243 Remove preview tags (#8653) 2019-12-12 11:24:05 -08:00
Chris LaFreniere
d1c594cfd0 Change deploy string from connections viewlet/welcome page (#8646)
* Change deploy string from connections viewlet

* Also change welcome page
2019-12-12 10:25:22 -08:00
Anthony Dresser
86da9852ca distro (#8651) 2019-12-11 22:48:46 -08:00
Anthony Dresser
4ba6a979ba Merge from vscode a4177f50c475fc0fa278a78235e3bee9ffdec781 (#8649)
* Merge from vscode a4177f50c475fc0fa278a78235e3bee9ffdec781

* distro

* fix tests
2019-12-11 22:42:23 -08:00
Anthony Dresser
82974a2135 remove some unused code (#8641) 2019-12-11 20:06:40 -08:00
Anthony Dresser
585c18ef4d distro update (#8647) 2019-12-11 18:11:55 -08:00
Alex Ma
7ec516d851 Change typo for monaco-pane-view (#8644)
* changed monaco-panel to pane (typo in classname)

* removed headervisible
2019-12-11 16:56:37 -08:00
Charles Gagnon
7e970d04ca Fix tree refresh (#8625)
* first part

* remove another refresh collapse expand

* Fix scriptable nodes

* Fix test
2019-12-11 16:34:03 -08:00
Charles Gagnon
808ce4366d Fix error deleting mount (#8637) 2019-12-11 16:31:44 -08:00
Arvind Ranasaria
8271226487 Improve performance of tools table painting for bdc deployment tools (#8628)
* get tool status at tool construction

* review feedback
2019-12-11 13:07:16 -08:00
Arvind Ranasaria
bbb7a67bd2 Update description to fit in Description column (#8633) 2019-12-11 10:41:00 -08:00
Chris LaFreniere
698b4fce41 Add generic select box padding (#8635) 2019-12-10 22:14:01 -08:00
Chris LaFreniere
302e8305ef add titles to collapse expand button (#8607) 2019-12-10 21:08:28 -08:00
Anthony Dresser
659f392196 add readonly method to editors (#8630) 2019-12-10 18:16:16 -08:00
Karl Burtram
775a25d944 Bump SQL Tools to 2.0.0-release.39 (#8631) 2019-12-10 16:59:42 -08:00
Chris LaFreniere
bceb766d28 Notebooks: Fix Cancel for Connection Dialog to Actually Close Window (#8626)
* Fix connection dialog not going away on cancel

* Remove unnecessary change
2019-12-10 16:59:15 -08:00
Arvind Ranasaria
36fd618ed4 Add validation for cluster name (#8617)
* adding input validation for cluster name

* Adding type to the inputValidtor

* move out const to global space
2019-12-10 11:50:46 -08:00
Charles Gagnon
99c473cdf6 Fix shift + enter to go to previous find entry in profiler (#8615) 2019-12-10 07:33:49 -08:00
Benjin Dubishar
88b55d0e06 Fixing extension exclusion name (#8616) 2019-12-09 18:52:11 -08:00
Cory Rivera
184d4bbe27 Add units tests for Notebook Actions (#8595) 2019-12-09 15:37:40 -08:00
Maddy
4fc6f4a13e unescape the encoded chars in tablename uri (#8446)
* unesacpe the encoded chars in tablename uri

* unescape special characters
2019-12-09 14:40:21 -08:00
Anthony Dresser
adad11c725 Add scripts for creating artifacts (#8602)
* add remote build to the pipeline

* add a separte compile step

* fix darwin build

* add linux container and fix docker creation

* fix distro

* remove system install and add xvfb start

* distro

* add logic to only run tests on hosted machine

* fix yml

* fix yml

* add linux docker container

* fix docker file

* fixdocker

* fix darwin

* fix linux build

* add cache salt to npm cache

* intentially ignore kerberos binaries

* disable docker for now

* remove vsix from win32

* fix linxu and win32

* fix linux and win32

* fix linux and win32

* fix linux

* maybe fix win32

* fix linux

* fix linux image; disable server package for noe

* fix minimatch for win32 test

* fix linux build

* add back in docker

* fix test

* use tmp directory insteado workspace

* change name of docker image

* try a differnt folder

* fix download

* add a git clean step

* bump cache

* fix issues with builidng

* readd windows build, revert signing changes

* simplify win32 server packaage

* some more optimizations

* use decompress task

* add back in install for windows test

* fix linux build

* add integration test to bat file

* make platform the same

* add copy extension to windows test

* revert tests back

* fix vsix drop aquasition

* inital changes

* fix download

* fix dependent on for release

* just downlaod everything which makes it easier

* setup pipeline artifacts

* more clean up

* fix linux

* add logic to install extensions for integration tests

* fix extension install

* fix build failures

* fix some issues

* fix darwin drop

* change linux build copy to js

* fix darwin archive

* fix copy artifacts and use it for windows

* use for darinw

* fix darwin

* ad dep on linux

* fix win32

* fix darwin

* fix copy artifacts

* mkdir p darwin

* fix copy

* add error handler

* add more binaries

* add more binaries

* fix archive path on linux

* add more options to integration extension install

* add more binaries

* add verbose to installer copy

* fix ip rate issues

* fix bat file for including extensions

* move echo

* add windows test condition

* use powershell cmdlet rather than cp

* remove verbose

* remove compiling of extensions

* fix pipelines

* update docker location

* fix copy item

* fix signing on win32

* fix locations

* move back to using cp

* ensure the docker folder exists

* test a createdrop script on darwin

* wip

* fix copy

* add drop for linux

* fix builds

* fix drop

* fix docker on linx

* fix darwin

* lets try this again

* fix linux drop

* i guess try the copy files task

* add create drop for win32

* ensure windows drop location exists

* fix extension install

* just use mkdir

* add better logic for installing extensions

* ignore errors?

* try force

* testing

* ok this should work

* use production cli

* fix liveshare vscodeignore

* fix integration test script

* revert changes to integration tests to fix them

* try newitem

* remove exec

* explicitly clear last exit code

* fix test build

* revert publish scripts

* add version json

* fix tests

* add back sources creation

* this is stupid

* fix clean positioning

* add version information to drop

* fix locations of artifacts in publush scripts
2019-12-09 14:34:46 -08:00
Anthony Dresser
a1b5af0445 remove secrets from ci (#8609) 2019-12-09 14:26:21 -08:00
Zbyněk Sailer
db4f512991 LOC CHECKIN | Microsoft/azuredatastudio master | 20191205 (#8571) 2019-12-09 10:57:01 -08:00
Charles Gagnon
42ff30515c Don't instrument any test code for coverage (#8598) 2019-12-09 09:01:10 -08:00
Chris LaFreniere
62565e0577 Notebooks: Only have One Connection in Attach To Dropdown (#8582)
* Have only one connection in attach to dropdown

* LGTM fixes

* Test fixes not unnecessarily changing context 2x

* PR Feedback
2019-12-08 11:03:51 -08:00
Anthony Dresser
de177c0335 distro (#8601) 2019-12-07 17:22:36 -08:00
Anthony Dresser
d614116b63 Merge from vscode a416c77e56ef0314ae00633faa04878151610de8 (#8600)
* Merge from vscode a416c77e56ef0314ae00633faa04878151610de8

* distro

* fix tests

* fix tests
2019-12-07 17:19:16 -08:00
Anthony Dresser
a7ff238653 Optimize pipelines (#8520)
Too Many commits
2019-12-07 14:27:38 -08:00
Arvind Ranasaria
6fb120f5dd Add tool path column to the deployment tools table (#8597)
* remove redundan console.warn messages

* add tool path, trim existing columns in tools table

* Update dockerTool.ts
2019-12-06 15:53:40 -08:00
Charles Gagnon
6e8cc3aaca Fix accessibility issues with Manage Access dialog (#8586)
* Fix accessibility issues with Manage Access dialog

* implement more property interfaces

* Fix lgtm errors

* Simplify condition
2019-12-06 15:09:42 -08:00
Arvind Ranasaria
18ab73cc1d remove redundan console.warn messages (#8593) 2019-12-06 14:16:38 -08:00
Charles Gagnon
d1c7370f1c Make links more descriptive (#8591) 2019-12-06 13:06:49 -08:00
Sakshi Sharma
794f7a14c0 Update schema compare extension to support SQL Login (#8567)
* Update schema compare extension to support SQL Login

* Update schema compare extension to support SQL Login

* Fixed the scenario where scmp file doesn't open as expected- cleaning up previous connect information and setting appropriate button states if either source or target info is missing. Also updated function getEndpointName to return empty string if serverName and databaseName aren't present, instead of partial information.

* Addressed comments. Fixed var names and moved repeatitive code in a helper function.
2019-12-06 11:52:48 -08:00
Alan Ren
7cd2a6d6aa fix radio card group rendering error (#8587)
* fix rendering error

* remove extra $ sign
2019-12-06 10:20:37 -08:00
Amir Omidi
febf6b9e70 Handle tenant failures (#8578)
* Handle tenant failures

* Change the comparison

* Fix issue
2019-12-05 17:47:05 -08:00
Arvind Ranasaria
7201025a15 Changes to remove Pip3 based installations for azdata (#8577)
* saving intermediate work - to merge master

* remove inadvertent change

* Remove Pip3 installation from all tools

* working version

* making some small fixes

* add back accidently removed file

* Update resourceTypePickerDialog.ts

* Removing redundant trailing ","

* Remove commented dead code

* fix casing of nls string

* remove inadvertent change

* change installation locations from field to properties so that they are constructed at run time'
2019-12-05 17:15:27 -08:00
Anthony Dresser
4787d7ba5c fix notebookinput matches function to work with replacement (#8585) 2019-12-05 16:53:36 -08:00
Alan Ren
3de95af25c check setting directly to avoid value reset (#8583) 2019-12-05 15:57:13 -08:00
swjain23
0bf4790a64 Add changes for flavor selection (#8419)
* Add changes for flavor selection

* Use getDefaultProviderId method

* Update default engine user setting description

* Add back check for codeEditor

* Add test for multiple providers

* Removing extra merge line

* Add an attribute to ConnectionProviderProperties for language flavor

Adding a boolean property to ConnectionProviderProperties for providers that are language flavors. When it is set to true, the provider will be part of drop down for changing SQL language flavor.

* Update variable name

* Put logic for removing CMS at one place and remove flag for flavor provider

* Using keys instead of entries

Using Object.keys instead of entries as doing [0] can be error prone if no provider matches.

* Adding logic to check from params

* Updating variable names

* Rename dedup map

* Fix action
2019-12-05 15:28:35 -08:00
Leila Lali
0d9353d99e Manage Package Dialog Refactor (#8473)
* Refactoring Manage Packages dialog so that other extensions can contribute to it by registering package mange providers for different location and package type
2019-12-05 10:26:50 -08:00
Maddy
a898c46e74 version bump (#8563)
* version bump

* vbump the release version as well
2019-12-05 10:13:56 -08:00
Anthony Dresser
493e7087cf update distro (#8570) 2019-12-04 19:55:52 -08:00
Anthony Dresser
f5ce7fb2a5 Merge from vscode a5cf1da01d5db3d2557132be8d30f89c38019f6c (#8525)
* Merge from vscode a5cf1da01d5db3d2557132be8d30f89c38019f6c

* remove files we don't want

* fix hygiene

* update distro

* update distro

* fix hygiene

* fix strict nulls

* distro

* distro

* fix tests

* fix tests

* add another edit

* fix viewlet icon

* fix azure dialog

* fix some padding

* fix more padding issues
2019-12-04 19:28:22 -08:00
Benjin Dubishar
a8818ab0df Initial commit of Database Projects extension (#8540)
* Initial commit of Database Projects extension

* Removing unused references, correcting license, tabs -> spaces in package.json

* cleaning up linter errors
2019-12-04 15:02:55 -08:00
Cory Rivera
9691fab917 Add code coverage tests for cell.ts (#8564) 2019-12-04 14:45:48 -08:00
Benjin Dubishar
a7f5741608 Correcting license link on schema-compare (#8560) 2019-12-04 14:44:04 -08:00
Alan Ren
c34869c243 improve the install sql on windows scenario (#8559) 2019-12-04 10:59:30 -08:00
Alan Ren
ab94b9785e deprecate the card component (#8552)
* deprecate the card component

* add deprecated tag
2019-12-04 10:48:08 -08:00
Arvind Ranasaria
ee94524ab1 Pull GITHUB_TOKEN from akv store (#8538)
* Pull GITHUB_TOKEN from akv store

* fixing subscription for ado-secrets

* undo changes to publish-types.yml

* fix subscription again
2019-12-04 09:37:58 -08:00
Charles Gagnon
6cce532ca4 Fix wizard not displaying error messages (#8545) 2019-12-03 17:40:19 -08:00
Charles Gagnon
7f16a4d857 Fix refresh action (#8519)
* Fix refresh action

* Refactor to async

* Fix missing bracket

* Remove unused error level

* Add back in error message

* Fix missing service
2019-12-03 17:40:02 -08:00
Cory Rivera
255ea1945b Only conda install pykerberos on non-windows platforms. (#8544) 2019-12-03 16:40:11 -08:00
Alan Ren
b38b53b658 accessible radio card (#8514)
* accessible radio card group

* set radio card group width

* address comments

* address comments 2

* fix the profile card not being focused issue
2019-12-03 13:26:00 -08:00
Arvind Ranasaria
82c60a23c0 Pull integration test connection string variables from secret store (#8529)
* Pull test secrets from Secret store

* removing redundant steps: from copy/paste

* fix indentation

* fix env: sections for test secrets
2019-12-03 10:18:09 -08:00
Anthony Dresser
c93ea20b75 fix uri string for converting inputs (#8530) 2019-12-03 10:09:20 -08:00
Zbyněk Sailer
6a4e4fc07b LOC CHECKIN | Microsoft/azuredatastudio master | 20191203 (#8533) 2019-12-03 10:07:16 -08:00
Charles Gagnon
d358cdac1e Add tests for declarative table component (#8499) 2019-12-03 09:04:33 -08:00
Charles Gagnon
4f8ced1f6b More dangling promise cleanup (#8518)
* More dangling promise cleanup

* return void

* Function to async

* Fix a couple missed promises
2019-12-02 15:54:33 -08:00
Cory Rivera
8cc60fde90 Add code coverage tests for notebookUtils (#8524) 2019-12-02 14:19:37 -08:00
Charles Gagnon
b8bc629970 Add aria labels and consolidate loc strings (#8494) 2019-12-02 13:33:24 -08:00
Arvind Ranasaria
9a83dfc022 Update sql-product-build.yml for Azure Pipelines (#8508) 2019-12-02 12:07:33 -08:00
Anthony Dresser
d5e26527a6 Clean windows agent on pipeline (#8503)
* add step to clean repo since agent does start clean

* move clean

* disable caaching for windows agent
2019-12-02 11:06:52 -08:00
Anthony Dresser
c9226a07c5 fix publish scripts for windows (#8501) 2019-11-27 16:08:56 -08:00
Charles Gagnon
d451528b36 Fix loading component not disappearing (#8495) 2019-11-27 14:42:28 -08:00
Anthony Dresser
48b2cbb0bf Rework windows pipeline to sign less (and min builds) (#8472)
* only sign windows once

* more scoping and add copy steps

* remove reh

* wip

* use min everywhere

* fix zip file name

* fix location of build file

* fix version.json production

* remove unneeded vars

* fix archive drop

* give mac more time

* fix location of windows archive

* fix system location and add comments

* fix installer signing

* remove unnecessary build step

* reduce the sign count

* fix dlls

* remove missing dlls
2019-11-27 13:01:55 -08:00
Anthony Dresser
39e6b9933d add publish scripts (#8490) 2019-11-27 12:50:47 -08:00
Anthony Dresser
d08fb1aee2 fix upgrade path for new serialization model (#8489) 2019-11-27 12:48:54 -08:00
Amir Omidi
5235a1d029 New azure authentication experience (#8483)
* Changes

* Work in progress

* Authenticate with azure

* enbable national clouds and initialization

* Add support for tenants

* Finish up account work

* Finish up azure auth

* Don't allow prompt if we're not initialized

* Shut down server

* Remove trailing comma

* encode uri component

* ignore errors

* Address comments on github

* Fix issues and disable feature without env var

* Don't encode the nonce

* Only use env variables to disable the new sign in

* Show more user friendly messages to users
2019-11-27 12:33:08 -08:00
Charles Gagnon
3135b8525b Add styling and component column type to declarative table (#8476)
* Initial wip

* wip

* Working implementation

* Make widths a bit nicer and remove sqlops addition

* Add sqlops back in

* Fix timing issue with tables

* Undo change to sql.bat and remove loading component when done
2019-11-27 08:06:41 -08:00
Charles Gagnon
0e9797c394 Fix more floating promises (#8460) 2019-11-27 08:04:51 -08:00
Anthony Dresser
4145ecfb32 default to using extension of the file for editor replacement (#8482) 2019-11-26 14:28:27 -08:00
Alan Ren
20e9b329b1 only handle the keyboard events on header (#8488) 2019-11-26 13:42:04 -08:00
Karl Burtram
d1ccbf028f sp_whoisactive and ServerReports updates for build breaks (#8431) 2019-11-26 13:02:34 -08:00
Karl Burtram
a1fc621e1b Bump SQL Tools to 2.0.0-release.37 for logging path fix (#8486) 2019-11-26 12:51:00 -08:00
Anthony Dresser
9ef6bec960 add back in listen for run query (#8481) 2019-11-26 10:40:22 -08:00
Rich Smith
b631530753 Modify connection management to support Active Directory authType for non-SQL DBs (#8434)
* First attempt to add Azure MFA login for PostgreSQL

* Finish merge with master

* Fix auth type default selection

* Add AzureMFAAndUser auth type for Orcas

* Fix formatting

* Update change log

* Incorporate some review comments

* Missed an occurrence of AzureResource

* Try to move all changes out of azdata.d.ts and sqlops.d.ts

* Concrete implementation of ConnectionProfile in azdata no longer has azureAccount

* Use enum names instead of numbers in config files
2019-11-26 10:32:59 -08:00
Anthony Dresser
d9997cebfc Only package external extensions on Linux in the pipelines (#8464)
* don't package every time, only on linux

* fix build

* fix build

* fix floating promises

* add comment

* fix order of steps

* clean up packaging of the extensions

* minor cleanup

* make code easier to read and add more logging

* do some rename

* maybe?

* re order the code

* fix output path not existing
2019-11-25 21:08:58 -08:00
Alan Ren
ffee69a765 set focus to default cards when loaded (#8480)
* set focus to default cards when loaded

* add return type
2019-11-25 20:31:39 -08:00
Anthony Dresser
7a38943412 remove request from azure externals (#8462) 2019-11-25 15:07:53 -08:00
Alan Ren
c970173fc0 fix azdata eula link (#8475) 2019-11-25 14:41:11 -08:00
Alan Ren
0979ce8de6 set dropdown aria-label (#8456) 2019-11-25 13:05:00 -08:00
Alan Ren
878bcc0d92 add service column name (#8457) 2019-11-25 13:04:36 -08:00
Anthony Dresser
68b2f1a8e4 Remove unnecessary lock (#8466)
* remove unncessary lock

* remove another one
2019-11-25 12:42:31 -08:00
Karl Burtram
8cd06f74b9 Update ENU XLF files (#8469) 2019-11-25 10:55:14 -08:00
Anthony Dresser
43387f0d0b Rework how we handle custom editors (#5696)
* update how we handle editors

* small edit

* handle changing languages

* implement generic language association

* implement notebook serializers

* fix tests

* formatting

* update how we handle editors

* small edit

* handle changing languages

* implement generic language association

* implement notebook serializers

* fix tests

* formatting

* fix broken

* fix compile

* fix tests

* add back in removed note book contributions

* fix layering

* fix compile errors

* fix workbench

* fix hanging promises

* idk why these changed

* fix change

* add comments to language change code

* fix a few bugs

* add query plan association
2019-11-24 19:22:11 -08:00
Anthony Dresser
f3a6fc6f88 remove myself as dri (#8459) 2019-11-24 14:51:50 -08:00
Alan Ren
f2bc367e78 set aria label for options dropdowns (#8453) 2019-11-24 12:23:59 -08:00
Alan Ren
46a8410fc5 make group container accessible (#8452) 2019-11-24 12:23:16 -08:00
Alan Ren
be7c26ede5 update the card role and add aria-checked state (#8451) 2019-11-24 12:03:10 -08:00
Anthony Dresser
fb3b7be9e5 Add release logic to ymls (#8450)
* adding releasing logic via tagging

* add dep for releasing

* fix name

* minor fixes
2019-11-22 19:26:31 -08:00
Alan Ren
78731e0c8c better loading message (#8448) 2019-11-22 18:52:59 -08:00
Karl Burtram
6b67f27cac Bump SQL Tools to 2.0.0-release.35 for SMO update (#8449) 2019-11-22 18:18:33 -08:00
Alan Ren
56182a53d1 add role to card component (#8447) 2019-11-22 17:31:22 -08:00
Maddy
66e1c01793 regex to encode & correctly (#8441) 2019-11-22 17:20:41 -08:00
Anthony Dresser
f0039a64a7 Fix build ymls (#8445)
* add publish mac args

* don't use minified

* continue on error

* no fancy error handling

* don't need continue on error specifier
2019-11-22 17:05:56 -08:00
Anthony Dresser
22501a09a1 Add governance to yml builds (#8444)
* add governance to myl

* remove wrong addition

* remove unnecessary values
2019-11-22 16:56:23 -08:00
Alan Ren
c03cce7f60 set aria label for table (#8443) 2019-11-22 16:05:57 -08:00
Alan Ren
98abf4a758 add aria label for agreement checkbox (#8439) 2019-11-22 15:50:09 -08:00
Anthony Dresser
c19bc54877 add mixin steps to product yml (#8440) 2019-11-22 13:55:21 -08:00
sakshisharma-DS
e17d4e96ae Update exclude/include message in SchemaCompare (#8350)
* Update exclude/include message in SchemaCompare

* Segregated the messages as discussed

* Updated message with a comma and merged two If statements in one
2019-11-22 13:20:38 -08:00
Alan Ren
b333788c3c space key as click (#8437) 2019-11-22 11:24:28 -08:00
Cory Rivera
632ca0685e Add code coverage tests for notebookContexts.ts (#8430) 2019-11-22 11:23:34 -08:00
Charles Gagnon
52de2b4751 Fix clickable being announced for all modelview text (#8384)
* Fix clickable being announced for all modelview text

* Remove unused method

* Move API changes into proposed
2019-11-22 11:01:46 -08:00
Anthony Dresser
fc0c05c755 Remove steps that delete code sign.md (#8397)
* remove steps that delete code sign.md

* update sign tool

* fix config

* wip

* add continue on error for md delete

* try this

* copy exactly what was there

* remove final del
2019-11-21 23:34:18 -08:00
Arvind Ranasaria
22b8ebd281 Adds minimum version requirement for kubectl (#8415)
* saving untested changes

* Fix for issue #8341

* Fix tab navigation within modal dialog (#8326)

* Fix tab navigation within modal dialog

* Add import

* Fix spelling

* Change to just add/remove items from DOM as necessary

* Fix a couple a11y issues with manage access (#8386)

* Fix a couple a11y issues with manage access

* Fix strict null check

* Fix another strict null check

* Update js file with monaco -> ads workbench (#8398)

* Set encoding to true (Bug fix for flavor) (#8395)

For offline scripts it shows the flavor as “Choose SQL Language”, this is because in flavorStatus.ts line 150 when we compare (uri === currentUri), the value of current Uri is "file:///d%3A/GitHub/PGExtension/TestDatabase/1ae730a9.sql" whereas the value for uri is "file:///d:/GitHub/PGExtension/TestDatabase/1ae730a9.sql" which ends up returning label ‘Choose SQL Language’.

In queryInput.ts we set public get uri(): string { return this.getResource().toString(true); }
This is the variable that we use in doChangeLanguageFlavor. And as we compare this with getEditorUri() later in flavorStatus.ts, it does not match. So enabling encoding here as well to get the encoded string in both the cases.

* bump handlebars in extensions (#8411)

* remove active css class on mouse leave (#8410)

* Bump SqlToolsService (#8404)

* Remove open notebook entry points (#8393)

* Add aria role and selected properties (#8405)

* Add aria role and selected properties

* Add img role fix

* Add title to text

* Notebooks cleanup: Remove old out of proc markdown option (#8394)

* Remove old out of proc markdown option

* Revert change to stats.js

* remove debug console.logs

* PR feedback
2019-11-21 15:29:20 -08:00
Anthony Dresser
ec91d3eda0 add filter for git hook on hygiene (#8396) 2019-11-21 13:00:20 -08:00
Anthony Dresser
7b31ee27d8 remove governance checks on ci (#8418) 2019-11-21 12:46:24 -08:00
Anthony Dresser
927120fa3b Remove labels from issue templates (#8392) 2019-11-21 12:11:50 -08:00
Charles Gagnon
f1ca2a35ef Add thank you to changelog (#8424)
@aspnerd Thanks again for the fix! Let me know if you'd prefer not to be added to this list.
2019-11-21 11:53:03 -08:00
Maddy
1760af13d1 Fix/open relative on dev (#8359)
* escape characters only on hyperlinks

* removed extra line

* added tests and changes to accomodate tests

* updates to test

* added comments

* use path.join

* format doc build error

* added comments
2019-11-21 11:24:10 -08:00
Karl Burtram
183cb84fbc Bump Server Reports and whoisactive extensions (#8412) 2019-11-21 10:18:32 -08:00
Charles Gagnon
019d5088ec Add engine edition to connection telemetry (#8425) 2019-11-21 10:05:44 -08:00
Anthony Dresser
15913e5e48 fix mixin (#8420) 2019-11-20 17:08:30 -08:00
Charles Gagnon
e6ffb97a7b Add info message for copy (#8416)
* Couple of a11y fixes for BDC Dashboard

* Moving ariaRole into separate PR

* Remove missed ariaRole tag

* Switch to just using notification dialog

* add back in blank line
2019-11-20 15:49:21 -08:00
Chris LaFreniere
8655044dfb Notebooks cleanup: Remove old out of proc markdown option (#8394)
* Remove old out of proc markdown option

* Revert change to stats.js
2019-11-20 14:00:06 -08:00
Charles Gagnon
f26c790736 Add aria role and selected properties (#8405)
* Add aria role and selected properties

* Add img role fix

* Add title to text
2019-11-20 13:56:59 -08:00
Chris LaFreniere
7e553031ce Remove open notebook entry points (#8393) 2019-11-20 13:46:03 -08:00
Charles Gagnon
164ec41fb1 Bump SqlToolsService (#8404) 2019-11-20 13:32:41 -08:00
Alan Ren
8cd9097526 remove active css class on mouse leave (#8410) 2019-11-20 13:28:06 -08:00
Anthony Dresser
134b0b32c6 bump handlebars in extensions (#8411) 2019-11-20 13:08:59 -08:00
swjain23
de4b7af1ad Set encoding to true (Bug fix for flavor) (#8395)
For offline scripts it shows the flavor as “Choose SQL Language”, this is because in flavorStatus.ts line 150 when we compare (uri === currentUri), the value of current Uri is "file:///d%3A/GitHub/PGExtension/TestDatabase/1ae730a9.sql" whereas the value for uri is "file:///d:/GitHub/PGExtension/TestDatabase/1ae730a9.sql" which ends up returning label ‘Choose SQL Language’.

In queryInput.ts we set public get uri(): string { return this.getResource().toString(true); }
This is the variable that we use in doChangeLanguageFlavor. And as we compare this with getEditorUri() later in flavorStatus.ts, it does not match. So enabling encoding here as well to get the encoded string in both the cases.
2019-11-20 12:36:20 -08:00
Charles Gagnon
f976fc9418 Update js file with monaco -> ads workbench (#8398) 2019-11-19 17:35:07 -08:00
Charles Gagnon
55059907a3 Fix a couple a11y issues with manage access (#8386)
* Fix a couple a11y issues with manage access

* Fix strict null check

* Fix another strict null check
2019-11-19 13:42:53 -08:00
Charles Gagnon
8ca0082ec4 Fix tab navigation within modal dialog (#8326)
* Fix tab navigation within modal dialog

* Add import

* Fix spelling

* Change to just add/remove items from DOM as necessary
2019-11-19 12:43:55 -08:00
Charles Gagnon
5b50696a1b Fix more floating promises (#8374)
* Fix more floating promises

* Fix a few more

* Test fixes

* Fix spellings

* More promise fixes

* couple more

* Few more fixes

* One more missed one
2019-11-18 17:11:25 -08:00
Anthony Dresser
840683e3f0 Build yamls (#8360)
* add darwin build

* add global product build

* add linux build

* fix build

* fix linux; add win32

* formatting

* formatting

* formatting

* formatting

* fix win32

* fix windows

* fix python

* fix linux display

* fix linux

* fix windows naming

* fix windows timeout

* add tagging

* add env for building

* add schedule
2019-11-18 13:50:02 -08:00
Charles Gagnon
ee5dbdffb9 Add charle-gagnon to default assignee (#8382) 2019-11-18 13:32:19 -08:00
Anthony Dresser
43f6a5576d assign chris to notebooks (#8381) 2019-11-18 11:12:33 -08:00
Charles Gagnon
8a44de27e7 monacoworkbench -> adsworkbench (#8380)
* monacoworkbench -> adsworkbench

* Fix typo
2019-11-18 10:57:39 -08:00
Anthony Dresser
fa9bbd4e1e remove commented our code that is removed elsewhere (#8370) 2019-11-18 10:55:31 -08:00
Charles Gagnon
66048f1d63 Default checkbox aria label to label (#8367) 2019-11-18 10:24:27 -08:00
Arvind Ranasaria
bafd9fd437 Native installers for azdata in auto deployment (#8285)
* code complete

* minor fixes from self-review

* installation searchaPaths and display logs fixes

* revert inadvertent change

* fixing installaton roott for debian and mac

* Chaning from getos to linux-release-info with sync api usage for figuring out os distribution

* adding file missed in previous commit

* fixing indvertent compile error that creeped in

* fix default install root for azli
2019-11-17 21:39:57 -08:00
Anthony Dresser
dae71c3bf4 assign myself (#8371) 2019-11-17 20:19:54 -08:00
Charles Gagnon
ae8304fc33 Add focus function for modelview components (#8348)
* Add focus method for modelview components

* Remove focus properties from table and radiobutton

* Fix break
2019-11-15 17:36:55 -08:00
Amir Omidi
d6ef42c8b0 Change how dropdowns get their lengths - take 2 (#8356)
* Add an element for width control

* Change the method that retrieves length

* Change to a readonly element
2019-11-15 15:17:46 -08:00
Anthony Dresser
131b0b93bf distro bump (#8362) 2019-11-15 14:59:28 -08:00
Charles Gagnon
82185f75d7 Fix tab focus for select box (#8349) 2019-11-15 14:31:40 -08:00
Anthony Dresser
3769b5066f bump packages (#8355) 2019-11-15 13:45:23 -08:00
Charles Gagnon
ba8c331356 Add alert role to error message element (#8354) 2019-11-15 11:51:08 -08:00
Alan Ren
80248846bb update the extensions.js (#8352) 2019-11-15 11:46:21 -08:00
Charles Gagnon
cf5297958a Update iKey (#8353) 2019-11-15 11:26:12 -08:00
Karl Burtram
22996cbce7 Update CHANGELOG for 1.13.1 hotfix release (#8342) 2019-11-15 09:27:25 -08:00
Charles Gagnon
7563416754 Fix a couple a11y issues (#8334)
* Fix a couple a11y issues

* Add other dialog
2019-11-15 07:41:09 -08:00
Anthony Dresser
02b1673c71 fix packaging (#8343) 2019-11-14 22:02:47 -08:00
Charles Gagnon
6ef87d7067 Insiders icons (#8313)
* Add separate icons for non-stable builds (#8078)

* Fixes

* Fix linux icon path

* WIP

* WIP

* WIP

* Undo all

* Try removing insiders icons completely

* Revert "Try removing insiders icons completely"

This reverts commit 7cdb04fcc3ba6ed92132e5d9113cc331e24b3c26.

* Revert "Undo all"

This reverts commit efac800b63483fbbcacdac45d885275c53350941.

* Revert "WIP"

This reverts commit b20517f374b85f6b02c665b86e73d9ae1b45258b.

* Revert "WIP"

This reverts commit 01383896ccc4939b000d0f08a1b370f6e66b0f3c.

* Revert "WIP"

This reverts commit da7da0a43401f1c97f1bfa66f255666cffe22948.

* Fixes

* Remove test

* Switch logic check

* Use insiders icons only

* Add newline
2019-11-14 17:21:57 -08:00
Anthony Dresser
e3921c6d14 Update configs for distro (#8337)
* update configs for distro

* add comments

* revert product.json

* update distro
2019-11-14 15:31:40 -08:00
Leila Lali
f9ef9d85f4 SQL extension building packages comments (#8296)
* Fixing the comment for building sql extension
2019-11-14 13:57:02 -08:00
Anthony Dresser
7a2c30e159 move code from parts to contrib (#8319) 2019-11-14 12:23:11 -08:00
Anthony Dresser
6438967202 enable stale pr (#8338) 2019-11-14 12:21:24 -08:00
Anthony Dresser
63bf82ad84 Add config for auto labeling (#8316)
* add auto assign label

* formatting
2019-11-14 12:08:16 -08:00
Charles Gagnon
18ab2ae799 Remove TelemetryUtils and use IAdsTelemetryService directly (#8289)
* Remove TelemetryUtils and use IAdsTelemetryService directly

* Fix event names and cleanup

* Fix tests

* Fix strict null check
2019-11-13 13:54:55 -08:00
Charles Gagnon
86d6295bf0 Fix welcome page icon (#8318)
* Fix welcome page icon

* Remove title metadata

* Add comment
2019-11-13 07:31:04 -08:00
Charles Gagnon
3f306d2396 Fix spacing for form layout (#8321) 2019-11-13 07:30:09 -08:00
Chris LaFreniere
2f1f5b2376 Prevent Markdown Cells from Collapsing (#8317) 2019-11-12 18:40:06 -08:00
Anthony Dresser
62d7c71093 Add stale pr config (#8315)
* add stale config

* formatting

* don't perform initally
2019-11-12 16:53:57 -08:00
Charles Gagnon
4f69ed5745 Fix missing icon and add test (#8307)
* Fix missing icon and add test

* Remove debug statement
2019-11-12 16:31:39 -08:00
Charles Gagnon
ddddf3beb4 Fix null ref error in query history (#8304)
* Fix null ref error in query history

* Add null check
2019-11-12 13:38:58 -08:00
Charles Gagnon
0ae525cbd5 Fix a few more floating promises (#8290)
* Fix a few more floating promises

* More explicit calls
2019-11-11 13:42:03 -08:00
Chris LaFreniere
0520870754 notebooks lgtm cleanup part 1 (#8280) 2019-11-11 13:40:48 -05:00
Charles Gagnon
8b17b77010 Undo codicon change to fix modelview images (#8201) 2019-11-11 08:28:48 -08:00
Alan Ren
e2ef1f8a89 remove the extension name link (#8271)
* remove the extension name link

* pr comments

* pr comments
2019-11-09 13:37:09 -08:00
Anthony Dresser
7f7052ad42 Clean up some of the extensions (#8267)
* 💄

* prune unused code

* more cleanup

* remove abunch of used code
2019-11-08 11:44:43 -08:00
Arvind Ranasaria
738ca479e4 fixes for #8165, #8167, & #8260 (#8250)
* saving untested work

* fixes for #8165 and #8167

* minor fixes

* fix for #8260

* minor quoting fixes

* fix for #8264

* minor fixes

* minor fixes.

* move tools constants to their own files

* remove execution cell results from notebooks.

* remove extraneous changes

* move ensuring of  StoragePath to platformservice

* remove fix for #8264 pending pm input
2019-11-08 09:11:21 -08:00
Cory Rivera
34a274a7d1 Update bundled python packages to pick up new powershell kernel updates. (#8273) 2019-11-07 16:46:05 -08:00
Charles Gagnon
b1496aa12f Don't show dialog cancelled errors for mount (#8272) 2019-11-07 16:00:23 -08:00
Charles Gagnon
30acba7921 Add preview title to BDC/HDFS components (#8268)
* Add preview title to BDC/HDFS components

* Additional dialogs

* More dialogs
2019-11-07 15:34:38 -08:00
Alan Ren
b5c0c37a23 add preview tag to the UI (#8266) 2019-11-07 14:31:17 -08:00
Anthony Dresser
ef0a92d83f Add compile options to a few extensions (#8252)
* add compile options to a few extensions

* move dep to dev dep

* fix return types
2019-11-07 11:41:31 -08:00
Alan Ren
b364e32beb improve the loading component (#8251)
* improve the loading component

* fix unused method warning

* revert the change
2019-11-07 11:28:38 -08:00
Alan Ren
7f51921176 fix the scroll issue of modal dialog and wizard (#8239)
* fix the scrolling issue of dialog and wizard

* revert unwanted changes
2019-11-07 10:50:00 -08:00
Charles Gagnon
efebd681b6 Add level to dialog titles and form component titles (#8233)
* Add level to dialog titles and form component titles

* Switch to h1

* h2 instead of role and aria-level
2019-11-07 09:32:02 -08:00
Charles Gagnon
d635390b33 Fix missing dashboard icons (#8249) 2019-11-07 08:05:06 -08:00
Anthony Dresser
61f0d614ce update distro (#8255) 2019-11-06 18:55:22 -08:00
Anthony Dresser
27b80804f5 add vm to distro (#8254) 2019-11-06 18:49:21 -08:00
Anthony Dresser
df0c505452 No unused locals (#8231)
* add no unused local

* fix strict null

* fix compile errors

* update vscode comments
2019-11-06 17:22:05 -08:00
Anthony Dresser
564f78b6f6 Revert "Revert "Remove typings and replace missing methods with vscodes (#8217)"" (#8242)
* Revert "Revert "Remove typings and replace missing methods with vscodes (#8217)" (#8240)"

This reverts commit e801a04bcf.

* fix runtime error

* add tests for chartview
2019-11-06 15:00:34 -08:00
Anthony Dresser
df6b6ded33 Distro build (#8235)
* update distro to work

* fix distro build

* fix distro build

* fix distro build
2019-11-06 13:16:39 -08:00
Elliot Boschwitz
e801a04bcf Revert "Remove typings and replace missing methods with vscodes (#8217)" (#8240)
This reverts commit 22a427f934.
2019-11-06 11:33:55 -08:00
Karl Burtram
3b1eaca58e Update Lang Pack extension strings from previous refresh (#8229)
* Update Lang Pack extension strings from previous refresh

* Bump lang pack versions
2019-11-05 17:53:27 -08:00
Anthony Dresser
22a427f934 Remove typings and replace missing methods with vscodes (#8217)
* remove typings and replace missing methods with vscodes

* fix strict-null-checks

* fix tests
2019-11-05 13:03:20 -08:00
Karl Burtram
4645a8ba6b Update ENU XLF (#8227) 2019-11-05 12:26:28 -08:00
Amir Omidi
1b88c10197 Some promise fixes (#8216)
* Some promise fixes

* changes to how we're logging errors

* Fix the tests

* Fix a few other issues
2019-11-05 11:50:39 -08:00
Karl Burtram
5a392dfd58 Revert "Add separate icons for non-stable builds (#8078)" (#8222)
This reverts commit 9eb438bb24.
2019-11-05 11:22:36 -08:00
Charles Gagnon
e49ff93122 Fix link for manual install of VSIX's (#8221) 2019-11-05 10:44:26 -08:00
Charles Gagnon
399788ccc1 Fix row/column count and table aria label (#8219) 2019-11-05 09:52:59 -08:00
Charles Gagnon
08fde8719d Make declarative table read blank for empty text cells (#8215) 2019-11-05 08:02:37 -08:00
Charles Gagnon
f7bef3f87b Update to new iKey (#8206) 2019-11-05 07:55:26 -08:00
Amir Omidi
c70e7794eb Use shared tsconfig (#8214)
* Use shared tsconfig

* Fix errors

* Remove sourcemap option
2019-11-04 17:25:01 -08:00
Charles Gagnon
9eb438bb24 Add separate icons for non-stable builds (#8078) 2019-11-04 17:05:25 -08:00
Charles Gagnon
38decaea90 Fix test script overwriting grep option (#8209)
* Fix test script overwriting grep option

* Better logic and fix unstable scripts too
2019-11-04 11:35:52 -08:00
Anthony Dresser
ade68b184d Move code around for more linting (#8190)
* testing

* moving around all the code

* fix strict nulls
2019-11-04 10:41:28 -08:00
Cory Rivera
3c702c15e2 Initialize cell source to empty array if notebook file's source is undefined. (#8189) 2019-11-04 10:38:14 -08:00
Anthony Dresser
76e8805a6b Remove should (#8196)
* remove should

* fix a few tests
2019-11-04 09:59:18 -08:00
Charles Gagnon
5dc7049f8c Fix floating promises in connection widget (#8198)
* Fix floating promises in connection widget

* PR feedback

* More feedback changes

* Couple missed promises
2019-11-04 07:24:38 -08:00
Karl Burtram
97f852c3d6 Update readme for Nov release (#8193) 2019-11-04 02:40:12 -08:00
Charles Gagnon
a8eed6114b Fix race conditions with BDC dashboard init (#8135)
* Fix race conditions with initialized

* Check for undefined status

* Disable no-floating-promises check

* Add catch instead of disabling tslint disable
2019-11-02 14:58:56 -07:00
Anthony Dresser
6864d39f85 add triggers for ci (#8197) 2019-11-02 13:22:55 -07:00
Udeesha Gautam
6f47c1fcda update dacpac and schema compare readme to remove preview text (#8191)
update dacpac and schema compare readme to remove preview text (#8191)
2019-11-01 18:49:16 -07:00
Maddy
c2c64293f5 missed insiders-only check removal (#8184) 2019-11-01 17:06:38 -07:00
Charles Gagnon
5bbc17be5c Fix ssl config issue (#8187)
* Fix ssl config issue

(cherry picked from commit cacd481bff)

* Better fix

* Actually return...
2019-11-01 16:25:44 -07:00
Charles Gagnon
abbb1e54da Add config for ignoring SSL errors on BDC queries (#8169)
* Add config for ignoring SSL errors on BDC queries

* Fix error handling in write stream

* Disable tslint check

* Handle promise appropriately

* PR comments

* Change defaults to true
2019-11-01 15:20:47 -07:00
Anthony Dresser
08d81927b4 remove js files that are being ignored (#8182) 2019-11-01 14:29:21 -07:00
Anthony Dresser
3d718068d1 update rollup code (#8181) 2019-11-01 13:36:14 -07:00
Anthony Dresser
1d31a6ef98 Bump some packages (#8096)
* update packages

* remove fs

* fix more deps

* test adding governance to ci

* ignore samples for cacheing

* fix save cache too

* bump more packages

* attempt to fix slickgrid

* add more deps

* use slickgrid version

* bump https-proxy

* bump more packages.

* fix service-downloader

* remove typings

* fix compile

* update more packages

* add vscode back to the extensions that need it for testing

* add fail on critical

* regrab proxy agent

* lock more deps

* replace final high warning

* revert service downloader changes

* remove the deps on jquery ui and event drag
2019-11-01 12:55:58 -07:00
Anthony Dresser
9c8f36e463 remove externals for some azure deps (#8168) 2019-11-01 12:19:29 -07:00
Anthony Dresser
bd988f62a2 Add WS as a dep of notebooks (#8154)
* add ws as an optional dep of noteoboks

* fix method of adding ws

* fix parsing for yarnrc

* remove optional reps
2019-11-01 12:19:06 -07:00
Zbyněk Sailer
29a46b9f8b LOC CHECKIN | Microsoft/azuredatastudio master | 20191029 (#8081) 2019-11-01 12:16:30 -07:00
Chris LaFreniere
55acb36e33 Update powershell kernel version to 0.1.2 (#8171) 2019-11-01 10:47:32 -07:00
Kevin Cunnane
a8b442a274 recommend datavirtualization instead of sql-vnext (#8172) 2019-10-31 20:25:27 -07:00
Maddy
330f690628 version bump books package (#8174) 2019-10-31 18:03:50 -07:00
Maddy
7cc430d199 added try catch aroud the json parse and bypassing the err by logging… (#8166)
* added try catch aroud the json parse and bypassing the err by logging to console

* updated error message with package info

* updates to address PR comments

* added package info and refactored the err logging

* backslash update

* refactored error method and added try for the entire method
2019-10-31 17:57:26 -07:00
Kevin Cunnane
2558d6bff6 Fix #8162 Spark timeout in BDC cluster is higher than client-side, causing successful sessions to show as failed (#8164)
* Fix #8162 Spark timeout in BDC cluster is higher than client-side, causing successful sessions to show as failed
2019-10-31 17:00:36 -07:00
Chris LaFreniere
5aff7ef4c7 Ensure cwd matches notebook path (#8137)
* Ensure cwd matches notebook path

* add error checking

* silent, don't store history
2019-10-31 15:10:49 -07:00
Charles Gagnon
7569f7fa32 Fix node update in bdc dashboard on reconnect (#8138)
* Fix node update in bdc dashboard on reconnect

* Fix no floating promises

* Fix opening from dashboard to always save controller node
2019-10-31 14:42:15 -07:00
Kevin Cunnane
333f634e94 Fix #8159 Mount HDFS broken against latest cluster (#8160)
connectWithRetry needed to pass as ... so each arg was set correctly,
instead of having 1st param be an array of the input arguments
2019-10-31 14:39:45 -07:00
Amir Omidi
b62c0cf2ab More promise cleanup (#8100)
* Some promise cleanup

* Handle more promise issues

* Remove changes that aren't needed anymore

* Use log service

* another one

* Be more explicit

* Some more promises cleaned up

* Handle promises here too

* Strings for errors

* Some more cleanup

* Remove unused imports
2019-10-31 14:30:08 -07:00
BranislavGrbicMDCS
572a83dac7 Hidding error message for sql on demand on server dashboard (#8146)
* Hidding error message for sql on demand

* Using constant instead of hardcoded value
2019-10-31 21:25:39 +01:00
Charles Gagnon
dbf834c00f Fix metrics log links (#8140) 2019-10-31 13:24:56 -07:00
Udeesha Gautam
24a6897836 Updating SqlToolsService To Revert SqlCmd explicit handling (#8133)
Updating SqlToolsService To Revert SqlCmd explicit handling (#8133)
2019-10-31 10:34:17 -07:00
Alan Ren
23da6155dd fix the missing icon issue of expand/collapse btn (#8132) 2019-10-30 20:10:34 -07:00
Charles Gagnon
ef9321ef2c Add error messages to BDC dashboard page (#8103)
* Add error messages to BDC dashboard page

* Remove testing code

* PR fixes
2019-10-30 15:23:34 -07:00
Alan Ren
836bf1d28a use addtionalEnvironmentVariables (#8123) 2019-10-30 15:02:30 -07:00
Alex Ma
9eb319f392 ImageInsight Image Error Notification (#8086)
* image visualization printout (needs localization)

* Add error image and log error

* "added alert error for invalid image"

* removed unnecessary spaces

* removed spaces on 61 and 66

* Added image and used notification service

* Removed unused import

* Changed wording for error
2019-10-30 14:48:12 -07:00
Alan Ren
79b6a14d64 Revert "accept azdata eula (#8114)" (#8121)
This reverts commit 0dec2ff9b5.
2019-10-30 13:57:41 -07:00
Alan Ren
0dec2ff9b5 accept azdata eula (#8114) 2019-10-30 13:31:05 -07:00
Chris LaFreniere
dd270a78fc Notebooks: Improvements to Connection Profile Passing (#8105)
* Fix relative images on notebook markdown pc

* PR feedback around replace ordering

* update databaseName when it's set to master

* usinng query as example
2019-10-30 13:22:17 -07:00
Anthony Dresser
827e6162c7 fix import (#8116) 2019-10-30 13:11:29 -07:00
Alan Ren
9f54fbc8cc profiler icon issue (#8115) 2019-10-30 12:32:37 -07:00
Shafiq Ur Rahman
f8858a3511 Tab trap for modal (closes #5930) (#8043)
* Tab trap for modal (closes #5930)

* Addressing PR comments

* Fixed formatting.
2019-10-30 11:38:11 -07:00
Charles Gagnon
82e5221024 Fix modal event propagation (#8050)
* Fix event propagation

* Remove unneeded onkeyup method

* Move event handling code into SQL classes
2019-10-30 11:29:01 -07:00
Anthony Dresser
004297aea6 fix linux cache (#8111) 2019-10-30 11:02:53 -07:00
Anthony Dresser
f7b8a019cd Remove references to noderequire (#8101)
* remove references to nodequire

* change promise handling
2019-10-30 10:04:52 -07:00
BranislavGrbicMDCS
a89788a020 Adjusting OE and hiding nodes for sql on demand (#8087) 2019-10-30 18:02:12 +01:00
Chris LaFreniere
62af81e88c Notebooks: Fix relative images not rendering on PC for Markdown Cells (#8091)
* Fix relative images on notebook markdown pc

* PR feedback around replace ordering
2019-10-29 19:48:57 -07:00
Amir Omidi
ab736466cd Some promise cleanup (#8092)
* Some promise cleanup

* Handle more promise issues

* Remove changes that aren't needed anymore

* Use log service

* another one

* Be more explicit
2019-10-29 17:04:46 -07:00
Amir Omidi
6a6f30523c Code -> Azure Data Studio (#8099)
* Change string

* sql carbon edit
2019-10-29 16:22:46 -07:00
Kim Santiago
8e3fa0a26d Bump dacpac and schema compare extension versions and remove preview (#8095)
* bump dacpac and schema compare extension versions and remove preview

* bump azdata dependency version
2019-10-29 16:04:10 -07:00
Amir Omidi
72c088e137 Allow toggle developer tools shortcut all the time (#8003)
* Allow toggle developer tools shortcut all the time

* Remove the import
2019-10-29 11:57:52 -07:00
Kim Santiago
ce5eb00177 Fix schema compare include/exclude behavior (#8042)
* don't uncheck difference if unsuccessful

* changes after rebasing to get schema compare fix

* First cut of column checkbox checking reactive to include opteration

* handle blocking dependencies and affected dependencies

* Changing the checked property of table to be list

* Addressing comments

* add map to keep row number of diff entries

* remove findDifferenceRow() since it isn't needed anymore

* fix scrolling to the top when checking/unchecking and add info message

* change checked to updateCells

* improve warning cannot include/exclude message
2019-10-29 11:55:31 -07:00
Alan Ren
5629356c66 notebook background execution (#8079)
* notebook background execution

* code review comments

* comments 2

* more logging
2019-10-29 11:53:50 -07:00
Maddy
6e7311ca87 added openLocalizedook action that opens up the link to localized books. (#8025)
* initial commit

* addressed the tslint floating promise errors

* updated the command name with PM's feedback
2019-10-29 11:51:05 -07:00
Charles Gagnon
d315ccff68 Fix BDC remember password and reprompting connection (#7957)
* Fix remember password and reprompting connection

* comment

* Fix to remember password for session

* Fix floating promises
2019-10-29 07:27:31 -07:00
Aditya Bist
789ee4b133 add repository field for extensions (#8073) 2019-10-28 17:41:23 -07:00
Amir Omidi
428745e929 Cleanup the copy output (#8072)
* Cleanup the copy output

* Use helper functions and cleanup promise handling
2019-10-28 16:13:34 -07:00
Charles Gagnon
cea8d62051 Add SQL Metrics links to BDC dashboard (#8056)
* Add SQL metrics link to dashboard pages

* Only show SQL metrics column for SQL service

* Add param doc
2019-10-28 15:07:32 -07:00
Charles Gagnon
fa79e5b016 Fix for propagating aria-label title of form components to loading component (#8051) 2019-10-28 14:48:47 -07:00
Aditya Bist
15fd37e049 add link to twitter on badge (#8037) 2019-10-28 14:22:13 -07:00
Karl Burtram
1dd4ea19a3 Update agent and import versions for Nov (#8054) 2019-10-28 11:53:12 -07:00
Kevin Cunnane
067af76904 Azure: add PostgresSQL support and refactor to use resource graph (#8046)
* Azure: add PostgresSQL support and refactor to use resource graph
- Refactored to use @azure/arm-resourcegraph for all queries
- Refactored database lookup to do just 2 queries
(all servers, all DBS) instead of waiting serially on 1 query per RG
- Added Azure Database for PostgresSQL Servers support in the tree
- Removed use of older azure APIs in preference to ones compatible with resource graph
- Note: Had to use v1.0 of new subscriptions package because resourcegraph is 2month out of date vs all other packages
2019-10-28 09:47:38 -07:00
Arvind Ranasaria
a7f597c943 dependencies messages, no curl on win32, fixes to min Version (#8039)
* checking temp work to move to another branch

* removing freeTds

* dependencies Messages and No curl on Win32

* elipsis instead of ...

* add min version check post install and pr fixes

* removing unnecessary comment

* removing old TODO comment

* fix text messages

* add github toke nto electron download (#8047)

* remove hardcode of kubectl version for download
2019-10-27 17:13:12 -07:00
Charles Gagnon
833adf3515 Fix hygiene errors (#8019) 2019-10-27 14:38:26 -07:00
Charles Gagnon
bd15a96b83 Make form components use title as aria-label (#8040) 2019-10-27 14:37:34 -07:00
Karl Burtram
024bd00d93 Bump ADS to 1.14.0 in master (#8049) 2019-10-27 14:03:19 -07:00
Karl Burtram
eb1aafa639 Bump SQL Tools Service to pickup schema compare changes (#8048) 2019-10-27 13:58:41 -07:00
Anthony Dresser
bf6b68c614 add github toke nto electron download (#8047) 2019-10-26 15:59:17 -07:00
Alan Ren
a895f53029 min version check (#8038)
* min version check

* comments
2019-10-25 17:11:32 -07:00
Karl Burtram
78d3b9d555 Fix typo in icon path (#8033) 2019-10-25 16:00:44 -07:00
Aditya Bist
42e1b28130 add twitter account badge to readme (#8034) 2019-10-25 15:58:15 -07:00
Karl Burtram
5590d60c5a Revert icon update 8aa8dc29a1 (#8030)
* Revert icon update 8aa8dc29a1

* Add build electron.js file
2019-10-25 14:37:15 -07:00
Maddy
d79423c728 Task/checkfile length (#7959)
* add hygiene task to test file length

* check for the filename length instead of the path

* formatted the error message

* added check for entire path including directories

* error messaged fixed

* check relative length for 150 as agreed upon

* error message to include 150

* added file length filter

* check the file length seperately

* ffsdfsdf

* remove the test file

* move it to last

* restore the filtered files for further checks

* removed comment

* test

* test

* remove the test file

* test commit

* remove the test file

* restore fileLengthFilter

* test

* remove the testfile

* revert

* xfgdgdfg

* huh

* test file

* revert.

* add all to the filter
2019-10-25 14:30:43 -07:00
Alan Ren
d1a0ae43c8 update the message (#8031) 2019-10-25 14:11:25 -07:00
Alan Ren
e4b0371b2a fix 2 issues found during testing (#8029) 2019-10-25 13:54:19 -07:00
Anthony Dresser
e191556d3b Update pipelines (#7993)
* update pipelines

* update

* fix scripts

* testing something

* testing something

* testing something

* add github token

* testing something

* delete fiels
2019-10-25 13:04:41 -07:00
Karl Burtram
5a269fc49a Package LiveShare outside core ADS (#8024) 2019-10-25 12:59:10 -07:00
Amir Omidi
613cd58aa3 Don't pack the hgh level node_modules directory (#7998) 2019-10-25 12:09:55 -07:00
Arvind Ranasaria
af9984f73b pass install paths to notebooks (#8008)
* pass install paths to notebooks

* onComplete

* discover and publish actual installation Path

* pass the path to notebook

* minor fixes needed post merge of code from remote

* fix some errors

* remove unused variable
2019-10-25 12:06:55 -07:00
Chris LaFreniere
3b1c9e910d serverManager to decide if server start needed (#8017) 2019-10-25 11:11:54 -07:00
Chris LaFreniere
5b29aef5f3 fix stdin cancel breaks notebook (#8012) 2019-10-25 10:52:18 -07:00
Anthony Dresser
b65a7795df Testing out ci with workflows (#8005)
* add workflow for ci

* add another step

* remove more steps
2019-10-25 10:30:53 -07:00
BranislavGrbicMDCS
c6a78456b8 Adjusting context menu for SqlOnDemand (#8018) 2019-10-25 17:33:17 +02:00
Chris LaFreniere
f8067ffada Fix for Markdown File in Jupyter Books Viewlet not Opening After 2x (#8009)
* fix for markdown not opening after opened twice

* PR comment to add return type
2019-10-24 22:14:24 -07:00
Chris LaFreniere
f7059a2365 Fix Issue when Saving Wrong Kernel (#7974)
* Only fire kernelChangeEmitter after lang set

* Fix unused import found by lgtm

* Fix comment
2019-10-24 22:13:18 -07:00
Alan Ren
d013b594b1 suppress the alert (#8007) 2019-10-24 20:13:32 -07:00
Charles Gagnon
c5d427ebb1 Fix column text overflow on BDC status pages (#7928)
* Fix column text overflow on status page

* Fix typo

* Fix another typo
2019-10-24 18:31:57 -07:00
Udeesha Gautam
240b90610f Work with single ext loc file (#7894)
* try loc with single file model

* adding filter and languages

* add links in langpack json

* changing variable name and limiting the list to only tested extensions
2019-10-24 18:12:04 -07:00
Chris LaFreniere
5cfad825fc search book gone from command palette (#8002) 2019-10-24 17:45:28 -07:00
Alex Ma
8918d1593c reclassified autoinstallrequired message to error (#7986)
* reclassified autoinstallrequired message to error

* Changed level to warning
2019-10-24 17:00:07 -07:00
Chris LaFreniere
b1e0b7c1e3 Always send \n instead of \r\n to Jupyter kernel (#7995)
* Always send \n instead of \r\n to kernel

* Use replace instead of split/join
2019-10-24 16:43:52 -07:00
Amir Omidi
3a5e4cbeac delete workflow (#8000) 2019-10-24 16:29:54 -07:00
Alan Ren
7bfa6e611e Remove RC from strings (#8001) 2019-10-24 16:03:22 -07:00
Maddy
004c177f7b builtinExtension-insiders to builtinExtension (#7973)
* builtinExtension-insiders to builtinExtension

* maintain both insiders and stable.

* builtinExtension.js is for build time and skipping the check for dev.

* check quality and pick insider vs stable json
2019-10-24 15:34:12 -07:00
Amir Omidi
86cc3f77ee Fixes some code coverage issues (#7994)
* Fix code coverage issues

* Fix some code coverage stuff
2019-10-24 15:32:43 -07:00
Chris LaFreniere
a33820ecdd Fix starting and separators in a row (#7861) 2019-10-24 15:20:58 -07:00
Kevin Cunnane
7babd6f3d0 Add SQL Managed Instance support and sorting (#7996)
- Add SQL Instances folder and support using existing SQLClient API
- Sort subscriptions in tree and quickpick for easier search
- Sort all resources (Databases, Servers, Instances) alphabetically too

Not in this PR:
- Will experiment with Graph API for faster perf & easier addition of other Azure resources such as PostgreSQL
2019-10-24 15:18:49 -07:00
aspnerd
e28ecf44cb Use selected DB for import wizard schema list (#7878)
* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts

* Update fileConfigPage.ts
2019-10-24 15:08:10 -07:00
Alan Ren
93685d3a09 make the azdata install url configurable (#7989)
* make the azdata install url configurable

* use settings without reloading

* comments
2019-10-24 14:57:17 -07:00
Maddy
d660405e73 hide save book action from command pallet (#7981) 2019-10-24 14:44:35 -07:00
Alan Ren
684fb2566b revert the notebook background execution (#7984) 2019-10-24 13:46:40 -07:00
Charles Gagnon
696f6841cb Change BDC view errors to use modal dialog instead of error toast (#7985)
* Change BDC view errors to use modal dialog instead of error toast

* Move to common control
2019-10-24 13:05:07 -07:00
Charles Gagnon
cef60f3ae5 Fix troubleshoot URL lookup (#7978) 2019-10-24 10:14:39 -07:00
Alan Ren
34fe2b44cc remove insider build check (#7926)
* remove insider build check

* fix welcome page not load issue
2019-10-23 21:53:42 -07:00
Aditya Bist
a16bfbfedd Fix node infinitely loading when a firewall dialog is cancelled (#7970)
* add back icons for azure actions

* fix firewall infinite loop

* formatting

* change message to firewall canceled

* fix tests
2019-10-23 21:37:21 -07:00
Chris LaFreniere
fb4fccf2d5 Notebooks: ensure python path dirs added to path on session start (#7968)
* ensure python path dirs add to path session start

* Change logic slightly

* PR feedback from Charles
2019-10-23 19:22:24 -07:00
Chris LaFreniere
b53cad78bd Update recommended package versions for ps (#7972) 2019-10-23 18:57:05 -07:00
Chris LaFreniere
a431ca7ef2 check for ownerDocument focus for focus (#7964) 2019-10-23 18:56:43 -07:00
Chris LaFreniere
c2022cac57 Fix for relative links not being resolved in notebook outputs (#7966)
* Fix for relative links not being resolved

* Add comment
2019-10-23 18:49:52 -07:00
Kevin Cunnane
806d807eae Refactor Azure Core extension for easier resource addition (#7958)
Consolidated most logic into a base class and common resource request pattern.
Reduces cost to add new providers, which will help for SQL Managed Instance support
2019-10-23 16:29:51 -07:00
Aditya Bist
24e3b1c5e6 add back icons for azure actions (#7963) 2019-10-23 15:09:23 -07:00
Amir Omidi
bbe3605317 Lets try out this gh-action (#7875)
* Update blank.yml

* Update and rename blank.yml to tslint.yml

* Update tslint.yml

* Update tslint.yml

* Update tslint.yml

* Update tslint.yml
2019-10-23 13:05:36 -07:00
Cory Rivera
06d67f5ad2 Append stdErr log to error message when a streamed console command fails. (#7868) 2019-10-23 12:38:24 -07:00
Amir Omidi
41f9f22e38 Update lgtm.yml (#7932)
Add vs stuff as a classifier so we don't see their issues
2019-10-22 23:16:11 -07:00
Anthony Dresser
a94cbb528e Merge from vscode f5d3ffa6a0d655c87e1eb0e1e90773df58f7ff25 (#7929)
* Merge from vscode f5d3ffa6a0d655c87e1eb0e1e90773df58f7ff25

* fix launch script

* add missing files
2019-10-22 21:49:55 -07:00
Charles Gagnon
4a68ab4659 Hygiene linting for extensions + new rule (#7843)
* linting for extensions + new rule

* Remove unneeded array

* Fix spelling mistake

* Fix bad merge
2019-10-22 18:56:31 -07:00
Chris LaFreniere
4c24043cc8 Add Option to Halt Notebook when Error Occurs in SQL Kernel (#7884)
* Add Option to Halt Nb when Error Occurs

* PR comments
2019-10-22 18:19:36 -07:00
Maddy
2ca5d18855 Update/sqlserverbook extension to stable (#7914)
* move package info from builtin-inisders to builtin extensions

* vbump for the filename fiasco change

* merged master

* revert sql.bat update
2019-10-22 17:13:48 -07:00
Kim Santiago
37426b0794 don't allow tabbing into table (#7769) 2019-10-22 16:25:37 -07:00
Charles Gagnon
a70ebeed1c BDC Dashboard context sensitive troubleshoot links (#7895)
* Add context awareness to troubleshoot button on dashboard

* Remove tests for now

* Undo yarn.lock changes

* Correct yarn.lock version

* Lower case service check
2019-10-22 14:24:26 -07:00
Charles Gagnon
8f2113e6b5 Fix HDFS paths to be encoded properly (#7896) 2019-10-22 14:05:23 -07:00
Charles Gagnon
03cb0565d4 Clean up bdc devDependencies (#7872)
* Clean up bgc devDependencies

* Add extra files to .vscodeignore
2019-10-22 11:07:35 -07:00
Charles Gagnon
dd14f9b93d Fix telemetry opt out message (#7891) 2019-10-22 11:00:57 -07:00
Kevin Cunnane
4dd15fb479 Add help text to mount HDFS dialog (#7865) 2019-10-22 10:40:28 -07:00
BranislavGrbicMDCS
397f6afaf1 ADS changes for new engine edition (#7695)
* Sql on demand changes

* Formating files

* Removing features for new Engine Edition

* Fixing Restore & Backup issue. Adding support for multiple conditions per flavor

* tabifying

* Formating documents

* Work in progress

* Resolving comments

* Resolving comments.

* Fixing typo
2019-10-22 12:50:15 +02:00
Anthony Dresser
65fb77ef5c Update linting file (#7886)
* update linting file

* update pr template
2019-10-22 00:26:03 -07:00
Anthony Dresser
1e22f47304 Merge from vscode c58aaab8a1cc22a7139b761166a0d4f37d41e998 (#7880)
* Merge from vscode c58aaab8a1cc22a7139b761166a0d4f37d41e998

* fix pipelines

* fix strict-null-checks

* add missing files
2019-10-21 22:12:22 -07:00
Charles Gagnon
7c9be74970 Disable failing test (#7882) 2019-10-21 20:31:16 -07:00
Amir Omidi
5efb2cf918 Add a new unstable test (#7874) 2019-10-21 19:19:03 -07:00
Alan Ren
98505110a4 fix a typo (#7871) 2019-10-21 18:31:24 -07:00
Anthony Dresser
1a864584b6 move code to mean linting (#7873) 2019-10-21 17:56:58 -07:00
Charles Gagnon
8aa8dc29a1 Separate icons for non-stable builds (#7857)
* Use separate icons for non-stable builds

* Fix file name

* Switch icon

* Updated icons

* Switch to copy of code.ico until issue is fixed

* Fix icon

* remove

* add

* Fix file renaming

* Fix a couple naming issues

* Move iss changes to build file
2019-10-21 16:11:59 -07:00
Anthony Dresser
06e86e57e7 Strict null on some query and connection (#7300)
* wip

* make connection work with strict-nulls

* change comments

* fix tests; remove unneeded type forcing

* address feedback

* adjust the logic of query editor

* clean up typing
2019-10-21 15:50:12 -07:00
Alan Ren
6a375fdd8c fix the wizard opened twice issue #7866 (#7869) 2019-10-21 15:32:13 -07:00
Maddy
b8ad7e3072 Updates to package.json to show books on stable (#7836)
* Updates to package.json to show books on stable

* remove notebookQuality context key

* renamed untitiled to unsaved
2019-10-21 14:26:59 -07:00
Chris LaFreniere
597a0cad6b Add (dummy) IPyWidgets Renderer (#7849) 2019-10-21 14:22:44 -07:00
Amir Omidi
a646af2ad2 Move vscode from dependencies to devDependencies (#7864)
* Update package.json

Move vscode out of dependencies

* Change extensions.js
2019-10-21 14:20:46 -07:00
Amir Omidi
143b70c6a8 Revert "Fix BDC and resource-deploy extensions to not be packaged separately (#7858)" (#7860)
This reverts commit 53a081262d.
2019-10-21 12:06:00 -07:00
Kevin Cunnane
c1e95a2246 Fix errors due to icon to codicon rename in VSCode (#7837)
Found numerous errors where icons weren't rendering correctly.
- Anything that's an action must have CSS using "codicon" not "icon"
since VSCode sets the "codicon" class automatically.
This affected Agent view in particular, but also:
  - acounts view
  - new tab view in dashboard
 - many more
- Anything referencing the common-icons.css icons needed updating.
This hid help tooltip text in FormContainer UI for example.
- Finally I tried to convert all references from icon -> codicon,
even when the CSS was technically correct. This was done
for maintainability reasons - from now on always add codicon.

Fixes #7827
2019-10-21 12:02:30 -07:00
Charles Gagnon
53a081262d Fix BDC and resource-deploy extensions to not be packaged separately (#7858)
* Fix BDC and resource-deploy extensions to not be packaged separately

* Update js too
2019-10-21 11:06:22 -07:00
Charles Gagnon
8b46143d48 Fix Manage Access dialog to not allow entering blank user (#7844) 2019-10-21 10:07:28 -07:00
Alan Ren
a05edc619c use required attribute (#7850)
* use required attribute

* readable sql port
2019-10-21 09:53:12 -07:00
Alan Yu
6c5aa6b367 Update ThirdPartyNotices.txt (#7479)
* Update ThirdPartyNotices.txt

* tab -> spaces
2019-10-20 21:56:29 -07:00
Charles Gagnon
0246c3f895 Fix ModelView tree A11y issue (#7835)
* Fix tree A11y issue

* Toggle expanded state on enter

* Move onkeydown method to base class
2019-10-20 21:42:55 -07:00
Chris LaFreniere
e3ae5263c6 Starting Fewer Jupyter Servers for Notebooks (#7744)
* Start fewer Jupyter servers

* Windows fix for drive casing

* PR Feedback

* Quick fix

* Fixing bug

* Ensure environment variables set 4 session startup

* test fix

* Dummy commit to update comment
2019-10-20 21:38:58 -07:00
Alan Ren
0bfb1aab7e set storage settings table name (#7841) 2019-10-20 19:27:50 -07:00
Chris LaFreniere
d120102805 Bump up widths for pick, remove schedule buttons (#7517) 2019-10-20 13:22:49 -07:00
Charles Gagnon
7e5b864299 BDC Dashboard connection retry (#7784)
* Open cluster dashboard

* Remove old translated strings and update var name

* Add exported auth type

* Add newline

* Add connection retry for dashboard

* Change getMainSectionComponent to return multiple (no undefined)

* Move try/catch to withConnectRetry

* Add connection retry for dashboard

* Change getMainSectionComponent to return multiple (no undefined)

* Move try/catch to withConnectRetry
2019-10-19 15:42:49 -07:00
Arvind Ranasaria
4dd6db57ee Feat/tool install master merge back to master (#7819)
* add install tools button (#7454)

* add install tools button

* address comments

* remove description for install tools hint message

* First working version of AutoDeployment of tools (#7647)

First working version of AutoDeployment of tools.

This pull request adds feature to install the tools needed for doing BDC/TINA deployments.

This has been tested so far only on win32 and testing on other platforms is in progress.

* removing TODO and redundant code

* Not localizing azuredatastudio product name

* convert methods returning Promises to async-await

* changing from null to undefined

* Localize all the command labels

* using existing sudo-prompt typings

* progres/error status in ModalDialogue && PR fixes

* review feedback to change warning to information

* revert settings.json changes

* fix resource-Deployment Extension Unit Test

* ensuring platform service's working directory

* incorporate review feedback

* review feedback

* addressing PR feedback

* PR fixes

* PR Feedback

* remove debug logs

* disable UI deployment containers when installing

* addding data type to stdout/stderr messaging

* remove commented code

* revert accidental change

* addressing review feedback

* fix failed install with zero exit code

* fixing bug due to typo

* fixes for linux

* Misc fixes during mac testing

* PR fixes
2019-10-18 23:17:21 -07:00
Udeesha Gautam
a2f105a913 Fix Local keys for spark so that dup key error doesnt occur (#7634) 2019-10-18 18:06:37 -07:00
Charles Gagnon
ab31a7b964 Open cluster dashboard from SQL Dashboard (#7783)
* Open cluster dashboard

* Remove old translated strings and update var name

* Add exported auth type

* Add newline

* PR feedback
2019-10-18 17:35:47 -07:00
Amir Omidi
203ff3872f Test run list proposal (#7617)
* Test run list propsal

* fixed ts errors

* added js file

* excluding testSetup.js file from hygiene

* moved ignore line to indententationFilter
2019-10-18 17:21:40 -07:00
Chris LaFreniere
2ee3840650 First improvements for SQL Cell Looping (#7733)
* First improvements

* clear results

* cleanup

* error handling
2019-10-18 17:12:10 -07:00
Cory Rivera
fa80dbfb27 Update download links for our notebook Python packages. (#7839) 2019-10-18 16:53:29 -07:00
Amir Omidi
2237d286b6 XML output is truncated (#7748)
* QueryExecutionOptions change

* add config points

* Change api type used

* Revert "QueryExecutionOptions change"

This reverts commit 7adc3b032b5e4bc92234d337f0a145f0963c2d34.

Let's not change the azdata file.

* Change the signature of this api

* Change the event

* Change SqlCMD to use the API properly

* Add type

* Change type to interface - handle existing files

* Remove unused import

* Delete useless code

* Remove unnecessary code

* Fix the interface

* remove whitespace
2019-10-18 16:35:45 -07:00
Anthony Dresser
4124f6b1ad Readd labeling (#7834)
* readd labeling

* fix yml structure

* fix more yml structure
2019-10-18 15:07:31 -07:00
Karl Burtram
23361d3d56 Update ENU loc resource files (#7831) 2019-10-18 14:35:15 -07:00
Anthony Dresser
4c8f3ddfd3 Add scripting contributions for explorer widget (#7830)
* add scripting contributions for explorer widget

* utilize or
2019-10-18 14:26:43 -07:00
Amir Omidi
f631a8aa9a Delete unused code (#7800)
* Update cmsUtils.ts

Fix small issue

* Delete unused code
2019-10-18 13:29:34 -07:00
Amir Omidi
bbc6460d3f Update groupContainer.component.ts (#7803)
Fix small issue
2019-10-18 13:27:25 -07:00
Amir Omidi
09d78544cf Update sqlExtHostTypes.ts (#7801)
Fix small mistake
2019-10-18 13:26:53 -07:00
Amir Omidi
a791aff0a2 Update pythonPathLookup.ts (#7807)
Fix regex problem
2019-10-18 13:26:38 -07:00
Amir Omidi
ce318f123f remove unusued deps (#7810) 2019-10-18 13:22:50 -07:00
Amir Omidi
9374056e61 move dependencies around (#7808) 2019-10-18 13:22:13 -07:00
Charles Gagnon
5ef19affd0 Update SqlToolsService (#7826) 2019-10-18 12:12:16 -07:00
Charles Gagnon
d40abf4add Add husky back for precommit hooks (#7825) 2019-10-18 08:47:58 -07:00
Alan Ren
86f8b3f9ec change field type to Password (#7791) 2019-10-17 22:39:31 -07:00
Chris LaFreniere
385d7f2803 Clear Output -> Clear Result for consistency (#7776) 2019-10-17 19:54:12 -07:00
Chris LaFreniere
2ee04e0cf0 null checks for onBeforeDetached (#7782) 2019-10-17 18:24:30 -07:00
Cory Rivera
9bdaba3b65 Remove sqlmlutils from default packages due to installation failures on OSX. (#7780) 2019-10-17 17:21:53 -07:00
Chris LaFreniere
80d46fb8a4 Add search book icon (#7777) 2019-10-17 17:20:07 -07:00
Maddy
268f9ef725 revert the version bump. (#7772) 2019-10-17 15:10:53 -07:00
Alan Ren
3813d9385b update the extensions.json (#7771) 2019-10-17 14:26:47 -07:00
Charles Gagnon
48bf72bfc4 Fix TelemetryOptOut error and add debug targets for running extension integration tests (#7765) 2019-10-17 13:39:11 -07:00
Amir Omidi
fa1d5cc49d Revert "Move Notebook tests back to stable" (#7755)
* Revert "Move Notebook tests back to stable (#7749)"

This reverts commit 5d4da455bd.

* Update notebook.test.ts

* Update notebook.test.ts
2019-10-17 11:30:58 -07:00
Amir Omidi
de5fd11155 Add some more asserts to integration tests (#7759) 2019-10-17 10:55:16 -07:00
Alan Ren
cd30a8cbc0 enable bdc features for stable (#7757) 2019-10-17 09:59:29 -07:00
Alex Ma
ec1e54db9a Handling for parsing of iconpaths (#7738)
* Check if light and dark are URI

* Added a map in the extensionsRegistry to check

* Moved resolve check to connectionProviderExtension

* removed resolvedUsers from extensionsRegistry.ts

* removed unused imports and spaces

* Formatting for import

* Simplified resolver check.

* ToString is sufficient to avoid any complications

* Small formatting fix

* removed extra comment
2019-10-17 09:51:33 -07:00
Maddy
82963ad075 vbump for the updates (#7752) 2019-10-16 23:10:24 -07:00
Elliot Boschwitz
b24671bbf6 disabled failing tests in vs code
Add skip statements for tests found to be failing.
2019-10-16 21:25:03 -07:00
Alan Ren
2ab7a47353 deploy BDC wizard improvement for CU1 (#7756)
* unified admin user account (#7485)

* azdata changes

* spaces

* error message

* comments

* support AD authentication for bdc deployment (#7518)

* enable ad authentication

* remove export for internal interface

* add comments

* more changes after testing

* update notebooks

* escape slash

* more comments

* Update deploy-bdc-aks.ipynb

* Update deploy-bdc-existing-aks.ipynb

* Update deploy-bdc-existing-kubeadm.ipynb

* AD changes and review feedback (#7618)

* enable ad authentication

* remove export for internal interface

* add comments

* more changes after testing

* update notebooks

* escape slash

* more comments

* Update deploy-bdc-aks.ipynb

* Update deploy-bdc-existing-aks.ipynb

* Update deploy-bdc-existing-kubeadm.ipynb

* address comments from scenario review (#7546)

* support AD authentication for bdc deployment (#7518)

* enable ad authentication

* remove export for internal interface

* add comments

* more changes after testing

* update notebooks

* escape slash

* more comments

* Update deploy-bdc-aks.ipynb

* Update deploy-bdc-existing-aks.ipynb

* Update deploy-bdc-existing-kubeadm.ipynb

* scenario review feedbacks

* more fixes

* adjust the display order of resource types

* different way to implement left side buttons

* revert unwanted changes

* rename variable

* more fixes for the scenario review feedback (#7589)

* fix more issues

* add help links

* model view readonly text with links

* fix size string

* address comments

* update notebooks

* text update

* address the feedback of 2nd round of deploy BDC wizard review (#7646)

* 2nd review meeting comments

* fix the unit test failure

* recent changes in azdata

* notebook background execution with azdata (#7741)

* notebook background execution with azdata

* prompt to open notebook in case of failure

* fix path quote issue

* better temp file handling

* expose docker settings (#7751)

* add docker settings

* new icon for container image
2019-10-16 20:41:15 -07:00
Charles Gagnon
5d4da455bd Move Notebook tests back to stable (#7749)
* SqlClient fix is in so these should be stable again

* removed comments
2019-10-16 16:44:36 -07:00
Cory Rivera
0b039830ea Add sqlmlutils to list of required notebook packages. (#7740) 2019-10-16 14:16:09 -07:00
Charles Gagnon
77e1ca59ed Roll back tree icon merge changes to fix loading icon issue (#7722) 2019-10-15 22:44:37 -07:00
Charles Gagnon
e8e8ee5941 BDC Dashboard fixes (#7732)
* BDC Dashboard fixes

* Make refresh indicate when refresh is happening

* Fix refresh button to properly reset even if error occurs. Refactor onclick into own method.

* Undo refresh button rotation per design feedback
2019-10-15 21:50:02 -07:00
Cory Rivera
23861bd369 Wait for python installs to complete before starting a python notebook. (#7729) 2019-10-15 17:06:19 -07:00
Chris LaFreniere
4c946b21a9 Jupyter Book Search First Steps (#7704)
* Crawling for book search. needs icon

* Remove context until decision on entry point

* Add undefined check
2019-10-15 17:04:24 -07:00
Alex Ma
e74538b40d Added notification for unsaved file in scheduling (#7705)
* Added notification for unsaved file in scheduling

* Removed logging message in line 216

* Changed regex for all 3 platforms (forbid '/')

* Untitled file prompts to save in mainController

* removed spaces and changed save message
2019-10-15 14:29:34 -07:00
Cory Rivera
b6ef5469de Correctly handle exceptions in package upgrade promise. (#7734) 2019-10-15 13:50:30 -07:00
Amir Omidi
ee2850f2e2 Add name and change severity (#7727)
Add a name and change severity to error.
2019-10-15 10:37:26 -07:00
Amir Omidi
3387a762c4 Update lgtm.yml (#7723)
Get rid of path classifiers
2019-10-15 01:42:05 -07:00
Amir Omidi
75388cc3af Add the promises ql file (#7720) 2019-10-14 18:55:26 -07:00
Kim Santiago
087f7fc43d remove role=document ffrom dialog so screenreader doesn't read it (#7685) 2019-10-14 16:57:09 -07:00
Amir Omidi
5da0e16e44 Check for error (#7711)
* Custom error
2019-10-14 16:26:51 -07:00
Cory Rivera
eb465fde1a Skip prompting for package upgrade if a python install is already in progress. (#7717) 2019-10-14 15:31:53 -07:00
Charles Gagnon
26ece1ee86 Fixes to apply recursive (#7714)
* Fix apply recursive Promise.all to correctly await promises and fix apply to not apply defaults to child files.

* PR comments
2019-10-14 15:04:14 -07:00
Kevin Cunnane
f18b65a690 Mount delete, refresh and default to new folder for mount create (#7702)
* Mount delete, refresh and default to new folder for mount create
- Delete mount action added
- Refresh mount action added
- Added "mymount" to theend of existing path so that we don't use already-existing HDFS folder. The call fails unless folder doesn't exist
2019-10-14 13:26:14 -07:00
Charles Gagnon
6851b2091f Add return to getControllerEndpoint (#7699) 2019-10-14 09:05:09 -07:00
Amir Omidi
74396c1558 Rename icon to codicon #7709 (#7710) 2019-10-13 22:24:52 -07:00
Charles Gagnon
d02c680dab More HDFS Manage Access dialog updates (#7692)
* Add support for default permissions on directories

(cherry picked from commit 4e81cceba142c6763c3447b4d2965cd75764f8f9)

* Remove unneeded import

(cherry picked from commit ffe5f357357e75e9290966e89768c699df2e1311)

* Add recursive apply and clean up webhdfs

(cherry picked from commit ae76df14f99e599df1cdfcc74ee22d3822f11a59)

* Final set of changes

* Undo changes to azdata/sqlops and few minor fixes

* Remove cast to fix build error

* Hide defaults checkbox for files and switch checkbox order
2019-10-11 15:18:17 -07:00
Cory Rivera
888327f5bc Hide collapse cell button if cell only has one line. (#7701) 2019-10-11 15:12:42 -07:00
Charles Gagnon
2623e7da88 Update Component API properties (#7694) 2019-10-11 13:33:35 -07:00
Amir Omidi
0d2a3bc2d7 Update lgtm.yml (#7698)
Add a trailing slash
2019-10-11 12:54:03 -07:00
Chris LaFreniere
5eeaa5710c Adding in separators (#7651) 2019-10-11 11:06:57 -07:00
Kevin Cunnane
92e1f83046 Mount HDFS Dialog: basic support (#7580)
Implemented in this PR

- New base dialog for anything needing to work with the controller. This is important since going from SQL -> Controller we "should" have the right permissions but aren't guaranteed
- Support for Mount HDFS via a dialog. Includes basic polling for success/failure, but have to give up after 2.5min as mounting could take hours. By then it's assumed to be successful since server-side has 2min timeout built in.


Not implemented in this PR

- Script as Notebook button. This should convert the inputs to a set of cells in a notebook so users can run things themselves
- Updates based on PM / UX reviews. I think we'll need a round of feedback before completing this work.
2019-10-11 11:06:40 -07:00
Cory Rivera
9a3f72591e Show a background task when upgrading python packages. (#7649) 2019-10-11 10:56:19 -07:00
Alex Ma
12d824d791 Fix for chart names in dropdown select box. (#7642)
* isolated problem involving user friendly names

* Fix to handle horizontalBar

* Working rough version, need to implement data structure to store alternative names later

* consolidated checks into its own static method, data structure still highly recommended to implement

* Version with hashMap implemented

* Moved check for alternative names into chartView

* removed parseOption

* removed space in selectbox

* removed unused import

* removed unused import and spaces

* Fixed formatting

* Added new comment and modified changeToAltNames

* Localization has been added to the Hash

* flxed small formatting issue

* fixed double quotes for nls.localize
2019-10-11 09:50:23 -07:00
Charles Gagnon
613fef5e73 Fix checkbox component layout (#7678) 2019-10-11 09:45:53 -07:00
Karl Burtram
1d4babefba Bump changelog for 1.12.2 (#7653) 2019-10-10 22:55:54 -07:00
Karl Burtram
6e9e81e3a1 Bump Azure Data Studio to 1.13.0 for Nov (#7654) 2019-10-10 20:54:16 -07:00
Karl Burtram
c292561eb1 Update XLF files and fix errors with export (#7586)
* Update XLF files and fix errors with export

* Update file name

* Patch
2019-10-10 20:47:28 -07:00
Amir Omidi
248464191d add lgtm file (#7650) 2019-10-10 18:10:48 -07:00
Charles Gagnon
f1cdfb768d Make health status header wider so text isn't multiline (#7637) 2019-10-10 14:10:28 -07:00
Cory Rivera
777c188a3f Add collapsed argument to insertCell extension method. (#7635) 2019-10-10 14:06:31 -07:00
Kim Santiago
bf00a6b695 bump dacpac and schema compare extension versions (#7629) 2019-10-10 13:20:23 -07:00
Amir Omidi
b58927fea1 Fix the CI (#7633) 2019-10-10 11:52:02 -07:00
Charles Gagnon
2aa7a145d4 Fix connection group input not correctly selecting nested groups (#7625) 2019-10-10 11:21:13 -07:00
Alex Ma
4d618c5ef1 Improved chart names for chart type dropdown. (#7631)
* isolated problem involving user friendly names

* Fix to handle horizontalBar

* Working rough version, need to implement data structure to store alternative names later

* consolidated checks into its own static method, data structure still highly recommended to implement

* Version with hashMap implemented
2019-10-10 11:08:33 -07:00
Charles Gagnon
543e3e2c09 More updates to HDFS Manage Access dialog (#7611)
* Add display property to ModelView components

* Update DisplayType property in sqlops as well

* More updates to HDFS Manage Access dialog

* More updates to HDFS Manage Access dialog
2019-10-10 10:57:38 -07:00
Kim Santiago
93c9426f25 fix aria labels for database text boxes (#7628) 2019-10-10 10:30:43 -07:00
Chris LaFreniere
d4feb903b0 Supporting Different Kernel JSONs for Various Platforms (#7552)
* First go at it

* undo unnecessary change to copy api

* Revert "undo unnecessary change to copy api"

This reverts commit f95f7f7a42277586f4d4d0eb7b0733c11853b413.

* Revert "First go at it"

This reverts commit 410c0d3c61b7c7cae3a2469a2672f67fe3b745b6.

* Actual super simple fix
2019-10-09 19:19:05 -07:00
Chris LaFreniere
5a1183a457 Fix async problem with jupyter book viewlet loading (#7591)
* Fix async problem with jupyter book viewlet

* PR Feedback
2019-10-09 16:06:32 -07:00
Alan Ren
22774f28c0 notebook linebreak update (#7614) 2019-10-09 14:46:52 -07:00
Amir Omidi
1936e0dbbd Enable prod mode if the service is built (#7575) 2019-10-09 11:24:51 -07:00
Rahul Ajmera
df6e86554c Microsoft logo in SVG format (#7572)
* Adding SVG for microsoft logo. Will allow for scalable use of image

* Rename Microsoft-logo_cmyk_c-gray.svg to microsoft_logo_gray.svg
2019-10-09 11:02:00 -07:00
Charles Gagnon
33218bb0e5 Add display property to ModelView components (#7579)
* Add display property to ModelView components

* Update DisplayType property in sqlops as well
2019-10-09 10:28:18 -07:00
Amir Omidi
f475c04ce3 Fix mocha issues (#7588)
* mocha invert boolean

* Run integration test setup either way
2019-10-08 22:14:06 -07:00
Amir Omidi
0788796f1a add --nogpu to our integration tests (#7495) 2019-10-08 17:43:57 -07:00
Cory Rivera
3a01f960a7 Add prompt for user to upgrade python packages when starting a notebook (#7574) 2019-10-08 17:42:10 -07:00
Cory Rivera
d37105ada2 Don't expand notebook cells after being run, and don't hide outputs when cell is collapsed. (#7585) 2019-10-08 17:39:21 -07:00
Cory Rivera
66fda57513 Block Enter key from closing Manage Packages dialog. (#7584) 2019-10-08 16:58:23 -07:00
Charles Gagnon
2fe82e4b2f Styling updates for HDFS Manage Access Dialog (#7551) 2019-10-08 16:02:55 -07:00
Amir Omidi
eee7e52bd4 Release tests (#7577) 2019-10-08 15:06:21 -07:00
Elliot Boschwitz
ecd76eb870 removed hidden overflow for schema compare splitview (#7104)
Fixes issue where scrollbars wouldn't appear with vertical overflow in schema compare
2019-10-08 13:44:12 -07:00
Amir Omidi
bcaa09e910 Disable unstable tests (#7571) 2019-10-08 13:01:49 -07:00
Charles Gagnon
32df727ff9 Revert ""Retry resubmit" - allows zone.js to cleanup error stacktraces (#7502)" (#7568)
This reverts commit d91488da62.
2019-10-08 10:04:58 -07:00
Karl Burtram
e1bfe6cdda Update changelog for 1.12.1 (#7548) 2019-10-08 09:05:10 -07:00
Charles Gagnon
e4e71af597 Fix all await-promise tslint errors (#7530)
* Fix all await-promise tslint errors

* Remove unnecessary await
2019-10-07 17:52:01 -07:00
Kim Santiago
749ddc30c7 fixing error and splitview not showing for schema compare (#7549) 2019-10-07 17:51:46 -07:00
Charles Gagnon
c4965c7fe9 Fix parseAclPermissionFromOctal to include optional sticky bit (#7542)
* Fix parseAclPermissionFromOctal to include optional sticky bit

* Fix doc
2019-10-07 17:21:08 -07:00
Amir Omidi
de72ab176c Add chmod +x to all the scripts (#7550) 2019-10-07 17:12:06 -07:00
Amir Omidi
b453c3a48e Flat file import version bump (#7525) 2019-10-07 16:09:26 -07:00
Amir Omidi
857c658888 cleanup promises and async (#7169) 2019-10-07 15:30:38 -07:00
Amir Omidi
d91488da62 "Retry resubmit" - allows zone.js to cleanup error stacktraces (#7502)
* Revert "Revert "Revert "Revert "allows zone.js to cleanup error stacktraces (#7203)" (#7499)" (#7500)" (#7501)"

This reverts commit 080d9bbaa6.

* Don't include the setup file here

* Change setup.js
2019-10-07 15:30:15 -07:00
Amir Omidi
16fbd4abfd Return values (#7521) 2019-10-07 15:29:51 -07:00
Amir Omidi
285f8bc28c Move around scripting utils (#7523)
* Move around scripting utils

* Don't set a file path

* Version bump
2019-10-07 15:09:49 -07:00
Charles Gagnon
3fb4877859 Change script file names back to original names (#7544) 2019-10-07 14:03:34 -07:00
Charles Gagnon
e25cbdf4b9 Fix BDC dashboard to update status icons on refresh (#7520) 2019-10-07 13:40:58 -07:00
Charles Gagnon
b1db9a8cf1 Update Query History readme with latest feature additions (#7529) 2019-10-07 13:38:28 -07:00
Charles Gagnon
f418104b7a big data cluster -> Big Data Cluster (#7536) 2019-10-07 13:13:40 -07:00
Charles Gagnon
5454917569 Add ability to handle enter key propagation for input model components (#7524)
* Add ability to stop enter key propagation for input model components

* Fix spacing

* onInputEntered -> onEnterKeyPressed
2019-10-07 12:05:43 -07:00
Charles Gagnon
effa50a9bd Add unstable test params for core and extension tests (#7513)
* Add unstable test params for core and extension tests

* unset invert options for unstable script runs

* Fix copypasta

* Add nogpu to dacpac tests
2019-10-07 10:18:02 -07:00
Euler
ac87346507 Prevent connections from moving on click (#7528) 2019-10-07 08:49:05 -07:00
Amir Omidi
cacbcb5415 Add more functionality to the grid (#7516) 2019-10-05 12:08:18 -07:00
Chris LaFreniere
0b2a2ad0ed Allow git to open notebook files (#7403)
* Allow git to open notebook files

* pr feedback
2019-10-04 15:34:19 -07:00
Aasim Khan
44bc7a89df Register Notebook Editor Model during instantiation (#6891) 2019-10-04 15:33:37 -07:00
Charles Gagnon
bcb5384639 Capitalize action text for query history (#7512) 2019-10-04 14:14:05 -07:00
Kevin Cunnane
b23e577ccc Support mounted file icon (#7496) 2019-10-04 14:10:50 -07:00
Charles Gagnon
96a28f2c4d Add query-history package metadata (#7511)
* Add query-history package metadata

* Add newline
2019-10-04 13:58:12 -07:00
Cory Rivera
826c4115a7 Add powershell package to notebook python setup. (#7503)
Also added force-reinstall flag to installs when running Reinstall Dependencies.
2019-10-04 11:08:07 -07:00
Amir Omidi
c95ea16a44 Amir/tests after build (#7507)
* test

* remove disable extensions

* fix the file...
2019-10-04 02:16:07 -07:00
Chris LaFreniere
8e1a2248e4 Notebooks: Ensure quotes and backslashes are escaped properly in text editor model (#7497)
* Ensure quotes and backslashes are escaped properly

* PR comment

* PR comments

* Reliably fix
2019-10-03 19:21:01 -07:00
Cory Rivera
6b29fd05bd Add collapse/expand functionality to notebook code cells. (#7481) 2019-10-03 16:50:47 -07:00
Karl Burtram
080d9bbaa6 Revert "Revert "Revert "allows zone.js to cleanup error stacktraces (#7203)" (#7499)" (#7500)" (#7501)
This reverts commit 263d342a79.
2019-10-03 16:02:20 -07:00
Amir Omidi
263d342a79 Revert "Revert "allows zone.js to cleanup error stacktraces (#7203)" (#7499)" (#7500)
This reverts commit 0d2dcb3d25.
2019-10-03 16:01:16 -07:00
Amir Omidi
0d2dcb3d25 Revert "allows zone.js to cleanup error stacktraces (#7203)" (#7499)
This reverts commit a0f1d68cfb.
2019-10-03 15:57:50 -07:00
Alan Ren
9dd35c8c0d fix the bdc casing on welcome page (#7498) 2019-10-03 15:50:14 -07:00
Kevin Cunnane
e85f93abec Support HDFS Tiering (#7484)
This is the 1st step to supporting HDFS Tiering
Changes:

Add new mounted folder icon. Will have separate commit for file icon
Disable delete/mkdir/upload for mounted files and folders
Disable delete for root HDFS folder (this was added in error)
2019-10-03 14:48:19 -07:00
Charles Gagnon
18c12dac9a Fix to open ADS release notes instead of VS Code (#7418)
* Open latest ADS release notes in browser

* Only show release notes for stable builds

* Update config description text

* Have release notes notification prompt user before opening

* Remove unneeded import
2019-10-03 13:28:04 -07:00
Amir Omidi
a0f1d68cfb allows zone.js to cleanup error stacktraces (#7203)
* allows zone.js to cleanup error stacktraces

* zone.js changes

* Change ordering
2019-10-03 13:23:38 -07:00
Amir Omidi
37f651fe08 Mark test as unstable (#7493) 2019-10-03 12:56:42 -07:00
Alan Ren
d2e4e94aec fix issue 7489 and disposable handling (#7491) 2019-10-03 12:20:54 -07:00
Amir Omidi
cf1a09aeaf Revert "Disable schema compare tests (#7459)" (#7460)
This reverts commit 704c5174f9.
2019-10-03 11:57:11 -07:00
Amir Omidi
32897d3e07 Mock extension context (#7492) 2019-10-03 11:56:44 -07:00
AlexFsmn
3f2a728ed0 Fixed issue where task icons got hidden if text was too long (#6699)
* Fixed issue where task icons got hidden if text was too long
#6116

* Changed padding to min width
#6116

* Update tasksPanel.css
2019-10-03 10:10:56 -07:00
Charles Gagnon
af24a9d002 Initial work for Manage Access dialog (#7483)
* Add width and height properties to checkbox component

* Initial work on manage access dialog

* Add missed change

* Add comments and clean up

* Initial work on manage access dialog

* Add missed change

* Add comments and clean up

* Add return type

* Address comments and use apiWrapper

* Fix compile error
2019-10-03 08:58:06 -07:00
Amir Omidi
6582debd73 Unstable tests (#7487)
Setup unstable tests
2019-10-02 22:01:17 -07:00
Amir Omidi
575d1c8543 Disable one test and add a new utility function (#7486)
Skip problematic tests
2019-10-02 21:59:40 -07:00
Charles Gagnon
08b78c3ca5 Add width and height properties to checkbox component (#7482) 2019-10-02 16:21:08 -07:00
Amir Omidi
e0a867a184 Disable hw accl (#7457)
Allow headless running of tests
2019-10-02 14:08:22 -07:00
Amir Omidi
2b8508574d Create the azure folder differently. (#7470)
Create the folder with a different mechanism.
2019-10-02 12:57:29 -07:00
Chris LaFreniere
b8976785fd Stop grabbing focus when nb editor isn't in view (#7466)
* Stop grabbing focus when nb editor isn't in view

* Add comments
2019-10-02 11:56:57 -07:00
Charles Gagnon
79e2c56ec8 .Add types for ModelView CSS properties (#7465) 2019-10-02 11:21:18 -07:00
Charles Gagnon
1ea09c7add Update SqlToolsService for rows affected fix (#7469) 2019-10-02 11:20:40 -07:00
Udeesha Gautam
7489a65bbe Changing Sql cmd icons to more clear ones (#7451)
Changing Sql cmd icons to more clear ones
2019-10-02 11:04:22 -07:00
Karl Burtram
faabdb8d88 Update readme for October release (#7449) 2019-10-02 10:03:32 -07:00
Alan Ren
57c5d98bdc make it easy to indicate all platforms (#7467) 2019-10-02 09:33:27 -07:00
Charles Gagnon
b5c249c25d Update settings/descriptions to use ADS product name (#7425)
* Update settings to use ADS product name

* Fix JSON comment break
2019-10-02 09:04:09 -07:00
Alan Yu
60a244888d Update README.md (#7463)
* Update README.md

* Fix number and links
2019-10-02 08:24:58 -07:00
Anthony Dresser
c4dfc5cf70 Fix issue with startup (#7461)
* address startup erorr

* fix naming
2019-10-01 17:31:49 -07:00
Chris LaFreniere
4c2ffdfc68 Add powershell kernel.json (#7456) 2019-10-01 16:30:09 -07:00
Amir Omidi
704c5174f9 Disable schema compare tests (#7459) 2019-10-01 16:11:50 -07:00
Amir Omidi
b708b4a42b Add dependencies required for tests (#7450)
* Add the dependencies

* options -> testOptions

* tabs vs spaces

* dacpac
2019-10-01 15:48:36 -07:00
Kevin Cunnane
ee98ce5c18 Fix/controller ad (#7445)
Reapply my previous commit
add kerberos to excludes in the webpack for the extension
Fixes #7443
Verified on Windows and MacOS
2019-10-01 14:52:09 -07:00
Alan Ren
7162272f1e update timeout (#7446) 2019-10-01 12:51:42 -07:00
Udeesha Gautam
faee6b45e0 Extensions Localization (#7426)
* Correct Schema comapre single file path in xlfs

* converting SC and Dacpac from Xlf to Json

* Adding all exts other than mssql (needs fixing)
2019-10-01 11:53:46 -07:00
Alan Ren
aef69ab12a add readme (#7447) 2019-10-01 11:21:07 -07:00
Karl Burtram
a712426185 Bump Server Reports to 0.2.0 (#7431)
* Bump Server Reports to 0.2.0

* Add force reload flag

* vbump in package-lock.json
2019-10-01 10:42:11 -07:00
Karl Burtram
19be0d0ff3 Revert "Support AD in cluster connection dialog (#7367)" (#7444)
This reverts commit 9f065b2b5a.
2019-10-01 10:30:38 -07:00
Charles Gagnon
3202e46930 Fix typo in snippet (#7441) 2019-10-01 09:56:27 -07:00
Alan Ren
5b95d6777f new icons (#7433) 2019-10-01 09:50:37 -07:00
Anthony Dresser
084524cd2d Merge from vscode 313ede61cbad8f9dc748907b3384e059ddddb79a (#7436)
* Merge from vscode 313ede61cbad8f9dc748907b3384e059ddddb79a

* fix strict null checks
2019-09-30 23:35:45 -07:00
Charles Gagnon
6ab03053a0 Switch HDFS node to use normal Folder icon (#7430) 2019-09-30 16:00:08 -07:00
Kevin Cunnane
9f065b2b5a Support AD in cluster connection dialog (#7367)
- Use token API to get auth token before using other endpoints. Note this needs server updates before it'll work as expected (will only checkin after verifying this)
- Add auth option in controller UI and plumb through connection save, load, and uses of the controller API
Because the swagger spec is split in 2 created new file for the 2nd swagger spec including token auth endpoints. These come from a running cluster and instructions were updated to reflect this.

New UI Changes:
- Added authentication type field with "Basic" and "Windows Authentication" as the options
- Moved error notifications to the dialog instead of separate notification window. That's the recommended pattern
- Username / password aren't required for Windows Authentication. I couldn't find a way to change required status in form container on switching from Windows => Basic Auth so have error show on clicking OK. 
- Controller URL should use ":" not "," for IP:Port
2019-09-30 15:59:04 -07:00
Chris LaFreniere
5327ed84c1 Add ps to standard kernels in package.json (#7428) 2019-09-30 14:04:25 -07:00
Charles Gagnon
4018a29a16 Add toggle query history capture command/action (#7427)
* Add toggle query history capture command/action

* Add extension updates
2019-09-30 11:59:49 -07:00
Charles Gagnon
7cbc268c52 Add Clear All Query History command/action (#7408)
* Add clear all query history action/command

* Fix display issue when clearing

* Change localize ID and fix registration
2019-09-30 08:20:27 -07:00
Anthony Dresser
bca7c8e6bd Merge from vscode f2d41726ba5a0e8abfe61b2c743022b1b6372010 (#7415)
* Merge from vscode f2d41726ba5a0e8abfe61b2c743022b1b6372010

* add missing files
2019-09-27 23:30:36 -07:00
Alan Ren
d0fb6de390 sample of how to contribute a deployment type (#7414) 2019-09-27 16:03:30 -07:00
Charles Gagnon
63f3d9862f Add getaclstatus/setacl calls to WebHDFS API (#7378)
* Add getaclstatus/setacl calls to WebHDFS API

* Fix hygiene check
2019-09-27 13:45:45 -07:00
Anthony Dresser
00f8dcb23e clean up workbench files (#7392) 2019-09-27 12:17:07 -07:00
Alan Ren
bc4165037c fall back to the old text component behavior... (#7405)
* fall back to the old text component behavior...

if no required indicator and description is needed, fall back to the old text component behavior.

* Update text.component.ts

indention
2019-09-27 11:54:11 -07:00
Anthony Dresser
07109617b5 Merge from vscode e0762af258c0b20320ed03f3871a41967acc4421 (#7404)
* Merge from vscode e0762af258c0b20320ed03f3871a41967acc4421

* readd svgs
2019-09-27 11:13:19 -07:00
Alan Ren
6385443a4c deployment extensibility (#7394)
* rename button and update dialog button width

* make deployment resource type contributable

* conflicts

* fix card width hight issue

* comments
2019-09-27 10:13:38 -07:00
Chris LaFreniere
6ef415d0e6 Notebook Tokenization Fixes (#7375)
* Fix don't like; unclear if grammar necessssary too

* Cleanup and sanity check

* Cleanup and sanity check

* Add test

* Call onBeforeAttached for 3 types of editor models
2019-09-27 10:04:29 -07:00
Chris LaFreniere
ba8ba9f68d echo extension test names (#7391) 2019-09-27 10:04:10 -07:00
Charles Gagnon
b30252021b Sort endpoints (#7402) 2019-09-27 07:45:37 -07:00
Anthony Dresser
db57171ece fix dashboard icons (#7388) 2019-09-26 12:45:24 -07:00
Alan Ren
3688e9981d fixes #7383 and #7380 (#7384) 2019-09-26 12:35:44 -07:00
Anthony Dresser
b4de26a801 Disable tasks (#7329)
* disable tasks

* more disable

* disable more tasks
2019-09-26 12:15:48 -07:00
Anthony Dresser
2a15768a25 Remove diff (#7368)
* remove diff

* gulpfile changes (#7370)

* gulpfile changes

* Remove examples

* move require statements to the top of the file

* add await
2019-09-26 11:55:42 -07:00
Anthony Dresser
f971417746 fix workspace actions (#7345) 2019-09-26 11:47:44 -07:00
Charles Gagnon
33854d42e4 Fix endpoint links to not overflow text (#7312)
* Fix endpoint links to not overflow

* Add titles to links

* Re-add title to Text and fix errors
2019-09-26 11:47:22 -07:00
Alan Ren
2d9f6dcd86 rename button and update dialog button width (#7369)
* rename button and update dialog button width

* update padding to match portal
2019-09-25 20:21:41 -07:00
Kim Santiago
e3c347e148 Fixing folder path in dacpac and schema compare extensions (#7352)
* fixing folder path in dacpac and schema compare extensions

* created method

* import os
2019-09-25 15:00:56 -07:00
Amir Omidi
f7c468d6f0 Promise cleanup (#7210) 2019-09-25 14:46:14 -07:00
Kim Santiago
e6cac8cc14 Fix missing schema compare icons (#7355)
* move schema compare icons

* use extensionContext.extensionPath
2019-09-25 12:59:18 -07:00
Cory Rivera
79d0239362 Use python3 as the default python version in Spark sessions. (#7353) 2019-09-25 11:23:39 -07:00
Alan Ren
a0e31fc723 wizard for deploying bdc (#7183)
* wip

* wip2

* wip eod 820

* wip 822

* text component improvements and misc changes

* aria-label

* targetClusterPage wip

* target cluster page

* target cluster page

* wip 827

* wip deployment profile page

* profile page

* service settings page

* wip 0903

* 0909 wip

* 0910

* 0911

* sql instance and working directory

* notebooks

* docker version on windows

* EULA env var

* 917 updates

* address comments

* use async file access

* fix the summary page display issue for ad auth

* add save json file buttons

* use promise for private methds

* review feedbacks

* refactor

* pass json to notebooks

* fix no tool scenario

* bypass tool check if installed

* update hint text

* update notebooks

* workaround azdata first time use

* comments

* accept eula and some text update

* fix the error in package.json

* promise instead of thenable

* comments

* fix typo
2019-09-25 10:04:13 -07:00
Anthony Dresser
6a6048d40f Merge from vscode 817eb6b0c720a4ecbc13c020afbbebfed667aa09 (#7356) 2019-09-24 21:36:17 -07:00
Charles Gagnon
a29ae4d3b9 Add enable logs setting for Flat File Import (#7342)
* Add config for enabling Flat File Import logging

* Move logs to default log location for extensions

* Add localized strings
2019-09-24 18:07:46 -07:00
Charles Gagnon
82b19614e1 Update more CSS url use (#7341)
* Update more CSS urls

* URI with resources (#7348)

* URI with resources

* Remove logs
2019-09-24 18:07:23 -07:00
Amir Omidi
49851daf0d Don't check if directory exists (#7349) 2019-09-24 15:45:48 -07:00
Chris LaFreniere
d815ae0e83 Skip failing tests due to sqlclient issues (#7346) 2019-09-24 15:04:35 -07:00
Anthony Dresser
cb50fae12d move query plan to browser (#7343) 2019-09-24 12:14:24 -07:00
Charles Gagnon
26072af82f Use test.skip instead of commenting out disabled test (#7338)
* Use test.skip instead of commenting out disabled test

* Also disable OE test
2019-09-24 11:13:25 -07:00
Amir Omidi
89c1c4897a proper icon rendering (#7337)
* proper icon rendering

* address comments
2019-09-24 01:30:39 -07:00
Udeesha Gautam
5e5563f974 Disabling test for insiders build to pass (#7336)
Disabling dacpac import test for insiders build to pass
2019-09-23 23:29:09 -07:00
Amir Omidi
5df68e5942 Use URIs properly (#7334) 2019-09-23 21:09:54 -07:00
Udeesha Gautam
d895de0bc1 Trying to remove intermittent ext test failure (#7330)
* Trying to remove intermittent ext test failure

* taking PR comments
2019-09-23 19:52:05 -07:00
Maddy
2ec4a0c8a8 renamed .icon to .codicon as per vscode updates that are merged. (#7331) 2019-09-23 17:17:24 -07:00
Anthony Dresser
5e3ec6ea39 moves notebooks code to browser (#7313) 2019-09-23 13:32:29 -07:00
Amir Omidi
6f06ab440a Move css around (#7311)
* Move css around

* typings

* Keep it in both
2019-09-20 17:21:16 -07:00
Shafiq Ur Rahman
c3bb7a66e0 Show more of the url (closes #6348) (#7299) 2019-09-20 16:00:36 -07:00
Amir Omidi
aadc871124 Serializer (#7309) 2019-09-20 16:00:24 -07:00
Charles Gagnon
cb2cea4ebd Remove Data Services folder (#7147)
* Remove Data Services folder

* Remove copy path from HDFS node

* Add comment
2019-09-20 13:57:28 -07:00
Chris LaFreniere
6125e68c1f Ensure we call layout() as few times as possible for notebook cells (#7253)
* Call layout fewer times from queryTextEditor

* remove unused method
2019-09-20 12:06:37 -07:00
Arvind Ranasaria
71b80e0817 Set Mocha options based on environment settings (#7229)
* Set Mocha options based on environment settings

* fixing copy paste bugs in console logs

* incorporating review feedback
2019-09-20 12:04:22 -07:00
Maddy
ac6a4e590d Books/viewlet refactor (#7208)
* add saved/untitled views to the books viewlet and provide save option to untitled

* addressed comments

* fixes after merge

* await on async methods

* reverted back

* await on promise

* added localize for books view names

* initial commit

* missed a file change

* changes to make the merges work part1

* fixes after merges 2

* getChildren to get all books

* chnages to address comments

* fsPromises instead of fs.readSync

* merged master

* replaced deprecated fs.exists async call with pathExists

* renamed method
2019-09-20 10:47:33 -07:00
Karl Burtram
1f61a2581c Pickup latest SQL Tools for SMO\Driver bump (#7301)
* Pickup latest SQL Tools for SMO\Driver bump

* Bump to 2.0.0.15 for intellisense fix
2019-09-20 09:52:44 -07:00
Amir Omidi
bf23a52ba4 optional serialization service (#7303) 2019-09-19 22:14:10 -07:00
Anthony Dresser
db498db0a8 Merge from vscode 1eb87b0e9ce9886afeaecec22b31abd0d9b7939f (#7282)
* Merge from vscode 1eb87b0e9ce9886afeaecec22b31abd0d9b7939f

* fix various icon issues

* fix preview features
2019-09-19 21:50:52 -07:00
Charles Gagnon
9d3d64eef3 BDC dashboard 💄 (#7292)
* cleanup

* Undo bad merge

* extra undefined check
2019-09-19 15:06:44 -07:00
Alan Ren
e694e0273b Fix for issue 7233 (#7293) 2019-09-19 14:13:03 -07:00
Charles Gagnon
ced882a2e5 Update iKey for extensions (#7278) 2019-09-19 13:46:01 -07:00
Charles Gagnon
754c643b1b Don't show links for BDC logs if none available (#7286)
* Don't show links for BDC logs if none available

* Remove extra space

* Add comment

* Consolidate localize calls
2019-09-19 13:43:21 -07:00
Charles Gagnon
6a136854b0 Update whoIsActive extension to use azdata (#7287)
* Update whoIsActive extension to use azdata

* Change path

* Update package-lock
2019-09-19 11:38:18 -07:00
Chris LaFreniere
a584aca969 Stop clearing out connecting and connected state when cancelling out of connection dialog (#7254)
* Stop clearing out connecting and connected state

* tweaks

* Handling cancel while connecting

* fix typo

* PR comments
2019-09-18 17:21:32 -07:00
Anthony Dresser
b3fbb29bf2 add sql folding (#7270) 2019-09-18 15:02:22 -07:00
Anthony Dresser
b7299e5eec check for undefined (#7277) 2019-09-18 14:53:13 -07:00
Amir Omidi
510c45b9b7 use isNaN instead of == NaN (#7276) 2019-09-18 14:35:46 -07:00
Anthony Dresser
aad9c0f965 Add more areas to strict null (#7243)
* add more areas to strict null

* fix compile errors

* fix tests

* fix checks

* address PR comments
2019-09-18 12:27:19 -07:00
Charles Gagnon
373828d76f Have MSSQL extension wait for SqlToolsService ready before finishing activation (#7087)
* Have mssql activate wait for service to be ready before returning

* Change to using deferred promise

* Await on server ready

* Finish removing old code

* More cleanup

* Go back to exposed promise API

* Make property readonly

* Add missing file

* Add back in deleted stuff

* Go back to awaiting
2019-09-18 11:41:37 -07:00
Amir Omidi
86a9a2c069 NaN checks aren't possible with an equality check (#7268) 2019-09-18 10:59:58 -07:00
Karl Burtram
6e7e6ee434 Use the default connect timeout in OE tests (#7249) 2019-09-18 08:57:14 -07:00
Anthony Dresser
6af544afde Final fix for exploration merge (#7250)
* fix exploration merge

* use pool

* fix exploration

* add login
2019-09-17 16:25:17 -07:00
Amir Omidi
dca21bd3be Null/undefined check editors (#7084)
* Null check editors

* Add console.errors to the null checks

* Change promises

* Fix typo

* Add output type & change returns
2019-09-17 16:19:47 -07:00
Chris LaFreniere
9b82b101cd Fix for notebook line height (#7248) 2019-09-17 15:41:51 -07:00
Anthony Dresser
47a14bbbff fix exploration merge (#7247) 2019-09-17 15:16:03 -07:00
Cory Rivera
094d6f2339 Update python packages to use sparkmagic 0.12.9 (#7240) 2019-09-17 15:05:59 -07:00
Anthony Dresser
290dd9531f add explore merge build (#7241) 2019-09-17 14:40:56 -07:00
Chris LaFreniere
141226332c Add notebook grid actions (#7181)
* Add notebook grid actions

* pr comments
2019-09-17 14:16:35 -07:00
Udeesha Gautam
7e0a5205b2 Fix Failng dacpac and SC integration tests (#7245) 2019-09-17 14:11:23 -07:00
Aditya Bist
c6c863cd84 commiter work (#7244)
* fix incorrect css and removed redundant css warnings

* add gulp step to check all files

* added files file

* revert build to hygiene job

* keep css changes only
2019-09-17 14:10:38 -07:00
Anthony Dresser
28d453fced Implement a no sync rule (#7216)
* implement a no sync rule

* fix linting disable

* fix unused imports

* exclude more testing

* clean up fs usage

* clean up more fs usage

* remove duplicate of code

* fix compile errors
2019-09-17 13:32:42 -07:00
Chris LaFreniere
4d62983680 Add Default File Type when Saving Chart (#7235)
* add file filter

* Code cleanup
2019-09-17 10:38:39 -07:00
Anthony Dresser
d3ea9c3168 Implement use strict linting (#7223)
* implement use strict linting

* commit changes

* add additional check for strict
2019-09-16 17:36:31 -07:00
Charles Gagnon
603a79d094 Fix moved config (#7231) 2019-09-16 16:24:28 -07:00
Amir Omidi
16481927e8 EoL chars (#7225) 2019-09-16 15:53:59 -07:00
Cory Rivera
6f06c18014 Lighten colors used for inline notebook buttons in dark themes. (#7230) 2019-09-16 15:27:56 -07:00
Karl Burtram
7868afb4fd Activate XML features when SQL loads (#7228) 2019-09-16 14:34:58 -07:00
Anthony Dresser
66d4d5c73f revert changes to build files (#7227) 2019-09-16 14:12:35 -07:00
Anthony Dresser
075479274d Readd sql linting (#7217)
* readd sql linting

* add skip linting for pipeline hygiene
2019-09-16 10:59:28 -07:00
Charles Gagnon
bae797f975 Fix config path (#7220) 2019-09-16 09:37:02 -07:00
Anthony Dresser
ea0f9e6ce9 Merge from vscode 64980ea1f3f532c82bb6c28d27bba9ef2c5b4463 (#7206)
* Merge from vscode 64980ea1f3f532c82bb6c28d27bba9ef2c5b4463

* fix config changes

* fix strictnull checks
2019-09-15 22:38:26 -07:00
Karl Burtram
fa6c52699e Remove sqlops referece from server reports extension (#7212) 2019-09-13 20:43:59 -07:00
jamesrod817
209d7e48d8 Tempdb (#7022)
* Server changes by James

* tempdb
2019-09-13 16:19:48 -07:00
Charles Gagnon
0bd3e1b0e1 Fix query-history README links (#7205)
* Fix README URLs

* different fix
2019-09-13 15:24:14 -07:00
Anthony Dresser
9229b26b9e Rename some build steps (#7202)
* rename some build steps

* unify naming
2019-09-13 13:52:01 -07:00
Maddy
29dbce079b Fix/replace wrong imports (#7158)
* replaced fs with IFileService

* work around for URI with relative paths

* updates to comments

* renamed existsSync to exists

* await on promise inside non-async method

* .then change

* await async calls

* removed the old code

* include everthing in .then

* remove file exists check

* added _ for consistency

* use path.abosult method

* fixed the cleanIrl calls back and path.sep change

* reverted path.sep for now
2019-09-13 13:11:18 -07:00
Amir Omidi
86df538db9 Specialized clipboard service (#7195)
* Specialized clipboard service
2019-09-13 13:07:00 -07:00
Anthony Dresser
d9c5b7ea9e Clean up some more disposable usage (#7190)
* clean up some more disposable usage

* fix a bug

* add more to register
2019-09-13 12:28:33 -07:00
Anthony Dresser
c9128d56c0 Handle some promises better in cms (#7166)
* handle some promises better in cms

* 💄
2019-09-13 12:28:16 -07:00
Charles Gagnon
888755e842 Add abillity to open to specific item within a Jupyter book (#7155)
* Add abillity to open to specific item within a Jupyter book

* Move helper method into BookTreeItem class

* Fix default URL path

* Add typing to Jupyter book code

* Update comment and typings

* Fix compile error and cleanup
2019-09-13 11:51:15 -07:00
Charles Gagnon
3ac096b3b1 Fix ModelView tests (#7193) 2019-09-12 17:17:58 -07:00
Charles Gagnon
7ebd1eb053 Revert accidently change to sql.bat (#7194) 2019-09-12 17:03:57 -07:00
Chris LaFreniere
2128851bdf Add title to table (#7182) 2019-09-12 14:57:39 -07:00
Anthony Dresser
823d136a00 No browser from common (#7178)
* no browser from common

* clean up some imports
2019-09-12 14:52:42 -07:00
Charles Gagnon
a67e62b2d0 Add margin to BDC Dashboard pages/nav (#7129) 2019-09-12 13:25:26 -07:00
Benjin Dubishar
d262ea21e3 Addresses accessiblility bug by converting editable dropdowns to normal when in accessible mode (#7159)
* Automatically disabling editability on extension dropdowns when in accessible mode

* correcting variable name type
2019-09-12 11:08:05 -07:00
Elliot Boschwitz
aaf115a5c8 Dacpac extension announces invalid text inputs in screen reader mode (#7133)
Text input boxes will have an updated aria-label with a given error message if the input is invalid.
2019-09-12 11:05:56 -07:00
Charles Gagnon
206c5146e1 BDC Dashboard Style Updates (#7140)
* Style updates

* Add highlight line under resource group tabs

* Fixes

* Update font weight to semi-bold (600)
2019-09-12 09:15:25 -07:00
Amir Omidi
abe917f3c1 Better cell selection (#6914)
* Better cell selection

* Explicit return type and undefined assignment

* More complex copy/paste

* Get TS to be less mad at me

* Remove EoL

* Fail safe if statement

* strict null check
2019-09-11 18:23:49 -07:00
Charles Gagnon
0793e11b04 Update to the latest merged VS Code version (#7154) 2019-09-11 15:35:38 -07:00
Karl Burtram
9df66deb81 Initial LiveShare extension scaffolding (#7170)
* LiveShare initial shared connection

* Various cleanups

* Fix type

* Fix hygiene
2019-09-11 15:24:08 -07:00
Charles Gagnon
9765b0ed8e Bump extension versions (#7168) 2019-09-11 15:22:35 -07:00
Amir Omidi
83c9c3f618 New rule to stop floating promises (#7165)
* Rules surrounding promises
2019-09-11 14:25:21 -07:00
Charles Gagnon
dd5dd12ee6 Minor notebook cleanup (#7156)
* Minor notebook cleanup

* Undo fix to getBooks
2019-09-11 14:12:32 -07:00
Charles Gagnon
b68fd91a02 Update query history README (#7164)
* Update query history README

* Fix typos
2019-09-11 13:57:55 -07:00
Charles Gagnon
4270547147 Remove custom tree indent guide (#7119) 2019-09-11 09:58:55 -07:00
Charles Gagnon
c4b90360a5 Query History feature (#6579)
* Initial commit

* Fix up QueryEventType

* Making query history visible in view and open query command (#6479)

* Add QueryInfo to query event events

* Pull actual query text/connection info for displaying

* cons and expand (#6489)

* Making query history visible in view and open query command

* expand and icons

* Failure icon enabled (#6491)

* Making query history visible in view and open query command

* expand and icons

* failure icon enabled

* Minor cleanup

* Open query with connection and add run query (#6496)

* Add initial query-history extension

* Fix issues caused by master merge, cleanup and add query-history extension (#6567)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child nodes (#6568)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Layering movement and add delete action (#6574)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Some layering movement and add delete action

* Move query tracking into service (#6578)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Some layering movement and add delete action

* Move query history tracking into service

* Add comment

* Fix actions

* Remove unnecessary type

* cleanup

* Remove unused section of README

* Fix merge issues and address PR comments

* Fix compile and tslint errors

* Change startup function name
2019-09-11 08:23:59 -07:00
Kim Santiago
7d49e75e46 make wizard page names get announced by screen reader (#7040) 2019-09-10 14:59:16 -07:00
Charles Gagnon
8db5bd438e Update iKey (#7150) 2019-09-10 13:43:44 -07:00
Karl Burtram
f6f18b68b5 Update readme for September release (#7096) 2019-09-10 10:19:39 -07:00
Maddy
ab8a9509b8 Books/fix relative links (#7083)
* fix to make relative links work on untitled notebooks

* changes to make the preb/next links to work

* show filename with extension
2019-09-10 09:43:00 -07:00
Karl Burtram
4dda5ee549 Bump ADS to 1.12.0 for October (#7139) 2019-09-09 15:43:01 -07:00
Udeesha Gautam
5ae8017233 Bug/accessibility 5 (#7008)
* fixing 6946 and 6796(second part)

* fix for https://github.com/microsoft/azuredatastudio/issues/6726

* comments cleanup

* taking PR comments

* adding strong border for HC focus

* convert to string template
2019-09-09 15:37:12 -07:00
Benjin Dubishar
66cdbbb335 Automatically focus the execute button in the DacFx wizard summary screens (#6984)
* Focus piping for extension buttons

* Focus the done button when entering the summary page for DacFx wizard

* Adding internal value resets for extension-side button model.

* Correcting remark string
2019-09-09 14:57:28 -07:00
Kim Santiago
3e9b694e6f Update dacpac and schema compare extensions to not use deprecated workspace.rootPath (#7125) 2019-09-09 13:21:35 -07:00
Alan Ren
b1eef13bb0 Update apiWrapper.ts (#7132)
remove the usage of workspace.rootPath, this is not being used.
2019-09-09 11:30:03 -07:00
Kim Santiago
82f93f7da5 remove non-descriptive tooltips (#7099) 2019-09-09 11:03:14 -07:00
Charles Gagnon
119008d05d Add metadata to Agent package.json (#7116) 2019-09-09 10:26:41 -07:00
Charles Gagnon
aeaac4bc17 Add ModelView ImageComponent (#7106)
* Add ModelView ImageComponent

* Remove duplicate property declarations

* Fix sqlops too
2019-09-08 18:29:30 -07:00
Charles Gagnon
789e26ae60 Add attach compound and align names (#7107) 2019-09-08 17:34:36 -07:00
Aasim Khan
b813ace79c Added opening latest notebook run to context menu from notebooks pane (#7066)
* added agent notebooks, notebook history view and view materialized notebook button

* Got a basic UI running for viewing notebook history

* made some changes to make UI look good

* Added new notebook dialog

* Added new notebook Dialog

* Added create notebook dialog

* Added edit and delete notebook job

* Added some notebook history features

* Added new notebook job icons, fixed a minor bug
in openmaterializednotebookAPI and added fixed the
schedule Picker API.

* Fixed Bugs in Notebook Grid expansion

* Fixed Notebook table highlighting and
grid generation is done using code.

* fixed some UI bugs

* Added changes to reflect sqltoolservice api

* Fixed some localize keys

* Made changes in the PR and added
ability to open Template Notebooks from
notebook history view.

* Added pin and renaming to notebook history

* made some library calls async

* fixed an import bug caused by merging from master

* Validation in NotebookJobDialog

* Added entry points for scheduling notebooks
on file explorer and notebook editor

* Handled no active connections and
a small bug in collapsing grid

* fix a bug in scheduling notebook from explorer
and toolbar

* setting up agent providers from connection now

* changed modals

* Reupload edited template

* Add dialog info, solved an edit bug and localized
UI strings.

* Bug fixes in UI, notebook renaming and
editing template on fly.

* fixed a bug that failed editing notebook jobs from notebook jobs table

* Fixed a cyclic dependency, made strings const and
some other changes in the PR

* Made some cyclic dependency and some fixes from PR

* made some changes mentioned in the PR

* Changed storage database health text

* Changed the sqltoolservice version to the point to the latest build.

* Added open Latest notebook notebook run to notebooks view context menu

* Fixed a small compilation error

* fixed a spelling mistake in function name

* made changes mentioned in the PR added open Notebook Functionality to charts

* Changed some context menues strings and order

* made some changes from the PR and fixed an API call

* made some changes mentioned in the PR

* Changed sqltoolsservice version to point to the latest build
2019-09-06 17:27:41 -07:00
Anthony Dresser
02f497712d clean up some promise use in cms (#7004) 2019-09-06 15:58:47 -07:00
Kim Santiago
fda4ba81c3 bump sqltoolsservice version to get DacFx fix for DW (#7102) 2019-09-06 10:18:03 -07:00
Hannah Qin
e7a9d34ecd Fix command palette keyboard shortcut in README.md (#6601)
Correcting keyboard shortcut for the command palette in the French lang pack README: changing from "Ctrl+Alt+P" to "Ctrl+Shift+P"
2019-09-05 16:11:29 -07:00
Aditya Bist
485cb43a34 fix issue where sometimes ownerUri was null (#7094) 2019-09-05 15:36:51 -07:00
Cory Rivera
3281d28de7 Disable 'should not be dirty after saving notebook' test in notebook integration tests. (#7091) 2019-09-05 15:34:43 -07:00
Aditya Bist
856833dbc4 fix disconnect option for extension nodes (#7085) 2019-09-05 13:48:47 -07:00
Charles Gagnon
15f1945f31 Localize some missed status messages (#7076)
* Localize some missed status messages

* More service text updates
2019-09-05 11:49:39 -07:00
Charles Gagnon
b1f29a8c92 Don't add unnecessary separator to OE action menu (#7071)
* Don't add unnecessary separator to OE action menu

* Fix another check
2019-09-05 09:22:53 -07:00
Maddy
cbbd4ffbb6 make books viewlet only available in insiders (#7055)
* make books viewlet only available in insiders

* insiders only: Books widget on dashboard
2019-09-05 09:20:03 -07:00
Karl Burtram
e2ea397fb9 Fix agent extension version back to 0.42.0 (#7068) 2019-09-04 17:18:38 -07:00
Aditya Bist
6a4c9b4108 fix cms server deletion from memento (#7062) 2019-09-04 15:45:06 -07:00
Amir Omidi
a61c86bff5 Remove the eol character at the end of string (#7056) 2019-09-04 15:30:46 -07:00
Aditya Bist
35b09542e2 removed separator from context menu (#7059)
* removed separator from context menu

* remove unused import
2019-09-04 15:29:27 -07:00
Udeesha Gautam
a57536be4b Trimming the text for SQLCMD button (#7050) 2019-09-04 15:25:41 -07:00
Aasim Khan
fbb2accacb Agent Notebooks Scheduler (#6786)
* added agent notebooks, notebook history view and view materialized notebook button

* Got a basic UI running for viewing notebook history

* made some changes to make UI look good

* Added new notebook dialog

* Added new notebook Dialog

* Added create notebook dialog

* Added edit and delete notebook job

* Added some notebook history features

* Added new notebook job icons, fixed a minor bug
in openmaterializednotebookAPI and added fixed the
schedule Picker API.

* Fixed Bugs in Notebook Grid expansion

* Fixed Notebook table highlighting and
grid generation is done using code.

* fixed some UI bugs

* Added changes to reflect sqltoolservice api

* Fixed some localize keys

* Made changes in the PR and added
ability to open Template Notebooks from
notebook history view.

* Added pin and renaming to notebook history

* made some library calls async

* fixed an import bug caused by merging from master

* Validation in NotebookJobDialog

* Added entry points for scheduling notebooks
on file explorer and notebook editor

* Handled no active connections and
a small bug in collapsing grid

* fix a bug in scheduling notebook from explorer
and toolbar

* setting up agent providers from connection now

* changed modals

* Reupload edited template

* Add dialog info, solved an edit bug and localized
UI strings.

* Bug fixes in UI, notebook renaming and
editing template on fly.

* fixed a bug that failed editing notebook jobs from notebook jobs table

* Fixed a cyclic dependency, made strings const and
some other changes in the PR

* Made some cyclic dependency and some fixes from PR

* made some changes mentioned in the PR

* Changed storage database health text

* Changed the sqltoolservice version to the point to the latest build.
2019-09-04 15:12:35 -07:00
Charles Gagnon
0a393400b2 Register loadCompletionExtension command (#6985) 2019-09-04 15:02:47 -07:00
Charles Gagnon
8b5ce753e4 More BDC updates (#6990) 2019-09-04 13:49:08 -07:00
Karl Burtram
f05b9396e8 Update extension versions for September (#7053) 2019-09-04 13:17:11 -07:00
DrewSK
6670fe8c1c fix(snippets): ads parenthesis to sqlcreateindex snippet (#7020) 2019-09-04 10:26:54 -07:00
Anthony Dresser
b8518f5795 Add pipeline args for testing (#7028)
* add pipeline args for testing

* fix test publish step

* simplify some testing code

* change coverage reporters

* fix coverage reports
2019-09-03 17:59:19 -07:00
Cory Rivera
c94291af52 Update CSS for inline notebook cell buttons (#7033) 2019-09-03 17:18:32 -07:00
Anthony Dresser
1d6f48806e fix index for panel push (#7035) 2019-09-03 17:17:23 -07:00
Chris LaFreniere
4626f37671 Notebooks: Fix double-click to edit with source as array (#7027)
* Fix dbl click to  edit with source as array

* Fix equality check
2019-09-03 10:37:19 -07:00
Amir Omidi
33a7fe38e1 Revert "Revert "OE clicking and awaiting the expansion calls"" (#6923)
* Revert "Revert "OE clicking and awaiting the expansion calls" (#6921)"

This reverts commit b9e3a468ae.

* Handle edge cases better

* Polling for OE load time
2019-09-03 10:31:29 -07:00
4298 changed files with 225570 additions and 189093 deletions

View File

@@ -2,7 +2,7 @@
name: Bug report
about: Create a report to help us improve
title: ''
labels: Bug
labels: ''
assignees: ''
---

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1 @@
blank_issues_enabled: false

View File

@@ -2,7 +2,7 @@
name: Feature request
about: Suggest an idea for this project
title: ''
labels: Enhancement
labels: ''
assignees: ''
---

View File

@@ -1,49 +1,36 @@
{
perform: false,
perform: true,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
defaultLabel: 'Triage: Needed',
defaultAssignee: '',
autoAssignees: {
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
Area - Acquisition: [],
Area - Azure: [],
Area - Backup\Restore: [],
Area - Charting\Insights: [],
Area - Connection: [ charles-gagnon ],
Area - DacFX: [],
Area - Dashboard: [],
Area - Data Explorer: [],
Area - Edit Data: [],
Area - Extensibility: [],
Area - External Table: [],
Area - Fundamentals: [],
Area - Language Service: [ charles-gagnon ],
Area - Localization: [],
Area - Notebooks: [ chlafreniere ],
Area - Performance: [],
Area - Query Editor: [ anthonydresser ],
Area - Query Plan: [],
Area - Reliability: [],
Area - Resource Deployment: [],
Area - Schema Compare: [],
Area - Shell: [],
Area - SQL Agent: [],
Area - SQL Import: [],
Area - SQL Profiler: [],
Area - SQL 2019: [],
Area - SSMS Integration: []
}
}

9
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,9 @@
<!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines:
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
* Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them.
-->
This PR fixes #

6
.github/stale.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
{
perform: true,
label: 'Stale PR',
daysSinceLastUpdate: 7,
ignoredLabels: ['Do Not Merge']
}

118
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,118 @@
name: CI
on:
push:
branches:
- master
- release/*
pull_request:
branches:
- master
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- uses: actions/setup-python@v1
with:
python-version: '2.x'
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
darwin:
runs-on: macos-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests

13
.github/workflows/tslint.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
name: TSLint Enforcement
on: [pull_request]
jobs:
job:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v1
- name: TSLint
uses: aaomidi/gh-action-tslint@master
with:
token: ${{ secrets.GITHUB_TOKEN }}
tslint_config: 'tslint-sql.json'

1
.gitignore vendored
View File

@@ -30,3 +30,4 @@ coverage/
test_data/
test-results/
yarn-error.log
*.vsix

View File

@@ -0,0 +1,33 @@
/**
* @name No floating promises
* @kind problem
* @problem.severity error
* @id js/experimental/floating-promise
*/
import javascript
private predicate isEscapingPromise(PromiseDefinition promise) {
exists (DataFlow::Node escape | promise.flowsTo(escape) |
escape = any(DataFlow::InvokeNode invk).getAnArgument()
or
escape = any(DataFlow::FunctionNode fun).getAReturn()
or
escape = any(ThrowStmt t).getExpr().flow()
or
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
or
escape = any(DataFlow::PropWrite write).getRhs()
or
exists(WithStmt with, Assignment assign |
with.mayAffect(assign.getLhs()) and
assign.getRhs().flow() = escape
)
)
}
from PromiseDefinition promise
where
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
not isEscapingPromise(promise)
select promise, "This promise appears to be a floating promise"

6
.prettierrc.json Normal file
View File

@@ -0,0 +1,6 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

88
.vscode/launch.json vendored
View File

@@ -16,6 +16,7 @@
"request": "attach",
"name": "Attach to Extension Host",
"port": 5870,
"timeout": 30000,
"restart": true,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -66,17 +67,16 @@
"request": "launch",
"name": "Launch azuredatastudio",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 20000
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 20000
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 20000
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"port": 9222,
"timeout": 20000,
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
},
@@ -127,6 +127,33 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web) (TBD)",
"runtimeExecutable": "yarn",
"runtimeArgs": [
"web"
],
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web, Chrome) (TBD)",
"url": "http://localhost:8080",
"preLaunchTask": "Run web"
},
{
"type": "node",
"request": "launch",
"name": "Git Unit Tests",
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceFolder}/extensions/git",
"outFiles": [
"${workspaceFolder}/extensions/git/out/**/*.js"
]
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
@@ -165,7 +192,10 @@
"cwd": "${workspaceFolder}",
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
],
"env": {
"MOCHA_COLORS": "true"
}
},
{
"type": "chrome",
@@ -183,6 +213,22 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Integration Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
],
"compounds": [
{
@@ -199,6 +245,13 @@
"Run Extension Unit Tests"
]
},
{
"name": "Debug Extension Integration Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Integration Tests"
]
},
{
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
@@ -207,18 +260,33 @@
]
},
{
"name": "Search and Renderer processes",
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
"configurations": [
"Launch azuredatastudio",
"Attach to Main Process",
"Attach to Extension Host"
]
},
{
"name": "Debug Renderer and search processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Search Process"
]
},
{
"name": "Renderer and Extension Host processes",
"name": "Debug Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
},
{
"name": "Attach Renderer and Extension Host",
"configurations": [
"Attach to azuredatastudio",
"Attach to Extension Host"
]
}
]
}
}

View File

@@ -39,6 +39,7 @@
],
"typescript.tsdk": "node_modules/typescript/lib",
"npm.exclude": "**/extensions/**",
"npm.packageManager": "yarn",
"emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single",
@@ -60,5 +61,6 @@
"remote.extensionKind": {
"msjsdiag.debugger-for-chrome": "workspace"
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true
}
}

37
.vscode/tasks.json vendored
View File

@@ -5,7 +5,10 @@
"type": "npm",
"script": "watch",
"label": "Build VS Code",
"group": "build",
"group": {
"kind": "build",
"isDefault": true
},
"isBackground": true,
"presentation": {
"reveal": "never"
@@ -30,22 +33,22 @@
},
{
"type": "npm",
"script": "strict-initialization-watch",
"label": "TS - Strict Initialization",
"script": "strict-function-types-watch",
"label": "TS - Strict Function Types",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-initialization",
"owner": "typescript-function-types",
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Cheks",
"label": "TS - Strict Null Checks",
"isBackground": true,
"presentation": {
"reveal": "never"
@@ -87,8 +90,8 @@
"problemMatcher": []
},
{
"type": "gulp",
"task": "electron",
"type": "npm",
"script": "electron",
"label": "Download electron"
},
{
@@ -96,6 +99,24 @@
"task": "hygiene",
"problemMatcher": []
},
{
"type": "shell",
"command": "yarn web -- --no-launch",
"label": "Run web",
"isBackground": true,
// This section to make error go away when launching the debug config
"problemMatcher": {
"pattern": {
"regexp": ""
},
"background": {
"beginsPattern": ".*node .*",
"endsPattern": "Web UI available at .*"
}
},
"presentation": {
"reveal": "never"
}
},
]
}

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "4.2.9"
target "6.1.5"
runtime "electron"

View File

@@ -1,5 +1,48 @@
# Change Log
## Version 1.13.1
* Release date: November 15, 2019
* Release status: General Availability
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
## Version 1.13.0
* Release date: November 4, 2019
* Release status: General Availability
* General Availability release for Schema Compare and DACPAC extensions
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
## Version 1.12.2
* Release date: October 11, 2019
* Release status: General Availability
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
## Version 1.12.1
* Release date: October 7, 2019
* Release status: General Availability
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
## Version 1.12.0
* Release date: October 2, 2019
* Release status: General Availability
## What's new in this version
* Announcing the Query History panel
* Improved Query Results Grid copy selection support
* TempDB page added to Server Reports extension
* PowerShell extension update
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
## Version 1.11.0
* Release date: September 10, 2019
* Release status: General Availability
## What's new in this version
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
## Version 1.10.0
* Release date: August 14, 2019
* Release status: General Availability
@@ -197,7 +240,7 @@ We would like to thank all our users who raised issues, and in particular the fo
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including big data cluster support.
* Support for SQL Server 2019 preview features including Big Data Cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard

View File

@@ -2,6 +2,7 @@
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/azuredatastudio/azuredatastudio/_apis/build/status/Azure%20Data%20Studio%20CI?branchName=master)](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
[![Twitter Follow](https://img.shields.io/twitter/follow/azuredatastudio?style=social)](https://twitter.com/azuredatastudio)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
@@ -9,13 +10,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
Platform | Link
-- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2100710
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2100711
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2100712
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2100809
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2100714
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2100810
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2100672
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2109256
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2109085
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2109255
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2109180
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2109179
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2109178
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2109254
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
@@ -68,6 +69,12 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
* jamesrod817 for `Tempdb (#7022)`
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
* devmattrick for `Update row count as updates are received #6642`
* mottykohn for `In Message panel onclick scroll to line #6417`
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
* yamatoya for `fix the format #4899`
* GeoffYoung for `Fix sqlDropColumn description #4422`

View File

@@ -36,6 +36,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
@@ -1175,7 +1176,35 @@ That's all there is to it!
=========================================
END OF jschardet NOTICES AND INFORMATION
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 Sergei Vorobev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF jupyter-powershell NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.

View File

@@ -1,84 +0,0 @@
steps:
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
sudo apt-get install -y libkrb5-dev
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: "Linux preinstall"
condition: eq(variables['Agent.OS'], 'Linux')
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.10.1"
- script: |
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: "Run TSLint"
- script: |
yarn strict-null-check
displayName: "Run Strict Null Check"
- script: |
yarn compile
displayName: "Compile"
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: "Tests"
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
displayName: "Tests"
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
- task: PublishTestResults@2
inputs:
testResultsFiles: "**/test-results.xml"
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: "cobertura"
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
condition: ne(variables['Agent.OS'], 'Linux')

View File

@@ -1,65 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.10.1"
- script: |
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: "Electron"
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: "Run TSLint"
- script: |
yarn strict-null-check
displayName: "Run Strict Null Check"
- script: |
yarn compile
displayName: "Compile"
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
displayName: "Test"
- task: PublishTestResults@2
inputs:
testResultsFiles: "test-results.xml"
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: "cobertura"
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report

View File

@@ -3,20 +3,20 @@ trigger:
- release/*
jobs:
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: azure-pipelines-windows.yml
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: build/azure-pipelines/win32/continuous-build-win32.yml
- job: Linux
pool:
vmImage: "Ubuntu-16.04"
steps:
- template: azure-pipelines-linux-mac.yml
- job: Linux
pool:
vmImage: 'Ubuntu-16.04'
steps:
- template: build/azure-pipelines/linux/continuous-build-linux.yml
- job: macOS
pool:
vmImage: macOS 10.13
steps:
- template: azure-pipelines-linux-mac.yml
- job: macOS
pool:
vmImage: macOS 10.13
steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml

View File

@@ -1 +1 @@
2019-07-11T05:47:05.444Z
2019-12-01T02:20:58.491Z

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as vfs from 'vinyl-fs';
const files = [
'.build/extensions/**/*.vsix', // external extensions
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
'.build/linux/sha256hashes.txt', // linux hashes
'.build/linux/deb/amd64/deb/*', // linux debs
'.build/linux/rpm/x86_64/*', // linux rpms
'.build/linux/server/*', // linux server
'.build/linux/archive/*', // linux archive
'.build/docker/**', // docker images
'.build/darwin/**', // darwin binaries
'.build/version.json' // version information
];
async function main() {
return new Promise((resolve, reject) => {
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
stream.on('end', () => resolve());
stream.on('error', e => reject(e));
});
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,132 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { CosmosClient } from '@azure/cosmos';
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl?: string;
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
if (process.argv.length !== 6) {
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
process.exit(-1);
}
function hashStream(hashName: string, stream: Readable): Promise<string> {
return new Promise<string>((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
function getEnv(name: string): string {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main(): Promise<void> {
const [, , platform, type, name, file] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(file);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + name;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const blobExists = await doesAssetExist(blobService, quality, blobName);
if (blobExists) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await uploadBlob(blobService, quality, blobName, file);
console.log('Blobs successfully uploaded.');
const asset: Asset = {
platform,
type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
}
main().then(() => {
console.log('Asset successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,60 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { CosmosClient } from '@azure/cosmos';
if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION');
process.exit(-1);
}
function getEnv(name: string): string {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main(): Promise<void> {
const [, , _version] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
const queuedBy = getEnv('BUILD_QUEUEDBY');
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
console.log('Creating build...');
console.log('Quality:', quality);
console.log('Version:', version);
console.log('Commit:', commit);
const build = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: false,
sourceBranch,
queuedBy,
assets: [],
updates: {}
};
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
}
main().then(() => {
console.log('Build successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -0,0 +1,70 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { CosmosClient } from '@azure/cosmos';
function getEnv(name: string): string {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
const res = await client.database('builds').container('config').items.query(query).fetchAll();
if (res.resources.length === 0) {
return createDefaultConfig(quality);
}
return res.resources[0] as Config;
}
async function main(): Promise<void> {
const commit = getEnv('BUILD_SOURCEVERSION');
const quality = getEnv('VSCODE_QUALITY');
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const config = await getConfig(client, quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
}
main().then(() => {
console.log('Build successfully released');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -8,7 +8,7 @@
import * as url from 'url';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { DocumentClient, RetrievedDocument } from 'documentdb';
import { CosmosClient } from '@azure/cosmos';
function log(...args: any[]) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
process.exit(-1);
}
interface Build extends RetrievedDocument {
interface Build {
assets: Asset[];
}
@@ -38,62 +38,20 @@ interface Asset {
supportsFastUpdate?: boolean;
}
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function _update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.assets = [
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
asset
];
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
if (err) { return e(err); }
log('Build successfully updated.');
c();
});
});
});
}
return _update();
}
async function sync(commit: string, quality: string): Promise<void> {
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = `dbs/builds/colls/${quality}`;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const container = client.database('builds').container(quality);
const build = await new Promise<Build>((c, e) => {
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
c(results[0] as Build);
});
});
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
const res = await container.items.query<Build>(query, {}).fetchAll();
if (res.resources.length !== 1) {
throw new Error(`No builds found for ${commit}`);
}
const build = res.resources[0];
log(`Found build for ${commit}, with ${build.assets.length} assets`);
@@ -140,8 +98,9 @@ async function sync(commit: string, quality: string): Promise<void> {
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await updateBuild(commit, quality, asset.platform, asset.type, asset);
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
log(` Done ✔️`);
} catch (err) {

View File

@@ -1,46 +1,55 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
yarn --frozen-lockfile
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check.
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
# - script: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
# - script: | {{SQL CARBON EDIT}} remove step
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# ensure drop directories exist
mkdir -p $REPO/.build/darwin/{archive,server}
# remove pkg from archive
zip -d $REPO/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
# package Remote Extension Host
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -21,11 +21,11 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -102,20 +102,28 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
cd test/smoke
yarn compile
cd -
yarn smoketest --web --headless
continueOnError: true
displayName: Run web smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e

View File

@@ -5,28 +5,20 @@ set -e
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
node build/azure-pipelines/common/createAsset.js \
darwin \
archive \
"VSCode-darwin-$VSCODE_QUALITY.zip" \
$VERSION \
true \
../VSCode-darwin.zip
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
# publish Remote Extension Host
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
node build/azure-pipelines/common/createAsset.js \
server-darwin \
archive-unsigned \
"vscode-server-darwin.zip" \
$VERSION \
true \
../vscode-server-darwin.zip
# publish hockeyapp symbols

View File

@@ -0,0 +1,171 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: '10.15.3'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: '1.x'
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
yarn gulp install-sqltoolsservice
displayName: Install sqltoolsservice
- script: |
set -e
yarn gulp package-rebuild-extensions
yarn gulp vscode-darwin-min-ci
yarn gulp vscode-reh-darwin-min-ci
yarn gulp vscode-reh-web-darwin-min-ci
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
- script: |
set -e
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
mkdir -p .build/darwin/archive
pushd ../azuredatastudio-darwin && zip -r -X -y $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip * && popd
displayName: 'Archive'
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
displayName: 'ESRP CodeSigning'
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
Pattern: 'azuredatastudio-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 20
- script: |
set -e
./build/azure-pipelines/darwin/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
inputs:
testResultsFiles: 'test-results.xml'
searchFolder: '$(Build.SourcesDirectory)'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Integration and Smoke Test Results'
inputs:
testResultsFiles: 'dawin-integration-tests-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true

View File

@@ -0,0 +1,19 @@
Param(
[string]$sourcesDir,
[string]$artifactsDir,
[string]$storageKey,
[string]$documentDbKey
)
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
$Version = $VersionJson.version
$Quality = $VersionJson.quality
$CommitId = $VersionJson.commit
$ZipName = "azuredatastudio-darwin.zip"
$Zip = "$artifactsDir\darwin\archive\$ZipName"
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId

View File

@@ -1,3 +1,6 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger:
branches:
include: ['master', 'release/*']
@@ -8,27 +11,27 @@ pr:
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
azureSubscription: 'azuredatastudio-adointegration'
KeyVaultName: ado-secrets
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
# Push master branch into oss/master

View File

@@ -0,0 +1,16 @@
#Download base image ubuntu 16.04
FROM ubuntu:16.04
# Update Software repository
RUN apt-get update
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
ADD ./ /opt/ads-server
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
CMD ["/opt/ads-server/server.sh"]
EXPOSE 8000:8000
EXPOSE 8001:8001

View File

@@ -1,10 +1,13 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger: none
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -31,13 +34,3 @@ steps:
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration
trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master

View File

@@ -0,0 +1,39 @@
trigger:
branches:
include: ['master']
pr: none
jobs:
- job: ExplorationMerge
pool:
vmImage: Ubuntu-16.04
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- script: |
set -e
cat << EOF > ~/.netrc
machine mssqltools.visualstudio.com
login azuredatastudio
password $(DEVOPS_PASSWORD)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add explore "$ADS_EXPLORE_REPO"
git fetch explore
git checkout -b merge-branch explore/master
git merge origin/master
git push explore HEAD:master
displayName: Sync & Merge Explore
env:
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)

View File

@@ -0,0 +1,22 @@
#Download base image ubuntu 16.04
FROM ubuntu:16.04
# Update Software repository
RUN apt-get update --fix-missing
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.8
#docker
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
RUN apt-key fingerprint 0EBFCD88
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
RUN apt-get update
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
# if built on a windows host.
ADD ./xvfb.init /etc/init.d/xvfb
RUN chmod +x /etc/init.d/xvfb
RUN update-rc.d xvfb defaults

View File

@@ -2,53 +2,62 @@ steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
yarn --frozen-lockfile
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add gci checks
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-null-check
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
# - script: | {{SQL CARBON EDIT}} remove step
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish tarball
mkdir -p $REPO/.build/linux/{archive,server}
PLATFORM_LINUX="linux-x64"
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
# create version
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
COMMIT_ID=$(git rev-parse HEAD)
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/azuredatastudio-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
# create docker
mkdir -p $REPO/.build/docker
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -1,40 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const documentdb_1 = require("documentdb");
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
});
}
getConfig(process.argv[2])
.then(config => {
console.log(config.frozen);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -21,11 +21,11 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -21,11 +21,11 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -105,12 +105,45 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn gulp "vscode-linux-x64-build-deb"
yarn gulp "vscode-linux-x64-build-rpm"
yarn gulp "vscode-linux-x64-prepare-snap"
displayName: Build packages
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '.build/linux/rpm/x86_64'
Pattern: '*.rpm'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-450779-Pgp",
"operationSetCode": "LinuxSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign rpm
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \

View File

@@ -10,13 +10,11 @@ BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
@@ -27,32 +25,28 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Publish DEB
yarn gulp "vscode-linux-x64-build-deb"
PLATFORM_DEB="linux-deb-x64"
DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
# Publish RPM
yarn gulp "vscode-linux-x64-build-rpm"
PLATFORM_RPM="linux-rpm-x64"
RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
# Publish Snap
yarn gulp "vscode-linux-x64-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"

View File

@@ -1,11 +1,11 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -43,12 +43,10 @@ steps:
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"

View File

@@ -0,0 +1,172 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
yarn gulp install-sqltoolsservice
yarn gulp install-ssmsmin
displayName: Install extension binaries
- script: |
set -e
yarn gulp vscode-linux-x64-min-ci
yarn gulp vscode-reh-linux-x64-min-ci
yarn gulp vscode-reh-web-linux-x64-min-ci
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
- script: |
set -e
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn gulp package-rebuild-extensions
yarn gulp compile-extensions
yarn gulp package-external-extensions
displayName: Package External extensions
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
DISPLAY=:10 ./scripts/test-extensions-unit.sh
displayName: 'Run Stable Extension Unit Tests'
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
displayName: 'Run Unstable Extension Unit Tests'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
- script: |
set -e
yarn gulp vscode-linux-x64-build-deb
displayName: Build Deb
- script: |
set -e
yarn gulp vscode-linux-x64-build-rpm
displayName: Build Rpm
- script: |
set -e
./build/azure-pipelines/linux/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
inputs:
testResultsFiles: 'test-results.xml'
searchFolder: '$(Build.SourcesDirectory)'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true

View File

@@ -0,0 +1,36 @@
Param(
[string]$sourcesDir,
[string]$artifactsDir,
[string]$storageKey,
[string]$documentDbKey
)
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
$Version = $VersionJson.version
$Quality = $VersionJson.quality
$CommitId = $VersionJson.commit
$Arch = "x64"
# Publish tarball
$PlatformLinux = "linux-$Arch"
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
# Publish DEB
$PlatformDeb = "linux-deb-$Arch"
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
# Publish RPM
$PlatformRpm = "linux-rpm-$Arch"
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId

View File

@@ -21,7 +21,7 @@ function main() {
return;
}
const productJsonFilter = filter('product.json', { restore: true });
const productJsonFilter = filter('**/product.json', { restore: true });
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
return vfs
@@ -29,7 +29,7 @@ function main() {
.pipe(filter(f => !f.isDirectory()))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
.pipe(json(o => Object.assign({}, require('../../product.json'), o)))
.pipe(productJsonFilter.restore)
.pipe(es.mapSync(function (f) {
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
@@ -38,4 +38,4 @@ function main() {
.pipe(vfs.dest('.'));
}
main();
main();

View File

@@ -56,7 +56,7 @@ jobs:
- template: linux/snap-build-linux.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
@@ -67,7 +67,7 @@ jobs:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
@@ -78,7 +78,7 @@ jobs:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
@@ -118,6 +118,7 @@ jobs:
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxArm64
- LinuxAlpine
- macOS
steps:
@@ -133,6 +134,7 @@ jobs:
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxArm64
- LinuxAlpine
- LinuxWeb
- macOS

View File

@@ -12,23 +12,24 @@ steps:
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
dryRun: true
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
versionSpec: "12.13.0"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -41,7 +42,7 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -49,33 +50,33 @@ steps:
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
@@ -83,20 +84,28 @@ steps:
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene
yarn gulp hygiene --skip-tslint
yarn gulp tslint
yarn monaco-compile-check
displayName: Run hygiene checks
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
displayName: Run hygiene, tslint and monaco compile checks
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -106,14 +115,22 @@ steps:
yarn gulp minify-vscode-reh
yarn gulp minify-vscode-reh-web
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
VERSION=`node -p "require(\"./package.json\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/createBuild.js $VERSION
displayName: Create build
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
@@ -122,4 +139,4 @@ steps:
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))

View File

@@ -1,36 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -35,9 +35,9 @@ function isValidTag(t: string) {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
return false;
}
return true;
}
}

View File

@@ -9,11 +9,27 @@ pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
- bash: |
# Install build dependencies

View File

@@ -1,62 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -19,4 +19,4 @@ steps:
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/release.js
node build/azure-pipelines/common/releaseBuild.js

View File

@@ -0,0 +1,73 @@
resources:
containers:
- container: linux-x64
image: sqltoolscontainers.azurecr.io/linux-build-agent:1
endpoint: ContainerRegistry
jobs:
- job: Compile
pool:
vmImage: 'Ubuntu-16.04'
container: linux-x64
steps:
- template: sql-product-compile.yml
- job: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS 10.13
dependsOn:
- Compile
steps:
- template: darwin/sql-product-build-darwin.yml
- job: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
container: linux-x64
dependsOn:
- Compile
steps:
- template: linux/sql-product-build-linux.yml
- job: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
dependsOn:
- Compile
steps:
- template: win32/sql-product-build-win32.yml
- job: Windows_Test
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
pool:
name: mssqltools
dependsOn:
- Linux
- Windows
steps:
- template: win32/sql-product-test-win32.yml
- job: Release
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: 'Ubuntu-16.04'
dependsOn:
- macOS
- Linux
- Windows
- Windows_Test
steps:
- template: sql-release.yml
trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master

View File

@@ -0,0 +1,112 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene --skip-tslint
yarn gulp tslint
displayName: Run hygiene, tslint
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp minify-vscode-reh
yarn gulp minify-vscode-reh-web
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))

View File

@@ -0,0 +1,5 @@
steps:
- script: |
set -e
echo "##vso[build.addbuildtag]Release"
displayName: Set For Release

View File

@@ -1,11 +1,11 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -21,11 +21,11 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -7,12 +7,9 @@ ROOT="$REPO/.."
WEB_BUILD_NAME="vscode-web"
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
BUILD="$ROOT/$WEB_BUILD_NAME"
PACKAGEJSON="$BUILD/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/vscode-web.tar.*
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"
node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"

View File

@@ -1,50 +1,60 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
yarn electron
- script: |
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
# - powershell: | {{SQL CARBON EDIT}} remove step
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -0,0 +1,20 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$Arch = "x64"
$Repo = "$(pwd)"
$Root = "$Repo\.."
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
$ServerName = "azuredatastudio-server-win32-$Arch"
$Server = "$Root\$ServerName"
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
# Create server archive
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
$global:LASTEXITCODE = 0
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"

View File

@@ -21,11 +21,11 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
@@ -107,16 +107,21 @@ steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))

View File

@@ -23,14 +23,13 @@ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
# publish hockeyapp symbols
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }

View File

@@ -0,0 +1,280 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "andresse@microsoft.com" }
exec { git config user.name "AzureDataStudio" }
displayName: Prepare tooling
- powershell: |
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "install-sqltoolsservice" }
displayName: Install sqltoolsservice
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "package-rebuild-extensions" }
exec { yarn gulp "vscode-win32-x64-min-ci" }
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
displayName: Run unstable tests
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
displayName: 'Sign out code'
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
signConfigType: inlineSignParams
inlineOperation: |
[
  {
    "keyCode": "CP-230012",
    "operationSetCode": "SigntoolSign",
    "parameters": [
    {
      "parameterName": "OpusName",
      "parameterValue": "Azure Data Studio"
    },
    {
      "parameterName": "OpusInfo",
      "parameterValue": "https://github.com/microsoft/azuredatastudio"
    },
    {
      "parameterName": "PageHash",
      "parameterValue": "/NPH"
    },
    {
      "parameterName": "FileDigest",
      "parameterValue": "/fd sha256"
    },
    {
      "parameterName": "TimeStamp",
      "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
    }
    ],
    "toolName": "signtool.exe",
    "toolVersion": "6.2.9304.0"
  },
  {
    "keyCode": "CP-230012",
    "operationSetCode": "SigntoolVerify",
    "parameters": [
    {
      "parameterName": "VerifyAll",
      "parameterValue": "/all"
    }
],
    "toolName": "signtool.exe",
    "toolVersion": "6.2.9304.0"
  }
]
SessionTimeout: 600
MaxConcurrency: 5
MaxRetryAttempts: 20
condition: and(succeeded(), eq(variables['signed'], true))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-x64-user-setup" }
exec { yarn gulp "vscode-win32-x64-system-setup" }
exec { yarn gulp "vscode-win32-x64-archive" }
displayName: Archive & User & System setup
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
displayName: 'Sign installers'
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '.build'
Pattern: '*.exe'
signConfigType: inlineSignParams
inlineOperation: |
[
  {
    "keyCode": "CP-230012",
    "operationSetCode": "SigntoolSign",
    "parameters": [
    {
      "parameterName": "OpusName",
      "parameterValue": "Azure Data Studio"
    },
    {
      "parameterName": "OpusInfo",
      "parameterValue": "https://github.com/microsoft/azuredatastudio"
    },
    {
      "parameterName": "PageHash",
      "parameterValue": "/NPH"
    },
    {
      "parameterName": "FileDigest",
      "parameterValue": "/fd sha256"
    },
    {
      "parameterName": "TimeStamp",
      "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
    }
    ],
    "toolName": "signtool.exe",
    "toolVersion": "6.2.9304.0"
  },
  {
    "keyCode": "CP-230012",
    "operationSetCode": "SigntoolVerify",
    "parameters": [
    {
      "parameterName": "VerifyAll",
      "parameterValue": "/all"
    }
],
    "toolName": "signtool.exe",
    "toolVersion": "6.2.9304.0"
  }
]
SessionTimeout: 600
MaxConcurrency: 5
MaxRetryAttempts: 20
condition: and(succeeded(), eq(variables['signed'], true))
- task: ArchiveFiles@2
displayName: 'Archive build scripts source'
inputs:
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
archiveType: tar
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: build scripts source'
inputs:
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
ArtifactName: source
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\createDrop.ps1
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
inputs:
testResultsFiles: 'test-results.xml'
searchFolder: '$(Build.SourcesDirectory)'
failTaskOnFailedTests: true
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Integration and Smoke Test Results'
inputs:
testResultsFiles: '*.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
mergeTestResults: true
failTaskOnFailedTests: true
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true

View File

@@ -0,0 +1,106 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:CHILD_CONCURRENCY="1"
exec { git clean -fxd }
displayName: Clean repo
- task: DownloadPipelineArtifact@2
inputs:
buildType: 'current'
targetPath: '$(Build.SourcesDirectory)\.build'
artifactName: drop
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
displayName: Unzip artifact
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: SqlToolsSecretStore'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: SqlToolsSecretStore
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
displayName: Run stable tests
env:
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
STANDALONE_SQL: $(ads-integration-test-standalone-server)
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
AZURE_SQL: $(ads-integration-test-azure-server)
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
displayName: Run release tests
env:
ADS_TEST_GREP: (.*@REL@|integration test setup)
ADS_TEST_INVERT_GREP: 0
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
STANDALONE_SQL: $(ads-integration-test-standalone-server)
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
AZURE_SQL: $(ads-integration-test-azure-server)
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
displayName: Run unstable integration tests
env:
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
STANDALONE_SQL: $(ads-integration-test-standalone-server)
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
AZURE_SQL: $(ads-integration-test-azure-server)

View File

@@ -0,0 +1,29 @@
Param(
[string]$sourcesDir,
[string]$artifactsDir,
[string]$storageKey,
[string]$documentDbKey
)
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
$ExeName = "AzureDataStudioSetup.exe"
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
$UserExeName = "AzureDataStudioUserSetup.exe"
$ZipName = "azuredatastudio-win32-x64.zip"
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
$Version = $VersionJson.version
$Quality = $VersionJson.quality
$CommitId = $VersionJson.commit
$assetPlatform = "win32-x64"
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId

View File

@@ -1,7 +1,7 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.2.1",
"version": "0.3.3",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -1,2 +1,7 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.3",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -41,12 +41,7 @@ var editorEntryPoints = [
];
var editorResources = [
'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
];
var BUNDLED_FILE_HEADER = [
@@ -62,7 +57,6 @@ var BUNDLED_FILE_HEADER = [
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const extractEditorSrcTask = task.define('extract-editor-src', () => {
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
const apiusages = monacoapi.execute().usageContent;
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
standalone.extractEditor({
@@ -76,25 +70,15 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
apiusages,
extrausages
],
typings: [
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require-monaco.d.ts',
"typings/lib.es2018.promise.d.ts",
'vs/monaco.d.ts'
],
libs: [
`lib.es5.d.ts`,
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')
destRoot: path.join(root, 'out-editor-src'),
redirects: []
});
});
@@ -145,18 +129,70 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
});
const compileEditorESMTask = task.define('compile-editor-esm', () => {
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
let result;
if (process.platform === 'win32') {
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
} else {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
}
console.log(result.stdout.toString());
console.log(result.stderr.toString());
if (result.status !== 0) {
console.log(`The TS Compilation failed, preparing analysis folder...`);
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
return util.rimraf(destPath)().then(() => {
fs.mkdirSync(destPath);
// initialize a new repository
cp.spawnSync(`git`, [`init`], {
cwd: destPath
});
// build a list of files to copy
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
// copy files from src
for (const file of files) {
const srcFilePath = path.join(__dirname, '../src', file);
const dstFilePath = path.join(destPath, file);
if (fs.existsSync(srcFilePath)) {
util.ensureDir(path.dirname(dstFilePath));
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
fs.writeFileSync(dstFilePath, contents);
}
}
// create an initial commit to diff against
cp.spawnSync(`git`, [`add`, `.`], {
cwd: destPath
});
// create the commit
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
cwd: destPath
});
// copy files from esm
for (const file of files) {
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
const dstFilePath = path.join(destPath, file);
if (fs.existsSync(srcFilePath)) {
util.ensureDir(path.dirname(dstFilePath));
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
fs.writeFileSync(dstFilePath, contents);
}
}
console.log(`Open in VS Code the folder at '${destPath}' and you can alayze the compilation error`);
throw new Error('Standalone Editor compilation failed. If this is the build machine, simply launch `yarn run gulp editor-distro` on your machine to further analyze the compilation problem.');
});
}
});

View File

@@ -21,10 +21,15 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const _ = require('underscore');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
// {{SQL CARBON EDIT}}
const sqlLocalizedExtensions = [
'dacpac',
'schema-compare'
];
// {{SQL CARBON EDIT}}
const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath,
@@ -37,38 +42,38 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const tsconfig = require(absolutePath);
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const overrideOptions = {};
overrideOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const out = path.join(root, 'out');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'vscode.' + relativeDirname;
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}}
headerOut = 'out';
} else {
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}}
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) {
const reporter = createReporter();
tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
return function () {
const pipeline = function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
@@ -98,15 +103,20 @@ const tasks = compilations.map(function (tsconfigFile) {
return es.duplex(input, output);
};
}
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
// add src-stream for project files
pipeline.tsProjectSrc = () => {
return compilation.src(srcOpts);
};
return pipeline;
}
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
const input = es.merge(nonts, pipeline.tsProjectSrc());
return input
.pipe(pipeline())
@@ -115,8 +125,9 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
const input = es.merge(nonts, pipeline.tsProjectSrc());
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
return watchInput
.pipe(util.incremental(pipeline, input))
@@ -125,7 +136,8 @@ const tasks = compilations.map(function (tsconfigFile) {
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(true, true);
const input = gulp.src(src, srcOpts);
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
const input = es.merge(nonts, pipeline.tsProjectSrc());
return input
.pipe(pipeline())
@@ -156,8 +168,8 @@ const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimr
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
cleanExtensionsBuildTask,
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build')))
));
gulp.task(compileExtensionsBuildTask);
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;

View File

@@ -17,6 +17,7 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const task = require('./lib/task');
/**
* Hygiene works by creating cascading subsets of all our files and
@@ -55,8 +56,10 @@ const indentationFilter = [
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
'!build/testSetup.js',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
@@ -70,7 +73,7 @@ const indentationFilter = [
'!**/yarn-error.log',
// except multiple specific folders
'!**/octicons/**',
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
@@ -100,7 +103,8 @@ const indentationFilter = [
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
];
const copyrightFilter = [
@@ -121,6 +125,7 @@ const copyrightFilter = [
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!**/promise-polyfill/polyfill.js',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
@@ -131,29 +136,30 @@ const copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/common/models/url.ts',
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
@@ -188,25 +194,44 @@ const tslintBaseFilter = [
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
// {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts' // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
];
// {{SQL CARBON EDIT}}
const useStrictFilter = [
'src/**'
];
const sqlFilter = [
'src/sql/**'
'src/sql/**',
'extensions/**',
// Ignore VS Code extensions
'!extensions/bat/**',
'!extensions/configuration-editing/**',
'!extensions/docker/**',
'!extensions/extension-editing/**',
'!extensions/git/**',
'!extensions/git-ui/**',
'!extensions/image-preview/**',
'!extensions/insights-default/**',
'!extensions/json/**',
'!extensions/json-language-features/**',
'!extensions/markdown-basics/**',
'!extensions/markdown-language-features/**',
'!extensions/merge-conflict/**',
'!extensions/powershell/**',
'!extensions/python/**',
'!extensions/r/**',
'!extensions/theme-*/**',
'!extensions/vscode-*/**',
'!extensions/xml/**',
'!extensions/xml-language-features/**',
'!extensions/yarml/**',
];
// {{SQL CARBON EDIT}}
const tslintCoreFilter = [
'src/**/*.ts',
'test/**/*.ts',
'!extensions/**/*.ts',
'!test/automation/**',
'!test/smoke/**',
...tslintBaseFilter
];
@@ -215,6 +240,7 @@ const tslintExtensionsFilter = [
'extensions/**/*.ts',
'!src/**/*.ts',
'!test/**/*.ts',
'test/automation/**/*.ts',
...tslintBaseFilter
];
@@ -222,9 +248,16 @@ const tslintHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts', // {{SQL CARBON EDIT}} known formatting issue do to commenting out code
...tslintBaseFilter
];
const fileLengthFilter = filter([
'**',
'!extensions/import/*.docx',
'!extensions/admin-tool-ext-win/license/**'
], {restore: true});
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -257,6 +290,33 @@ gulp.task('tslint', () => {
]).pipe(es.through());
});
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function() {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
});
gulp.task(checkPackageJSONTask);
function hygiene(some) {
let errorCount = 0;
@@ -306,23 +366,6 @@ function hygiene(some) {
this.emit('data', file);
});
// {{SQL CARBON EDIT}}
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
const useStrict = es.through(function (file) {
const lines = file.__lines;
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
// (10 is used to account for copyright and extraneous newlines)
lines.slice(0, 10).forEach((line, i) => {
if (/\s*'use\s*strict\s*'/.test(line)) {
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
errorCount++;
}
});
this.emit('data', file);
});
// {{SQL CARBON EDIT}} END
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
@@ -353,6 +396,23 @@ function hygiene(some) {
});
});
const filelength = es.through(function (file) {
const fileName = path.basename(file.relative);
const fileDir = path.dirname(file.relative);
//check the filename is < 50 characters (basename gets the filename with extension).
if (fileName.length > 50) {
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
errorCount++;
}
if (file.relative.length > 150) {
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
errorCount++;
}
this.emit('data', file);
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
@@ -366,25 +426,32 @@ function hygiene(some) {
let input;
if (Array.isArray(some) || typeof some === 'string' || !some) {
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
const options = { base: '.', follow: true, allowEmpty: true };
if (some) {
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
} else {
input = vfs.src(all, options);
}
} else {
input = some;
}
// {{SQL CARBON EDIT}} Linting for SQL
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
const tslintSqlOptions = { fix: false, formatter: 'json' };
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
const sqlTsl = es.through(function (file) {
const sqlTsl = es.through(function (file) { //TODO restore
const contents = file.contents.toString('utf8');
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
this.emit('data', file);
});
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(fileLengthFilter)
.pipe(filelength)
.pipe(fileLengthFilter.restore)
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
@@ -394,15 +461,16 @@ function hygiene(some) {
.pipe(filter(copyrightFilter))
.pipe(copyrights);
const typescript = result
let typescript = result
.pipe(filter(tslintHygieneFilter))
.pipe(formatting)
.pipe(tsl)
// {{SQL CARBON EDIT}}
.pipe(filter(useStrictFilter))
.pipe(useStrict)
.pipe(filter(sqlFilter))
.pipe(sqlTsl);
.pipe(formatting);
if (!process.argv.some(arg => arg === '--skip-tslint')) {
typescript = typescript.pipe(tsl);
typescript = typescript
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}}
.pipe(sqlTsl);
}
const javascript = result
.pipe(filter(eslintFilter))
@@ -488,7 +556,7 @@ function createGitIndexVinyls(paths) {
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', () => hygiene());
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {

View File

@@ -1,79 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
// {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor');
const product = require('../product.json');
gulp.task('mixin', function () {
// {{SQL CARBON EDIT}}
const updateUrl = process.env['SQLOPS_UPDATEURL'];
if (!updateUrl) {
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
return;
}
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
console.log('Missing VSCODE_QUALITY, skipping mixin');
return;
}
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
let newValues = {
"nameShort": product.nameShort,
"nameLong": product.nameLong,
"applicationName": product.applicationName,
"dataFolderName": product.dataFolderName,
"win32MutexName": product.win32MutexName,
"win32DirName": product.win32DirName,
"win32NameVersion": product.win32NameVersion,
"win32RegValueName": product.win32RegValueName,
"win32AppId": product.win32AppId,
"win32x64AppId": product.win32x64AppId,
"win32UserAppId": product.win32UserAppId,
"win32x64UserAppId": product.win32x64UserAppId,
"win32AppUserModelId": product.win32AppUserModelId,
"win32ShellNameShort": product.win32ShellNameShort,
"darwinBundleIdentifier": product.darwinBundleIdentifier,
"updateUrl": updateUrl,
"quality": quality,
"extensionsGallery": {
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
}
};
if (quality === 'insider') {
let dashSuffix = '-insiders';
let dotSuffix = '.insiders';
let displaySuffix = ' - Insiders';
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
newValues.nameShort += dashSuffix;
newValues.nameLong += displaySuffix;
newValues.applicationName += dashSuffix;
newValues.dataFolderName += dashSuffix;
newValues.win32MutexName += dashSuffix;
newValues.win32DirName += displaySuffix;
newValues.win32NameVersion += displaySuffix;
newValues.win32RegValueName += dashSuffix;
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
newValues.win32x64UserAppId += dashSuffix;
newValues.win32AppUserModelId += dotSuffix;
newValues.win32ShellNameShort += displaySuffix;
newValues.darwinBundleIdentifier += dotSuffix;
}
return gulp.src('./product.json')
.pipe(jeditor(newValues))
.pipe(gulp.dest('.'));
});

View File

@@ -4,7 +4,6 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const tsfmt = require('typescript-formatter');
@@ -12,8 +11,13 @@ const es = require('event-stream');
const filter = require('gulp-filter');
const del = require('del');
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const platform = require('service-downloader/out/platform').PlatformInformation;
const path = require('path');
const ext = require('./lib/extensions');
const task = require('./lib/task');
const glob = require('glob');
const vsce = require('vsce');
const mkdirp = require('mkdirp');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
@@ -22,73 +26,79 @@ gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
}, err => {
cb(err);
});
}, err => {
cb(err);
});
return gulp.src(some, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
});
return gulp.src(some, {
base: '.'
})
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
};
const formatStagedFiles = () => {
const cp = require('child_process');
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --cached --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
};
function installService() {
let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => {
let config = require('../extensions/mssql/config.json');
return platform.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
@@ -108,25 +118,50 @@ gulp.task('install-sqltoolsservice', () => {
return installService();
});
function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/src/config.json');
return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
var installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-ssmsmin', () => {
return installSsmsMin();
const config = require('../extensions/admin-tool-ext-win/config.json');
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
var installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
const root = path.dirname(__dirname);
gulp.task('package-external-extensions', task.series(
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
task.define('create-external-extension-vsix-build', () => {
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
}).map(element => {
const pkgJson = require(path.join(element.path, 'package.json'));
const vsixDirectory = path.join(root, '.build', 'extensions');
mkdirp.sync(vsixDirectory);
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
return vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
return Promise.all(vsixes);
})
));
gulp.task('package-rebuild-extensions', task.series(
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
));

View File

@@ -29,9 +29,8 @@ const packageJson = require('../package.json');
const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const { config } = require('./lib/electron');
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
@@ -60,8 +59,7 @@ const nodeModules = [
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
buildfile.base,
buildfile.serviceWorker,
buildfile.workbench,
buildfile.workbenchDesktop,
buildfile.code
]);
@@ -79,7 +77,7 @@ const vscodeResources = [
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -87,15 +85,13 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/platform/files/**/*.exe',
'out-build/vs/platform/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
// {{SQL CARBON EDIT}}
'out-build/sql/workbench/electron-browser/splashscreen/*',
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
'out-build/sql/**/*.{svg,png,cur,html}',
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
@@ -113,7 +109,8 @@ const vscodeResources = [
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
'out-build/sql/setup.js',
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
'out-build/vs/platform/auth/common/auth.css',
'!**/test/**'
];
@@ -125,6 +122,7 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined
})
));
@@ -144,73 +142,6 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
));
gulp.task(minifyVSCodeTask);
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
/**
* Compute checksums for some files.
*
@@ -265,10 +196,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
.pipe(util.setExecutableBit(['**/*.sh']));
// {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions();
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
const sources = es.merge(src, extensions)
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
@@ -468,7 +396,7 @@ gulp.task(task.define(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = '.build/extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@@ -489,7 +417,7 @@ gulp.task(task.define(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = '.build/extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(

View File

@@ -43,7 +43,8 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@@ -136,6 +137,7 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
@@ -206,21 +208,25 @@ function prepareSnapPackage(arch) {
const destination = getSnapBuildPath(arch);
return function () {
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
@@ -241,7 +247,8 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
return shell.task(`cd ${snapBuildPath} && snapcraft`);
}
const BUILD_TARGETS = [

View File

@@ -6,150 +6,11 @@
'use strict';
const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const task = require('./lib/task');
const common = require('./lib/optimize');
const product = require('../product.json');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const json = require('gulp-json-editor');
const _ = require('underscore');
const deps = require('./dependencies');
const vfs = require('vinyl-fs');
const packageJson = require('../package.json');
const { compileBuildTask } = require('./gulpfile.compile');
const REPO_ROOT = path.dirname(__dirname);
const commit = util.getVersion(REPO_ROOT);
const BUILD_ROOT = path.dirname(REPO_ROOT);
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
const noop = () => { return Promise.resolve(); };
const productionDependencies = deps.getProductionDependencies(WEB_FOLDER);
const nodeModules = Object.keys(product.dependencies || {})
.concat(_.uniq(productionDependencies.map(d => d.name)));
const vscodeWebResources = [
// Workbench
'out-build/vs/{base,platform,editor,workbench}/**/*.{svg,png,html}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/**/markdown.css',
// Webview
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
// Extension Worker
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
// Excludes
'!out-build/vs/**/{node,electron-browser,electron-main}/**',
'!out-build/vs/editor/standalone/**',
'!out-build/vs/workbench/**/*-tb.png',
'!**/test/**'
];
const buildfile = require('../src/buildfile');
const vscodeWebEntryPoints = [
buildfile.workbenchWeb,
buildfile.serviceWorker,
buildfile.workerExtensionHost,
buildfile.keyboardMaps,
buildfile.base
];
const optimizeVSCodeWebTask = task.define('optimize-vscode-web', task.series(
util.rimraf('out-vscode-web'),
common.optimizeTask({
src: 'out-build',
entryPoints: _.flatten(vscodeWebEntryPoints),
otherSources: [],
resources: vscodeWebResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode-web',
bundleInfo: undefined
})
));
const minifyVSCodeWebTask = task.define('minify-vscode-web', task.series(
optimizeVSCodeWebTask,
util.rimraf('out-vscode-web-min'),
common.minifyTask('out-vscode-web', `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
));
gulp.task(minifyVSCodeWebTask);
function packageTask(sourceFolderName, destinationFolderName) {
const destination = path.join(BUILD_ROOT, destinationFolderName);
return () => {
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
.pipe(filter(['**', '!**/*.js.map']));
const sources = es.merge(src);
let version = packageJson.version;
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
const name = product.nameShort;
const packageJsonStream = gulp.src(['remote/web/package.json'], { base: 'remote/web' })
.pipe(json({ name, version }));
const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date }));
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')));
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
let all = es.merge(
packageJsonStream,
productJsonStream,
license,
sources,
deps,
favicon
);
let result = all
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions());
return result.pipe(vfs.dest(destination));
};
}
const dashed = (str) => (str ? `-${str}` : ``);
['', 'min'].forEach(minified => {
const sourceFolderName = `out-vscode-web${dashed(minified)}`;
const destinationFolderName = `vscode-web`;
const vscodeWebTaskCI = task.define(`vscode-web${dashed(minified)}-ci`, task.series(
minified ? minifyVSCodeWebTask : optimizeVSCodeWebTask,
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(sourceFolderName, destinationFolderName)
));
gulp.task(vscodeWebTaskCI);
const vscodeWebTask = task.define(`vscode-web${dashed(minified)}`, task.series(
compileBuildTask,
vscodeWebTaskCI
));
gulp.task(vscodeWebTask);
});
gulp.task('minify-vscode-web', noop);
gulp.task('vscode-web', noop);
gulp.task('vscode-web-min', noop);
gulp.task('vscode-web-ci', noop);
gulp.task('vscode-web-min-ci', noop);

View File

@@ -23,7 +23,7 @@ const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}}
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');

View File

@@ -19,9 +19,17 @@ const ansiColors = require('ansi-colors');
const root = path.dirname(path.dirname(__dirname));
// {{SQL CARBON EDIT}}
const builtInExtensions = require('../builtInExtensions-insiders.json');
const quality = process.env['VSCODE_QUALITY'];
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - END
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
function log() {
if (ENABLE_LOGGING) {
fancyLog.apply(this, arguments);
}
}
function getExtensionPath(extension) {
return path.join(root, '.build', 'builtInExtensions', extension.name);
@@ -46,7 +54,7 @@ function isUpToDate(extension) {
function syncMarketplaceExtension(extension) {
if (isUpToDate(extension)) {
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
return es.readArray([]);
}
@@ -55,13 +63,13 @@ function syncMarketplaceExtension(extension) {
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
}
function syncExtension(extension, controlState) {
switch (controlState) {
case 'disabled':
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
return es.readArray([]);
case 'marketplace':
@@ -69,15 +77,15 @@ function syncExtension(extension, controlState) {
default:
if (!fs.existsSync(controlState)) {
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
return es.readArray([]);
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
return es.readArray([]);
}
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
return es.readArray([]);
}
}
@@ -96,8 +104,8 @@ function writeControlFile(control) {
}
function main() {
fancyLog('Syncronizing built-in extensions...');
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
log('Syncronizing built-in extensions...');
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];

View File

@@ -11,7 +11,6 @@ const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb");
const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
@@ -22,14 +21,7 @@ const watch = require('./watch');
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
let options = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -38,49 +30,46 @@ function getTypeScriptCompilerOptions(src) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
return options;
}
function createCompile(src, build, emitError) {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
return function (token) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
const input = es.through();
const output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(compilation(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: overrideOptions.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -95,8 +84,8 @@ exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src' });
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
generator.execute();
return watchSrc

View File

@@ -12,27 +12,21 @@ import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import ts = require('typescript');
const watch = require('./watch');
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) {
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
let options: ts.CompilerOptions = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -41,18 +35,17 @@ function getTypeScriptCompilerOptions(src: string) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
return options;
}
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
return function (token?: util.ICancellationToken) {
function pipeline(token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
@@ -61,43 +54,35 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
const input = es.through();
const output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(compilation(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: overrideOptions.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -115,11 +100,8 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
return function () {
const compile = createCompile('src', build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
);
const watchSrc = watch('src/**', { base: 'src' });
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
generator.execute();

View File

@@ -2,29 +2,88 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const json = require("gulp-json-editor");
const _ = require("underscore");
const util = require("./util");
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
exports.getElectronVersion = getElectronVersion;
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
exports.config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, exports.config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch) {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) {
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
process.exit(isUpToDate ? 0 : 1);
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

100
build/lib/electron.ts Normal file
View File

@@ -0,0 +1,100 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as json from 'gulp-json-editor';
import * as _ from 'underscore';
import * as util from './util';
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
export function getElectronVersion(): string {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
export const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch): Promise<void> {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -101,7 +101,7 @@ function fromLocalWebpack(extensionPath) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
@@ -189,9 +189,11 @@ const excludedExtensions = [
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
const externalExtensions = [
// This is the list of SQL extensions which the source code is included in this repository, but
// they get packaged separately. Adding extension name here, will make the build to create
// a separate vsix package for the extension and the extension will be excluded from the main package.
// Any extension not included here will be installed by default.
'admin-tool-ext-win',
'agent',
'import',
@@ -200,13 +202,15 @@ const sqlBuiltInExtensions = [
'dacpac',
'schema-compare',
'cms',
'query-history'
'query-history',
'liveshare',
'sql-database-projects'
];
// extensions that require a rebuild since they have native parts
const rebuildExtensions = [
'big-data-cluster',
'mssql'
];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - End
function packageLocalExtensionsStream() {
@@ -218,7 +222,7 @@ function packageLocalExtensionsStream() {
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
@@ -237,66 +241,40 @@ function packageMarketplaceExtensionsStream() {
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
const vfs = require("vinyl-fs");
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
function packageExternalExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
const visxDirectory = path.join(path.dirname(root), 'vsix');
try {
if (!fs.existsSync(visxDirectory)) {
fs.mkdirSync(visxDirectory);
}
}
catch (err) {
// don't fail the build if the output directory already exists
console.warn(err);
}
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(builtExtensions);
}
exports.packageBuiltInExtensions = packageBuiltInExtensions;
function packageExtensionTask(extensionName, platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
}
else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
exports.packageExtensionTask = packageExtensionTask;
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
// {{SQL CARBON EDIT}} - End
function cleanRebuildExtensions(root) {
return Promise.all(rebuildExtensions.map(async (e) => {
await util2.rimraf(path.join(root, e))();
})).then();
}
exports.cleanRebuildExtensions = cleanRebuildExtensions;
function packageRebuildExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(builtExtensions);
}
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;

View File

@@ -225,9 +225,11 @@ const excludedExtensions = [
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
const externalExtensions = [
// This is the list of SQL extensions which the source code is included in this repository, but
// they get packaged separately. Adding extension name here, will make the build to create
// a separate vsix package for the extension and the extension will be excluded from the main package.
// Any extension not included here will be installed by default.
'admin-tool-ext-win',
'agent',
'import',
@@ -236,15 +238,16 @@ const sqlBuiltInExtensions = [
'dacpac',
'schema-compare',
'cms',
'query-history'
'query-history',
'liveshare',
'sql-database-projects'
];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
// extensions that require a rebuild since they have native parts
const rebuildExtensions = [
'big-data-cluster',
'mssql'
];
interface IBuiltInExtension {
name: string;
@@ -267,7 +270,7 @@ export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
@@ -289,71 +292,43 @@ export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
.pipe(util2.setExecutableBit(['**/*.sh']));
}
// {{SQL CARBON EDIT}}
import * as _ from 'underscore';
import * as vfs from 'vinyl-fs';
export function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
const visxDirectory = path.join(path.dirname(root), 'vsix');
try {
if (!fs.existsSync(visxDirectory)) {
fs.mkdirSync(visxDirectory);
}
} catch (err) {
// don't fail the build if the output directory already exists
console.warn(err);
}
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(builtExtensions);
}
// {{SQL CARBON EDIT}} - End
export function cleanRebuildExtensions(root: string): Promise<void> {
return Promise.all(rebuildExtensions.map(async e => {
await util2.rimraf(path.join(root, e))();
})).then();
}
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
platform = platform || process.platform;
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
return es.merge(builtExtensions);
}
// {{SQL CARBON EDIT}} - End

View File

@@ -176,6 +176,7 @@ class XLF {
this.buffer.push(line.toString());
}
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
@@ -248,7 +249,6 @@ XLF.parse = function (xlfString) {
});
});
};
exports.XLF = XLF;
class Limiter {
constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@@ -586,7 +586,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@@ -609,7 +609,7 @@ function createXlfFilesForExtensions() {
}
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -912,8 +912,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
_coreAndExtensionResources.push(...json.workbench);
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
});
@@ -1086,7 +1086,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
if (externalExtensionId) {

View File

@@ -42,6 +42,10 @@
"name": "vs/workbench/contrib/callHierarchy",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeActions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/comments",
"project": "vscode-workbench"
@@ -106,6 +110,10 @@
"name": "vs/workbench/contrib/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userData",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/remote",
"project": "vscode-workbench"
@@ -131,7 +139,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/stats",
"name": "vs/workbench/contrib/tags",
"project": "vscode-workbench"
},
{
@@ -170,6 +178,10 @@
"name": "vs/workbench/contrib/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench"
@@ -178,10 +190,18 @@
"name": "vs/workbench/contrib/outline",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/authToken",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench"
@@ -230,6 +250,10 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/lifecycle",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@@ -255,7 +279,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/workspace",
"name": "vs/workbench/services/workspaces",
"project": "vscode-workbench"
},
{
@@ -273,6 +297,10 @@
{
"name": "vs/workbench/services/notification",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/userData",
"project": "vscode-workbench"
}
]
}

View File

@@ -709,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
return _xlf;
}
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path);
@@ -729,7 +729,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -1053,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1253,7 +1253,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];

View File

@@ -5,6 +5,7 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano");
@@ -17,7 +18,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const path = require("path");
const pump = require("pump");
const uglifyes = require("uglify-es");
const terser = require("terser");
const VinylFile = require("vinyl");
const bundle = require("./bundle");
const i18n_1 = require("./i18n");
@@ -134,6 +135,14 @@ function optimizeTask(opts) {
if (err || !result) {
return bundlesStream.emit('error', JSON.stringify(err));
}
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
}
catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
@@ -169,6 +178,39 @@ function optimizeTask(opts) {
};
}
exports.optimizeTask = optimizeTask;
function inlineAmdImages(src, result) {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@@ -199,7 +241,7 @@ function uglifyWithCopyrights() {
return false;
};
};
const minify = composer(uglifyes);
const minify = composer(terser);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View File

@@ -6,6 +6,7 @@
'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
@@ -19,7 +20,7 @@ import * as ansiColors from 'ansi-colors';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as uglifyes from 'uglify-es';
import * as terser from 'terser';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
@@ -161,6 +162,10 @@ export interface IOptimizeTaskOpts {
* (emit bundleInfo.json file)
*/
bundleInfo: boolean;
/**
* replace calls to `registerAndGetAmdImageURL` with data uris
*/
inlineAmdImages: boolean;
/**
* (out folder name)
*/
@@ -194,6 +199,14 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
} catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
@@ -238,6 +251,42 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
};
}
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
}
@@ -275,7 +324,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
};
};
const minify = (composer as any)(uglifyes);
const minify = (composer as any)(terser);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

103
build/lib/rollup.js Normal file
View File

@@ -0,0 +1,103 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const rollup = require("rollup");
const path = require("path");
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
async function rollupModule(options) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
}
catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
function rollupAngularSlickgrid(root) {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
function rollupAngular(root) {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}
exports.rollupAngular = rollupAngular;

125
build/lib/rollup.ts Normal file
View File

@@ -0,0 +1,125 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as rollup from 'rollup';
import * as path from 'path';
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
export interface IRollupOptions {
moduleName: string;
inputFile: string;
outputDirectory: string;
outputFileName: string;
external?: string[];
}
async function rollupModule(options: IRollupOptions) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
} catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
export function rollupAngularSlickgrid(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
export function rollupAngular(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}

View File

@@ -43,7 +43,9 @@ function extractEditor(options) {
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
@@ -130,7 +132,7 @@ function createESMSourcesAndResources2(options) {
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -250,35 +252,37 @@ function transportCSS(module, enqueue, write) {
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
function _rewriteOrInlineUrls(contents, forceBase64) {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
enqueue(imagePath);
return url;
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
});
}
function _replaceURL(contents, replacer) {

View File

@@ -50,7 +50,10 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
let result = tss.shake(options);
for (let fileName in result) {
@@ -154,7 +157,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -290,40 +293,41 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
enqueue(imagePath);
return url;
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
});
}

View File

@@ -116,7 +116,7 @@ function submitAllStats(productJson, commit) {
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`,
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});
appInsights.defaultClient.flush({

View File

@@ -126,7 +126,7 @@ export function submitAllStats(productJson: any, commit: string): Promise<boolea
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`, // {{SQL CARBON EDIT}}
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});

View File

@@ -25,17 +25,17 @@ function toStringShakeLevel(shakeLevel) {
}
}
exports.toStringShakeLevel = toStringShakeLevel;
function printDiagnostics(diagnostics) {
function printDiagnostics(options, diagnostics) {
for (const diag of diagnostics) {
let result = '';
if (diag.file) {
result += `${diag.file.fileName}: `;
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `- ${location.line + 1},${location.character} - `;
result += `:${location.line + 1}:${location.character}`;
}
result += JSON.stringify(diag.messageText);
result += ` - ` + JSON.stringify(diag.messageText);
console.log(result);
}
}
@@ -44,17 +44,17 @@ function shake(options) {
const program = languageService.getProgram();
const globalDiagnostics = program.getGlobalDiagnostics();
if (globalDiagnostics.length > 0) {
printDiagnostics(globalDiagnostics);
printDiagnostics(options, globalDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const syntacticDiagnostics = program.getSyntacticDiagnostics();
if (syntacticDiagnostics.length > 0) {
printDiagnostics(syntacticDiagnostics);
printDiagnostics(options, syntacticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const semanticDiagnostics = program.getSemanticDiagnostics();
if (semanticDiagnostics.length > 0) {
printDiagnostics(semanticDiagnostics);
printDiagnostics(options, semanticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
markNodes(languageService, options);
@@ -358,7 +358,7 @@ function markNodes(languageService, options) {
++step;
let node;
if (step % 100 === 0) {
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
}
if (black_queue.length === 0) {
for (let i = 0; i < gray_queue.length; i++) {

Some files were not shown because too many files have changed in this diff Show More