Compare commits

..

4 Commits

Author SHA1 Message Date
Karl Burtram
49bf916346 Merge branch 'master' into release/0.26 2018-02-14 14:00:37 -08:00
Karl Burtram
ebd620bf7b Add auto-update links in release branch 2018-02-14 10:30:51 -08:00
Karl Burtram
79980d95e4 Merge branch 'master' into release/0.26 2018-02-14 10:24:14 -08:00
Karl Burtram
250d84923c Update changelog and readme for February 2018-02-13 13:29:18 -08:00
12742 changed files with 169175 additions and 313974 deletions

9
.gitattributes vendored
View File

@@ -1,9 +0,0 @@
* text=auto
LICENSE.txt eol=crlf
ThirdPartyNotices.txt eol=crlf
*.bat eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf

1
.gitignore vendored
View File

@@ -6,7 +6,6 @@ node_modules/
out/
out-build/
out-editor/
out-editor-esm/
out-editor-min/
out-monaco-editor-core/
out-vscode/

View File

@@ -32,6 +32,8 @@ addons:
before_install:
- git submodule update --init --recursive
- git clone --depth 1 https://github.com/creationix/nvm.git ./.nvm
- source ./.nvm/nvm.sh
- nvm install 8.9.1
- nvm use 8.9.1
- npm i -g yarn

View File

@@ -1,9 +1,9 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"eg2.tslint",
"dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome"
]
}
}

9
.vscode/launch.json vendored
View File

@@ -1,6 +1,7 @@
{
"version": "0.1.0",
"configurations": [
{
"type": "node",
"request": "launch",
@@ -8,7 +9,7 @@
"program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
"stopOnEntry": true,
"args": [
"hygiene"
"watch-extension:json-client"
],
"cwd": "${workspaceFolder}"
},
@@ -86,11 +87,7 @@
"runtimeArgs": [
"--inspect=5875"
],
"skipFiles": [
"**/winjs*.js"
],
"webRoot": "${workspaceFolder}",
"timeout": 15000
"webRoot": "${workspaceFolder}"
},
{
"type": "node",

View File

@@ -10,9 +10,6 @@
"when": "$(basename).ts"
}
},
"files.associations": {
"OSSREADME.json": "jsonc"
},
"search.exclude": {
"**/node_modules": true,
"**/bower_components": true,
@@ -40,4 +37,4 @@
],
"typescript.tsdk": "node_modules/typescript/lib",
"git.ignoreLimitWarning": true
}
}

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "1.7.12"
target "1.7.11"
runtime "electron"

View File

@@ -1,92 +1,6 @@
# Change Log
## Version 0.31.4
* Release date: July 19, 2018
* Release status: Public Preview
## What's new in this version
* SQL Server Agent for SQL Operations Studio extension improvements
* Added view of Alerts, Operators, and Proxies and icons on left pane
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
* Added Delete Job, Delete Alert, and Delete Operator (right-click)
* Added Previous Runs visualization
* Added Filters for each column name
* SQL Server Profiler for SQL Operations Studio extension improvements
* Added Hotkeys to quickly launch and start/stop Profiler
* Added 5 Default Templates to view Extended Events
* Added Server/Database connection name
* Added support for Azure SQL Database instances
* Added suggestion to exit Profiler when tab is closed when Profiler is still running
* Release of Combine Scripts Extension
* Wizard and Dialog Extensibility
* Fix GitHub Issues
## Version 0.30.6
* Release date: June 20, 2018
* Release status: Public Preview
## What's new in this version
* **SQL Server Profiler for SQL Operations Studio *Preview*** extension initial release
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
* **Edit Data "Filtering and Sorting"** support
* **SQL Server Agent for SQL Operations Studio *Preview*** extension enhancements for Jobs and Job History views
* Improved **Wizard & Dialog UI Builder Framework** extensibility APIs
* Update VS Code Platform source code integrating [March 2018 (1.22)](https://code.visualstudio.com/updates/v1_22) and [April 2018 (1.23)](https://code.visualstudio.com/updates/v1_23) releases
* Fix GitHub Issues
## Version 0.29.3
* Release date: May 7, 2018
* Release status: Public Preview
## What's new in this version
The May release is focused on stabilization and bug fixes leading up to the Build conference. This build contains the following highlights.
* Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions
* Continue to fix GitHub issues
## Version 0.28.6
* Release date: April 25, 2018
* Release status: Public Preview
## What's new in this version
The April Public Preview release contains some of the following highlights.
* Improvements to SQL Agent *Preview* extension
* Accessibility improvements for keyboard navigation, screen reader support and high-contrast mode.
* Improved large and protected file support for saving Admin protected and >256M files within SQL Ops Studio
* Integrated Terminal splitting to work with multiple open terminals at once
* Reduced installation on-disk file count foot print for faster installs and startup times
* Improvements to Server Reports extension
* Continue to fix GitHub issues
## Version 0.27.3
* Release date: March 28, 2017
* Release status: Public Preview
## What's new in this version
The March Public Preview release enables some key aspects of the SQL Operations
Studio extensibility story. Here are some highlights in this release.
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)
* Extension Manager enables simple acquisition of 1st-party and 3rd-party extensions
* Add additional Extensibility APIs for `connection` and `objectexplorer` management
* Community Localization open for 10 languages
* Continue to fix important customer impacting GitHub issues
## Version 0.26.7
* Release date: February 16, 2017
* Release status: Public Preview Hotfix 1
## What's new in this version
* Bug fix for `#717 Selecting partial query and hitting Cmd or Ctrl+C opens terminal with Error message`
## Version 0.26.6
## Version 0.26.4
* Release date: February 15, 2017
* Release status: Public Preview
@@ -97,15 +11,16 @@ Here's some of the highlights in the February release.
* Support Auto-Update installation on Windows and macOS
* Publish RPM and DEB packages to offical Microsoft repos
* Fix `#6 Keep connection and selected database when opening new query tabs`
* Fix `#22 'Server Name' and 'Database Name' - Can these be drop downs instead of text` boxes?
* Fix #6 Keep connection and selected database when opening new query tabs
* Fix #22 'Server Name' and 'Database Name' - Can these be drop downs instead of text boxes?
* Fix #549 Silent/Very Silent Install results in application opening after installation
* Fix #481 Add "Check for updates" option.
* SQL Editor colorization and auto-completion fixes
* `#584 Keyword "FULL" not highlighted by IntelliSense`
* `#345 Colorize SQL functions within the editor`
* `#300 [#tempData] latest "]" will display green color`
* `#225 Keyword color mismatch`
* `#60 invalid sql syntax color highlighting when using temporary table in from clause`
* #584 Keyword "FULL" not highlighted by IntelliSense
* #345 Colorize SQL functions within the editor
* #300 [#tempData] latest "]" will display green color
* #225 Keyword color mismatch
* #60 invalid sql syntax color highlighting when using temporary table in from clause
* Introduce Connection extensibility API
* VS Code Editor 1.19 integration
* Update JustinPealing/html-query-plan component to pick-up several Query Plan viewer improvements

View File

@@ -4,11 +4,11 @@ MICROSOFT SQL OPERATIONS STUDIO
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. Unless applicable law gives you more rights, Microsoft reserves all
other rights not expressly granted herein, whether by implication, estoppel or
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. Unless applicable law gives you more rights, Microsoft reserves all
other rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY

View File

@@ -808,389 +808,5 @@
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE"
]
},
{
"name": "mdn-data",
"version": "1.1.12",
"repositoryURL": "https://github.com/mdn/data",
"license": "MPL",
"licenseDetail": [
"Mozilla Public License Version 2.0",
"",
"Copyright (c) 2018 Mozilla Corporation",
"",
"==================================",
"",
"1. Definitions",
"--------------",
"",
"1.1. \"Contributor\"",
" means each individual or legal entity that creates, contributes to",
" the creation of, or owns Covered Software.",
"",
"1.2. \"Contributor Version\"",
" means the combination of the Contributions of others (if any) used",
" by a Contributor and that particular Contributor's Contribution.",
"",
"1.3. \"Contribution\"",
" means Covered Software of a particular Contributor.",
"",
"1.4. \"Covered Software\"",
" means Source Code Form to which the initial Contributor has attached",
" the notice in Exhibit A, the Executable Form of such Source Code",
" Form, and Modifications of such Source Code Form, in each case",
" including portions thereof.",
"",
"1.5. \"Incompatible With Secondary Licenses\"",
" means",
"",
" (a) that the initial Contributor has attached the notice described",
" in Exhibit B to the Covered Software; or",
"",
" (b) that the Covered Software was made available under the terms of",
" version 1.1 or earlier of the License, but not also under the",
" terms of a Secondary License.",
"",
"1.6. \"Executable Form\"",
" means any form of the work other than Source Code Form.",
"",
"1.7. \"Larger Work\"",
" means a work that combines Covered Software with other material, in",
" a separate file or files, that is not Covered Software.",
"",
"1.8. \"License\"",
" means this document.",
"",
"1.9. \"Licensable\"",
" means having the right to grant, to the maximum extent possible,",
" whether at the time of the initial grant or subsequently, any and",
" all of the rights conveyed by this License.",
"",
"1.10. \"Modifications\"",
" means any of the following:",
"",
" (a) any file in Source Code Form that results from an addition to,",
" deletion from, or modification of the contents of Covered",
" Software; or",
"",
" (b) any new file in Source Code Form that contains any Covered",
" Software.",
"",
"1.11. \"Patent Claims\" of a Contributor",
" means any patent claim(s), including without limitation, method,",
" process, and apparatus claims, in any patent Licensable by such",
" Contributor that would be infringed, but for the grant of the",
" License, by the making, using, selling, offering for sale, having",
" made, import, or transfer of either its Contributions or its",
" Contributor Version.",
"",
"1.12. \"Secondary License\"",
" means either the GNU General Public License, Version 2.0, the GNU",
" Lesser General Public License, Version 2.1, the GNU Affero General",
" Public License, Version 3.0, or any later versions of those",
" licenses.",
"",
"1.13. \"Source Code Form\"",
" means the form of the work preferred for making modifications.",
"",
"1.14. \"You\" (or \"Your\")",
" means an individual or a legal entity exercising rights under this",
" License. For legal entities, \"You\" includes any entity that",
" controls, is controlled by, or is under common control with You. For",
" purposes of this definition, \"control\" means (a) the power, direct",
" or indirect, to cause the direction or management of such entity,",
" whether by contract or otherwise, or (b) ownership of more than",
" fifty percent (50%) of the outstanding shares or beneficial",
" ownership of such entity.",
"",
"2. License Grants and Conditions",
"--------------------------------",
"",
"2.1. Grants",
"",
"Each Contributor hereby grants You a world-wide, royalty-free,",
"non-exclusive license:",
"",
"(a) under intellectual property rights (other than patent or trademark)",
" Licensable by such Contributor to use, reproduce, make available,",
" modify, display, perform, distribute, and otherwise exploit its",
" Contributions, either on an unmodified basis, with Modifications, or",
" as part of a Larger Work; and",
"",
"(b) under Patent Claims of such Contributor to make, use, sell, offer",
" for sale, have made, import, and otherwise transfer either its",
" Contributions or its Contributor Version.",
"",
"2.2. Effective Date",
"",
"The licenses granted in Section 2.1 with respect to any Contribution",
"become effective for each Contribution on the date the Contributor first",
"distributes such Contribution.",
"",
"2.3. Limitations on Grant Scope",
"",
"The licenses granted in this Section 2 are the only rights granted under",
"this License. No additional rights or licenses will be implied from the",
"distribution or licensing of Covered Software under this License.",
"Notwithstanding Section 2.1(b) above, no patent license is granted by a",
"Contributor:",
"",
"(a) for any code that a Contributor has removed from Covered Software;",
" or",
"",
"(b) for infringements caused by: (i) Your and any other third party's",
" modifications of Covered Software, or (ii) the combination of its",
" Contributions with other software (except as part of its Contributor",
" Version); or",
"",
"(c) under Patent Claims infringed by Covered Software in the absence of",
" its Contributions.",
"",
"This License does not grant any rights in the trademarks, service marks,",
"or logos of any Contributor (except as may be necessary to comply with",
"the notice requirements in Section 3.4).",
"",
"2.4. Subsequent Licenses",
"",
"No Contributor makes additional grants as a result of Your choice to",
"distribute the Covered Software under a subsequent version of this",
"License (see Section 10.2) or under the terms of a Secondary License (if",
"permitted under the terms of Section 3.3).",
"",
"2.5. Representation",
"",
"Each Contributor represents that the Contributor believes its",
"Contributions are its original creation(s) or it has sufficient rights",
"to grant the rights to its Contributions conveyed by this License.",
"",
"2.6. Fair Use",
"",
"This License is not intended to limit any rights You have under",
"applicable copyright doctrines of fair use, fair dealing, or other",
"equivalents.",
"",
"2.7. Conditions",
"",
"Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted",
"in Section 2.1.",
"",
"3. Responsibilities",
"-------------------",
"",
"3.1. Distribution of Source Form",
"",
"All distribution of Covered Software in Source Code Form, including any",
"Modifications that You create or to which You contribute, must be under",
"the terms of this License. You must inform recipients that the Source",
"Code Form of the Covered Software is governed by the terms of this",
"License, and how they can obtain a copy of this License. You may not",
"attempt to alter or restrict the recipients' rights in the Source Code",
"Form.",
"",
"3.2. Distribution of Executable Form",
"",
"If You distribute Covered Software in Executable Form then:",
"",
"(a) such Covered Software must also be made available in Source Code",
" Form, as described in Section 3.1, and You must inform recipients of",
" the Executable Form how they can obtain a copy of such Source Code",
" Form by reasonable means in a timely manner, at a charge no more",
" than the cost of distribution to the recipient; and",
"",
"(b) You may distribute such Executable Form under the terms of this",
" License, or sublicense it under different terms, provided that the",
" license for the Executable Form does not attempt to limit or alter",
" the recipients' rights in the Source Code Form under this License.",
"",
"3.3. Distribution of a Larger Work",
"",
"You may create and distribute a Larger Work under terms of Your choice,",
"provided that You also comply with the requirements of this License for",
"the Covered Software. If the Larger Work is a combination of Covered",
"Software with a work governed by one or more Secondary Licenses, and the",
"Covered Software is not Incompatible With Secondary Licenses, this",
"License permits You to additionally distribute such Covered Software",
"under the terms of such Secondary License(s), so that the recipient of",
"the Larger Work may, at their option, further distribute the Covered",
"Software under the terms of either this License or such Secondary",
"License(s).",
"",
"3.4. Notices",
"",
"You may not remove or alter the substance of any license notices",
"(including copyright notices, patent notices, disclaimers of warranty,",
"or limitations of liability) contained within the Source Code Form of",
"the Covered Software, except that You may alter any license notices to",
"the extent required to remedy known factual inaccuracies.",
"",
"3.5. Application of Additional Terms",
"",
"You may choose to offer, and to charge a fee for, warranty, support,",
"indemnity or liability obligations to one or more recipients of Covered",
"Software. However, You may do so only on Your own behalf, and not on",
"behalf of any Contributor. You must make it absolutely clear that any",
"such warranty, support, indemnity, or liability obligation is offered by",
"You alone, and You hereby agree to indemnify every Contributor for any",
"liability incurred by such Contributor as a result of warranty, support,",
"indemnity or liability terms You offer. You may include additional",
"disclaimers of warranty and limitations of liability specific to any",
"jurisdiction.",
"",
"4. Inability to Comply Due to Statute or Regulation",
"---------------------------------------------------",
"",
"If it is impossible for You to comply with any of the terms of this",
"License with respect to some or all of the Covered Software due to",
"statute, judicial order, or regulation then You must: (a) comply with",
"the terms of this License to the maximum extent possible; and (b)",
"describe the limitations and the code they affect. Such description must",
"be placed in a text file included with all distributions of the Covered",
"Software under this License. Except to the extent prohibited by statute",
"or regulation, such description must be sufficiently detailed for a",
"recipient of ordinary skill to be able to understand it.",
"",
"5. Termination",
"--------------",
"",
"5.1. The rights granted under this License will terminate automatically",
"if You fail to comply with any of its terms. However, if You become",
"compliant, then the rights granted under this License from a particular",
"Contributor are reinstated (a) provisionally, unless and until such",
"Contributor explicitly and finally terminates Your grants, and (b) on an",
"ongoing basis, if such Contributor fails to notify You of the",
"non-compliance by some reasonable means prior to 60 days after You have",
"come back into compliance. Moreover, Your grants from a particular",
"Contributor are reinstated on an ongoing basis if such Contributor",
"notifies You of the non-compliance by some reasonable means, this is the",
"first time You have received notice of non-compliance with this License",
"from such Contributor, and You become compliant prior to 30 days after",
"Your receipt of the notice.",
"",
"5.2. If You initiate litigation against any entity by asserting a patent",
"infringement claim (excluding declaratory judgment actions,",
"counter-claims, and cross-claims) alleging that a Contributor Version",
"directly or indirectly infringes any patent, then the rights granted to",
"You by any and all Contributors for the Covered Software under Section",
"2.1 of this License shall terminate.",
"",
"5.3. In the event of termination under Sections 5.1 or 5.2 above, all",
"end user license agreements (excluding distributors and resellers) which",
"have been validly granted by You or Your distributors under this License",
"prior to termination shall survive termination.",
"",
"************************************************************************",
"* *",
"* 6. Disclaimer of Warranty *",
"* ------------------------- *",
"* *",
"* Covered Software is provided under this License on an \"as is\" *",
"* basis, without warranty of any kind, either expressed, implied, or *",
"* statutory, including, without limitation, warranties that the *",
"* Covered Software is free of defects, merchantable, fit for a *",
"* particular purpose or non-infringing. The entire risk as to the *",
"* quality and performance of the Covered Software is with You. *",
"* Should any Covered Software prove defective in any respect, You *",
"* (not any Contributor) assume the cost of any necessary servicing, *",
"* repair, or correction. This disclaimer of warranty constitutes an *",
"* essential part of this License. No use of any Covered Software is *",
"* authorized under this License except under this disclaimer. *",
"* *",
"************************************************************************",
"",
"************************************************************************",
"* *",
"* 7. Limitation of Liability *",
"* -------------------------- *",
"* *",
"* Under no circumstances and under no legal theory, whether tort *",
"* (including negligence), contract, or otherwise, shall any *",
"* Contributor, or anyone who distributes Covered Software as *",
"* permitted above, be liable to You for any direct, indirect, *",
"* special, incidental, or consequential damages of any character *",
"* including, without limitation, damages for lost profits, loss of *",
"* goodwill, work stoppage, computer failure or malfunction, or any *",
"* and all other commercial damages or losses, even if such party *",
"* shall have been informed of the possibility of such damages. This *",
"* limitation of liability shall not apply to liability for death or *",
"* personal injury resulting from such party's negligence to the *",
"* extent applicable law prohibits such limitation. Some *",
"* jurisdictions do not allow the exclusion or limitation of *",
"* incidental or consequential damages, so this exclusion and *",
"* limitation may not apply to You. *",
"* *",
"************************************************************************",
"",
"8. Litigation",
"-------------",
"",
"Any litigation relating to this License may be brought only in the",
"courts of a jurisdiction where the defendant maintains its principal",
"place of business and such litigation shall be governed by laws of that",
"jurisdiction, without reference to its conflict-of-law provisions.",
"Nothing in this Section shall prevent a party's ability to bring",
"cross-claims or counter-claims.",
"",
"9. Miscellaneous",
"----------------",
"",
"This License represents the complete agreement concerning the subject",
"matter hereof. If any provision of this License is held to be",
"unenforceable, such provision shall be reformed only to the extent",
"necessary to make it enforceable. Any law or regulation which provides",
"that the language of a contract shall be construed against the drafter",
"shall not be used to construe this License against a Contributor.",
"",
"10. Versions of the License",
"---------------------------",
"",
"10.1. New Versions",
"",
"Mozilla Foundation is the license steward. Except as provided in Section",
"10.3, no one other than the license steward has the right to modify or",
"publish new versions of this License. Each version will be given a",
"distinguishing version number.",
"",
"10.2. Effect of New Versions",
"",
"You may distribute the Covered Software under the terms of the version",
"of the License under which You originally received the Covered Software,",
"or under the terms of any subsequent version published by the license",
"steward.",
"",
"10.3. Modified Versions",
"",
"If you create software not governed by this License, and you want to",
"create a new license for such software, you may create and use a",
"modified version of this License if you rename the license and remove",
"any references to the name of the license steward (except to note that",
"such modified license differs from this License).",
"",
"10.4. Distributing Source Code Form that is Incompatible With Secondary",
"Licenses",
"",
"If You choose to distribute Source Code Form that is Incompatible With",
"Secondary Licenses under the terms of this version of the License, the",
"notice described in Exhibit B of this License must be attached.",
"",
"Exhibit A - Source Code Form License Notice",
"-------------------------------------------",
"",
" This Source Code Form is subject to the terms of the Mozilla Public",
" License, v. 2.0. If a copy of the MPL was not distributed with this",
" file, You can obtain one at http://mozilla.org/MPL/2.0/.",
"",
"If it is not possible or desirable to put the notice in a particular",
"file, then You may include the notice in a location (such as a LICENSE",
"file in a relevant directory) where a recipient would be likely to look",
"for such a notice.",
"",
"You may add additional accurate notices of copyright ownership.",
"",
"Exhibit B - \"Incompatible With Secondary Licenses\" Notice",
"---------------------------------------------------------",
"",
" This Source Code Form is \"Incompatible With Secondary Licenses\", as",
" defined by the Mozilla Public License, v. 2.0."
]
}
]

View File

@@ -1,19 +1,17 @@
# SQL Operations Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
**Download SQL Operations Studio June Public Preview**
**Download SQL Operations Studio February Public Preview**
Platform | Link
-- | --
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2005949
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2005950
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2005959
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2005960
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2006083
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2006084
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=866480
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=866479
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=866481
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=866482
Linux DEB | https://go.microsoft.com/fwlink/?linkid=866484
Linux RPM | https://go.microsoft.com/fwlink/?linkid=866483
Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
@@ -50,45 +48,23 @@ please see the document [How to Contribute](https://github.com/Microsoft/sqlopss
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization
SQL Operations Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make SQL Operations Studio available in your language!
## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* lanceklinger `Fix for double clicking column handle in results table #1504`
* westerncj for `Removed duplicate contribution from README.md (#753)`
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
* alextercete for `Fix "No extension gallery service configured" error (#427)`
* SebastianPfliegel for `Add cursor snippet (#475)`
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)`
* Adam Mechanic for helping with the `whoisactive` extension
* All community localization contributors
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa , Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/sqlopsstudio/master/ThirdPartyNotices.txt)
## License

View File

@@ -1,12 +0,0 @@
[
{
"name": "ms-vscode.node-debug",
"version": "1.23.3",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.23.5",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
]

View File

@@ -1,20 +0,0 @@
{
"env": {
"node": true,
"es6": true,
"browser": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,126 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs');
const path = require('path');
const os = require('os');
// @ts-ignore review
const { remote } = require('electron');
const dialog = remote.dialog;
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, { encoding: 'utf8' }));
}
function writeJson(filePath, obj) {
fs.writeFileSync(filePath, JSON.stringify(obj, null, 2));
}
function renderOption(form, id, title, value, checked) {
const input = document.createElement('input');
input.type = 'radio';
input.id = id;
input.name = 'choice';
input.value = value;
input.checked = !!checked;
form.appendChild(input);
const label = document.createElement('label');
label.setAttribute('for', id);
label.textContent = title;
form.appendChild(label);
return input;
}
function render(el, state) {
function setState(state) {
try {
writeJson(controlFilePath, state.control);
} catch (err) {
console.error(err);
}
el.innerHTML = '';
render(el, state);
}
const ul = document.createElement('ul');
const { builtin, control } = state;
for (const ext of builtin) {
const controlState = control[ext.name] || 'marketplace';
const li = document.createElement('li');
ul.appendChild(li);
const name = document.createElement('code');
name.textContent = ext.name;
li.appendChild(name);
const form = document.createElement('form');
li.appendChild(form);
const marketplaceInput = renderOption(form, `marketplace-${ext.name}`, 'Marketplace', 'marketplace', controlState === 'marketplace');
marketplaceInput.onchange = function () {
control[ext.name] = 'marketplace';
setState({ builtin, control });
};
const disabledInput = renderOption(form, `disabled-${ext.name}`, 'Disabled', 'disabled', controlState === 'disabled');
disabledInput.onchange = function () {
control[ext.name] = 'disabled';
setState({ builtin, control });
};
let local = undefined;
if (controlState !== 'marketplace' && controlState !== 'disabled') {
local = controlState;
}
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
localInput.onchange = function () {
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result && result.length >= 1) {
control[ext.name] = result[0];
}
setState({ builtin, control });
};
if (local) {
const localSpan = document.createElement('code');
localSpan.className = 'local';
localSpan.textContent = local;
form.appendChild(localSpan);
}
}
el.appendChild(ul);
}
function main() {
const el = document.getElementById('extensions');
const builtin = readJson(builtInExtensionsPath);
let control;
try {
control = readJson(controlFilePath);
} catch (err) {
control = {};
}
render(el, { builtin, control });
}
window.onload = main;

View File

@@ -1,46 +0,0 @@
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Manage Built-in Extensions</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="browser-main.js"></script>
<style>
body {
font-family: 'Trebuchet MS', 'Lucida Sans Unicode', 'Lucida Grande', 'Lucida Sans', Arial, sans-serif;
font-size: 10pt;
}
code {
font-family: 'Menlo', 'Courier New', 'Courier', monospace;
}
ul {
padding-left: 1em;
}
li {
list-style: none;
padding: 0.3em 0;
}
label {
margin-right: 1em;
}
form {
padding: 0.3em 0 0.3em 0.3em;
}
</style>
</head>
<body>
<h1>Built-in Extensions</h1>
<div id="extensions"></div>
</body>
</html>

View File

@@ -1,20 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const { app, BrowserWindow } = require('electron');
const url = require('url');
const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600 });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();
window.once('closed', () => window = null);
});
app.on('window-all-closed', () => app.quit());

View File

@@ -1,5 +0,0 @@
{
"name": "builtin",
"version": "0.1.0",
"main": "main.js"
}

View File

@@ -3,19 +3,15 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const gulp = require('gulp');
const path = require('path');
const util = require('./lib/util');
const common = require('./lib/optimize');
const es = require('event-stream');
const File = require('vinyl');
const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone');
const cp = require('child_process');
var gulp = require('gulp');
var path = require('path');
var util = require('./lib/util');
var common = require('./lib/optimize');
var es = require('event-stream');
var File = require('vinyl');
var root = path.dirname(__dirname);
var sha1 = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var semver = require('./monaco/package.json').version;
var headerVersion = semver + '(' + sha1 + ')';
@@ -25,14 +21,14 @@ var editorEntryPoints = [
{
name: 'vs/editor/editor.main',
include: [],
exclude: ['vs/css', 'vs/nls'],
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
exclude: [ 'vs/css', 'vs/nls' ],
prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
},
{
name: 'vs/base/common/worker/simpleWorker',
include: ['vs/editor/common/services/editorSimpleWorker'],
prepend: ['vs/loader.js'],
append: ['vs/base/worker/workerMain'],
include: [ 'vs/editor/common/services/editorSimpleWorker' ],
prepend: [ 'vs/loader.js' ],
append: [ 'vs/base/worker/workerMain' ],
dest: 'vs/base/worker/workerMain.js'
}
];
@@ -74,8 +70,6 @@ function editorLoaderConfig() {
return result;
}
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
entryPoints: editorEntryPoints,
@@ -85,37 +79,14 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'],
bundleLoader: false,
header: BUNDLED_FILE_HEADER,
bundleInfo: true,
out: 'out-editor',
languages: languages
out: 'out-editor'
}));
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources({
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker'
],
outFolder: './out-editor-esm/src',
outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
}
});
});
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
});
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function() {
return es.merge(
// other assets
es.merge(
@@ -124,30 +95,19 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
gulp.src('src/vs/monaco.d.ts')
).pipe(gulp.dest('out-monaco-editor-core')),
// place the .d.ts in the esm folder
gulp.src('src/vs/monaco.d.ts')
.pipe(es.through(function (data) {
this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base,
contents: data.contents
}));
}))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
// package.json
gulp.src('build/monaco/package.json')
.pipe(es.through(function (data) {
.pipe(es.through(function(data) {
var json = JSON.parse(data.contents.toString());
json.private = false;
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
data.contents = new Buffer(JSON.stringify(json, null, ' '));
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// README.md
gulp.src('build/monaco/README-npm.md')
.pipe(es.through(function (data) {
.pipe(es.through(function(data) {
this.emit('data', new File({
path: data.path.replace(/README-npm\.md/, 'README.md'),
base: data.base,
@@ -164,10 +124,10 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
// min folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) {
).pipe(filterStream(function(path) {
// no map files
return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path);
})).pipe(es.through(function (data) {
})).pipe(es.through(function(data) {
// tweak the sourceMappingURL
if (!/\.js$/.test(data.path)) {
this.emit('data', data);
@@ -180,50 +140,49 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = Buffer.from(strContents);
data.contents = new Buffer(strContents);
this.emit('data', data);
})).pipe(gulp.dest('out-monaco-editor-core/min')),
// min-maps folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) {
).pipe(filterStream(function(path) {
// no map files
return /\.js\.map$/.test(path);
})).pipe(gulp.dest('out-monaco-editor-core/min-maps'))
);
});
gulp.task('analyze-editor-distro', function () {
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
gulp.task('analyze-editor-distro', function() {
var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles;
var inverseGraph = {};
Object.keys(graph).forEach(function (module) {
Object.keys(graph).forEach(function(module) {
var dependencies = graph[module];
dependencies.forEach(function (dep) {
dependencies.forEach(function(dep) {
inverseGraph[dep] = inverseGraph[dep] || [];
inverseGraph[dep].push(module);
});
});
var detailed = {};
Object.keys(bundles).forEach(function (entryPoint) {
Object.keys(bundles).forEach(function(entryPoint) {
var included = bundles[entryPoint];
var includedMap = {};
included.forEach(function (included) {
included.forEach(function(included) {
includedMap[included] = true;
});
var explanation = [];
included.map(function (included) {
included.map(function(included) {
if (included.indexOf('!') >= 0) {
return;
}
var reason = (inverseGraph[included] || []).filter(function (mod) {
var reason = (inverseGraph[included]||[]).filter(function(mod) {
return !!includedMap[mod];
});
explanation.push({
@@ -239,67 +198,10 @@ gulp.task('analyze-editor-distro', function () {
});
function filterStream(testFunc) {
return es.through(function (data) {
return es.through(function(data) {
if (!testFunc(data.relative)) {
return;
}
this.emit('data', data);
});
}
//#region monaco type checking
function createTscCompileTask(watch) {
return () => {
const createReporter = require('./lib/reporter').createReporter;
return new Promise((resolve, reject) => {
const args = ['./node_modules/.bin/tsc', '-p', './src/tsconfig.monaco.json', '--noEmit'];
if (watch) {
args.push('-w');
}
const child = cp.spawn(`node`, args, {
cwd: path.join(__dirname, '..'),
// stdio: [null, 'pipe', 'inherit']
});
let errors = [];
let reporter = createReporter();
let report;
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
child.stdout.on('data', data => {
let str = String(data);
str = str.replace(magic, '').trim();
if (str.indexOf('Starting compilation') >= 0 || str.indexOf('File change detected') >= 0) {
errors.length = 0;
report = reporter.end(false);
} else if (str.indexOf('Compilation complete') >= 0) {
report.end();
} else if (str) {
let match = /(.*\(\d+,\d+\): )(.*: )(.*)/.exec(str);
if (match) {
// trying to massage the message so that it matches the gulp-tsb error messages
// e.g. src/vs/base/common/strings.ts(663,5): error TS2322: Type '1234' is not assignable to type 'string'.
let fullpath = path.join(root, match[1]);
let message = match[3];
// @ts-ignore
reporter(fullpath + message);
} else {
// @ts-ignore
reporter(str);
}
}
});
child.on('exit', resolve);
child.on('error', reject);
});
};
}
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
gulp.task('monaco-typecheck', createTscCompileTask(false));
//#endregion

View File

@@ -20,8 +20,6 @@ const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const i18n = require('./lib/i18n');
const plumber = require('gulp-plumber');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -31,8 +29,7 @@ const compilations = glob.sync('**/tsconfig.json', {
});
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
@@ -58,19 +55,9 @@ const tasks = compilations.map(function (tsconfigFile) {
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const out = path.join(root, 'out');
const i18nPath = path.join(__dirname, '..', 'i18n');
const i18n = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
} else {
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) {
const reporter = createReporter();
@@ -83,13 +70,6 @@ const tasks = compilations.map(function (tsconfigFile) {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
.pipe(plumber({
errorHandler: function (err) {
if (err && !err.__reporter__) {
reporter(err);
}
}
}))
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation())
@@ -102,9 +82,7 @@ const tasks = compilations.map(function (tsconfigFile) {
sourceRoot: '../src'
}))
.pipe(tsFilter.restore)
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through())
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
.pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18n, out) : es.through())
.pipe(reporter.end(emitError));
return es.duplex(input, output);
@@ -151,7 +129,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(() => pipeline(), input))
.pipe(util.incremental(() => pipeline(true), input))
.pipe(gulp.dest(out));
});

View File

@@ -12,11 +12,7 @@ const gulptslint = require('gulp-tslint');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const tslint = require('tslint');
const VinylFile = require('vinyl');
const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
/**
* Hygiene works by creating cascading subsets of all our files and
@@ -33,56 +29,55 @@ const all = [
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!**/node_modules/**'
'test/**/*'
];
const eolFilter = [
'**',
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!extensions/**/out/**',
'!test/smoke/out/**',
'!**/node_modules/**',
'!**/fixtures/**',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
'!build/{lib,tslintRules}/**/*.js',
'!build/monaco/**',
'!build/win32/**',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile'
];
const indentationFilter = [
'**',
// except specific files
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!src/vs/nls.js',
'!src/vs/css.js',
'!src/vs/loader.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh',
'!test/assert.js',
// except specific folders
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/*.md',
'!**/*.ps1',
'!**/*.template',
'!**/*.yaml',
'!**/*.yml',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/octicons/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!src/vs/*/**/*.d.ts',
'!**/*.d.ts.recipe',
'!test/assert.js',
'!**/package.json',
'!**/octicons/**',
'!**/vs/base/common/marked/raw.marked.js',
'!**/vs/base/common/winjs.base.raw.js',
'!**/vs/base/node/terminateProcess.sh',
'!**/vs/base/node/ps-win.ps1',
'!**/vs/nls.js',
'!**/vs/css.js',
'!**/vs/loader.js',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile',
'!extensions/markdown/media/*.js'
'!extensions/vscode-api-tests/testWorkspace/**'
];
const copyrightFilter = [
@@ -100,13 +95,11 @@ const copyrightFilter = [
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!extensions/markdown-language-features/media/tomorrow.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*'
'!extensions/markdown/media/tomorrow.css',
'!extensions/html/server/src/modes/typescript/*'
];
const eslintFilter = [
@@ -117,8 +110,8 @@ const eslintFilter = [
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/winjs.base.js',
'!src/**/marked.js',
'!src/**/winjs.base.raw.js',
'!src/**/raw.marked.js',
'!**/test/**'
];
@@ -131,17 +124,16 @@ const tslintFilter = [
'!**/node_modules/**',
'!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts'
'!extensions/html/server/lib/jquery.d.ts'
];
const copyrightHeaderLines = [
const copyrightHeader = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/'
];
].join('\n');
gulp.task('eslint', () => {
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
@@ -152,23 +144,31 @@ gulp.task('eslint', () => {
});
gulp.task('tslint', () => {
// {{SQL CARBON EDIT}}
const options = { emitError: false };
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report(options));
.pipe(gulptslint({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.report(options));
});
function hygiene(some) {
const hygiene = exports.hygiene = (some, options) => {
options = options || {};
let errorCount = 0;
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
const eol = es.through(function (file) {
if (/\r\n?/g.test(file.contents.toString('utf8'))) {
console.error(file.relative + ': Bad EOL found');
errorCount++;
}
lines
this.emit('data', file);
});
const indentation = es.through(function (file) {
file.contents
.toString('utf8')
.split(/\r\n|\r|\n/)
.forEach((line, i) => {
if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK
@@ -186,14 +186,9 @@ function hygiene(some) {
});
const copyrights = es.through(function (file) {
const lines = file.__lines;
for (let i = 0; i < copyrightHeaderLines.length; i++) {
if (lines[i] !== copyrightHeaderLines[i]) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
break;
}
if (file.contents.toString('utf8').indexOf(copyrightHeader) !== 0) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
}
this.emit('data', file);
@@ -201,25 +196,12 @@ function hygiene(some) {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
verify: true,
tsfmt: true,
// verbose: true,
// keep checkJS happy
editorconfig: undefined,
replace: undefined,
tsconfig: undefined,
tsconfigFile: undefined,
tslint: undefined,
tslintFile: undefined,
tsfmtFile: undefined,
vscode: undefined,
vscodeFile: undefined
// verbose: true
}).then(result => {
let original = result.src.replace(/\r\n/gm, '\n');
let formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error('File not formatted:', file.relative);
if (result.error) {
console.error(result.message);
errorCount++;
}
cb(null, file);
@@ -229,31 +211,43 @@ function hygiene(some) {
});
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
function reportFailures(failures) {
failures.forEach(failure => {
const name = failure.name || failure.fileName;
const position = failure.startPosition;
const line = position.lineAndCharacter ? position.lineAndCharacter.line : position.line;
const character = position.lineAndCharacter ? position.lineAndCharacter.character : position.character;
// console.error(`${name}:${line + 1}:${character + 1}:${failure.failure}`);
});
}
const tsl = es.through(function (file) {
const configuration = tslint.Configuration.findConfiguration(null, '.');
const options = { formatter: 'json', rulesDirectory: 'build/lib/tslint' };
const contents = file.contents.toString('utf8');
tsLinter.lint(file.relative, contents, tslintConfiguration.results);
const linter = new tslint.Linter(options);
linter.lint(file.relative, contents, configuration.results);
const result = linter.getResult();
if (result.failures.length > 0) {
reportFailures(result.failures);
errorCount += result.failures.length;
}
this.emit('data', file);
});
let input;
if (Array.isArray(some) || typeof some === 'string' || !some) {
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
} else {
input = some;
}
const result = input
const result = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(eolFilter))
// {{SQL CARBON EDIT}}
//.pipe(options.skipEOL ? es.through() : eol)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter));
.pipe(filter(copyrightFilter))
// {{SQL CARBON EDIT}}
// .pipe(copyrights);
//.pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
@@ -263,52 +257,23 @@ function hygiene(some) {
const javascript = result
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'));
// {{SQL CARBON EDIT}}
.pipe(gulpeslint.formatEach('compact'))
// {{SQL CARBON EDIT}}
// .pipe(gulpeslint.failAfterError());
let count = 0;
return es.merge(typescript, javascript)
.pipe(es.through(function (data) {
// {{SQL CARBON EDIT}}
this.emit('end');
}));
}
return es.merge(typescript, javascript)
.pipe(es.through(null, function () {
// {{SQL CARBON EDIT}}
// if (errorCount > 0) {
// this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
// } else {
// this.emit('end');
// }
this.emit('end');
}));
};
function createGitIndexVinyls(paths) {
const cp = require('child_process');
const repositoryPath = process.cwd();
const fns = paths.map(relativePath => () => new Promise((c, e) => {
const fullPath = path.join(repositoryPath, relativePath);
fs.stat(fullPath, (err, stat) => {
if (err && err.code === 'ENOENT') { // ignore deletions
return c(null);
} else if (err) {
return e(err);
}
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}
c(new VinylFile({
path: fullPath,
base: repositoryPath,
contents: out,
stat
}));
});
});
}));
return pall(fns, { concurrency: 4 })
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', () => hygiene());
gulp.task('hygiene', () => hygiene(''));
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {
@@ -319,19 +284,22 @@ if (require.main === module) {
process.exit(1);
});
if (process.argv.length > 2) {
hygiene(process.argv.slice(2)).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
} else {
cp.exec('git config core.autocrlf', (err, out) => {
const skipEOL = out.trim() === 'true';
if (process.argv.length > 2) {
return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
return;
}
const some = out
@@ -339,18 +307,12 @@ if (require.main === module) {
.filter(l => !!l);
if (some.length > 0) {
console.log('Reading git index versions...');
createGitIndexVinyls(some)
.then(vinyls => new Promise((c, e) => hygiene(es.readArray(vinyls))
.on('end', () => c())
.on('error', e)))
.catch(err => {
console.error();
console.error(err);
process.exit(1);
});
hygiene(some, { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
}
});
}
});
}

View File

@@ -6,21 +6,9 @@
'use strict';
const gulp = require('gulp');
const json = require('gulp-json-editor');
const buffer = require('gulp-buffer');
const filter = require('gulp-filter');
const es = require('event-stream');
const util = require('./lib/util');
const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
// {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json');
gulp.task('mixin', function () {
// {{SQL CARBON EDIT}}
const updateUrl = process.env['SQLOPS_UPDATEURL'];
@@ -36,17 +24,10 @@ gulp.task('mixin', function () {
return;
}
// {{SQL CARBON EDIT}}
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json';
if (quality === 'insider') {
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
}
// {{SQL CARBON EDIT}}
let newValues = {
"updateUrl": updateUrl,
"quality": quality,
"extensionsGallery": {
"serviceUrl": serviceUrl
}
"quality": quality
};
return gulp.src('./product.json')

View File

@@ -13,6 +13,14 @@ const filter = require('gulp-filter');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
gulp.task('clean-extensions-modules', util.rimraf('extensions-modules/node_modules'));
gulp.task('clean-protocol', ['clean-extensions-modules', 'clean-mssql-extension', 'clean-credentials-extension', 'clean-client', 'clean-jsonrpc', 'clean-server', 'clean-types']);
// Tasks to clean extensions modules
gulp.task('clean-mssql-ext-mod', util.rimraf('extensions/mssql/node_modules/extensions-modules'));
gulp.task('clean-credentials-ext-mod', util.rimraf('extensions/credentials/node_modules/extensions-modules'));
gulp.task('clean-build-ext-mod', util.rimraf('build/node_modules/extensions-modules'));
gulp.task('clean-ext-mod', ['clean-mssql-ext-mod', 'clean-credentials-ext-mod', 'clean-build-ext-mod', 'clean-extensions-modules']);
gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {

View File

@@ -27,22 +27,17 @@ const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
// {{SQL CARBON EDIT}}
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const serviceInstaller = require('../extensions-modules/lib/languageservice/serviceInstallerUtil');
const glob = require('glob');
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
// {{SQL CARBON EDIT}}
var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
@@ -60,26 +55,16 @@ const nodeModules = [
.concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules);
// Build
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('./builtInExtensions.json');
const builtInExtensions = [
{ name: 'ms-vscode.node-debug', version: '1.19.8' },
{ name: 'ms-vscode.node-debug2', version: '1.19.4' }
];
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'import',
'profiler'
'vscode-colorize-tests'
];
const vscodeEntryPoints = _.flatten([
@@ -92,28 +77,25 @@ const vscodeEntryPoints = _.flatten([
const vscodeResources = [
'out-build/main.js',
'out-build/cli.js',
'out-build/driver.js',
'out-build/bootstrap.js',
'out-build/bootstrap-amd.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,ps-win.ps1}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
'out-build/vs/workbench/parts/html/browser/webview-pre.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
'out-build/vs/code/electron-browser/sharedProcess.js',
// {{SQL CARBON EDIT}}
'out-build/sql/workbench/electron-browser/splashscreen/*',
'out-build/sql/**/*.{svg,png,cur,html}',
@@ -131,7 +113,6 @@ const vscodeResources = [
'out-build/sql/parts/grid/views/**/*.html',
'out-build/sql/parts/tasks/**/*.html',
'out-build/sql/parts/taskHistory/viewlet/media/**',
'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'!**/test/**'
@@ -143,7 +124,10 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/'
].join('\n');
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
if (process.env.VSCODE_QUALITY !== 'stable') {
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
}
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
@@ -153,8 +137,7 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
languages: languages,
bundleInfo: undefined
languages: languages
}));
@@ -176,7 +159,7 @@ const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
copyright: 'Copyright (C) 2017 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
@@ -195,7 +178,7 @@ const config = {
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
@@ -259,27 +242,6 @@ function computeChecksum(filename) {
return hash;
}
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
function packageTask(platform, arch, opts) {
opts = opts || {};
@@ -309,10 +271,7 @@ function packageTask(platform, arch, opts) {
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
@@ -334,12 +293,16 @@ function packageTask(platform, arch, opts) {
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('dataprotocol-client', ['node_modules/**', 'src/*.js'], undefined))
.pipe(util.cleanNodeModule('extensions-modules', ['node_modules/**', 'src/*.js'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
.pipe(filter(['**',
'!**/*.js.map',
'!extensions/**/node_modules/**/{test, tests}/**',
'!extensions/**/node_modules/**/test.js']));
let version = packageJson.version;
const quality = product.quality;
@@ -352,7 +315,7 @@ function packageTask(platform, arch, opts) {
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json({ name, version }));
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
const settingsSearchBuildId = getBuildNumber();
const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date, checksums, settingsSearchBuildId }));
@@ -363,7 +326,7 @@ function packageTask(platform, arch, opts) {
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [
@@ -377,7 +340,6 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
@@ -385,28 +347,18 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
// {{SQL CARBON EDIT}}
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.node', 'build/Release/*.dll', 'build/Release/*.exe']))
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
// {{SQL CARBON EDIT}}
let copiedModules = gulp.src([
'node_modules/jquery/**/*.*',
'node_modules/reflect-metadata/**/*.*',
'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*'
], { base: '.', dot: true });
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
let all = es.merge(
packageJsonStream,
@@ -414,8 +366,7 @@ function packageTask(platform, arch, opts) {
license,
watermark,
api,
// {{SQL CARBON EDIT}}
copiedModules,
// {{SQL CARBON EDIT}}
dataApi,
sources,
deps
@@ -486,21 +437,25 @@ gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], p
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations
const innoSetupConfig = {
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } },
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } },
'ko': { codePage: 'CP949', defaultInfo: { name: 'Korean', id: '$0412' } },
'ja': { codePage: 'CP932' },
'de': { codePage: 'CP1252' },
'fr': { codePage: 'CP1252' },
'es': { codePage: 'CP1252' },
'ru': { codePage: 'CP1251' },
'it': { codePage: 'CP1252' },
'pt-br': { codePage: 'CP1252' },
'hu': { codePage: 'CP1250' },
'tr': { codePage: 'CP1254' }
};
const vscodeLanguages = [
'zh-hans',
'zh-hant',
'ja',
'ko',
'de',
'fr',
'es',
'ru',
'it',
'pt-br',
'hu',
'tr'
];
const setupDefaultLanguages = [
'zh-hans',
'zh-hant',
'ko'
];
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
@@ -508,50 +463,27 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = './extensions/**/*.nls.json';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
});
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
});
gulp.task('vscode-translations-pull', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
});
return es.merge(
i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
).pipe(vfs.dest('../vscode-localization'));
});
gulp.task('vscode-translations-import', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
});
return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
});
// Sourcemaps
@@ -577,19 +509,20 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
const branch = process.env.BUILD_SOURCEBRANCH;
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
if (!branch.endsWith('/master') && branch.indexOf('/release/') < 0) {
console.log(`Only runs on master and release branches, not ${branch}`);
return;
}
if (!fs.existsSync(allConfigDetailsPath)) {
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
console.error(`configuration file at ${allConfigDetailsPath} does not exist`);
return;
}
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
const settingsSearchBuildId = getBuildNumber();
if (!settingsSearchBuildId) {
throw new Error('Failed to compute build number');
console.error('Failed to compute build number');
return;
}
return gulp.src(allConfigDetailsPath)
@@ -601,19 +534,76 @@ gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () =
}));
});
function getSettingsSearchBuildId(packageJson) {
const previous = util.getPreviousVersion(packageJson.version);
function getBuildNumber() {
const previous = getPreviousVersion(packageJson.version);
if (!previous) {
return 0;
}
try {
const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
const count = parseInt(out.toString());
return util.versionStringToNumber(packageJson.version) * 1e4 + count;
return versionStringToNumber(packageJson.version) * 1e4 + count;
} catch (e) {
throw new Error('Could not determine build number: ' + e.toString());
console.error('Could not determine build number: ' + e.toString());
return 0;
}
}
// This task is only run for the MacOS build
/**
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2.
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
function getPreviousVersion(versionStr) {
function tagExists(tagName) {
try {
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
function getLastTagFromBase(semverArr, componentToTest) {
const baseVersion = semverArr.join('.');
if (!tagExists(baseVersion)) {
console.error('Failed to find tag for base version, ' + baseVersion);
return null;
}
let goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (tagExists(semverArr.join('.')));
return goodTag;
}
const semverArr = versionStr.split('.');
if (semverArr[2] > 0) {
semverArr[2]--;
return semverArr.join('.');
} else if (semverArr[1] > 0) {
semverArr[1]--;
return getLastTagFromBase(semverArr, 2);
} else {
semverArr[0]--;
return getLastTagFromBase(semverArr, 1);
}
}
function versionStringToNumber(versionStr) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
return 0;
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
gulp.task('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
@@ -623,8 +613,7 @@ gulp.task('generate-vscode-configuration', () => {
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
const appPath = path.join(buildDir, 'VSCode-darwin/Visual\\ Studio\\ Code\\ -\\ Insiders.app/Contents/Resources/app/bin/code');
const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`);
const timer = setTimeout(() => {
@@ -644,31 +633,36 @@ gulp.task('generate-vscode-configuration', () => {
clearTimeout(timer);
reject(err);
});
}).catch(e => {
// Don't fail the build
console.error(e.toString());
});
});
// {{SQL CARBON EDIT}}
// Install service locally before building carbon
function installService() {
let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
var installer = new serviceDownloader(config);
let serviceInstallFolder = installer.getInstallDirectory(runtime);
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
return del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
function installService(extObj, path) {
var installer = new serviceInstaller.ServiceInstaller(extObj, path);
installer.getServiceInstallDirectoryRoot().then(serviceInstallFolder => {
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
installer.installService();
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
}, getFolderPathError => {
console.log('failed to call getServiceInstallDirectoryRoot error: ' + getFolderPathError);
});
}
gulp.task('install-sqltoolsservice', () => {
return installService();
var mssqlExt = require('../extensions/mssql/client/out/models/constants');
var extObj = new mssqlExt.Constants();
var path = '../extensions/mssql/client/out/config.json';
return installService(extObj, path);
});

View File

@@ -12,15 +12,35 @@ const shell = require('gulp-shell');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const rpmDependencies = require('../resources/linux/rpm/dependencies');
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
const flatpakManifest = {
appId: product.darwinBundleIdentifier, // We need a reverse-url style identifier.
sdk: 'org.freedesktop.Sdk',
runtime: 'org.freedesktop.Sdk',
runtimeVersion: '1.4',
base: 'io.atom.electron.BaseApp',
baseFlatpakref: 'https://s3-us-west-2.amazonaws.com/electron-flatpak.endlessm.com/electron-base-app-master.flatpakref',
command: product.applicationName,
symlinks: [
['/share/' + product.applicationName + '/bin/' + product.applicationName, '/bin/' + product.applicationName],
],
finishArgs: [
'--share=ipc', '--socket=x11', // Allow showing X11 windows.
'--share=network', // Network access (e.g. for installing extension).
'--filesystem=host', // Allow access to the whole file system.
'--device=dri', // Allow OpenGL rendering.
'--filesystem=/tmp', // Needed for Chromium's single instance check.
'--socket=pulseaudio', // Some extensions may want to play sounds...
'--talk-name=org.freedesktop.Notifications', // ...or pop up notifications.
],
};
function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
}
@@ -91,7 +111,8 @@ function buildDebPackage(arch) {
return shell.task([
'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm',
'mkdir -p deb',
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb'
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
'dpkg-scanpackages deb /dev/null > Packages'
], { cwd: '.build/linux/deb/' + debArch });
}
@@ -199,10 +220,10 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
`cd ${snapBuildPath} && snapcraft snap`
]);
}
@@ -313,3 +334,10 @@ gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prep
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-flatpak', ['vscode-linux-ia32-prepare-flatpak'], buildFlatpak('ia32'));
gulp.task('vscode-linux-x64-flatpak', ['vscode-linux-x64-prepare-flatpak'], buildFlatpak('x64'));
gulp.task('vscode-linux-arm-flatpak', ['vscode-linux-arm-prepare-flatpak'], buildFlatpak('arm'));

View File

@@ -11,11 +11,8 @@ const assert = require('assert');
const cp = require('child_process');
const _7z = require('7zip')['7z'];
const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
const vfs = require('vinyl-fs');
const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}}
@@ -94,13 +91,3 @@ gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], arch
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
function copyInnoUpdater(arch) {
return () => {
return gulp.src('build/win32/{inno_updater.exe,vcruntime140.dll}', { base: 'build/win32' })
.pipe(vfs.dest(path.join(buildPath(arch), 'tools')));
};
}
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));

View File

@@ -1,118 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var es = require("event-stream");
var pickle = require("chromium-pickle-js");
var Filesystem = require("asar/lib/filesystem");
var VinylFile = require("vinyl");
var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) {
var shouldUnpackFile = function (file) {
for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
var filesystem = new Filesystem(folderPath);
var out = [];
// Keep track of pending inserts
var pendingInserts = 0;
var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory
var seenDir = {};
var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
var insertDirectoryForFile = function (file) {
var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error("unknown item in stream!");
}
var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
}
else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
var _this = this;
var finish = function () {
{
var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
var headerBuf = headerPickle.toBuffer();
var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
var contents = Buffer.concat(out);
out.length = 0;
_this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
_this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
}
else {
onFileInserted = function () {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}
exports.createAsar = createAsar;

View File

@@ -1,131 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
import * as pickle from 'chromium-pickle-js';
import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch';
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
const shouldUnpackFile = (file: VinylFile): boolean => {
for (let i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
const filesystem = new Filesystem(folderPath);
const out: Buffer[] = [];
// Keep track of pending inserts
let pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; };
// Do not insert twice the same directory
const seenDir: { [key: string]: boolean; } = {};
const insertDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
const insertDirectoryForFile = (file: string) => {
let lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`);
}
const shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
} else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
let finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
const contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
} else {
onFileInserted = () => {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}

View File

@@ -1,122 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
const os = require('os');
const mkdirp = require('mkdirp');
const rimraf = require('rimraf');
const es = require('event-stream');
const rename = require('gulp-rename');
const vfs = require('vinyl-fs');
const ext = require('./extensions');
const util = require('gulp-util');
const root = path.dirname(path.dirname(__dirname));
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function getExtensionPath(extension) {
return path.join(root, '.build', 'builtInExtensions', extension.name);
}
function isUpToDate(extension) {
const packagePath = path.join(getExtensionPath(extension), 'package.json');
if (!fs.existsSync(packagePath)) {
return false;
}
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
try {
const diskVersion = JSON.parse(packageContents).version;
return (diskVersion === extension.version);
} catch (err) {
return false;
}
}
function syncMarketplaceExtension(extension) {
if (isUpToDate(extension)) {
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
return es.readArray([]);
}
rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
}
function syncExtension(extension, controlState) {
switch (controlState) {
case 'disabled':
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
return es.readArray([]);
case 'marketplace':
return syncMarketplaceExtension(extension);
default:
if (!fs.existsSync(controlState)) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
return es.readArray([]);
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
return es.readArray([]);
}
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
return es.readArray([]);
}
}
function readControlFile() {
try {
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
} catch (err) {
return {};
}
}
function writeControlFile(control) {
mkdirp.sync(path.dirname(controlFilePath));
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
}
function main() {
util.log('Syncronizing built-in extensions...');
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];
for (const extension of builtInExtensions) {
let controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}
writeControlFile(control);
es.merge(streams)
.on('error', err => {
console.error(err);
process.exit(1);
})
.on('end', () => {
process.exit(0);
});
}
main();

View File

@@ -217,7 +217,6 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];

View File

@@ -44,11 +44,11 @@ interface ILoaderPluginReqFunc {
export interface IEntryPoint {
name: string;
include?: string[];
exclude?: string[];
include: string[];
exclude: string[];
prepend: string[];
append?: string[];
dest?: string;
append: string[];
dest: string;
}
interface IEntryPointMap {
@@ -339,7 +339,6 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
];
destFiles.forEach((destFile) => {

View File

@@ -22,12 +22,8 @@ var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
@@ -62,13 +58,9 @@ function compileTask(out, build) {
return function () {
var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return src
.pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
};
}
@@ -78,19 +70,54 @@ function watchTask(out, build) {
var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
function reloadTypeScriptNodeModule() {
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
}
function heapUsed() {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out, isWatch) {
var basePath = path.resolve(process.cwd(), out);
var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
@@ -133,7 +160,7 @@ function monacodtsTask(out, isWatch) {
}));
}
resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative));
var filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}

View File

@@ -25,12 +25,8 @@ const rootDir = path.join(__dirname, '../../src');
const options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(options);
@@ -53,6 +49,7 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
@@ -78,14 +75,9 @@ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWrite
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return src
.pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
};
}
@@ -101,21 +93,61 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
);
const watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
}
const basePath = path.resolve(process.cwd(), out);
function heapUsed(): string {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
@@ -164,7 +196,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
}
resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative));
const filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}

File diff suppressed because it is too large Load Diff

View File

@@ -46,6 +46,10 @@
"name": "vs/workbench/parts/execution",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/explorers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/extensions",
"project": "vscode-workbench"
@@ -67,11 +71,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/localizations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/logs",
"name": "vs/workbench/parts/nps",
"project": "vscode-workbench"
},
{
@@ -130,42 +130,22 @@
"name": "vs/workbench/parts/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/url",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configuration",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configurationResolver",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/crashReporter",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/dialogs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/editor",
"project": "vscode-workbench"
@@ -174,10 +154,6 @@
"name": "vs/workbench/services/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/jsonschemas",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
@@ -186,6 +162,10 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/message",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@@ -215,8 +195,8 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/preferences",
"project": "vscode-preferences"
"name": "setup_messages",
"project": "vscode-workbench"
}
]
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,7 @@ function isImportNode(node) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: Buffer.from(contents),
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
@@ -150,16 +150,13 @@ function isImportNode(node) {
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls`
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
// only `localize` calls
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports
var allLocalizeImportDeclarations = importDeclarations

View File

@@ -131,7 +131,7 @@ module nls {
export function fileFrom(file: File, contents: string, path: string = file.path) {
return new File({
contents: Buffer.from(contents),
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path

View File

@@ -59,7 +59,7 @@ function loader(bundledFileHeader, bundleLoader) {
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: Buffer.from(bundledFileHeader)
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
}
@@ -98,7 +98,7 @@ function toConcatStream(bundledFileHeader, sources, dest) {
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: Buffer.from(source.contents)
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
@@ -141,7 +141,7 @@ function optimizeTask(opts) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
@@ -174,6 +174,7 @@ function optimizeTask(opts) {
};
}
exports.optimizeTask = optimizeTask;
;
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@@ -236,3 +237,4 @@ function minifyTask(src, sourceMapBaseUrl) {
};
}
exports.minifyTask = minifyTask;
;

View File

@@ -31,7 +31,7 @@ function log(prefix: string, message: string): void {
}
// {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths?: string[]) {
export function loaderConfig(emptyPaths: string[]) {
const result = {
paths: {
'vs': 'out-build/vs',
@@ -73,7 +73,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: Buffer.from(bundledFileHeader)
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
} else {
@@ -117,7 +117,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: Buffer.from(source.contents)
contents: new Buffer(source.contents)
});
});
@@ -165,7 +165,7 @@ export interface IOptimizeTaskOpts {
/**
* (languages to process)
*/
languages: i18n.Language[];
languages: string[];
}
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const entryPoints = opts.entryPoints;
@@ -201,7 +201,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
@@ -241,7 +241,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
}))
.pipe(gulp.dest(out));
};
}
};
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
@@ -295,7 +295,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => {
@@ -326,4 +326,4 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
cb(err);
});
};
}
};

View File

@@ -34,13 +34,7 @@ catch (err) {
}
function log() {
var errors = _.flatten(allErrors);
var seen = new Set();
errors.map(function (err) {
if (!seen.has(err)) {
seen.add(err);
util.log(util.colors.red('Error') + ": " + err);
}
});
errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
var messages = errors
.map(function (err) { return regex.exec(err); })
@@ -73,13 +67,8 @@ function createReporter() {
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
errors.__logged__ = true;
if (!errors.__logged__) {
log();
}
var err = new Error("Found " + errors.length + " errors");
err.__reporter__ = true;
this.emit('error', err);
log();
this.emit('error');
}
else {
this.emit('end');
@@ -91,3 +80,4 @@ function createReporter() {
return ReportFunc;
}
exports.createReporter = createReporter;
;

View File

@@ -11,7 +11,7 @@ import * as util from 'gulp-util';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: string[][] = [];
const allErrors: Error[][] = [];
let startTime: number = null;
let count = 0;
@@ -42,14 +42,7 @@ try {
function log(): void {
const errors = _.flatten(allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
util.log(`${util.colors.red('Error')}: ${err}`);
}
});
errors.map(err => util.log(`${util.colors.red('Error')}: ${err}`));
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
@@ -68,17 +61,17 @@ function log(): void {
}
export interface IReporter {
(err: string): void;
(err: Error): void;
hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream;
}
export function createReporter(): IReporter {
const errors: string[] = [];
const errors: Error[] = [];
allErrors.push(errors);
class ReportFunc {
constructor(err: string) {
constructor(err: Error) {
errors.push(err);
}
@@ -94,15 +87,8 @@ export function createReporter(): IReporter {
onEnd();
if (emitError && errors.length > 0) {
(errors as any).__logged__ = true;
if (!(errors as any).__logged__) {
log();
}
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
log();
this.emit('error');
} else {
this.emit('end');
}
@@ -111,4 +97,4 @@ export function createReporter(): IReporter {
}
return <IReporter><any>ReportFunc;
}
};

View File

@@ -1,265 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var fs = require("fs");
var path = require("path");
var REPO_ROOT = path.join(__dirname, '../../');
var SRC_DIR = path.join(REPO_ROOT, 'src');
var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
function createESMSourcesAndResources(options) {
var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
var in_queue = Object.create(null);
var queue = [];
var enqueue = function (module) {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
var seenDir = {};
var createDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try {
fs.mkdirSync(dir);
}
catch (err) { }
};
seenDir[REPO_ROOT] = true;
var toggleComments = function (fileContents) {
var lines = fileContents.split(/\r\n|\r|\n/);
var mode = 0;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
var write = function (filePath, contents) {
var absoluteFilePath;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
}
else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
while (queue.length > 0) {
var module_1 = queue.shift();
if (transportCSS(options, module_1, enqueue, write)) {
continue;
}
if (transportResource(options, module_1, enqueue, write)) {
continue;
}
if (transportDTS(options, module_1, enqueue, write)) {
continue;
}
var filename = void 0;
if (options.redirects[module_1]) {
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
}
else {
filename = path.join(SRC_DIR, module_1 + '.ts');
}
var fileContents = fs.readFileSync(filename).toString();
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFilename = info.importedFiles[i].fileName;
var pos = info.importedFiles[i].pos;
var end = info.importedFiles[i].end;
var importedFilepath = void 0;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
}
enqueue(importedFilepath);
var relativePath = void 0;
if (importedFilepath === path.dirname(module_1)) {
relativePath = '../' + path.basename(path.dirname(module_1));
}
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
}
else {
relativePath = path.relative(path.dirname(module_1), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return "import * as " + m1 + " from " + m2 + ";";
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module_1 + '.ts', fileContents);
}
var esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": []
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
exports.createESMSourcesAndResources = createESMSourcesAndResources;
function transportCSS(options, module, enqueue, write) {
if (!/\.css/.test(module)) {
return false;
}
var filename = path.join(SRC_DIR, module);
var fileContents = fs.readFileSync(filename).toString();
var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
var newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(originalFileFSPath, contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, function (url) {
var imagePath = path.join(path.dirname(module), url);
var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
var DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
var newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
var encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents, replacer) {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
var matches = [];
for (var _i = 1; _i < arguments.length; _i++) {
matches[_i - 1] = arguments[_i];
}
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack, needle) {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options, module, enqueue, write) {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options, module, enqueue, write) {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
var filename;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
}
else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,305 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as fs from 'fs';
import * as path from 'path';
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
export interface IOptions {
entryPoints: string[];
outFolder: string;
outResourcesFolder: string;
redirects: { [module: string]: string; };
}
export function createESMSourcesAndResources(options: IOptions): void {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
let in_queue: { [module: string]: boolean; } = Object.create(null);
let queue: string[] = [];
const enqueue = (module: string) => {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
const seenDir: { [key: string]: boolean; } = {};
const createDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try { fs.mkdirSync(dir); } catch (err) { }
};
seenDir[REPO_ROOT] = true;
const toggleComments = (fileContents: string) => {
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
const write = (filePath: string, contents: string | Buffer) => {
let absoluteFilePath: string;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
} else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const module = queue.shift();
if (transportCSS(options, module, enqueue, write)) {
continue;
}
if (transportResource(options, module, enqueue, write)) {
continue;
}
if (transportDTS(options, module, enqueue, write)) {
continue;
}
let filename: string;
if (options.redirects[module]) {
filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
} else {
filename = path.join(SRC_DIR, module + '.ts');
}
let fileContents = fs.readFileSync(filename).toString();
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module), importedFilepath);
}
enqueue(importedFilepath);
let relativePath: string;
if (importedFilepath === path.dirname(module)) {
relativePath = '../' + path.basename(path.dirname(module));
} else if (importedFilepath === path.dirname(path.dirname(module))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
} else {
relativePath = path.relative(path.dirname(module), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module + '.ts', fileContents);
}
const esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": [
]
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
function transportCSS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.css/.test(module)) {
return false;
}
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(originalFileFSPath: string, contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents: string, replacer: (url: string) => string): string {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack: string, needle: string): boolean {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
let filename: string;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
} else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,56 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var assert = require("assert");
var util = require("../util");
function getMockTagExists(tags) {
return function (tag) { return tags.indexOf(tag) >= 0; };
}
suite('util tests', function () {
test('getPreviousVersion - patch', function () {
assert.equal(util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])), '1.2.2');
});
test('getPreviousVersion - patch invalid', function () {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])), '1.1.3');
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])), '1.1.0');
});
test('getPreviousVersion - minor gap', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])), '1.1.1');
});
test('getPreviousVersion - minor invalid', function () {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', function () {
assert.equal(util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])), '1.2.2');
});
test('getPreviousVersion - major invalid', function () {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -1,79 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import assert = require('assert');
import util = require('../util');
function getMockTagExists(tags: string[]) {
return (tag: string) => tags.indexOf(tag) >= 0;
}
suite('util tests', () => {
test('getPreviousVersion - patch', () => {
assert.equal(
util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])),
'1.2.2'
);
});
test('getPreviousVersion - patch invalid', () => {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])),
'1.1.3'
);
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])),
'1.1.0'
);
});
test('getPreviousVersion - minor gap', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])),
'1.1.1'
);
});
test('getPreviousVersion - minor invalid', () => {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', () => {
assert.equal(
util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])),
'1.2.2'
);
});
test('getPreviousVersion - major invalid', () => {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -71,7 +71,7 @@ var TranslationRemindRuleWalker = /** @class */ (function (_super) {
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, "Please add '" + resource + "' to ./build/lib/i18n.resources.json file to use translations here.");
this.addFailureAtNode(node, "Please add '" + resource + "' to ./builds/lib/i18n.resources.json file to use translations here.");
}
};
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';

View File

@@ -67,7 +67,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
});
if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./build/lib/i18n.resources.json file to use translations here.`);
this.addFailureAtNode(node, `Please add '${resource}' to ./builds/lib/i18n.resources.json file to use translations here.`);
}
}
}

View File

@@ -1,14 +1,7 @@
declare module "event-stream" {
import { Stream } from 'stream';
import { ThroughStream as _ThroughStream} from 'through';
import { ThroughStream } from 'through';
import { MapStream } from 'map-stream';
import * as File from 'vinyl';
export interface ThroughStream extends _ThroughStream {
queue(data: File | null);
push(data: File | null);
paused: boolean;
}
function merge(streams: Stream[]): ThroughStream;
function merge(...streams: Stream[]): ThroughStream;

View File

@@ -14,7 +14,6 @@ var fs = require("fs");
var _rimraf = require("rimraf");
var git = require("./git");
var VinylFile = require("vinyl");
var cp = require("child_process");
var NoCancellationToken = { isCancellationRequested: function () { return false; } };
function incremental(streamProvider, initial, supportsCancellation) {
var input = es.through();
@@ -144,7 +143,7 @@ function loadSourcemaps() {
cb(null, f);
return;
}
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) {
return cb(err);
@@ -161,7 +160,7 @@ function stripSourceMappingURL() {
var output = input
.pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
@@ -174,6 +173,7 @@ function rimraf(dir) {
if (!err) {
return cb();
}
;
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(function () { return retry(cb); }, 10);
}
@@ -211,68 +211,3 @@ function filter(fn) {
return result;
}
exports.filter = filter;
function tagExists(tagName) {
try {
cp.execSync("git rev-parse " + tagName, { stdio: 'ignore' });
return true;
}
catch (e) {
return false;
}
}
/**
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
function getPreviousVersion(versionStr, _tagExists) {
if (_tagExists === void 0) { _tagExists = tagExists; }
function getLatestTagFromBase(semverArr, componentToTest) {
var baseVersion = semverArr.join('.');
if (!_tagExists(baseVersion)) {
throw new Error('Failed to find git tag for base version, ' + baseVersion);
}
var goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (_tagExists(semverArr.join('.')));
return goodTag;
}
var semverArr = versionStringToNumberArray(versionStr);
if (semverArr[2] > 0) {
semverArr[2]--;
var previous = semverArr.join('.');
if (!_tagExists(previous)) {
throw new Error('Failed to find git tag for previous version, ' + previous);
}
return previous;
}
else if (semverArr[1] > 0) {
semverArr[1]--;
return getLatestTagFromBase(semverArr, 2);
}
else {
semverArr[0]--;
// Find 1.x.0 for latest x
var latestMinorVersion = getLatestTagFromBase(semverArr, 1);
// Find 1.x.y for latest y
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
}
}
exports.getPreviousVersion = getPreviousVersion;
function versionStringToNumberArray(versionStr) {
return versionStr
.split('.')
.map(function (s) { return parseInt(s); });
}
function versionStringToNumber(versionStr) {
var semverRegex = /(\d+)\.(\d+)\.(\d+)/;
var match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
exports.versionStringToNumber = versionStringToNumber;

View File

@@ -17,7 +17,6 @@ import * as git from './git';
import * as VinylFile from 'vinyl';
import { ThroughStream } from 'through';
import * as sm from 'source-map';
import * as cp from 'child_process';
export interface ICancellationToken {
isCancellationRequested(): boolean;
@@ -29,7 +28,7 @@ export interface IStreamProvider {
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
}
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation?: boolean): NodeJS.ReadWriteStream {
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
const input = es.through();
const output = es.through();
let state = 'idle';
@@ -130,7 +129,7 @@ export function skipDirectories(): NodeJS.ReadWriteStream {
});
}
export function cleanNodeModule(name: string, excludes: string[], includes?: string[]): NodeJS.ReadWriteStream {
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str: string) => '!' + str;
@@ -191,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
return;
}
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
if (err) { return cb(err); }
@@ -210,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
@@ -224,7 +223,7 @@ export function rimraf(dir: string): (cb: any) => void {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return cb();
}
};
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
@@ -269,74 +268,4 @@ export function filter(fn: (data: any) => boolean): FilterStream {
result.restore = es.through();
return result;
}
function tagExists(tagName: string): boolean {
try {
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
/**
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
export function getPreviousVersion(versionStr: string, _tagExists = tagExists) {
function getLatestTagFromBase(semverArr: number[], componentToTest: number): string {
const baseVersion = semverArr.join('.');
if (!_tagExists(baseVersion)) {
throw new Error('Failed to find git tag for base version, ' + baseVersion);
}
let goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (_tagExists(semverArr.join('.')));
return goodTag;
}
const semverArr = versionStringToNumberArray(versionStr);
if (semverArr[2] > 0) {
semverArr[2]--;
const previous = semverArr.join('.');
if (!_tagExists(previous)) {
throw new Error('Failed to find git tag for previous version, ' + previous);
}
return previous;
} else if (semverArr[1] > 0) {
semverArr[1]--;
return getLatestTagFromBase(semverArr, 2);
} else {
semverArr[0]--;
// Find 1.x.0 for latest x
const latestMinorVersion = getLatestTagFromBase(semverArr, 1);
// Find 1.x.y for latest y
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
}
}
function versionStringToNumberArray(versionStr: string): number[] {
return versionStr
.split('.')
.map(s => parseInt(s));
}
export function versionStringToNumber(versionStr: string) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
}

View File

@@ -9,7 +9,7 @@ const es = require('event-stream');
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = Buffer.from('');
f.contents = new Buffer('');
f.stat = { mtime: new Date() };
}

View File

@@ -30,12 +30,12 @@ function watch(root) {
path: path,
base: root
});
//@ts-ignore
file.event = type;
result.emit('data', file);
}
nsfw(root, function (events) {
nsfw(root, function(events) {
for (var i = 0; i < events.length; i++) {
var e = events[i];
var changeType = e.action;
@@ -47,16 +47,16 @@ function watch(root) {
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
}
}
}).then(function (watcher) {
}).then(function(watcher) {
watcher.start();
});
});
return result;
return result;
}
var cache = Object.create(null);
module.exports = function (pattern, options) {
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());
@@ -66,7 +66,7 @@ module.exports = function (pattern, options) {
watcher = cache[cwd] = watch(cwd);
}
var rebase = !options.base ? es.through() : es.mapSync(function (f) {
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
f.base = options.base;
return f;
});
@@ -74,13 +74,13 @@ module.exports = function (pattern, options) {
return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern))
.pipe(es.map(function (file, cb) {
fs.stat(file.path, function (err, stat) {
.pipe(es.map(function(file, cb) {
fs.stat(file.path, function(err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function (err, contents) {
fs.readFile(file.path, function(err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }

View File

@@ -24,8 +24,7 @@ function watch(root) {
var result = es.through();
var child = cp.spawn(watcherPath, [root]);
child.stdout.on('data', function (data) {
// @ts-ignore
child.stdout.on('data', function(data) {
var lines = data.toString('utf8').split('\n');
for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim();
@@ -47,17 +46,17 @@ function watch(root) {
path: changePathFull,
base: root
});
//@ts-ignore
file.event = toChangeType(changeType);
result.emit('data', file);
}
});
child.stderr.on('data', function (data) {
child.stderr.on('data', function(data) {
result.emit('error', data);
});
child.on('exit', function (code) {
child.on('exit', function(code) {
result.emit('error', 'Watcher died with code ' + code);
child = null;
});
@@ -71,7 +70,7 @@ function watch(root) {
var cache = Object.create(null);
module.exports = function (pattern, options) {
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());

View File

@@ -1,21 +1 @@
The Source EULA
Copyright (c) 2016 - present Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
See project root directory

View File

@@ -171,7 +171,9 @@ function format(text) {
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return ts.formatting.getFormatContext(options);
var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
@@ -208,8 +210,7 @@ function createReplacer(data) {
};
}
function generateDeclarationFile(out, inputFiles, recipe) {
var endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
var lines = recipe.split(endl);
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
@@ -277,11 +278,12 @@ function generateDeclarationFile(out, inputFiles, recipe) {
}
result.push(line);
});
var resultTxt = result.join(endl);
var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getFilesToWatch(out) {
@@ -312,13 +314,10 @@ function run(out, inputFiles) {
var result = generateDeclarationFile(out, inputFiles, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
var one = currentContent.replace(/\r\n/gm, '\n');
var other = result.replace(/\r\n/gm, '\n');
var isTheSame = one === other;
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame: isTheSame
isTheSame: currentContent === result
};
}
exports.run = run;

View File

@@ -24,15 +24,15 @@ function logErr(message: any, ...rest: any[]): void {
util.log(util.colors.red('[monaco.d.ts]'), message, ...rest);
}
function moduleIdToPath(out: string, moduleId: string): string {
function moduleIdToPath(out:string, moduleId:string): string {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
let SOURCE_FILE_MAP: { [moduleId: string]: ts.SourceFile; } = {};
function getSourceFile(out: string, inputFiles: { [file: string]: string; }, moduleId: string): ts.SourceFile {
let SOURCE_FILE_MAP: {[moduleId:string]:ts.SourceFile;} = {};
function getSourceFile(out:string, inputFiles: { [file: string]: string; }, moduleId:string): ts.SourceFile {
if (!SOURCE_FILE_MAP[moduleId]) {
let filePath = path.normalize(moduleIdToPath(out, moduleId));
@@ -53,7 +53,7 @@ function getSourceFile(out: string, inputFiles: { [file: string]: string; }, mod
type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration;
type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement;
function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
function isDeclaration(a:TSTopLevelDeclare): a is TSTopLevelDeclaration {
return (
a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
@@ -64,7 +64,7 @@ function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
);
}
function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
function visitTopLevelDeclarations(sourceFile:ts.SourceFile, visitor:(node:TSTopLevelDeclare)=>boolean): void {
let stop = false;
let visit = (node: ts.Node): void => {
@@ -100,8 +100,8 @@ function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TS
}
function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
let all: TSTopLevelDeclare[] = [];
function getAllTopLevelDeclarations(sourceFile:ts.SourceFile): TSTopLevelDeclare[] {
let all:TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
@@ -128,8 +128,8 @@ function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclar
}
function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TSTopLevelDeclare {
let result: TSTopLevelDeclare = null;
function getTopLevelDeclaration(sourceFile:ts.SourceFile, typeName:string): TSTopLevelDeclare {
let result:TSTopLevelDeclare = null;
visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
@@ -149,12 +149,12 @@ function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TS
}
function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number; }): string {
function getNodeText(sourceFile:ts.SourceFile, node:{pos:number; end:number;}): string {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare): string {
function getMassagedTopLevelDeclarationText(sourceFile:ts.SourceFile, declaration: TSTopLevelDeclare): string {
let result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
@@ -163,7 +163,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
let members: ts.NodeArray<ts.Node> = interfaceDeclaration.members;
let members:ts.NodeArray<ts.Node> = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
@@ -182,7 +182,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
return result;
}
function format(text: string): string {
function format(text:string): string {
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
@@ -196,7 +196,9 @@ function format(text: string): string {
function getRuleProvider(options: ts.FormatCodeSettings) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return (ts as any).formatting.getFormatContext(options);
let ruleProvider = new (<any>ts).formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text: string, edits: ts.TextChange[]): string {
@@ -212,10 +214,10 @@ function format(text: string): string {
}
}
function createReplacer(data: string): (str: string) => string {
function createReplacer(data:string): (str:string)=>string {
data = data || '';
let rawDirectives = data.split(';');
let directives: [RegExp, string][] = [];
let directives: [RegExp,string][] = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
@@ -229,7 +231,7 @@ function createReplacer(data: string): (str: string) => string {
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return (str: string) => {
return (str:string)=> {
for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
@@ -237,12 +239,11 @@ function createReplacer(data: string): (str: string) => string {
};
}
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe: string): string {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe:string): string {
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
lines.forEach(line => {
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
@@ -284,7 +285,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap: { [typeName: string]: boolean; } = {};
let typesToExcludeMap: {[typeName:string]:boolean;} = {};
let typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
@@ -317,17 +318,18 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
result.push(line);
});
let resultTxt = result.join(endl);
let resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
export function getFilesToWatch(out: string): string[] {
export function getFilesToWatch(out:string): string[] {
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
@@ -368,14 +370,10 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
const one = currentContent.replace(/\r\n/gm, '\n');
const other = result.replace(/\r\n/gm, '\n');
const isTheSame = one === other;
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame
isTheSame: currentContent === result
};
}

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
declare namespace monaco {
declare module monaco {
export type Thenable<T> = PromiseLike<T>;
type Thenable<T> = PromiseLike<T>;
export interface IDisposable {
dispose(): void;
@@ -32,14 +32,6 @@ declare namespace monaco {
Error = 3,
}
export enum MarkerSeverity {
Hint = 1,
Info = 2,
Warning = 4,
Error = 8,
}
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise
#include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken
#include(vs/base/common/uri): URI, UriComponents
@@ -54,7 +46,7 @@ declare namespace monaco {
#include(vs/editor/common/core/token): Token
}
declare namespace monaco.editor {
declare module monaco.editor {
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
@@ -66,34 +58,29 @@ export interface ICommandHandler {
}
#include(vs/platform/contextkey/common/contextkey): IContextKey
#include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarker, IMarkerData, IRelatedInformation
#include(vs/platform/markers/common/markers): IMarker, IMarkerData
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/base/common/scrollable): ScrollbarVisibility
#include(vs/platform/theme/common/themeService): ThemeColor
#includeAll(vs/editor/common/model;LanguageIdentifier=>languages.LanguageIdentifier): IScrollEvent
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents):
#includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
//compatibility:
export type IReadOnlyModel = ITextModel;
export type IModel = ITextModel;
}
declare namespace monaco.languages {
declare module monaco.languages {
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;model.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/common/modes/languageConfiguration):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData;model.=>editor.):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint
#includeAll(vs/editor/standalone/common/monarch/monarchTypes):
}
declare namespace monaco.worker {
declare module monaco.worker {
#includeAll(vs/editor/common/services/editorSimpleWorker;):

View File

@@ -1,17 +1,15 @@
{
"name": "monaco-editor-core",
"private": true,
"version": "0.12.0",
"version": "0.9.0",
"description": "A browser based code editor",
"author": "Microsoft Corporation",
"license": "MIT",
"typings": "./esm/vs/editor/editor.api.d.ts",
"module": "./esm/vs/editor/editor.main.js",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/vscode"
},
"bugs": {
"bugs": {
"url": "https://github.com/Microsoft/vscode/issues"
}
}

View File

@@ -21,24 +21,28 @@ function yarnInstall(location, opts) {
}
// {{SQL CARBON EDIT}}
yarnInstall('dataprotocol-client');
yarnInstall('extensions-modules');
yarnInstall('extensions'); // node modules shared by all extensions
const allExtensionFolders = fs.readdirSync('extensions');
const extensions = allExtensionFolders.filter(e => {
try {
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
return packageJSON && (packageJSON.dependencies || packageJSON.devDependencies);
} catch (e) {
return false;
}
});
const extensions = [
'vscode-colorize-tests',
'json',
'mssql',
'configuration-editing',
'extension-editing',
'markdown',
'git',
'merge-conflict',
'insights-default',
'account-provider-azure'
];
extensions.forEach(extension => yarnInstall(`extensions/${extension}`));
function yarnInstallBuildDependencies() {
// make sure we install the deps of build/lib/watch for the system installed
// node, since that is the driver of gulp
//@ts-ignore
const env = Object.assign({}, process.env);
const watchPath = path.join(path.dirname(__dirname), 'lib', 'watch');
const yarnrcPath = path.join(watchPath, '.yarnrc');
@@ -56,5 +60,4 @@ runtime "${runtime}"`;
}
yarnInstall(`build`); // node modules required for build
yarnInstall('test/smoke'); // node modules required for smoketest
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

View File

@@ -4,11 +4,9 @@
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const fs = require('fs');
const path = require('path');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function updateGrammar(location) {
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
cwd: location,
stdio: 'inherit'
@@ -19,17 +17,50 @@ function updateGrammar(location) {
}
}
const allExtensionFolders = fs.readdirSync('extensions');
const extensions = allExtensionFolders.filter(e => {
try {
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
return packageJSON && packageJSON.scripts && packageJSON.scripts['update-grammar'];
} catch (e) {
return false;
}
});
console.log(`Updating ${extensions.length} grammars...`);
const extensions = [
'bat',
'clojure',
'coffeescript',
'cpp',
'csharp',
'css',
'diff',
'docker',
'fsharp',
'gitsyntax',
'go',
'groovy',
'handlebars',
'hlsl',
'html',
'ini',
'java',
// 'javascript', updated through JavaScript
'json',
'less',
'lua',
'make',
'markdown',
'objective-c',
'perl',
'php',
// 'powershell', grammar not ready yet, @daviwil will ping when ready
'pug',
'python',
'r',
'razor',
'ruby',
'rust',
'scss',
'shaderlab',
'shellscript',
'sql',
'swift',
'typescript',
'vb',
'xml',
'yaml'
];
extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
@@ -39,5 +70,4 @@ if (process.platform === 'win32') {
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
} else {
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
}
}

View File

@@ -14,19 +14,14 @@ var url = require('url');
function getOptions(urlString) {
var _url = url.parse(urlString);
var headers = {
'User-Agent': 'VSCode'
};
var token = process.env['GITHUB_TOKEN'];
if (token) {
headers['Authorization'] = 'token ' + token
}
return {
protocol: _url.protocol,
host: _url.host,
port: _url.port,
path: _url.path,
headers: headers
headers: {
'User-Agent': 'NodeJS'
}
};
}
@@ -37,16 +32,12 @@ function download(url, redirectCount) {
response.on('data', function (data) {
content += data.toString();
}).on('end', function () {
if (response.statusCode === 403 && response.headers['x-ratelimit-remaining'] === '0') {
e('GitHub API rate exceeded. Set GITHUB_TOKEN environment variable to increase rate limit.');
return;
}
let count = redirectCount || 0;
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
let location = response.headers['location'];
if (location) {
console.log("Redirected " + url + " to " + location);
download(location, count + 1).then(c, e);
download(location, count+1).then(c, e);
return;
}
}
@@ -68,13 +59,17 @@ function getCommitSha(repoId, repoPath) {
commitDate: lastCommit.commit.author.date
});
} catch (e) {
return Promise.reject(new Error("Failed extracting the SHA: " + content));
console.error("Failed extracting the SHA: " + content);
return Promise.resolve(null);
}
}, function () {
console.error('Failed loading ' + commitInfo);
return Promise.resolve(null);
});
}
exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'master') {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + `/${version}/` + repoPath;
exports.update = function (repoId, repoPath, dest, modifyGrammar) {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + '/master/' + repoPath;
console.log('Reading from ' + contentPath);
return download(contentPath).then(function (content) {
var ext = path.extname(repoPath);
@@ -86,7 +81,8 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
} else if (ext === '.json') {
grammar = JSON.parse(content);
} else {
return Promise.reject(new Error('Unknown file extension: ' + ext));
console.error('Unknown file extension: ' + ext);
return;
}
if (modifyGrammar) {
modifyGrammar(grammar);
@@ -103,10 +99,8 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
if (info) {
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
}
let keys = ['name', 'scopeName', 'comment', 'injections', 'patterns', 'repository'];
for (let key of keys) {
if (grammar.hasOwnProperty(key)) {
for (let key in grammar) {
if (!result.hasOwnProperty(key)) {
result[key] = grammar[key];
}
}
@@ -119,14 +113,11 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
console.log('Updated ' + path.basename(dest));
}
} catch (e) {
return Promise.reject(e);
console.error(e);
}
});
}, console.error).catch(e => {
console.error(e);
process.exit(1);
});
}, console.error);
};
if (path.basename(process.argv[1]) === 'update-grammar.js') {

View File

@@ -1,69 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
let i18n = require("../lib/i18n");
let fs = require("fs");
let path = require("path");
let vfs = require("vinyl-fs");
let rimraf = require('rimraf');
function update(idOrPath) {
if (!idOrPath) {
throw new Error('Argument must be the location of the localization extension.');
}
let locExtFolder = idOrPath;
if (/^\w{2}(-\w+)?$/.test(idOrPath)) {
locExtFolder = '../vscode-language-pack-' + idOrPath;
}
let locExtStat = fs.statSync(locExtFolder);
if (!locExtStat || !locExtStat.isDirectory) {
throw new Error('No directory found at ' + idOrPath);
}
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
let contributes = packageJSON['contributes'];
if (!contributes) {
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
}
let localizations = contributes['localizations'];
if (!localizations) {
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
}
localizations.forEach(function (localization) {
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
}
let server = localization.server || 'www.transifex.com';
let userName = localization.userName || 'api';
let apiToken = process.env.TRANSIFEX_API_TOKEN;
let languageId = localization.transifexId || localization.languageId;
let translationDataFolder = path.join(locExtFolder, 'translations');
if (fs.existsSync(translationDataFolder) && fs.existsSync(path.join(translationDataFolder, 'main.i18n.json'))) {
console.log('Clearing \'' + translationDataFolder + '\'...');
rimraf.sync(translationDataFolder);
}
console.log('Downloading translations for \'' + languageId + '\' to \'' + translationDataFolder + '\'...');
const translationPaths = [];
i18n.pullI18nPackFiles(server, userName, apiToken, { id: languageId }, translationPaths)
.pipe(vfs.dest(translationDataFolder)).on('end', function () {
localization.translations = [];
for (let tp of translationPaths) {
localization.translations.push({ id: tp.id, path: `./translations/${tp.resourceName}`});
}
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
});
});
}
if (path.basename(process.argv[1]) === 'update-localization-extension.js') {
update(process.argv[2]);
}

View File

@@ -9,24 +9,20 @@
"@types/mime": "0.0.29",
"@types/node": "8.0.33",
"@types/xml2js": "0.0.33",
"@types/request": "^2.47.0",
"azure-storage": "^2.1.0",
"decompress": "^4.2.0",
"documentdb": "1.13.0",
"service-downloader": "github:anthonydresser/service-downloader#0.1.2",
"extensions-modules": "file:../extensions-modules",
"fs-extra-promise": "^1.0.1",
"mime": "^1.3.4",
"minimist": "^1.2.0",
"typescript": "2.8.1",
"vscode": "^1.0.1",
"xml2js": "^0.4.17",
"github-releases": "^0.4.1",
"request": "^2.85.0"
"typescript": "2.6.1",
"vscode": "^1.0.1",
"xml2js": "^0.4.17"
},
"scripts": {
"compile": "tsc -p tsconfig.build.json",
"watch": "tsc -p tsconfig.build.json --watch",
"postinstall": "npm run compile",
"npmCheckJs": "tsc --noEmit"
"compile": "tsc",
"watch": "tsc --watch",
"postinstall": "npm run compile"
}
}

View File

@@ -4,9 +4,9 @@ set -e
# setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh --no-use
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh --no-use
source $NVM_DIR/nvm.sh
fi
# install node

View File

@@ -70,7 +70,6 @@ interface Asset {
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
@@ -204,30 +203,17 @@ async function publish(commit: string, quality: string, platform: string, type:
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
await assertContainer(mooncakeBlobService, quality);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const mooncakeBlobExists = await doesAssetExist(mooncakeBlobService, quality, blobName);
const promises = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
if (!mooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
} else {
console.log('Skipping Mooncake publishing.');
}
if (promises.length === 0) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
@@ -254,13 +240,6 @@ async function publish(commit: string, quality: string, platform: string, type:
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const release = {
id: commit,
timestamp: (new Date()).getTime(),

View File

@@ -1,219 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as request from 'request';
import { createReadStream, createWriteStream, unlink, mkdir } from 'fs';
import * as github from 'github-releases';
import { join } from 'path';
import { tmpdir } from 'os';
import { promisify } from 'util';
const BASE_URL = 'https://rink.hockeyapp.net/api/2/';
const HOCKEY_APP_TOKEN_HEADER = 'X-HockeyAppToken';
export interface IVersions {
app_versions: IVersion[];
}
export interface IVersion {
id: number;
version: string;
}
export interface IApplicationAccessor {
accessToken: string;
appId: string;
}
export interface IVersionAccessor extends IApplicationAccessor {
id: string;
}
enum Platform {
WIN_32 = 'win32-ia32',
WIN_64 = 'win32-x64',
LINUX_32 = 'linux-ia32',
LINUX_64 = 'linux-x64',
MAC_OS = 'darwin-x64'
}
function symbolsZipName(platform: Platform, electronVersion: string, insiders: boolean): string {
return `${insiders ? 'insiders' : 'stable'}-symbols-v${electronVersion}-${platform}.zip`;
}
const SEED = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
async function tmpFile(name: string): Promise<string> {
let res = '';
for (let i = 0; i < 8; i++) {
res += SEED.charAt(Math.floor(Math.random() * SEED.length));
}
const tmpParent = join(tmpdir(), res);
await promisify(mkdir)(tmpParent);
return join(tmpParent, name);
}
function getVersions(accessor: IApplicationAccessor): Promise<IVersions> {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions`,
method: 'GET',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
}
});
}
function createVersion(accessor: IApplicationAccessor, version: string): Promise<IVersion> {
return asyncRequest<IVersion>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/new`,
method: 'POST',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
bundle_version: version
}
});
}
function updateVersion(accessor: IVersionAccessor, symbolsPath: string) {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/${accessor.id}`,
method: 'PUT',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
dsym: createReadStream(symbolsPath)
}
});
}
function asyncRequest<T>(options: request.UrlOptions & request.CoreOptions): Promise<T> {
return new Promise<T>((resolve, reject) => {
request(options, (error, response, body) => {
if (error) {
reject(error);
} else {
resolve(JSON.parse(body));
}
});
});
}
function downloadAsset(repository, assetName: string, targetPath: string, electronVersion: string) {
return new Promise((resolve, reject) => {
repository.getReleases({ tag_name: `v${electronVersion}` }, (err, releases) => {
if (err) {
reject(err);
} else {
const asset = releases[0].assets.filter(asset => asset.name === assetName)[0];
if (!asset) {
reject(new Error(`Asset with name ${assetName} not found`));
} else {
repository.downloadAsset(asset, (err, reader) => {
if (err) {
reject(err);
} else {
const writer = createWriteStream(targetPath);
writer.on('error', reject);
writer.on('close', resolve);
reader.on('error', reject);
reader.pipe(writer);
}
});
}
}
});
});
}
interface IOptions {
repository: string;
platform: Platform;
versions: { code: string; insiders: boolean; electron: string; };
access: { hockeyAppToken: string; hockeyAppId: string; githubToken: string };
}
async function ensureVersionAndSymbols(options: IOptions) {
// Check version does not exist
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
if (versions.app_versions.some(v => v.version === options.versions.code)) {
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
return;
}
// Download symbols for platform and electron version
const symbolsName = symbolsZipName(options.platform, options.versions.electron, options.versions.insiders);
const symbolsPath = await tmpFile('symbols.zip');
console.log(`HockeyApp: downloading symbols ${symbolsName} for electron ${options.versions.electron} (${options.platform}) into ${symbolsPath}`);
await downloadAsset(new github({ repo: options.repository, token: options.access.githubToken }), symbolsName, symbolsPath, options.versions.electron);
// Create version
console.log(`HockeyApp: creating new version ${options.versions.code} (${options.platform})`);
const version = await createVersion({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, options.versions.code);
// Upload symbols
console.log(`HockeyApp: uploading symbols for version ${options.versions.code} (${options.platform})`);
await updateVersion({ id: String(version.id), accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, symbolsPath);
// Cleanup
await promisify(unlink)(symbolsPath);
}
// Environment
const pakage = require('../../../package.json');
const product = require('../../../product.json');
const repository = product.electronRepository;
const electronVersion = require('../../lib/electron').getElectronVersion();
const insiders = product.quality !== 'stable';
let codeVersion = pakage.version;
if (insiders) {
codeVersion = `${codeVersion}-insider`;
}
const githubToken = process.argv[2];
const hockeyAppToken = process.argv[3];
const is64 = process.argv[4] === 'x64';
const hockeyAppId = process.argv[5];
let platform: Platform;
if (process.platform === 'darwin') {
platform = Platform.MAC_OS;
} else if (process.platform === 'win32') {
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
} else {
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
}
// Create version and upload symbols in HockeyApp
if (repository && codeVersion && electronVersion && (product.quality === 'stable' || product.quality === 'insider')) {
ensureVersionAndSymbols({
repository,
platform,
versions: {
code: codeVersion,
insiders,
electron: electronVersion
},
access: {
githubToken,
hockeyAppToken,
hockeyAppId
}
}).then(() => {
console.log('HockeyApp: done');
}).catch(error => {
console.error(`HockeyApp: error (${error})`);
});
} else {
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);
}

View File

@@ -1,100 +0,0 @@
phases:
- phase: Windows
queue: Hosted VS2017
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
$ErrorActionPreference = "Stop"
yarn
.\node_modules\.bin\gulp electron
npm run gulp -- hygiene
.\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- powershell: |
$ErrorActionPreference = "Stop"
.\scripts\test.bat --tfs
.\scripts\test-integration.bat
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)\artifacts" --log "$(Build.ArtifactStagingDirectory)\artifacts\smoketest.log"
name: test
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: build-artifacts-win32
publishLocation: Container
condition: succeededOrFailed()
- phase: Linux
queue: Hosted Linux Preview
steps:
- script: |
set -e
apt-get update
apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 libgconf-2-4 dbus xvfb libgtk-3-0
cp build/tfs/linux/x64/xvfb.init /etc/init.d/xvfb
chmod +x /etc/init.d/xvfb
update-rc.d xvfb defaults
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
service xvfb start
service dbus start
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
yarn
npm run gulp -- electron-x64
- script: |
set -e
npm run gulp -- hygiene
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --tfs
# DISPLAY=:10 ./scripts/test-integration.sh
name: test
- phase: macOS
queue: Hosted macOS Preview
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
yarn
npm run gulp -- electron-x64
- script: |
set -e
npm run gulp -- hygiene
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- script: |
set -e
./scripts/test.sh --tfs
./scripts/test-integration.sh
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)/artifacts" --log "$(Build.ArtifactStagingDirectory)/artifacts/smoketest.log"
name: test
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: build-artifacts-darwin
publishLocation: Container
condition: succeededOrFailed()

View File

@@ -19,9 +19,6 @@ step "Install dependencies" \
step "Hygiene" \
npm run gulp -- hygiene
step "Monaco Editor Check" \
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin

View File

@@ -1,51 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin electron
node build/tfs/common/installDistro.js
- script: |
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-darwin-min
name: build
- script: |
./scripts/test.sh --build --tfs
name: test
- script: |
# archive the unsigned build
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
# publish the unsigned build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/tfs/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive-unsigned \
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
$VERSION \
false \
../VSCode-darwin-unsigned.zip
# enqueue the unsigned build
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
npm run gulp -- upload-vscode-configuration

View File

@@ -22,9 +22,6 @@ step "Install dependencies" \
step "Hygiene" \
npm run gulp -- hygiene
step "Monaco Editor Check" \
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin

View File

@@ -1,103 +0,0 @@
steps:
- script: |
# dependencies
dpkg --add-architecture i386
apt-get update
DEPS=" \
gcc-multilib g++-multilib \
pkg-config \
dbus \
xvfb \
fakeroot \
bc \
bsdmainutils \
rpm \
"
if [[ "$(VSCODE_ARCH)" == "x64" ]]; then
DEPS="$DEPS \
dpkg-dev \
libgconf-2-4 \
libnss3 \
libasound2 \
libxtst6 \
libx11-dev \
libxkbfile-dev \
libxss1 \
libx11-xcb-dev \
libsecret-1-dev \
"
else
DEPS="$DEPS \
dpkg-dev:i386 \
libgconf-2-4:i386 \
libnss3:i386 \
libasound2:i386 \
libxtst6:i386 \
libnotify4:i386 \
libx11-dev:i386 \
libxkbfile-dev:i386 \
libxss1:i386 \
libx11-xcb-dev:i386 \
libgl1-mesa-glx:i386 libgl1-mesa-dri:i386 \
libgirepository-1.0-1:i386 \
gir1.2-glib-2.0:i386 \
gir1.2-secret-1:i386 \
libsecret-1-dev:i386 \
libgtk2.0-0:i386 \
"
fi
apt-get install -y $DEPS
# setup xvfb
cp build/tfs/linux/$(VSCODE_ARCH)/xvfb.init /etc/init.d/xvfb
chmod +x /etc/init.d/xvfb
update-rc.d xvfb defaults
service xvfb start
# setup dbus
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
service dbus start
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
- script: |
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
name: test
- script: |
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"

View File

@@ -1,67 +0,0 @@
#!/bin/bash
set -e
# Arguments
ARCH="$1"
LINUX_REPO_PASSWORD="$2"
# Variables
PLATFORM_LINUX="linux-$ARCH"
PLATFORM_DEB="linux-deb-$ARCH"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
REPO="`pwd`"
ROOT="$REPO/.."
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
if [ -z "$VSCODE_QUALITY" ]; then
echo "VSCODE_QUALITY is not set, skipping repo package publish"
elif [ "$IS_FROZEN" = "true" ]; then
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
else
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
# Write config files needed by API, use eval to force environment variable expansion
pushd build/tfs/linux
# Submit to apt repo
if [ "$DEB_ARCH" = "amd64" ]; then
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
fi
# Submit to yum repo (disabled as it's manual until signing is automated)
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
popd
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
fi
fi
fi

View File

@@ -1,413 +0,0 @@
phases:
- phase: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
queue:
name: Hosted VS2017
parallel: 2
matrix:
x64:
VSCODE_ARCH: x64
ia32:
VSCODE_ARCH: ia32
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- powershell: |
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
name: build
- powershell: |
$ErrorActionPreference = "Stop"
npm run gulp -- "electron-$(VSCODE_ARCH)"
.\scripts\test.bat --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$ErrorActionPreference = "Stop"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: VSCodeSetup.exe
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$ErrorActionPreference = "Stop"
$Repo = "$(pwd)"
$Root = "$Repo\.."
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe
# publish hockeyapp symbols
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId
- phase: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
queue: linux-x64
variables:
VSCODE_ARCH: x64
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
- phase: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
queue: linux-ia32
variables:
VSCODE_ARCH: ia32
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX32)"
- phase: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
queue: Hosted macOS Preview
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
name: build
- script: |
set -e
./scripts/test.sh --build --tfs
APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
name: test
- script: |
set -e
# archive the unsigned build
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
# publish the unsigned build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/tfs/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive-unsigned \
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
$VERSION \
false \
../VSCode-darwin-unsigned.zip
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# enqueue the unsigned build
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- upload-vscode-configuration

View File

@@ -24,10 +24,6 @@ step "Hygiene" {
exec { & npm run gulp -- hygiene }
}
step "Monaco Editor Check" {
exec { & .\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit }
}
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
step "Mix in repository from vscode-distro" {
exec { & npm run gulp -- mixin }
@@ -45,10 +41,6 @@ step "Build minified" {
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
}
step "Copy Inno updater" {
exec { & npm run gulp -- "vscode-win32-$global:arch-copy-inno-updater" }
}
# step "Create loader snapshot" {
# exec { & node build\lib\snapshotLoader.js --arch=$global:arch }
# }

View File

@@ -1,212 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- mixin
node build/tfs/common/installDistro.js
- powershell: |
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
name: build
- powershell: |
npm run gulp -- "electron-$(VSCODE_ARCH)"
.\scripts\test.bat --build --tfs
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: VSCodeSetup.exe
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$Repo = "$(pwd)"
$Root = "$Repo\.."
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe

View File

@@ -1,7 +1,7 @@
# install node
$env:Path = $env:NVM_HOME + ";" + $env:NVM_SYMLINK + ";" + $env:Path
$NodeVersion = "8.9.1"
# nvm install $NodeVersion
# nvm use $NodeVersion
# npm install -g yarn
nvm install $NodeVersion
nvm use $NodeVersion
npm install -g yarn
$env:Path = $env:NVM_HOME + "\v" + $NodeVersion + ";" + $env:Path

View File

@@ -1,7 +0,0 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"allowJs": false,
"checkJs": false
}
}

View File

@@ -7,11 +7,7 @@
"preserveConstEnums": true,
"sourceMap": false,
"experimentalDecorators": true,
// enable JavaScript type checking for the language service
// use the tsconfig.build.json for compiling wich disable JavaScript
// type checking so that JavaScript file are not transpiled
"allowJs": true,
"checkJs": true
"newLine": "LF"
},
"exclude": [
"node_modules/**"

View File

@@ -9,6 +9,5 @@
"always"
],
"triple-equals": true
},
"defaultSeverity": "warning"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -19,7 +19,7 @@ OutputDir={#OutputDir}
OutputBaseFilename=SqlOpsStudioSetup
Compression=lzma
SolidCompression=yes
AppMutex={code:GetAppMutex}
AppMutex={#AppMutex}
SetupMutex={#AppMutex}setup
WizardImageFile={#RepoDir}\resources\win32\inno-big.bmp
WizardSmallImageFile={#RepoDir}\resources\win32\inno-small.bmp
@@ -52,17 +52,10 @@ Type: filesandordirs; Name: "{app}\resources\app\out"; Check: IsNotUpdate
Type: filesandordirs; Name: "{app}\resources\app\plugins"; Check: IsNotUpdate
Type: filesandordirs; Name: "{app}\resources\app\extensions"; Check: IsNotUpdate
Type: filesandordirs; Name: "{app}\resources\app\node_modules"; Check: IsNotUpdate
Type: filesandordirs; Name: "{app}\resources\app\node_modules.asar.unpacked"; Check: IsNotUpdate
Type: files; Name: "{app}\resources\app\node_modules.asar"; Check: IsNotUpdate
Type: files; Name: "{app}\resources\app\Credits_45.0.2454.85.html"; Check: IsNotUpdate
[UninstallDelete]
Type: filesandordirs; Name: "{app}\_"
[Tasks]
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
Name: "quicklaunchicon"; Description: "{cm:CreateQuickLaunchIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked; OnlyBelowVersion: 0,6.1
Name: "associatewithfiles"; Description: "{cm:AssociateWithFiles,{#NameShort}}"; GroupDescription: "{cm:Other}"; Flags: unchecked
Name: "addtopath"; Description: "{cm:AddToPath}"; GroupDescription: "{cm:Other}"
Name: "runcode"; Description: "{cm:RunAfter,{#NameShort}}"; GroupDescription: "{cm:Other}"; Check: WizardSilent
@@ -84,13 +77,6 @@ Root: HKCR; Subkey: "{#RegValueName}SourceFile\DefaultIcon"; ValueType: string;
Root: HKCR; Subkey: "{#RegValueName}SourceFile\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""
Root: HKCU; Subkey: "Environment"; ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}\bin"; Tasks: addtopath; Check: NeedsAddPath(ExpandConstant('{app}\bin'))
Root: HKCU; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: none; ValueName: "{#RegValueName}"; Flags: deletevalue uninsdeletevalue; Tasks: associatewithfiles
Root: HKCU; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: string; ValueName: "{#RegValueName}.sql"; ValueData: ""; Flags: uninsdeletevalue; Tasks: associatewithfiles
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,SQL}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\code_file.ico"; Tasks: associatewithfiles
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles
[Code]
// Don't allow installing conflicting architectures
function InitializeSetup(): Boolean;
@@ -152,48 +138,6 @@ begin
Result := True;
end;
function GetAppMutex(Value: string): string;
begin
if IsBackgroundUpdate() then
Result := ''
else
Result := '{#AppMutex}';
end;
function GetDestDir(Value: string): string;
begin
if IsBackgroundUpdate() then
Result := ExpandConstant('{app}\_')
else
Result := ExpandConstant('{app}');
end;
function BoolToStr(Value: Boolean): String;
begin
if Value then
Result := 'true'
else
Result := 'false';
end;
procedure CurStepChanged(CurStep: TSetupStep);
var
UpdateResultCode: Integer;
begin
if IsBackgroundUpdate() and (CurStep = ssPostInstall) then
begin
CreateMutex('{#AppMutex}-ready');
while (CheckForMutexes('{#AppMutex}')) do
begin
Log('Application is still running, waiting');
Sleep(1000);
end;
Exec(ExpandConstant('{app}\tools\inno_updater.exe'), ExpandConstant('"{app}\{#ExeBasename}.exe" ' + BoolToStr(LockFileExists())), '', SW_SHOW, ewWaitUntilTerminated, UpdateResultCode);
end;
end;
// http://stackoverflow.com/a/23838239/261019
procedure Explode(var Dest: TArrayOfString; Text: String; Separator: String);
var

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

3
dataprotocol-client/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
lib
node_modules
npm-debug.log

View File

@@ -0,0 +1,3 @@
src
.gitignore
tsfmt.json

View File

@@ -0,0 +1,25 @@
{
"name": "dataprotocol-client",
"version": "1.0.0",
"description": "SQL Operations Studio studio implementation of vscode-language-client",
"main": "lib/main.js",
"typings": "./lib/main",
"scripts": {
"prepare": "node ./node_modules/vscode/bin/install && tsc -p ./src",
"compile": "tsc -p ./src",
"watch": "tsc -w -p ./src",
"update-vscode": "node ./node_modules/vscode/bin/install"
},
"author": "Microsoft",
"license": "ISC",
"dependencies": {
"typescript": "2.6.2",
"vscode-languageclient": "3.5.0"
},
"devDependencies": {
"vscode": "1.1.5"
},
"engines": {
"vscode": "^1.15"
}
}

View File

@@ -0,0 +1,68 @@
import * as data from 'data';
import * as proto from './protocol';
import * as types from './types';
export interface Ic2p {
asConnectionParams(connectionUri: string, connectionInfo: data.ConnectionInfo): proto.ConnectParams;
asExecutionPlanOptions(planOptions: data.ExecutionPlanOptions): types.ExecutionPlanOptions;
asScriptingParams(connectionUri: string, operation: data.ScriptOperation, metadata: data.ObjectMetadata, paramDetails: data.ScriptingParamDetails): types.ScriptingParams;
}
function asConnectionParams(ownerUri: string, connInfo: data.ConnectionInfo): proto.ConnectParams {
return {
ownerUri,
connection: {
options: connInfo.options
}
};
}
function asExecutionPlanOptions(planOptions: data.ExecutionPlanOptions): types.ExecutionPlanOptions {
return {
includeEstimatedExecutionPlanXml: planOptions ? planOptions.displayEstimatedQueryPlan : undefined,
includeActualExecutionPlanXml: planOptions ? planOptions.displayActualQueryPlan : undefined
};
}
function asScriptingParams(ownerURI: string, operation: data.ScriptOperation, metadata: data.ObjectMetadata, paramDetails: data.ScriptingParamDetails): types.ScriptingParams {
let scriptingObject: types.ScriptingObject = {
type: metadata.metadataTypeName,
schema: metadata.schema,
name: metadata.name
};
let targetDatabaseEngineEdition = paramDetails.targetDatabaseEngineEdition;
let targetDatabaseEngineType = paramDetails.targetDatabaseEngineType;
let scriptCompatibilityOption = paramDetails.scriptCompatibilityOption;
let scriptOptions: types.ScriptOptions = {
scriptCreateDrop: (operation === types.ScriptOperation.Delete) ? 'ScriptDrop' :
(operation === types.ScriptOperation.Select) ? 'ScriptSelect' : 'ScriptCreate',
typeOfDataToScript: 'SchemaOnly',
scriptStatistics: 'ScriptStatsNone',
targetDatabaseEngineEdition: targetDatabaseEngineEdition ? targetDatabaseEngineEdition : 'SqlServerEnterpriseEdition',
targetDatabaseEngineType: targetDatabaseEngineType ? targetDatabaseEngineType : 'SingleInstance',
scriptCompatibilityOption: scriptCompatibilityOption ? scriptCompatibilityOption : 'Script140Compat'
};
return {
connectionString: null,
filePath: paramDetails.filePath,
scriptingObjects: [scriptingObject],
scriptDestination: 'ToEditor',
includeObjectCriteria: null,
excludeObjectCriteria: null,
includeSchemas: null,
excludeSchemas: null,
includeTypes: null,
excludeTypes: null,
scriptOptions,
connectionDetails: null,
selectScript: null,
ownerURI,
operation
};
}
export const c2p: Ic2p = {
asConnectionParams,
asExecutionPlanOptions,
asScriptingParams
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,633 @@
import { ClientCapabilities as VSClientCapabilities, RequestType, NotificationType } from 'vscode-languageclient';
import * as types from './types';
import * as data from 'data';
export interface ConnectionClientCapabilities {
connection?: {
/**
* Whether the connection support dynamic registration
*/
dynamicRegistration?: boolean;
};
backup?: {
/**
* Whether the backup support dynamic registration
*/
dynamicRegistration?: boolean;
};
restore?: {
/**
* Whether the restore support dynamic registration
*/
dynamicRegistration?: boolean;
};
query?: {
/**
* Whether the query support dynamic registration
*/
dynamicRegistration?: boolean;
};
objectExplorer?: {
/**
* Whether the object explorer support dynamic registration
*/
dynamicRegistration?: boolean;
};
scripting?: {
/**
* Whether the scripting support dynamic registration
*/
dynamicRegistration?: boolean;
};
taskServices?: {
/**
* Whether the task services support dynamic registration
*/
dynamicRegistration?: boolean;
};
fileBrowser?: {
/**
* Whether the file browser support dynamic registration
*/
dynamicRegistration?: boolean;
};
profiler?: {
/**
* Whether the profiler support dynamic registration
*/
dynamicRegistration?: boolean;
};
capabilities?: {
/**
*
*/
dynamicRegistration?: boolean;
};
metadata?: {
/**
*
*/
dynamicRegistration?: boolean;
};
adminServices?: {
/**
*
*/
dynamicRegistration?: boolean;
};
}
export interface ClientCapabilities extends VSClientCapabilities {
connection?: ConnectionClientCapabilities;
}
//---- Refresh IntelliSense ----------------------------------------
/**
* Notification sent when the an IntelliSense cache invalidation is requested
*/
export namespace RebuildIntelliSenseNotification {
export const type = new NotificationType<RebuildIntelliSenseParams, void>('textDocument/rebuildIntelliSense');
}
/**
* Rebuild IntelliSense notification parameters
*/
export class RebuildIntelliSenseParams {
/**
* URI identifying the text document
*/
public ownerUri: string;
}
// ------------------------------- < Connect Request > ----------------------------------------------
/**
* Connection request message format
*/
export interface ConnectParams {
/**
* URI identifying the owner of the connection
*/
ownerUri: string;
/**
* Details for creating the connection
*/
connection: types.ConnectionDetails;
}
// Connection request message callback declaration
export namespace ConnectionRequest {
export const type = new RequestType<ConnectParams, boolean, void, void>('connection/connect');
}
// ------------------------------- < Connection Complete Event > ------------------------------------
export namespace ConnectionCompleteNotification {
export const type = new NotificationType<types.ConnectionCompleteParams, void>('connection/complete');
}
// ------------------------------- < Connection Changed Event > -------------------------------------
/**
* Parameters for the ConnectionChanged notification.
*/
export class ConnectionChangedParams {
/**
* Owner URI of the connection that changed.
*/
public ownerUri: string;
/**
* Summary of details containing any connection changes.
*/
public connection: types.ConnectionSummary;
}
/**
* Connection changed event callback declaration.
*/
export namespace ConnectionChangedNotification {
export const type = new NotificationType<ConnectionChangedParams, void>('connection/connectionchanged');
}
// ------------------------------- < Disconnect Request > -------------------------------------------
// Disconnect request message format
export class DisconnectParams {
// URI identifying the owner of the connection
public ownerUri: string;
}
// Disconnect response format
export type DisconnectResult = boolean;
// Disconnect request message callback declaration
export namespace DisconnectRequest {
export const type = new RequestType<DisconnectParams, DisconnectResult, void, void>('connection/disconnect');
}
// ------------------------------- < Cancel Connect Request > ---------------------------------------
// Cancel connect request message format
export class CancelConnectParams {
/**
* URI identifying the owner of the connection
*/
public ownerUri: string;
}
// Cancel connect response format.
export type CancelConnectResult = boolean;
// Cancel connect request message callback declaration
export namespace CancelConnectRequest {
export const type = new RequestType<CancelConnectParams, CancelConnectResult, void, void>('connection/cancelconnect');
}
// ------------------------------- < Change Database Request > -------------------------------------
export class ChangeDatabaseParams {
public ownerUri: string;
public newDatabase: string;
}
export namespace ChangeDatabaseRequest {
export const type = new RequestType<ChangeDatabaseParams, boolean, void, void>('connection/changedatabase');
}
// ------------------------------- < List Databases Request > ---------------------------------------
// List databases request format
export class ListDatabasesParams {
// Connection information to use for querying master
public ownerUri: string;
}
// List databases request callback declaration
export namespace ListDatabasesRequest {
export const type = new RequestType<ListDatabasesParams, data.ListDatabasesResult, void, void>('connection/listdatabases');
}
// Language Flavor Changed ================================================================================
/**
* Parameters to provide when sending a language flavor changed notification
*/
export interface DidChangeLanguageFlavorParams {
uri: string;
language: string;
flavor: string;
}
// ------------------------------- < Language Flavor Changed Notification > ---------------------------------------
export namespace LanguageFlavorChangedNotification {
export const type = new NotificationType<DidChangeLanguageFlavorParams, void>('connection/languageflavorchanged');
}
// ------------------------------- < Table Metadata Request > ---------------------------------------
// Table metadata request format
export class TableMetadataParams {
// Connection information to use for querying master
public ownerUri: string;
public schema: string;
public objectName: string;
}
// Table metadata response format
export class TableMetadataResult {
public columns: data.ColumnMetadata[];
}
// Table metadata request callback declaration
export namespace TableMetadataRequest {
export const type = new RequestType<TableMetadataParams, TableMetadataResult, void, void>('metadata/table');
}
// ------------------------------- < View Metadata Request > ---------------------------------------
// Table metadata request callback declaration
export namespace ViewMetadataRequest {
export const type = new RequestType<TableMetadataParams, TableMetadataResult, void, void>('metadata/view');
}
/**
* Event sent when the language service is finished updating after a connection
*/
export namespace IntelliSenseReadyNotification {
export const type = new NotificationType<types.IntelliSenseReadyParams, void>('textDocument/intelliSenseReady');
}
// ------------------------------- < Capabilties Discovery Event > ------------------------------------
export class CapabiltiesDiscoveryParams {
public hostName: string;
public hostVersion: string;
}
export namespace CapabiltiesDiscoveryRequest {
export const type = new RequestType<CapabiltiesDiscoveryParams, types.CapabiltiesDiscoveryResult, void, void>('capabilities/list');
}
// Query Execution ================================================================================
// ------------------------------- < Query Cancellation Request > ------------------------------------
export namespace QueryCancelRequest {
export const type = new RequestType<QueryCancelParams, data.QueryCancelResult, void, void>('query/cancel');
}
export interface QueryCancelParams {
ownerUri: string;
}
// ------------------------------- < Query Dispose Request > ------------------------------------
export namespace QueryDisposeRequest {
export const type = new RequestType<QueryDisposeParams, QueryDisposeResult, void, void>('query/dispose');
}
/**
* Parameters to provide when disposing of a query
*/
export interface QueryDisposeParams {
ownerUri: string;
}
/**
* Result received upon successful disposal of a query
*/
export interface QueryDisposeResult {
}
// ------------------------------- < Query Execution Complete Notification > ------------------------------------
export namespace QueryExecuteCompleteNotification {
export const type = new NotificationType<data.QueryExecuteCompleteNotificationResult, void>('query/complete');
}
// ------------------------------- < Query Batch Start Notification > ------------------------------------
export namespace QueryExecuteBatchStartNotification {
export const type = new NotificationType<data.QueryExecuteBatchNotificationParams, void>('query/batchStart');
}
// ------------------------------- < Query Batch Complete Notification > ------------------------------------
export namespace QueryExecuteBatchCompleteNotification {
export const type = new NotificationType<data.QueryExecuteBatchNotificationParams, void>('query/batchComplete');
}
// ------------------------------- < Query ResultSet Complete Notification > ------------------------------------
export namespace QueryExecuteResultSetCompleteNotification {
export const type = new NotificationType<data.QueryExecuteResultSetCompleteNotificationParams, void>('query/resultSetComplete');
}
// ------------------------------- < Query Message Notification > ------------------------------------
export namespace QueryExecuteMessageNotification {
export const type = new NotificationType<data.QueryExecuteMessageParams, void>('query/message');
}
// ------------------------------- < Query Execution Request > ------------------------------------
export namespace QueryExecuteRequest {
export const type = new RequestType<types.QueryExecuteParams, QueryExecuteResult, void, void>('query/executeDocumentSelection');
}
export interface QueryExecuteResult { }
// ------------------------------- < Query Results Request > ------------------------------------
export namespace QueryExecuteSubsetRequest {
export const type = new RequestType<data.QueryExecuteSubsetParams, data.QueryExecuteSubsetResult, void, void>('query/subset');
}
export interface ResultSetSubset {
rowCount: number;
rows: data.DbCellValue[][];
}
// ------------------------------- < Execute Statement > ------------------------------------
export interface QueryExecuteStatementParams {
ownerUri: string;
line: number;
column: number;
}
export namespace QueryExecuteStatementRequest {
export const type = new RequestType<QueryExecuteStatementParams, QueryExecuteResult, void, void>('query/executedocumentstatement');
}
// --------------------------------- < Save Results as CSV Request > ------------------------------------------
// save results in csv format
export namespace SaveResultsAsCsvRequest {
export const type = new RequestType<data.SaveResultsRequestParams, data.SaveResultRequestResult, void, void>('query/saveCsv');
}
// --------------------------------- </ Save Results as CSV Request > ------------------------------------------
// --------------------------------- < Save Results as JSON Request > ------------------------------------------
// save results in json format
export namespace SaveResultsAsJsonRequest {
export const type = new RequestType<data.SaveResultsRequestParams, data.SaveResultRequestResult, void, void>('query/saveJson');
}
// --------------------------------- </ Save Results as JSON Request > ------------------------------------------
// --------------------------------- < Save Results as Excel Request > ------------------------------------------
// save results in Excel format
export namespace SaveResultsAsExcelRequest {
export const type = new RequestType<data.SaveResultsRequestParams, data.SaveResultRequestResult, void, void>('query/saveExcel');
}
// --------------------------------- </ Save Results as Excel Request > ------------------------------------------
// ------------------------------- < Execute and Return > -----------------------------------
export namespace SimpleExecuteRequest {
export const type = new RequestType<data.SimpleExecuteParams, data.SimpleExecuteResult, void, void>('query/simpleexecute');
}
// ------------------------------- < Execute String > ------------------------------------
export interface QueryExecuteStringParams {
query: string;
ownerUri: string;
}
export namespace QueryExecuteStringRequest {
export const type = new RequestType<QueryExecuteStringParams, QueryExecuteResult, void, void>('query/executeString');
}
// ------------------------------- < Metadata Events > ------------------------------------
export namespace MetadataQueryRequest {
export const type = new RequestType<types.MetadataQueryParams, types.MetadataQueryResult, void, void>('metadata/list');
}
// ------------------------------- < Scripting Events > ------------------------------------
export namespace ScriptingRequest {
export const type = new RequestType<types.ScriptingParams, data.ScriptingResult, void, void>('scripting/script');
}
// ------------------------------- < Scripting Complete Event > ------------------------------------
export namespace ScriptingCompleteNotification {
export const type = new NotificationType<types.ScriptingCompleteParams, void>('scripting/scriptComplete');
}
// Edit Data ======================================================================================
// Shared Interfaces --------------------------------------------------------------------------
export interface EditSessionOperationParams {
ownerUri: string;
}
export interface EditRowOperationParams extends EditSessionOperationParams {
rowId: number;
}
export interface EditCellResult {
cell: data.EditCell;
isRowDirty: boolean;
}
// edit/commit --------------------------------------------------------------------------------
export namespace EditCommitRequest {
export const type = new RequestType<data.EditCommitParams, EditCommitResult, void, void>('edit/commit');
}
export interface EditCommitResult { }
// edit/createRow -----------------------------------------------------------------------------
export namespace EditCreateRowRequest {
export const type = new RequestType<data.EditCreateRowParams, data.EditCreateRowResult, void, void>('edit/createRow');
}
// edit/deleteRow -----------------------------------------------------------------------------
export namespace EditDeleteRowRequest {
export const type = new RequestType<data.EditDeleteRowParams, EditDeleteRowResult, void, void>('edit/deleteRow');
}
export interface EditDeleteRowResult { }
// edit/dispose -------------------------------------------------------------------------------
export namespace EditDisposeRequest {
export const type = new RequestType<data.EditDisposeParams, EditDisposeResult, void, void>('edit/dispose');
}
export interface EditDisposeResult { }
// edit/initialize ----------------------------------------------------------------------------
export namespace EditInitializeRequest {
export const type = new RequestType<data.EditInitializeParams, EditInitializeResult, void, void>('edit/initialize');
}
export interface EditInitializeResult { }
// edit/revertCell --------------------------------------------------------------------------------
export namespace EditRevertCellRequest {
export const type = new RequestType<data.EditRevertCellParams, data.EditRevertCellResult, void, void>('edit/revertCell');
}
// edit/revertRow -----------------------------------------------------------------------------
export namespace EditRevertRowRequest {
export const type = new RequestType<data.EditRevertRowParams, EditRevertRowResult, void, void>('edit/revertRow');
}
export interface EditRevertRowResult { }
// edit/sessionReady Event --------------------------------------------------------------------
export namespace EditSessionReadyNotification {
export const type = new NotificationType<data.EditSessionReadyParams, void>('edit/sessionReady');
}
// edit/updateCell ----------------------------------------------------------------------------
export namespace EditUpdateCellRequest {
export const type = new RequestType<data.EditUpdateCellParams, data.EditUpdateCellResult, void, void>('edit/updateCell');
}
// edit/subset ------------------------------------------------------------------------------------
export namespace EditSubsetRequest {
export const type = new RequestType<data.EditSubsetParams, data.EditSubsetResult, void, void>('edit/subset');
}
// ------------------------------- < Object Explorer Events > ------------------------------------
export namespace ObjectExplorerCreateSessionRequest {
export const type = new RequestType<types.ConnectionDetails, types.CreateSessionResponse, void, void>('objectexplorer/createsession');
}
export namespace ObjectExplorerExpandRequest {
export const type = new RequestType<types.ExpandParams, boolean, void, void>('objectexplorer/expand');
}
export namespace ObjectExplorerRefreshRequest {
export const type = new RequestType<types.ExpandParams, boolean, void, void>('objectexplorer/refresh');
}
export namespace ObjectExplorerCloseSessionRequest {
export const type = new RequestType<types.CloseSessionParams, types.CloseSessionResponse, void, void>('objectexplorer/closesession');
}
// ------------------------------- < Object Explorer Events > ------------------------------------
export namespace ObjectExplorerCreateSessionCompleteNotification {
export const type = new NotificationType<types.SessionCreatedParameters, void>('objectexplorer/sessioncreated');
}
export namespace ObjectExplorerExpandCompleteNotification {
export const type = new NotificationType<types.ExpandResponse, void>('objectexplorer/expandCompleted');
}
// ------------------------------- < Task Service Events > ------------------------------------
export namespace ListTasksRequest {
export const type = new RequestType<data.ListTasksParams, data.ListTasksResponse, void, void>('tasks/listtasks');
}
export namespace CancelTaskRequest {
export const type = new RequestType<data.CancelTaskParams, boolean, void, void>('tasks/canceltask');
}
// ------------------------------- < Task Service Events > ------------------------------------
export namespace TaskStatusChangedNotification {
export const type = new NotificationType<data.TaskProgressInfo, void>('tasks/statuschanged');
}
export namespace TaskCreatedNotification {
export const type = new NotificationType<data.TaskInfo, void>('tasks/newtaskcreated');
}
// ------------------------------- < Admin Service Events > ------------------------------------
export namespace CreateDatabaseRequest {
export const type = new RequestType<types.CreateDatabaseParams, data.CreateDatabaseResponse, void, void>('admin/createdatabase');
}
export namespace DefaultDatabaseInfoRequest {
export const type = new RequestType<types.DefaultDatabaseInfoParams, types.DefaultDatabaseInfoResponse, void, void>('admin/defaultdatabaseinfo');
}
export namespace CreateLoginRequest {
export const type = new RequestType<types.CreateLoginParams, data.CreateLoginResponse, void, void>('admin/createlogin');
}
export namespace GetDatabaseInfoRequest {
export const type = new RequestType<types.GetDatabaseInfoParams, types.GetDatabaseInfoResponse, void, void>('admin/getdatabaseinfo');
}
// ------------------------------- < Disaster Recovery Events > ------------------------------------
export namespace BackupRequest {
export const type = new RequestType<types.BackupParams, data.BackupResponse, void, void>('backup/backup');
}
export namespace BackupConfigInfoRequest {
export const type = new RequestType<types.DefaultDatabaseInfoParams, types.BackupConfigInfoResponse, void, void>('backup/backupconfiginfo');
}
export namespace RestoreRequest {
export const type = new RequestType<types.RestoreParams, data.RestoreResponse, void, void>('restore/restore');
}
export namespace RestorePlanRequest {
export const type = new RequestType<types.RestoreParams, data.RestorePlanResponse, void, void>('restore/restoreplan');
}
export namespace CancelRestorePlanRequest {
export const type = new RequestType<types.RestoreParams, boolean, void, void>('restore/cancelrestoreplan');
}
export namespace RestoreConfigInfoRequest {
export const type = new RequestType<types.RestoreConfigInfoRequestParams, types.RestoreConfigInfoResponse, void, void>('restore/restoreconfiginfo');
}
// ------------------------------- < File Browser Events > ------------------------------------
export namespace FileBrowserOpenRequest {
export const type = new RequestType<types.FileBrowserOpenParams, boolean, void, void>('filebrowser/open');
}
export namespace FileBrowserOpenedNotification {
export const type = new NotificationType<data.FileBrowserOpenedParams, void>('filebrowser/opencomplete');
}
export namespace FileBrowserExpandRequest {
export const type = new RequestType<types.FileBrowserExpandParams, boolean, void, void>('filebrowser/expand');
}
export namespace FileBrowserExpandedNotification {
export const type = new NotificationType<data.FileBrowserExpandedParams, void>('filebrowser/expandcomplete');
}
export namespace FileBrowserValidateRequest {
export const type = new RequestType<types.FileBrowserValidateParams, boolean, void, void>('filebrowser/validate');
}
export namespace FileBrowserValidatedNotification {
export const type = new NotificationType<data.FileBrowserValidatedParams, void>('filebrowser/validatecomplete');
}
export namespace FileBrowserCloseRequest {
export const type = new RequestType<types.FileBrowserCloseParams, data.FileBrowserCloseResponse, void, void>('filebrowser/close');
}
// ------------------------------- < Profiler Events > ------------------------------------
export namespace StartProfilingRequest {
export const type = new RequestType<types.StartProfilingParams, types.StartProfilingResponse, void, void>('profiler/start');
}
export namespace StopProfilingRequest {
export const type = new RequestType<types.StopProfilingParams, types.StopProfilingResponse, void, void>('profiler/stop');
}
export namespace ProfilerEventsAvailableNotification {
export const type = new NotificationType<types.ProfilerEventsAvailableParams, void>('profiler/eventsavailable');
}

View File

@@ -0,0 +1,198 @@
import * as data from 'data';
import * as types from './types';
export interface Ip2c {
asProviderMetadata(params: types.MetadataQueryResult): data.ProviderMetadata;
asServerCapabilities(result: types.CapabiltiesDiscoveryResult): data.DataProtocolServerCapabilities;
}
function asProviderMetadata(params: types.MetadataQueryResult): data.ProviderMetadata {
let objectMetadata: data.ObjectMetadata[] = [];
if (!params.metadata || !params.metadata.length) {
return {
objectMetadata
};
}
for (let i = 0; i < params.metadata.length; ++i) {
let metadata: data.ObjectMetadata = params.metadata[i];
let metadataTypeName: string;
if (metadata.metadataTypeName) {
// Read from the provider since it's defined
metadataTypeName = metadata.metadataTypeName;
} else if (metadata.metadataType === types.MetadataType.View) {
metadataTypeName = 'View';
} else if (metadata.metadataType === types.MetadataType.SProc) {
metadataTypeName = 'StoredProcedure';
} else if (metadata.metadataType === types.MetadataType.Function) {
metadataTypeName = 'Function';
} else {
metadataTypeName = 'Table';
}
objectMetadata.push({
metadataTypeName,
metadataType: metadata.metadataType,
name: metadata.name,
schema: metadata.schema,
urn: metadata.urn
});
}
return <data.ProviderMetadata>{
objectMetadata
};
}
function asServiceOptionType(val: string): data.ServiceOptionType {
if (val === 'string') {
return data.ServiceOptionType.string;
} else if (val === 'multistring') {
return data.ServiceOptionType.multistring;
} else if (val === 'password') {
return data.ServiceOptionType.password;
} else if (val === 'number') {
return data.ServiceOptionType.number;
} else if (val === 'boolean') {
return data.ServiceOptionType.boolean;
} else if (val === 'category') {
return data.ServiceOptionType.category;
} else if (val === 'object') {
return data.ServiceOptionType.object;
}
// assume string for unknown value types
return data.ServiceOptionType.string;
}
function buildServiceOption(srcOption: types.ServiceOption): data.ServiceOption {
return {
name: srcOption.name,
displayName: srcOption.displayName ? srcOption.displayName : srcOption.name,
description: srcOption.description,
groupName: srcOption.groupName,
defaultValue: srcOption.defaultValue,
categoryValues: srcOption.categoryValues,
isRequired: srcOption.isRequired,
isArray: srcOption.isArray,
objectType: srcOption.objectType,
valueType: asServiceOptionType(srcOption.valueType),
};
}
function asServerCapabilities(result: types.CapabiltiesDiscoveryResult): data.DataProtocolServerCapabilities {
let capabilities: data.DataProtocolServerCapabilities = {
protocolVersion: result.capabilities.protocolVersion,
providerName: result.capabilities.providerName,
providerDisplayName: result.capabilities.providerDisplayName,
connectionProvider: undefined,
adminServicesProvider: undefined,
features: []
};
if (result.capabilities.adminServicesProvider) {
capabilities.adminServicesProvider = <data.AdminServicesOptions>{
databaseInfoOptions: new Array<data.ServiceOption>(),
databaseFileInfoOptions: new Array<data.ServiceOption>(),
fileGroupInfoOptions: new Array<data.ServiceOption>()
};
if (result.capabilities.adminServicesProvider.databaseInfoOptions
&& result.capabilities.adminServicesProvider.databaseInfoOptions.length > 0) {
for (let i = 0; i < result.capabilities.adminServicesProvider.databaseInfoOptions.length; ++i) {
let srcOption: any = result.capabilities.adminServicesProvider.databaseInfoOptions[i];
let descOption: data.ServiceOption = buildServiceOption(srcOption);
capabilities.adminServicesProvider.databaseInfoOptions.push(descOption);
}
}
if (result.capabilities.adminServicesProvider.databaseFileInfoOptions
&& result.capabilities.adminServicesProvider.databaseFileInfoOptions.length > 0) {
for (let i = 0; i < result.capabilities.adminServicesProvider.databaseFileInfoOptions.length; ++i) {
//let srcOption: types.ServiceOption = result.capabilities.adminServicesProvider.databaseFileInfoOptions[i];
let srcOption: any = result.capabilities.adminServicesProvider.databaseFileInfoOptions[i];
let descOption: data.ServiceOption = buildServiceOption(srcOption);
capabilities.adminServicesProvider.databaseFileInfoOptions.push(descOption);
}
}
if (result.capabilities.adminServicesProvider.fileGroupInfoOptions
&& result.capabilities.adminServicesProvider.fileGroupInfoOptions.length > 0) {
for (let i = 0; i < result.capabilities.adminServicesProvider.fileGroupInfoOptions.length; ++i) {
//let srcOption: types.ServiceOption = result.capabilities.adminServicesProvider.fileGroupInfoOptions[i];
let srcOption: any = result.capabilities.adminServicesProvider.fileGroupInfoOptions[i];
let descOption: data.ServiceOption = buildServiceOption(srcOption);
capabilities.adminServicesProvider.fileGroupInfoOptions.push(descOption);
}
}
}
if (result.capabilities.connectionProvider
&& result.capabilities.connectionProvider.options
&& result.capabilities.connectionProvider.options.length > 0) {
capabilities.connectionProvider = <data.ConnectionProviderOptions>{
options: new Array<data.ConnectionOption>()
};
for (let i = 0; i < result.capabilities.connectionProvider.options.length; ++i) {
let srcOption: any = result.capabilities.connectionProvider.options[i];
let descOption: data.ConnectionOption = {
name: srcOption.name,
displayName: srcOption.displayName ? srcOption.displayName : srcOption.name,
description: srcOption.description,
groupName: srcOption.groupName,
defaultValue: srcOption.defaultValue,
categoryValues: srcOption.categoryValues,
isIdentity: srcOption.isIdentity,
isRequired: srcOption.isRequired,
valueType: asServiceOptionType(srcOption.valueType),
specialValueType: undefined
};
if (srcOption.specialValueType === 'serverName') {
descOption.specialValueType = data.ConnectionOptionSpecialType.serverName;
} else if (srcOption.specialValueType === 'databaseName') {
descOption.specialValueType = data.ConnectionOptionSpecialType.databaseName;
} else if (srcOption.specialValueType === 'authType') {
descOption.specialValueType = data.ConnectionOptionSpecialType.authType;
} else if (srcOption.specialValueType === 'userName') {
descOption.specialValueType = data.ConnectionOptionSpecialType.userName;
} else if (srcOption.specialValueType === 'password') {
descOption.specialValueType = data.ConnectionOptionSpecialType.password;
} else if (srcOption.specialValueType === 'appName') {
descOption.specialValueType = data.ConnectionOptionSpecialType.appName;
}
capabilities.connectionProvider.options.push(descOption);
}
}
if (result.capabilities.features
&& result.capabilities.features.length > 0) {
result.capabilities.features.forEach(feature => {
let descFeature: data.FeatureMetadataProvider = {
enabled: feature.enabled,
featureName: feature.featureName,
optionsMetadata: []
};
capabilities.features.push(descFeature);
if (feature.optionsMetadata) {
feature.optionsMetadata.forEach(srcOption => {
descFeature.optionsMetadata.push(buildServiceOption(<any>srcOption));
});
}
});
}
return capabilities;
}
export const p2c: Ip2c = {
asProviderMetadata,
asServerCapabilities
};

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"moduleResolution": "node",
"sourceMap": false,
"inlineSources": false,
"declaration": true,
"stripInternal": true,
"outDir": "../lib"
}
}

View File

@@ -0,0 +1,939 @@
import * as data from 'data';
export interface CreateSessionResponse {
sessionId: string;
}
export interface SessionCreatedParameters {
success: boolean;
sessionId: string;
rootNode: NodeInfo;
errorMessage: string;
}
export interface ExpandResponse {
nodePath: string;
sessionId: string;
nodes: NodeInfo[];
errorMessage: string;
}
export interface NodeInfo {
nodePath: string;
nodeType: string;
nodeSubType: string;
nodeStatus: string;
label: string;
isLeaf: boolean;
metadata: data.ObjectMetadata;
errorMessage: string;
}
export interface ExpandParams {
sessionId: string;
nodePath: string;
}
export interface CloseSessionParams {
sessionId: string;
}
export interface CloseSessionResponse {
success: boolean;
sessionId: string;
}
export interface CategoryValue {
displayName: string;
name: string;
}
export interface ServiceOption {
name: string;
displayName: string;
description: string;
groupName: string;
valueType: string;
defaultValue: string;
objectType: string;
categoryValues: CategoryValue[];
isRequired: boolean;
isArray: boolean;
}
export interface ConnectionOption {
name: string;
displayName: string;
description: string;
groupName: string;
valueType: string;
defaultValue: string;
objectType: string;
categoryValues: CategoryValue[];
specialValueType: string;
isIdentity: boolean;
isRequired: boolean;
isArray: boolean;
}
export interface ConnectionProviderOptions {
options: ConnectionOption[];
}
export interface AdminServicesProviderOptions {
databaseInfoOptions: ServiceOption[];
databaseFileInfoOptions: ServiceOption[];
fileGroupInfoOptions: ServiceOption[];
}
export interface FeatureMetadataProvider {
enabled: boolean;
featureName: string;
optionsMetadata: ServiceOption[];
}
/**
* Parameters to initialize a connection to a database
*/
export interface ConnectionDetails {
/**
* connection options
*/
options: {};
}
/**
* Summary that identifies a unique database connection.
*/
export class ConnectionSummary {
/**
* server name
*/
public serverName: string;
/**
* database name
*/
public databaseName: string;
/**
* user name
*/
public userName: string;
}
/**
* Connection response format.
*/
export class ConnectionCompleteParams {
/**
* URI identifying the owner of the connection
*/
public ownerUri: string;
/**
* connection id returned from service host.
*/
public connectionId: string;
/**
* any diagnostic messages return from the service host.
*/
public messages: string;
/**
* Error message returned from the engine, if any.
*/
public errorMessage: string;
/**
* Error number returned from the engine, if any.
*/
public errorNumber: number;
/**
* Information about the connected server.
*/
public serverInfo: ServerInfo;
/**
* information about the actual connection established
*/
public connectionSummary: ConnectionSummary;
}
/**
* Update event parameters
*/
export class IntelliSenseReadyParams {
/**
* URI identifying the text document
*/
public ownerUri: string;
}
/**
* Information about a SQL Server instance.
*/
export class ServerInfo {
/**
* The major version of the SQL Server instance.
*/
public serverMajorVersion: number;
/**
* The minor version of the SQL Server instance.
*/
public serverMinorVersion: number;
/**
* The build of the SQL Server instance.
*/
public serverReleaseVersion: number;
/**
* The ID of the engine edition of the SQL Server instance.
*/
public engineEditionId: number;
/**
* String containing the full server version text.
*/
public serverVersion: string;
/**
* String describing the product level of the server.
*/
public serverLevel: string;
/**
* The edition of the SQL Server instance.
*/
public serverEdition: string;
/**
* Whether the SQL Server instance is running in the cloud (Azure) or not.
*/
public isCloud: boolean;
/**
* The version of Azure that the SQL Server instance is running on, if applicable.
*/
public azureVersion: number;
/**
* The Operating System version string of the machine running the SQL Server instance.
*/
public osVersion: string;
}
export class CapabiltiesDiscoveryResult {
public capabilities: data.DataProtocolServerCapabilities;
}
// Task Services types
export enum TaskStatus {
notStarted = 0,
inProgress = 1,
succeeded = 2,
succeededWithWarning = 3,
failed = 4,
canceled = 5
}
// Admin Services types
export interface CreateDatabaseParams {
ownerUri: string;
databaseInfo: data.DatabaseInfo;
}
export interface DefaultDatabaseInfoParams {
ownerUri: string;
}
export interface DefaultDatabaseInfoResponse {
defaultDatabaseInfo: data.DatabaseInfo;
}
export interface GetDatabaseInfoResponse {
databaseInfo: data.DatabaseInfo;
}
export interface GetDatabaseInfoParams {
ownerUri: string;
}
export interface BackupConfigInfoResponse {
backupConfigInfo: data.BackupConfigInfo;
}
export interface CreateLoginParams {
ownerUri: string;
loginInfo: data.LoginInfo;
}
// Disaster Recovery types
export interface BackupInfo {
ownerUri: string;
databaseName: string;
backupType: number;
backupComponent: number;
backupDeviceType: number;
selectedFiles: string;
backupsetName: string;
selectedFileGroup: { [path: string]: string };
// List of {key: backup path, value: device type}
backupPathDevices: { [path: string]: number };
backupPathList: [string];
isCopyOnly: boolean;
formatMedia: boolean;
initialize: boolean;
skipTapeHeader: boolean;
mediaName: string;
mediaDescription: string;
checksum: boolean;
continueAfterError: boolean;
logTruncation: boolean;
tailLogBackup: boolean;
retainDays: number;
compressionOption: number;
verifyBackupRequired: boolean;
encryptionAlgorithm: number;
encryptorType: number;
encryptorName: string;
}
export interface BackupParams {
ownerUri: string;
backupInfo: BackupInfo;
taskExecutionMode: data.TaskExecutionMode;
}
export interface RestoreParams {
ownerUri: string;
options: {};
taskExecutionMode: data.TaskExecutionMode;
}
export interface RestoreConfigInfoRequestParams {
ownerUri: string;
}
export interface RestoreConfigInfoResponse {
configInfo: { [key: string]: any };
}
export interface RestoreDatabaseFileInfo {
fileType: string;
logicalFileName: string;
originalFileName: string;
restoreAsFileName: string;
}
export interface FileBrowserOpenParams {
ownerUri: string;
expandPath: string;
fileFilters: string[];
changeFilter: boolean;
}
export interface FileTreeNode {
children: FileTreeNode[];
isExpanded: boolean;
isFile: boolean;
name: string;
fullPath: string;
}
export interface FileTree {
rootNode: FileTreeNode;
selectedNode: FileTreeNode;
}
export interface FileBrowserExpandParams {
ownerUri: string;
expandPath: string;
}
export interface FileBrowserValidateParams {
ownerUri: string;
serviceType: string;
selectedFiles: string[];
}
export interface FileBrowserCloseParams {
ownerUri: string;
}
export interface DatabaseFileInfo {
properties: LocalizedPropertyInfo[];
id: string;
isSelected: boolean;
}
export interface LocalizedPropertyInfo {
propertyName: string;
propertyValue: string;
propertyDisplayName: string;
propertyValueDisplayName: string;
}
export interface RestorePlanDetailInfo {
name: string;
currentValue: any;
isReadOnly: boolean;
isVisible: boolean;
defaultValue: any;
}
// Query Execution types
export interface ResultSetSummary {
id: number;
batchId: number;
rowCount: number;
columnInfo: IDbColumn[];
}
export interface BatchSummary {
hasError: boolean;
id: number;
selection: data.ISelectionData;
resultSetSummaries: ResultSetSummary[];
executionElapsed: string;
executionEnd: string;
executionStart: string;
}
export interface IDbColumn {
allowDBNull?: boolean;
baseCatalogName: string;
baseColumnName: string;
baseSchemaName: string;
baseServerName: string;
baseTableName: string;
columnName: string;
columnOrdinal?: number;
columnSize?: number;
isAliased?: boolean;
isAutoIncrement?: boolean;
isExpression?: boolean;
isHidden?: boolean;
isIdentity?: boolean;
isKey?: boolean;
isBytes?: boolean;
isChars?: boolean;
isSqlVariant?: boolean;
isUdt?: boolean;
dataType: string;
isXml?: boolean;
isJson?: boolean;
isLong?: boolean;
isReadOnly?: boolean;
isUnique?: boolean;
numericPrecision?: number;
numericScale?: number;
udtAssemblyQualifiedName: string;
dataTypeName: string;
}
export interface IGridResultSet {
columns: IDbColumn[];
rowsUri: string;
numberOfRows: number;
}
export interface IResultMessage {
batchId?: number;
isError: boolean;
time: string;
message: string;
}
export interface ExecutionPlanOptions {
includeEstimatedExecutionPlanXml?: boolean;
includeActualExecutionPlanXml?: boolean;
}
export interface QueryExecuteParams {
ownerUri: string;
querySelection: data.ISelectionData;
executionPlanOptions?: ExecutionPlanOptions;
}
export enum EditRowState {
clean = 0,
dirtyInsert = 1,
dirtyDelete = 2,
dirtyUpdate = 3
}
export interface EditRow {
cells: data.DbCellValue[];
id: number;
isDirty: boolean;
state: EditRowState;
}
export class MetadataQueryParams {
/**
* Owner URI of the connection that changed.
*/
public ownerUri: string;
}
/**
* Used as value version of data.MetadataType THESE SHOULD MIRROR
*/
export enum MetadataType {
Table = 0,
View = 1,
SProc = 2,
Function = 3
}
export class MetadataQueryResult {
public metadata: data.ObjectMetadata[];
}
export interface ScriptOptions {
/**
* Generate ANSI padding statements
*/
scriptANSIPadding?: boolean;
/**
* Append the generated script to a file
*/
appendToFile?: boolean;
/**
* Continue to script if an error occurs. Otherwise, stop.
*/
continueScriptingOnError?: boolean;
/**
* Convert user-defined data types to base types.
*/
convertUDDTToBaseType?: boolean;
/**
* Generate script for dependent objects for each object scripted.
*/
generateScriptForDependentObjects?: boolean;
/**
* Include descriptive headers for each object generated.
*/
includeDescriptiveHeaders?: boolean;
/**
* Check that an object with the given name exists before dropping or altering or that an object with the given name does not exist before creating.
*/
includeIfNotExists?: boolean;
/**
* Script options to set vardecimal storage format.
*/
includeVarDecimal?: boolean;
/**
* Include system generated constraint names to enforce declarative referential integrity.
*/
scriptDRIIncludeSystemNames?: boolean;
/**
* Include statements in the script that are not supported on the specified SQL Server database engine type.
*/
includeUnsupportedStatements?: boolean;
/**
* Prefix object names with the object schema.
*/
schemaQualify?: boolean;
/**
* Script options to set bindings option.
*/
bindings?: boolean;
/**
* Script the objects that use collation.
*/
collation?: boolean;
/**
* Script the default values.
*/
default?: boolean;
/**
* Script Object CREATE/DROP statements.
*/
scriptCreateDrop: string;
/**
* Script the Extended Properties for each object scripted.
*/
scriptExtendedProperties?: boolean;
/**
* Script only features compatible with the specified version of SQL Server.
*/
scriptCompatibilityOption: string;
/**
* Script only features compatible with the specified SQL Server database engine type.
*/
targetDatabaseEngineType: string;
/**
* Script only features compatible with the specified SQL Server database engine edition.
*/
targetDatabaseEngineEdition: string;
/**
* Script all logins available on the server. Passwords will not be scripted.
*/
scriptLogins?: boolean;
/**
* Generate object-level permissions.
*/
scriptObjectLevelPermissions?: boolean;
/**
* Script owner for the objects.
*/
scriptOwner?: boolean;
/**
* Script statistics, and optionally include histograms, for each selected table or view.
*/
scriptStatistics: string;
/**
* Generate USE DATABASE statement.
*/
scripUseDatabase?: boolean;
/**
* Generate script that contains schema only or schema and data.
*/
typeOfDataToScript: string;
/**
* Scripts the change tracking information.
*/
scriptChangeTracking?: boolean;
/**
* Script the check constraints for each table or view scripted.
*/
scriptCheckConstraints?: boolean;
/**
* Scripts the data compression information.
*/
scriptDataCompressionOptions?: boolean;
/**
* Script the foreign keys for each table scripted.
*/
scriptForeignKeys?: boolean;
/**
* Script the full-text indexes for each table or indexed view scripted.
*/
scriptFullTextIndexes?: boolean;
/**
* Script the indexes (including XML and clustered indexes) for each table or indexed view scripted.
*/
scriptIndexes?: boolean;
/**
* Script the primary keys for each table or view scripted
*/
scriptPrimaryKeys?: boolean;
/**
* Script the triggers for each table or view scripted
*/
scriptTriggers?: boolean;
/**
* Script the unique keys for each table or view scripted.
*/
uniqueKeys?: boolean;
}
export interface ScriptingObject {
/**
* The database object type
*/
type: string;
/**
* The schema of the database object
*/
schema: string;
/**
* The database object name
*/
name: string;
}
export interface ScriptingParams {
/**
* File path used when writing out the script.
*/
filePath: string;
/**
* Whether scripting to a single file or file per object.
*/
scriptDestination: string;
/**
* Connection string of the target database the scripting operation will run against.
*/
connectionString: string;
/**
* A list of scripting objects to script
*/
scriptingObjects: ScriptingObject[];
/**
* A list of scripting object which specify the include criteria of objects to script.
*/
includeObjectCriteria: ScriptingObject[];
/**
* A list of scripting object which specify the exclude criteria of objects to not script.
*/
excludeObjectCriteria: ScriptingObject[];
/**
* A list of schema name of objects to script.
*/
includeSchemas: string[];
/**
* A list of schema name of objects to not script.
*/
excludeSchemas: string[];
/**
* A list of type name of objects to script.
*/
includeTypes: string[];
/**
* A list of type name of objects to not script.
*/
excludeTypes: string[];
/**
* Scripting options for the ScriptingParams
*/
scriptOptions: ScriptOptions;
/**
* Connection details for the ScriptingParams
*/
connectionDetails: ConnectionDetails;
/**
* Owner URI of the connection
*/
ownerURI: string;
/**
* Whether the scripting operation is for
* select script statements
*/
selectScript: boolean;
/**
* Operation associated with the script request
*/
operation: data.ScriptOperation;
}
export interface ScriptingCompleteParams {
/**
* The error details for an error that occurred during the scripting operation.
*/
errorDetails: string;
/**
* The error message for an error that occurred during the scripting operation.
*/
errorMessage: string;
/**
* A value to indicate an error occurred during the scripting operation.
*/
hasError: boolean;
/**
* A value to indicate the scripting operation was canceled.
*/
canceled: boolean;
/**
* A value to indicate the scripting operation successfully completed.
*/
success: boolean;
}
export class TableMetadata {
columns: data.ColumnMetadata[];
}
/**
* Parameters to start a profiler session
*/
export interface StartProfilingParams {
/**
* Session Owner URI
*/
ownerUri: string;
/**
* Session options
*/
options: {};
}
export interface StartProfilingResponse {
succeeded: string;
errorMessage: string;
}
/**
* Parameters to start a profiler session
*/
export interface StopProfilingParams {
/**
* Session Owner URI
*/
ownerUri: string;
}
export interface StopProfilingResponse {
succeeded: string;
errorMessage: string;
}
/**
* Profiler Event
*/
export interface ProfilerEvent {
/**
* Event class name
*/
name: string;
/**
* Event timestamp
*/
timestamp: string;
/**
* Event values
*/
values: {};
}
/**
* Profiler events available notification parameters
*/
export interface ProfilerEventsAvailableParams {
/**
* Session owner URI
*/
ownerUri: string;
/**
* New profiler events available
*/
events: ProfilerEvent[];
}
/**
* Used as value version of data.ScriptOperation THESE SHOULD BE THE SAME
*/
export enum ScriptOperation {
Select = 0,
Create = 1,
Insert = 2,
Update = 3,
Delete = 4,
Execute = 5,
Alter = 6
}

View File

@@ -0,0 +1,2 @@
/// <reference path='../../../src/sql/data.d.ts'/>

View File

@@ -0,0 +1,17 @@
{
"tabSize": 4,
"indentSize": 4,
"newLineCharacter": "\n",
"convertTabsToSpaces": false,
"insertSpaceAfterCommaDelimiter": true,
"insertSpaceAfterSemicolonInForStatements": true,
"insertSpaceBeforeAndAfterBinaryOperators": true,
"insertSpaceAfterKeywordsInControlFlowStatements": true,
"insertSpaceAfterFunctionKeywordForAnonymousFunctions": true,
"insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis": false,
"insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets": false,
"insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces": false,
"insertSpaceBeforeFunctionParenthesis": false,
"placeOpenBraceOnNewLineForFunctions": false,
"placeOpenBraceOnNewLineForControlBlocks": false
}

File diff suppressed because it is too large Load Diff

3
extensions-modules/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
lib/
node_modules
*.log

View File

@@ -0,0 +1,9 @@
.vscode/
lib/test/
lib/**/*.map
src/
test/
.eslintrc
.gitignore
gulpfile.js
tsd.json

View File

@@ -0,0 +1,28 @@
{
"name": "extensions-modules",
"version": "0.1.0",
"description": "Shared modules for Carbon extensions",
"dependencies": {
"dataprotocol-client": "file:../dataprotocol-client",
"decompress": "^4.2.0",
"fs-extra-promise": "^1.0.1",
"http-proxy-agent": "^2.0.0",
"https-proxy-agent": "^2.1.0",
"opener": "^1.4.3",
"tmp": "0.0.33",
"vscode-extension-telemetry": "0.0.8",
"vscode-languageclient": "^3.5.0"
},
"devDependencies": {
"@types/node": "^6.0.61",
"vscode": "1.0.1"
},
"scripts": {
"prepare": "tsc -p ./src",
"compile": "tsc -p ./src",
"watch": "tsc -w -p ./src",
"update-vscode": "node ./node_modules/vscode/bin/install"
},
"main": "./lib/main.js",
"typings": "./lib/main"
}

View File

@@ -0,0 +1,89 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
import * as path from 'path';
import { IConfig } from '../languageservice/interfaces';
import { Constants } from '../models/constants';
/*
* Config class handles getting values from config.json.
*/
export default class Config implements IConfig {
private _configJsonContent: any = undefined;
private _extensionConfigSectionName: string = undefined;
private _fromBuild: boolean = undefined;
constructor(extensionConfigSectionName: string, private path: string, fromBuild?: boolean) {
this._extensionConfigSectionName = extensionConfigSectionName;
this._fromBuild = fromBuild;
}
public get configJsonContent(): any {
if (this._configJsonContent === undefined) {
this._configJsonContent = this.loadConfig();
}
return this._configJsonContent;
}
public getDownloadUrl(): string {
return this.getConfigValue(Constants.downloadUrlConfigKey);
}
public getInstallDirectory(): string {
return this.getConfigValue(Constants.installDirConfigKey);
}
public getExecutableFiles(): string[] {
return this.getConfigValue(Constants.executableFilesConfigKey);
}
public getPackageVersion(): string {
return this.getConfigValue(Constants.versionConfigKey);
}
public getConfigValue(configKey: string): any {
let json = this.configJsonContent;
let toolsConfig = json[Constants.serviceConfigKey];
let configValue: string = undefined;
if (toolsConfig !== undefined) {
configValue = toolsConfig[configKey];
}
return configValue;
}
public getExtensionConfig(key: string, defaultValue?: any): any {
let json = this.configJsonContent;
let extensionConfig = json[this._extensionConfigSectionName];
let configValue = extensionConfig[key];
if (!configValue) {
configValue = defaultValue;
}
return configValue;
}
public getWorkspaceConfig(key: string, defaultValue?: any): any {
let json = this.configJsonContent;
let configValue = json[key];
if (!configValue) {
configValue = defaultValue;
}
return configValue;
}
public loadConfig(): any {
let configContent = undefined;
if (this._fromBuild) {
let remainingPath = '../../../extensions/' + this._extensionConfigSectionName + '/client/out/config.json';
configContent = fs.readFileSync(path.join(__dirname, remainingPath));
}
else {
configContent = fs.readFileSync(this.path);
}
return JSON.parse(configContent);
}
}

Some files were not shown because too many files have changed in this diff Show More