Compare commits

..

4 Commits

Author SHA1 Message Date
Karl Burtram
8095643ed4 Merge branch 'master' into release/0.25 2018-01-12 15:43:19 -08:00
Karl Burtram
96a3ded120 Merge branch 'master' into release/0.25 2018-01-11 17:33:31 -08:00
Karl Burtram
bd3aa9c3cf Merge branch 'master' into release/0.25 2018-01-10 22:08:15 -08:00
Cory Rivera
b765e5aa90 Add updater service url to product.json. (#467) 2018-01-10 16:51:56 -08:00
11308 changed files with 187089 additions and 375244 deletions

View File

@@ -11,6 +11,6 @@ trim_trailing_whitespace = true
# The indent size used in the `package.json` file cannot be changed # The indent size used in the `package.json` file cannot be changed
# https://github.com/npm/npm/pull/3180#issuecomment-16336516 # https://github.com/npm/npm/pull/3180#issuecomment-16336516
[{*.yml,*.yaml,package.json}] [{*.yml,*.yaml,npm-shrinkwrap.json,package.json}]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2

View File

@@ -10,10 +10,5 @@
"no-extra-semi": "warn", "no-extra-semi": "warn",
"semi": "warn" "semi": "warn"
}, },
"extends": "eslint:recommended", "extends": "eslint:recommended"
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
} }

9
.gitattributes vendored
View File

@@ -1,9 +0,0 @@
* text=auto
LICENSE.txt eol=crlf
ThirdPartyNotices.txt eol=crlf
*.bat eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf

18
.gitignore vendored
View File

@@ -1,17 +1,29 @@
.DS_Store .DS_Store
npm-debug.log npm-debug.log
Thumbs.db Thumbs.db
.DS_Store
*.dat
*.db
*.exe
*.log
*.nupkg
*.orig
*.vsix
*BROWSE.VC*
sqltoolsservice
coverage
test-reports
.vscode-test
node_modules/ node_modules/
.build/ .build/
.vs/
out/ out/
out-build/ out-build/
out-editor/ out-editor/
out-editor-esm/
out-editor-min/ out-editor-min/
out-monaco-editor-core/ out-monaco-editor-core/
out-vscode/ out-vscode/
out-vscode-min/ out-vscode-min/
build/node_modules build/node_modules
coverage/ coverage/
test_data/ _site
yarn-error.log

1
.nvmrc
View File

@@ -1 +0,0 @@
8.9.2

View File

@@ -7,13 +7,10 @@ os:
cache: cache:
directories: directories:
- $HOME/.cache/yarn - $HOME/.npm
notifications: notifications:
email: false email: false
webhooks:
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
addons: addons:
apt: apt:
@@ -32,26 +29,25 @@ addons:
before_install: before_install:
- git submodule update --init --recursive - git submodule update --init --recursive
- nvm install 8.9.1 - git clone --depth 1 https://github.com/creationix/nvm.git ./.nvm
- nvm use 8.9.1 - source ./.nvm/nvm.sh
- npm i -g yarn - nvm install 7.9.0
# - npm config set python `which python` - nvm use 7.9.0
- npm config set python `which python`
- npm install -g gulp
- if [ $TRAVIS_OS_NAME == "linux" ]; then - if [ $TRAVIS_OS_NAME == "linux" ]; then
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0; export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
sh -e /etc/init.d/xvfb start; sh -e /etc/init.d/xvfb start;
sleep 3; sleep 3;
fi fi
# Make npm logs less verbose
# - npm config set depth 0
# - npm config set loglevel warn
install: install:
- yarn - ./scripts/npm.sh install
script: script:
- node_modules/.bin/gulp electron --silent - gulp electron --silent
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096 - gulp compile --silent --max_old_space_size=4096
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096 - gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
after_success: after_success:

View File

@@ -1,9 +1,9 @@
{ {
// See https://go.microsoft.com/fwlink/?LinkId=827846 // See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format // for the documentation about the extensions.json format
"recommendations": [ "recommendations": [
"eg2.tslint", "eg2.tslint",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome" "msjsdiag.debugger-for-chrome"
] ]
} }

10
.vscode/launch.json vendored
View File

@@ -1,6 +1,7 @@
{ {
"version": "0.1.0", "version": "0.1.0",
"configurations": [ "configurations": [
{ {
"type": "node", "type": "node",
"request": "launch", "request": "launch",
@@ -8,7 +9,7 @@
"program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js", "program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
"stopOnEntry": true, "stopOnEntry": true,
"args": [ "args": [
"hygiene" "watch-extension:json-client"
], ],
"cwd": "${workspaceFolder}" "cwd": "${workspaceFolder}"
}, },
@@ -86,11 +87,7 @@
"runtimeArgs": [ "runtimeArgs": [
"--inspect=5875" "--inspect=5875"
], ],
"skipFiles": [ "webRoot": "${workspaceFolder}"
"**/winjs*.js"
],
"webRoot": "${workspaceFolder}",
"timeout": 15000
}, },
{ {
"type": "node", "type": "node",
@@ -107,7 +104,6 @@
}, },
"stopOnEntry": false, "stopOnEntry": false,
"args": [ "args": [
"--delay",
"--timeout", "--timeout",
"2000" "2000"
], ],

View File

@@ -10,9 +10,6 @@
"when": "$(basename).ts" "when": "$(basename).ts"
} }
}, },
"files.associations": {
"OSSREADME.json": "jsonc"
},
"search.exclude": { "search.exclude": {
"**/node_modules": true, "**/node_modules": true,
"**/bower_components": true, "**/bower_components": true,
@@ -37,7 +34,5 @@
"command": "${workspaceFolder}\\scripts\\test.bat --coverage --run ${file}" "command": "${workspaceFolder}\\scripts\\test.bat --coverage --run ${file}"
} }
} }
], ]
"typescript.tsdk": "node_modules/typescript/lib", }
"git.ignoreLimitWarning": true
}

6
.vscode/tasks.json vendored
View File

@@ -33,11 +33,11 @@
"task": "tslint", "task": "tslint",
"label": "Run tslint", "label": "Run tslint",
"problemMatcher": [ "problemMatcher": [
"$tslint5" "$tslint4"
] ]
}, },
{ {
"label": "Run tests", "taskName": "Run tests",
"type": "shell", "type": "shell",
"command": "./scripts/test.sh", "command": "./scripts/test.sh",
"windows": { "windows": {
@@ -50,7 +50,7 @@
} }
}, },
{ {
"label": "Run Dev", "taskName": "Run Dev",
"type": "shell", "type": "shell",
"command": "./scripts/code.sh", "command": "./scripts/code.sh",
"windows": { "windows": {

View File

@@ -1,3 +0,0 @@
disturl "https://atom.io/download/electron"
target "1.7.12"
runtime "electron"

View File

@@ -1,123 +1,6 @@
# Change Log # Change Log
## Version 0.31.4 ## Version 0.25.2
* Release date: July 19, 2018
* Release status: Public Preview
## What's new in this version
* SQL Server Agent for SQL Operations Studio extension improvements
* Added view of Alerts, Operators, and Proxies and icons on left pane
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
* Added Delete Job, Delete Alert, and Delete Operator (right-click)
* Added Previous Runs visualization
* Added Filters for each column name
* SQL Server Profiler for SQL Operations Studio extension improvements
* Added Hotkeys to quickly launch and start/stop Profiler
* Added 5 Default Templates to view Extended Events
* Added Server/Database connection name
* Added support for Azure SQL Database instances
* Added suggestion to exit Profiler when tab is closed when Profiler is still running
* Release of Combine Scripts Extension
* Wizard and Dialog Extensibility
* Fix GitHub Issues
## Version 0.30.6
* Release date: June 20, 2018
* Release status: Public Preview
## What's new in this version
* **SQL Server Profiler for SQL Operations Studio *Preview*** extension initial release
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
* **Edit Data "Filtering and Sorting"** support
* **SQL Server Agent for SQL Operations Studio *Preview*** extension enhancements for Jobs and Job History views
* Improved **Wizard & Dialog UI Builder Framework** extensibility APIs
* Update VS Code Platform source code integrating [March 2018 (1.22)](https://code.visualstudio.com/updates/v1_22) and [April 2018 (1.23)](https://code.visualstudio.com/updates/v1_23) releases
* Fix GitHub Issues
## Version 0.29.3
* Release date: May 7, 2018
* Release status: Public Preview
## What's new in this version
The May release is focused on stabilization and bug fixes leading up to the Build conference. This build contains the following highlights.
* Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions
* Continue to fix GitHub issues
## Version 0.28.6
* Release date: April 25, 2018
* Release status: Public Preview
## What's new in this version
The April Public Preview release contains some of the following highlights.
* Improvements to SQL Agent *Preview* extension
* Accessibility improvements for keyboard navigation, screen reader support and high-contrast mode.
* Improved large and protected file support for saving Admin protected and >256M files within SQL Ops Studio
* Integrated Terminal splitting to work with multiple open terminals at once
* Reduced installation on-disk file count foot print for faster installs and startup times
* Improvements to Server Reports extension
* Continue to fix GitHub issues
## Version 0.27.3
* Release date: March 28, 2017
* Release status: Public Preview
## What's new in this version
The March Public Preview release enables some key aspects of the SQL Operations
Studio extensibility story. Here are some highlights in this release.
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)
* Extension Manager enables simple acquisition of 1st-party and 3rd-party extensions
* Add additional Extensibility APIs for `connection` and `objectexplorer` management
* Community Localization open for 10 languages
* Continue to fix important customer impacting GitHub issues
## Version 0.26.7
* Release date: February 16, 2017
* Release status: Public Preview Hotfix 1
## What's new in this version
* Bug fix for `#717 Selecting partial query and hitting Cmd or Ctrl+C opens terminal with Error message`
## Version 0.26.6
* Release date: February 15, 2017
* Release status: Public Preview
## What's new in this version
The February release fixes several important customer reported issues, as well as various feature improvements. We've also introduced auto-update support in February which will simplify keeping updated with the lastest changes.
Here's some of the highlights in the February release.
* Support Auto-Update installation on Windows and macOS
* Publish RPM and DEB packages to offical Microsoft repos
* Fix `#6 Keep connection and selected database when opening new query tabs`
* Fix `#22 'Server Name' and 'Database Name' - Can these be drop downs instead of text` boxes?
* Fix #481 Add "Check for updates" option.
* SQL Editor colorization and auto-completion fixes
* `#584 Keyword "FULL" not highlighted by IntelliSense`
* `#345 Colorize SQL functions within the editor`
* `#300 [#tempData] latest "]" will display green color`
* `#225 Keyword color mismatch`
* `#60 invalid sql syntax color highlighting when using temporary table in from clause`
* Introduce Connection extensibility API
* VS Code Editor 1.19 integration
* Update JustinPealing/html-query-plan component to pick-up several Query Plan viewer improvements
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
## Version 0.25.4
* Release date: January 17, 2017 * Release date: January 17, 2017
* Release status: Public Preview * Release status: Public Preview
@@ -140,7 +23,6 @@ Here's some of the highlights in the January release.
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes: We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* alextercete for `Fix "No extension gallery service configured" error (#427)` * alextercete for `Fix "No extension gallery service configured" error (#427)`
* SebastianPfliegel for `Add cursor snippet (#475)`
## Version 0.24.1 ## Version 0.24.1
* Release date: December 19, 2017 * Release date: December 19, 2017

View File

@@ -4,11 +4,11 @@ MICROSOFT SQL OPERATIONS STUDIO
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual, Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it ("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf) (except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or through distribution, network access, service agreement, lease, rental, or
otherwise. Unless applicable law gives you more rights, Microsoft reserves all otherwise. Unless applicable law gives you more rights, Microsoft reserves all
other rights not expressly granted herein, whether by implication, estoppel or other rights not expressly granted herein, whether by implication, estoppel or
otherwise. otherwise.
THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY

View File

@@ -771,426 +771,5 @@
"\"\"\"" "\"\"\""
], ],
"isProd": true "isProd": true
},
{
"name": "spdlog original",
"version": "0.14.0",
"repositoryURL": "https://github.com/gabime/spdlog",
"license": "MIT",
"isProd": true
},
{
"isLicense": true,
"name": "spdlog",
"version": "0.14.0",
"repositoryURL": "https://github.com/gabime/spdlog",
"license": "MIT",
"licenseDetail": [
"MIT License",
"",
"Copyright (c) Microsoft Corporation. All rights reserved.",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all",
"copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE"
]
},
{
"name": "mdn-data",
"version": "1.1.12",
"repositoryURL": "https://github.com/mdn/data",
"license": "MPL",
"licenseDetail": [
"Mozilla Public License Version 2.0",
"",
"Copyright (c) 2018 Mozilla Corporation",
"",
"==================================",
"",
"1. Definitions",
"--------------",
"",
"1.1. \"Contributor\"",
" means each individual or legal entity that creates, contributes to",
" the creation of, or owns Covered Software.",
"",
"1.2. \"Contributor Version\"",
" means the combination of the Contributions of others (if any) used",
" by a Contributor and that particular Contributor's Contribution.",
"",
"1.3. \"Contribution\"",
" means Covered Software of a particular Contributor.",
"",
"1.4. \"Covered Software\"",
" means Source Code Form to which the initial Contributor has attached",
" the notice in Exhibit A, the Executable Form of such Source Code",
" Form, and Modifications of such Source Code Form, in each case",
" including portions thereof.",
"",
"1.5. \"Incompatible With Secondary Licenses\"",
" means",
"",
" (a) that the initial Contributor has attached the notice described",
" in Exhibit B to the Covered Software; or",
"",
" (b) that the Covered Software was made available under the terms of",
" version 1.1 or earlier of the License, but not also under the",
" terms of a Secondary License.",
"",
"1.6. \"Executable Form\"",
" means any form of the work other than Source Code Form.",
"",
"1.7. \"Larger Work\"",
" means a work that combines Covered Software with other material, in",
" a separate file or files, that is not Covered Software.",
"",
"1.8. \"License\"",
" means this document.",
"",
"1.9. \"Licensable\"",
" means having the right to grant, to the maximum extent possible,",
" whether at the time of the initial grant or subsequently, any and",
" all of the rights conveyed by this License.",
"",
"1.10. \"Modifications\"",
" means any of the following:",
"",
" (a) any file in Source Code Form that results from an addition to,",
" deletion from, or modification of the contents of Covered",
" Software; or",
"",
" (b) any new file in Source Code Form that contains any Covered",
" Software.",
"",
"1.11. \"Patent Claims\" of a Contributor",
" means any patent claim(s), including without limitation, method,",
" process, and apparatus claims, in any patent Licensable by such",
" Contributor that would be infringed, but for the grant of the",
" License, by the making, using, selling, offering for sale, having",
" made, import, or transfer of either its Contributions or its",
" Contributor Version.",
"",
"1.12. \"Secondary License\"",
" means either the GNU General Public License, Version 2.0, the GNU",
" Lesser General Public License, Version 2.1, the GNU Affero General",
" Public License, Version 3.0, or any later versions of those",
" licenses.",
"",
"1.13. \"Source Code Form\"",
" means the form of the work preferred for making modifications.",
"",
"1.14. \"You\" (or \"Your\")",
" means an individual or a legal entity exercising rights under this",
" License. For legal entities, \"You\" includes any entity that",
" controls, is controlled by, or is under common control with You. For",
" purposes of this definition, \"control\" means (a) the power, direct",
" or indirect, to cause the direction or management of such entity,",
" whether by contract or otherwise, or (b) ownership of more than",
" fifty percent (50%) of the outstanding shares or beneficial",
" ownership of such entity.",
"",
"2. License Grants and Conditions",
"--------------------------------",
"",
"2.1. Grants",
"",
"Each Contributor hereby grants You a world-wide, royalty-free,",
"non-exclusive license:",
"",
"(a) under intellectual property rights (other than patent or trademark)",
" Licensable by such Contributor to use, reproduce, make available,",
" modify, display, perform, distribute, and otherwise exploit its",
" Contributions, either on an unmodified basis, with Modifications, or",
" as part of a Larger Work; and",
"",
"(b) under Patent Claims of such Contributor to make, use, sell, offer",
" for sale, have made, import, and otherwise transfer either its",
" Contributions or its Contributor Version.",
"",
"2.2. Effective Date",
"",
"The licenses granted in Section 2.1 with respect to any Contribution",
"become effective for each Contribution on the date the Contributor first",
"distributes such Contribution.",
"",
"2.3. Limitations on Grant Scope",
"",
"The licenses granted in this Section 2 are the only rights granted under",
"this License. No additional rights or licenses will be implied from the",
"distribution or licensing of Covered Software under this License.",
"Notwithstanding Section 2.1(b) above, no patent license is granted by a",
"Contributor:",
"",
"(a) for any code that a Contributor has removed from Covered Software;",
" or",
"",
"(b) for infringements caused by: (i) Your and any other third party's",
" modifications of Covered Software, or (ii) the combination of its",
" Contributions with other software (except as part of its Contributor",
" Version); or",
"",
"(c) under Patent Claims infringed by Covered Software in the absence of",
" its Contributions.",
"",
"This License does not grant any rights in the trademarks, service marks,",
"or logos of any Contributor (except as may be necessary to comply with",
"the notice requirements in Section 3.4).",
"",
"2.4. Subsequent Licenses",
"",
"No Contributor makes additional grants as a result of Your choice to",
"distribute the Covered Software under a subsequent version of this",
"License (see Section 10.2) or under the terms of a Secondary License (if",
"permitted under the terms of Section 3.3).",
"",
"2.5. Representation",
"",
"Each Contributor represents that the Contributor believes its",
"Contributions are its original creation(s) or it has sufficient rights",
"to grant the rights to its Contributions conveyed by this License.",
"",
"2.6. Fair Use",
"",
"This License is not intended to limit any rights You have under",
"applicable copyright doctrines of fair use, fair dealing, or other",
"equivalents.",
"",
"2.7. Conditions",
"",
"Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted",
"in Section 2.1.",
"",
"3. Responsibilities",
"-------------------",
"",
"3.1. Distribution of Source Form",
"",
"All distribution of Covered Software in Source Code Form, including any",
"Modifications that You create or to which You contribute, must be under",
"the terms of this License. You must inform recipients that the Source",
"Code Form of the Covered Software is governed by the terms of this",
"License, and how they can obtain a copy of this License. You may not",
"attempt to alter or restrict the recipients' rights in the Source Code",
"Form.",
"",
"3.2. Distribution of Executable Form",
"",
"If You distribute Covered Software in Executable Form then:",
"",
"(a) such Covered Software must also be made available in Source Code",
" Form, as described in Section 3.1, and You must inform recipients of",
" the Executable Form how they can obtain a copy of such Source Code",
" Form by reasonable means in a timely manner, at a charge no more",
" than the cost of distribution to the recipient; and",
"",
"(b) You may distribute such Executable Form under the terms of this",
" License, or sublicense it under different terms, provided that the",
" license for the Executable Form does not attempt to limit or alter",
" the recipients' rights in the Source Code Form under this License.",
"",
"3.3. Distribution of a Larger Work",
"",
"You may create and distribute a Larger Work under terms of Your choice,",
"provided that You also comply with the requirements of this License for",
"the Covered Software. If the Larger Work is a combination of Covered",
"Software with a work governed by one or more Secondary Licenses, and the",
"Covered Software is not Incompatible With Secondary Licenses, this",
"License permits You to additionally distribute such Covered Software",
"under the terms of such Secondary License(s), so that the recipient of",
"the Larger Work may, at their option, further distribute the Covered",
"Software under the terms of either this License or such Secondary",
"License(s).",
"",
"3.4. Notices",
"",
"You may not remove or alter the substance of any license notices",
"(including copyright notices, patent notices, disclaimers of warranty,",
"or limitations of liability) contained within the Source Code Form of",
"the Covered Software, except that You may alter any license notices to",
"the extent required to remedy known factual inaccuracies.",
"",
"3.5. Application of Additional Terms",
"",
"You may choose to offer, and to charge a fee for, warranty, support,",
"indemnity or liability obligations to one or more recipients of Covered",
"Software. However, You may do so only on Your own behalf, and not on",
"behalf of any Contributor. You must make it absolutely clear that any",
"such warranty, support, indemnity, or liability obligation is offered by",
"You alone, and You hereby agree to indemnify every Contributor for any",
"liability incurred by such Contributor as a result of warranty, support,",
"indemnity or liability terms You offer. You may include additional",
"disclaimers of warranty and limitations of liability specific to any",
"jurisdiction.",
"",
"4. Inability to Comply Due to Statute or Regulation",
"---------------------------------------------------",
"",
"If it is impossible for You to comply with any of the terms of this",
"License with respect to some or all of the Covered Software due to",
"statute, judicial order, or regulation then You must: (a) comply with",
"the terms of this License to the maximum extent possible; and (b)",
"describe the limitations and the code they affect. Such description must",
"be placed in a text file included with all distributions of the Covered",
"Software under this License. Except to the extent prohibited by statute",
"or regulation, such description must be sufficiently detailed for a",
"recipient of ordinary skill to be able to understand it.",
"",
"5. Termination",
"--------------",
"",
"5.1. The rights granted under this License will terminate automatically",
"if You fail to comply with any of its terms. However, if You become",
"compliant, then the rights granted under this License from a particular",
"Contributor are reinstated (a) provisionally, unless and until such",
"Contributor explicitly and finally terminates Your grants, and (b) on an",
"ongoing basis, if such Contributor fails to notify You of the",
"non-compliance by some reasonable means prior to 60 days after You have",
"come back into compliance. Moreover, Your grants from a particular",
"Contributor are reinstated on an ongoing basis if such Contributor",
"notifies You of the non-compliance by some reasonable means, this is the",
"first time You have received notice of non-compliance with this License",
"from such Contributor, and You become compliant prior to 30 days after",
"Your receipt of the notice.",
"",
"5.2. If You initiate litigation against any entity by asserting a patent",
"infringement claim (excluding declaratory judgment actions,",
"counter-claims, and cross-claims) alleging that a Contributor Version",
"directly or indirectly infringes any patent, then the rights granted to",
"You by any and all Contributors for the Covered Software under Section",
"2.1 of this License shall terminate.",
"",
"5.3. In the event of termination under Sections 5.1 or 5.2 above, all",
"end user license agreements (excluding distributors and resellers) which",
"have been validly granted by You or Your distributors under this License",
"prior to termination shall survive termination.",
"",
"************************************************************************",
"* *",
"* 6. Disclaimer of Warranty *",
"* ------------------------- *",
"* *",
"* Covered Software is provided under this License on an \"as is\" *",
"* basis, without warranty of any kind, either expressed, implied, or *",
"* statutory, including, without limitation, warranties that the *",
"* Covered Software is free of defects, merchantable, fit for a *",
"* particular purpose or non-infringing. The entire risk as to the *",
"* quality and performance of the Covered Software is with You. *",
"* Should any Covered Software prove defective in any respect, You *",
"* (not any Contributor) assume the cost of any necessary servicing, *",
"* repair, or correction. This disclaimer of warranty constitutes an *",
"* essential part of this License. No use of any Covered Software is *",
"* authorized under this License except under this disclaimer. *",
"* *",
"************************************************************************",
"",
"************************************************************************",
"* *",
"* 7. Limitation of Liability *",
"* -------------------------- *",
"* *",
"* Under no circumstances and under no legal theory, whether tort *",
"* (including negligence), contract, or otherwise, shall any *",
"* Contributor, or anyone who distributes Covered Software as *",
"* permitted above, be liable to You for any direct, indirect, *",
"* special, incidental, or consequential damages of any character *",
"* including, without limitation, damages for lost profits, loss of *",
"* goodwill, work stoppage, computer failure or malfunction, or any *",
"* and all other commercial damages or losses, even if such party *",
"* shall have been informed of the possibility of such damages. This *",
"* limitation of liability shall not apply to liability for death or *",
"* personal injury resulting from such party's negligence to the *",
"* extent applicable law prohibits such limitation. Some *",
"* jurisdictions do not allow the exclusion or limitation of *",
"* incidental or consequential damages, so this exclusion and *",
"* limitation may not apply to You. *",
"* *",
"************************************************************************",
"",
"8. Litigation",
"-------------",
"",
"Any litigation relating to this License may be brought only in the",
"courts of a jurisdiction where the defendant maintains its principal",
"place of business and such litigation shall be governed by laws of that",
"jurisdiction, without reference to its conflict-of-law provisions.",
"Nothing in this Section shall prevent a party's ability to bring",
"cross-claims or counter-claims.",
"",
"9. Miscellaneous",
"----------------",
"",
"This License represents the complete agreement concerning the subject",
"matter hereof. If any provision of this License is held to be",
"unenforceable, such provision shall be reformed only to the extent",
"necessary to make it enforceable. Any law or regulation which provides",
"that the language of a contract shall be construed against the drafter",
"shall not be used to construe this License against a Contributor.",
"",
"10. Versions of the License",
"---------------------------",
"",
"10.1. New Versions",
"",
"Mozilla Foundation is the license steward. Except as provided in Section",
"10.3, no one other than the license steward has the right to modify or",
"publish new versions of this License. Each version will be given a",
"distinguishing version number.",
"",
"10.2. Effect of New Versions",
"",
"You may distribute the Covered Software under the terms of the version",
"of the License under which You originally received the Covered Software,",
"or under the terms of any subsequent version published by the license",
"steward.",
"",
"10.3. Modified Versions",
"",
"If you create software not governed by this License, and you want to",
"create a new license for such software, you may create and use a",
"modified version of this License if you rename the license and remove",
"any references to the name of the license steward (except to note that",
"such modified license differs from this License).",
"",
"10.4. Distributing Source Code Form that is Incompatible With Secondary",
"Licenses",
"",
"If You choose to distribute Source Code Form that is Incompatible With",
"Secondary Licenses under the terms of this version of the License, the",
"notice described in Exhibit B of this License must be attached.",
"",
"Exhibit A - Source Code Form License Notice",
"-------------------------------------------",
"",
" This Source Code Form is subject to the terms of the Mozilla Public",
" License, v. 2.0. If a copy of the MPL was not distributed with this",
" file, You can obtain one at http://mozilla.org/MPL/2.0/.",
"",
"If it is not possible or desirable to put the notice in a particular",
"file, then You may include the notice in a location (such as a LICENSE",
"file in a relevant directory) where a recipient would be likely to look",
"for such a notice.",
"",
"You may add additional accurate notices of copyright ownership.",
"",
"Exhibit B - \"Incompatible With Secondary Licenses\" Notice",
"---------------------------------------------------------",
"",
" This Source Code Form is \"Incompatible With Secondary Licenses\", as",
" defined by the Mozilla Public License, v. 2.0."
]
} }
] ]

View File

@@ -1,26 +1,22 @@
# SQL Operations Studio # SQL Operations Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux. SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
**Download SQL Operations Studio June Public Preview** **Download SQL Operations Studio December Public Preview**
Platform | Link Platform | Link
-- | -- -- | --
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2005949 Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=865305
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2005950 Windows ZIP | https://go.microsoft.com/fwlink/?linkid=865304
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2005959 macOS ZIP | https://go.microsoft.com/fwlink/?linkid=865306
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2005960 Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=865307
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2006083 Linux DEB | https://go.microsoft.com/fwlink/?linkid=865308
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2006084 Linux RPM | https://go.microsoft.com/fwlink/?linkid=865309
Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions. Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases. Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases.
See the [change log](https://github.com/Microsoft/sqlopsstudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
**Feature Highlights** **Feature Highlights**
- Cross-Platform DB management for Windows, macOS and Linux with simple XCopy deployment - Cross-Platform DB management for Windows, macOS and Linux with simple XCopy deployment
@@ -50,46 +46,9 @@ please see the document [How to Contribute](https://github.com/Microsoft/sqlopss
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization
SQL Operations Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make SQL Operations Studio available in your language!
## Privacy Statement ## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software. The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* lanceklinger `Fix for double clicking column handle in results table #1504`
* westerncj for `Removed duplicate contribution from README.md (#753)`
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
* alextercete for `Fix "No extension gallery service configured" error (#427)`
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)`
* Adam Mechanic for helping with the `whoisactive` extension
* All community localization contributors
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa , Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/sqlopsstudio/master/ThirdPartyNotices.txt)
## License ## License
Copyright (c) Microsoft Corporation. All rights reserved. Copyright (c) Microsoft Corporation. All rights reserved.

View File

@@ -3,17 +3,19 @@ environment:
VSCODE_BUILD_VERBOSE: true VSCODE_BUILD_VERBOSE: true
cache: cache:
- '%LOCALAPPDATA%\Yarn\cache' - '%APPDATA%\npm-cache'
install: install:
- ps: Install-Product node 8.9.1 x64 - ps: Install-Product node 7.9.0 x64
- npm install -g npm@4 --silent
build_script: build_script:
- yarn - .\scripts\npm.bat install
- .\node_modules\.bin\gulp electron - .\node_modules\.bin\gulp electron
- npm run compile - npm run compile
test_script: test_script:
- node --version - node --version
- npm --version
- .\scripts\test.bat - .\scripts\test.bat
- .\scripts\test-integration.bat - .\scripts\test-integration.bat

View File

@@ -1,12 +0,0 @@
[
{
"name": "ms-vscode.node-debug",
"version": "1.23.3",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.23.5",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
]

View File

@@ -1,20 +0,0 @@
{
"env": {
"node": true,
"es6": true,
"browser": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,126 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs');
const path = require('path');
const os = require('os');
// @ts-ignore review
const { remote } = require('electron');
const dialog = remote.dialog;
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, { encoding: 'utf8' }));
}
function writeJson(filePath, obj) {
fs.writeFileSync(filePath, JSON.stringify(obj, null, 2));
}
function renderOption(form, id, title, value, checked) {
const input = document.createElement('input');
input.type = 'radio';
input.id = id;
input.name = 'choice';
input.value = value;
input.checked = !!checked;
form.appendChild(input);
const label = document.createElement('label');
label.setAttribute('for', id);
label.textContent = title;
form.appendChild(label);
return input;
}
function render(el, state) {
function setState(state) {
try {
writeJson(controlFilePath, state.control);
} catch (err) {
console.error(err);
}
el.innerHTML = '';
render(el, state);
}
const ul = document.createElement('ul');
const { builtin, control } = state;
for (const ext of builtin) {
const controlState = control[ext.name] || 'marketplace';
const li = document.createElement('li');
ul.appendChild(li);
const name = document.createElement('code');
name.textContent = ext.name;
li.appendChild(name);
const form = document.createElement('form');
li.appendChild(form);
const marketplaceInput = renderOption(form, `marketplace-${ext.name}`, 'Marketplace', 'marketplace', controlState === 'marketplace');
marketplaceInput.onchange = function () {
control[ext.name] = 'marketplace';
setState({ builtin, control });
};
const disabledInput = renderOption(form, `disabled-${ext.name}`, 'Disabled', 'disabled', controlState === 'disabled');
disabledInput.onchange = function () {
control[ext.name] = 'disabled';
setState({ builtin, control });
};
let local = undefined;
if (controlState !== 'marketplace' && controlState !== 'disabled') {
local = controlState;
}
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
localInput.onchange = function () {
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result && result.length >= 1) {
control[ext.name] = result[0];
}
setState({ builtin, control });
};
if (local) {
const localSpan = document.createElement('code');
localSpan.className = 'local';
localSpan.textContent = local;
form.appendChild(localSpan);
}
}
el.appendChild(ul);
}
function main() {
const el = document.getElementById('extensions');
const builtin = readJson(builtInExtensionsPath);
let control;
try {
control = readJson(controlFilePath);
} catch (err) {
control = {};
}
render(el, { builtin, control });
}
window.onload = main;

View File

@@ -1,46 +0,0 @@
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Manage Built-in Extensions</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="browser-main.js"></script>
<style>
body {
font-family: 'Trebuchet MS', 'Lucida Sans Unicode', 'Lucida Grande', 'Lucida Sans', Arial, sans-serif;
font-size: 10pt;
}
code {
font-family: 'Menlo', 'Courier New', 'Courier', monospace;
}
ul {
padding-left: 1em;
}
li {
list-style: none;
padding: 0.3em 0;
}
label {
margin-right: 1em;
}
form {
padding: 0.3em 0 0.3em 0.3em;
}
</style>
</head>
<body>
<h1>Built-in Extensions</h1>
<div id="extensions"></div>
</body>
</html>

View File

@@ -1,20 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const { app, BrowserWindow } = require('electron');
const url = require('url');
const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600 });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();
window.once('closed', () => window = null);
});
app.on('window-all-closed', () => app.quit());

View File

@@ -1,5 +0,0 @@
{
"name": "builtin",
"version": "0.1.0",
"main": "main.js"
}

View File

@@ -1,74 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const path = require('path');
const parseSemver = require('parse-semver');
const cp = require('child_process');
const _ = require('underscore');
function asYarnDependency(prefix, tree) {
let parseResult;
try {
parseResult = parseSemver(tree.name);
} catch (err) {
err.message += `: ${tree.name}`;
console.warn(`Could not parse semver: ${tree.name}`);
return null;
}
// not an actual dependency in disk
if (parseResult.version !== parseResult.range) {
return null;
}
const name = parseResult.name;
const version = parseResult.version;
const dependencyPath = path.join(prefix, name);
const children = [];
for (const child of (tree.children || [])) {
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
if (dep) {
children.push(dep);
}
}
return { name, version, path: dependencyPath, children };
}
function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) {
throw new Error('Could not parse result of `yarn list --json`');
}
const trees = JSON.parse(match[0]).data.trees;
return trees
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
.filter(dep => !!dep);
}
function getProductionDependencies(cwd) {
const result = [];
const deps = getYarnProductionDependencies(cwd);
const flatten = dep => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
deps.forEach(flatten);
return _.uniq(result);
}
module.exports.getProductionDependencies = getProductionDependencies;
if (require.main === module) {
const root = path.dirname(__dirname);
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
}

View File

@@ -3,19 +3,15 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const gulp = require('gulp'); var gulp = require('gulp');
const path = require('path'); var path = require('path');
const util = require('./lib/util'); var util = require('./lib/util');
const common = require('./lib/optimize'); var common = require('./lib/optimize');
const es = require('event-stream'); var es = require('event-stream');
const File = require('vinyl'); var File = require('vinyl');
const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone');
const cp = require('child_process');
var root = path.dirname(__dirname); var root = path.dirname(__dirname);
var sha1 = util.getVersion(root); var sha1 = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var semver = require('./monaco/package.json').version; var semver = require('./monaco/package.json').version;
var headerVersion = semver + '(' + sha1 + ')'; var headerVersion = semver + '(' + sha1 + ')';
@@ -25,14 +21,14 @@ var editorEntryPoints = [
{ {
name: 'vs/editor/editor.main', name: 'vs/editor/editor.main',
include: [], include: [],
exclude: ['vs/css', 'vs/nls'], exclude: [ 'vs/css', 'vs/nls' ],
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'], prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
}, },
{ {
name: 'vs/base/common/worker/simpleWorker', name: 'vs/base/common/worker/simpleWorker',
include: ['vs/editor/common/services/editorSimpleWorker'], include: [ 'vs/editor/common/services/editorSimpleWorker' ],
prepend: ['vs/loader.js'], prepend: [ 'vs/loader.js' ],
append: ['vs/base/worker/workerMain'], append: [ 'vs/base/worker/workerMain' ],
dest: 'vs/base/worker/workerMain.js' dest: 'vs/base/worker/workerMain.js'
} }
]; ];
@@ -74,8 +70,6 @@ function editorLoaderConfig() {
return result; return result;
} }
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
gulp.task('clean-optimized-editor', util.rimraf('out-editor')); gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
entryPoints: editorEntryPoints, entryPoints: editorEntryPoints,
@@ -85,37 +79,14 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'],
bundleLoader: false, bundleLoader: false,
header: BUNDLED_FILE_HEADER, header: BUNDLED_FILE_HEADER,
bundleInfo: true, bundleInfo: true,
out: 'out-editor', out: 'out-editor'
languages: languages
})); }));
gulp.task('clean-minified-editor', util.rimraf('out-editor-min')); gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor')); gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources({
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker'
],
outFolder: './out-editor-esm/src',
outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
}
});
});
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
});
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core')); gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () { gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function() {
return es.merge( return es.merge(
// other assets // other assets
es.merge( es.merge(
@@ -124,30 +95,19 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
gulp.src('src/vs/monaco.d.ts') gulp.src('src/vs/monaco.d.ts')
).pipe(gulp.dest('out-monaco-editor-core')), ).pipe(gulp.dest('out-monaco-editor-core')),
// place the .d.ts in the esm folder
gulp.src('src/vs/monaco.d.ts')
.pipe(es.through(function (data) {
this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base,
contents: data.contents
}));
}))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
// package.json // package.json
gulp.src('build/monaco/package.json') gulp.src('build/monaco/package.json')
.pipe(es.through(function (data) { .pipe(es.through(function(data) {
var json = JSON.parse(data.contents.toString()); var json = JSON.parse(data.contents.toString());
json.private = false; json.private = false;
data.contents = Buffer.from(JSON.stringify(json, null, ' ')); data.contents = new Buffer(JSON.stringify(json, null, ' '));
this.emit('data', data); this.emit('data', data);
})) }))
.pipe(gulp.dest('out-monaco-editor-core')), .pipe(gulp.dest('out-monaco-editor-core')),
// README.md // README.md
gulp.src('build/monaco/README-npm.md') gulp.src('build/monaco/README-npm.md')
.pipe(es.through(function (data) { .pipe(es.through(function(data) {
this.emit('data', new File({ this.emit('data', new File({
path: data.path.replace(/README-npm\.md/, 'README.md'), path: data.path.replace(/README-npm\.md/, 'README.md'),
base: data.base, base: data.base,
@@ -164,10 +124,10 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
// min folder // min folder
es.merge( es.merge(
gulp.src('out-editor-min/**/*') gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) { ).pipe(filterStream(function(path) {
// no map files // no map files
return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path); return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path);
})).pipe(es.through(function (data) { })).pipe(es.through(function(data) {
// tweak the sourceMappingURL // tweak the sourceMappingURL
if (!/\.js$/.test(data.path)) { if (!/\.js$/.test(data.path)) {
this.emit('data', data); this.emit('data', data);
@@ -180,50 +140,49 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/'); var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr); strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = Buffer.from(strContents); data.contents = new Buffer(strContents);
this.emit('data', data); this.emit('data', data);
})).pipe(gulp.dest('out-monaco-editor-core/min')), })).pipe(gulp.dest('out-monaco-editor-core/min')),
// min-maps folder // min-maps folder
es.merge( es.merge(
gulp.src('out-editor-min/**/*') gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) { ).pipe(filterStream(function(path) {
// no map files // no map files
return /\.js\.map$/.test(path); return /\.js\.map$/.test(path);
})).pipe(gulp.dest('out-monaco-editor-core/min-maps')) })).pipe(gulp.dest('out-monaco-editor-core/min-maps'))
); );
}); });
gulp.task('analyze-editor-distro', function () { gulp.task('analyze-editor-distro', function() {
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var bundleInfo = require('../out-editor/bundleInfo.json'); var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph; var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles; var bundles = bundleInfo.bundles;
var inverseGraph = {}; var inverseGraph = {};
Object.keys(graph).forEach(function (module) { Object.keys(graph).forEach(function(module) {
var dependencies = graph[module]; var dependencies = graph[module];
dependencies.forEach(function (dep) { dependencies.forEach(function(dep) {
inverseGraph[dep] = inverseGraph[dep] || []; inverseGraph[dep] = inverseGraph[dep] || [];
inverseGraph[dep].push(module); inverseGraph[dep].push(module);
}); });
}); });
var detailed = {}; var detailed = {};
Object.keys(bundles).forEach(function (entryPoint) { Object.keys(bundles).forEach(function(entryPoint) {
var included = bundles[entryPoint]; var included = bundles[entryPoint];
var includedMap = {}; var includedMap = {};
included.forEach(function (included) { included.forEach(function(included) {
includedMap[included] = true; includedMap[included] = true;
}); });
var explanation = []; var explanation = [];
included.map(function (included) { included.map(function(included) {
if (included.indexOf('!') >= 0) { if (included.indexOf('!') >= 0) {
return; return;
} }
var reason = (inverseGraph[included] || []).filter(function (mod) { var reason = (inverseGraph[included]||[]).filter(function(mod) {
return !!includedMap[mod]; return !!includedMap[mod];
}); });
explanation.push({ explanation.push({
@@ -239,67 +198,10 @@ gulp.task('analyze-editor-distro', function () {
}); });
function filterStream(testFunc) { function filterStream(testFunc) {
return es.through(function (data) { return es.through(function(data) {
if (!testFunc(data.relative)) { if (!testFunc(data.relative)) {
return; return;
} }
this.emit('data', data); this.emit('data', data);
}); });
} }
//#region monaco type checking
function createTscCompileTask(watch) {
return () => {
const createReporter = require('./lib/reporter').createReporter;
return new Promise((resolve, reject) => {
const args = ['./node_modules/.bin/tsc', '-p', './src/tsconfig.monaco.json', '--noEmit'];
if (watch) {
args.push('-w');
}
const child = cp.spawn(`node`, args, {
cwd: path.join(__dirname, '..'),
// stdio: [null, 'pipe', 'inherit']
});
let errors = [];
let reporter = createReporter();
let report;
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
child.stdout.on('data', data => {
let str = String(data);
str = str.replace(magic, '').trim();
if (str.indexOf('Starting compilation') >= 0 || str.indexOf('File change detected') >= 0) {
errors.length = 0;
report = reporter.end(false);
} else if (str.indexOf('Compilation complete') >= 0) {
report.end();
} else if (str) {
let match = /(.*\(\d+,\d+\): )(.*: )(.*)/.exec(str);
if (match) {
// trying to massage the message so that it matches the gulp-tsb error messages
// e.g. src/vs/base/common/strings.ts(663,5): error TS2322: Type '1234' is not assignable to type 'string'.
let fullpath = path.join(root, match[1]);
let message = match[3];
// @ts-ignore
reporter(fullpath + message);
} else {
// @ts-ignore
reporter(str);
}
}
});
child.on('exit', resolve);
child.on('error', reject);
});
};
}
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
gulp.task('monaco-typecheck', createTscCompileTask(false));
//#endregion

View File

@@ -20,8 +20,6 @@ const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev'); const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
const i18n = require('./lib/i18n');
const plumber = require('gulp-plumber');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions'); const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -31,8 +29,7 @@ const compilations = glob.sync('**/tsconfig.json', {
}); });
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`; const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const tasks = compilations.map(function (tsconfigFile) { const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile); const absolutePath = path.join(extensionsPath, tsconfigFile);
@@ -58,38 +55,19 @@ const tasks = compilations.map(function (tsconfigFile) {
const srcBase = path.join(root, 'src'); const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**'); const src = path.join(srcBase, '**');
const out = path.join(root, 'out'); const out = path.join(root, 'out');
const i18nPath = path.join(__dirname, '..', 'i18n'); const i18n = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out); const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
} else {
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) { function createPipeline(build, emitError) {
const reporter = createReporter(); const reporter = createReporter();
tsOptions.inlineSources = !!build; tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString())); const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
return function () { return function () {
const input = es.through(); const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true }); const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input const output = input
.pipe(plumber({
errorHandler: function (err) {
if (err && !err.__reporter__) {
reporter(err);
}
}
}))
.pipe(tsFilter) .pipe(tsFilter)
.pipe(util.loadSourcemaps()) .pipe(util.loadSourcemaps())
.pipe(compilation()) .pipe(compilation())
@@ -102,9 +80,7 @@ const tasks = compilations.map(function (tsconfigFile) {
sourceRoot: '../src' sourceRoot: '../src'
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through()) .pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18n, out) : es.through())
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
.pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
.pipe(reporter.end(emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
@@ -151,7 +127,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchInput = watcher(src, srcOpts); const watchInput = watcher(src, srcOpts);
return watchInput return watchInput
.pipe(util.incremental(() => pipeline(), input)) .pipe(util.incremental(() => pipeline(true), input))
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}); });

View File

@@ -12,11 +12,7 @@ const gulptslint = require('gulp-tslint');
const gulpeslint = require('gulp-eslint'); const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter'); const tsfmt = require('typescript-formatter');
const tslint = require('tslint'); const tslint = require('tslint');
const VinylFile = require('vinyl');
const vfs = require('vinyl-fs'); const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
/** /**
* Hygiene works by creating cascading subsets of all our files and * Hygiene works by creating cascading subsets of all our files and
@@ -33,56 +29,53 @@ const all = [
'extensions/**/*', 'extensions/**/*',
'scripts/**/*', 'scripts/**/*',
'src/**/*', 'src/**/*',
'test/**/*', 'test/**/*'
'!**/node_modules/**' ];
const eolFilter = [
'**',
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!extensions/**/out/**',
'!**/node_modules/**',
'!**/fixtures/**',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
'!build/{lib,tslintRules}/**/*.js',
'!build/monaco/**',
'!build/win32/**',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile'
]; ];
const indentationFilter = [ const indentationFilter = [
'**', '**',
// except specific files
'!ThirdPartyNotices.txt', '!ThirdPartyNotices.txt',
'!LICENSE.txt', '!**/*.md',
'!src/vs/nls.js', '!**/*.ps1',
'!src/vs/css.js', '!**/*.template',
'!src/vs/loader.js', '!**/*.yaml',
'!src/vs/base/common/marked/marked.js', '!**/*.yml',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh',
'!test/assert.js',
// except specific folders
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/octicons/**',
'!**/fixtures/**',
'!**/lib/**', '!**/lib/**',
'!extensions/**/out/**', '!extensions/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!src/vs/*/**/*.d.ts',
'!**/*.d.ts.recipe',
'!test/assert.js',
'!**/package.json',
'!**/npm-shrinkwrap.json',
'!**/octicons/**',
'!**/vs/base/common/marked/raw.marked.js',
'!**/vs/base/common/winjs.base.raw.js',
'!**/vs/base/node/terminateProcess.sh',
'!**/vs/nls.js',
'!**/vs/css.js',
'!**/vs/loader.js',
'!extensions/**/snippets/**', '!extensions/**/snippets/**',
'!extensions/**/syntaxes/**', '!extensions/**/syntaxes/**',
'!extensions/**/themes/**', '!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**', '!extensions/**/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**'
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile',
'!extensions/markdown/media/*.js'
]; ];
const copyrightFilter = [ const copyrightFilter = [
@@ -100,13 +93,11 @@ const copyrightFilter = [
'!**/*.xpm', '!**/*.xpm',
'!**/*.opts', '!**/*.opts',
'!**/*.disabled', '!**/*.disabled',
'!**/*.code-workspace',
'!build/**/*.init', '!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml', '!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js', '!resources/win32/bin/code.js',
'!extensions/markdown-language-features/media/tomorrow.css', '!extensions/markdown/media/tomorrow.css',
'!extensions/html-language-features/server/src/modes/typescript/*', '!extensions/html/server/src/modes/typescript/*'
'!extensions/*/server/bin/*'
]; ];
const eslintFilter = [ const eslintFilter = [
@@ -117,8 +108,8 @@ const eslintFilter = [
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/css.build.js', '!src/vs/css.build.js',
'!src/vs/nls.build.js', '!src/vs/nls.build.js',
'!src/**/winjs.base.js', '!src/**/winjs.base.raw.js',
'!src/**/marked.js', '!src/**/raw.marked.js',
'!**/test/**' '!**/test/**'
]; ];
@@ -131,17 +122,26 @@ const tslintFilter = [
'!**/node_modules/**', '!**/node_modules/**',
'!extensions/typescript/test/colorize-fixtures/**', '!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**', '!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**', '!extensions/**/*.test.ts'
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts'
]; ];
const copyrightHeaderLines = [ const copyrightHeader = [
'/*---------------------------------------------------------------------------------------------', '/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.', ' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.', ' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/' ' *--------------------------------------------------------------------------------------------*/'
]; ].join('\n');
function reportFailures(failures) {
failures.forEach(failure => {
const name = failure.name || failure.fileName;
const position = failure.startPosition;
const line = position.lineAndCharacter ? position.lineAndCharacter.line : position.line;
const character = position.lineAndCharacter ? position.lineAndCharacter.character : position.character;
console.error(`${name}:${line + 1}:${character + 1}:${failure.failure}`);
});
}
gulp.task('eslint', () => { gulp.task('eslint', () => {
return vfs.src(all, { base: '.', follow: true, allowEmpty: true }) return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
@@ -152,23 +152,31 @@ gulp.task('eslint', () => {
}); });
gulp.task('tslint', () => { gulp.task('tslint', () => {
// {{SQL CARBON EDIT}} const options = { summarizeFailureOutput: true };
const options = { emitError: false };
return vfs.src(all, { base: '.', follow: true, allowEmpty: true }) return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter)) .pipe(filter(tslintFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' })) .pipe(gulptslint({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report(options)); .pipe(gulptslint.report(reportFailures, options));
}); });
function hygiene(some) { const hygiene = exports.hygiene = (some, options) => {
options = options || {};
let errorCount = 0; let errorCount = 0;
const indentation = es.through(function (file) { const eol = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/); if (/\r\n?/g.test(file.contents.toString('utf8'))) {
file.__lines = lines; console.error(file.relative + ': Bad EOL found');
errorCount++;
}
lines this.emit('data', file);
});
const indentation = es.through(function (file) {
file.contents
.toString('utf8')
.split(/\r\n|\r|\n/)
.forEach((line, i) => { .forEach((line, i) => {
if (/^\s*$/.test(line)) { if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK // empty or whitespace lines are OK
@@ -186,14 +194,9 @@ function hygiene(some) {
}); });
const copyrights = es.through(function (file) { const copyrights = es.through(function (file) {
const lines = file.__lines; if (file.contents.toString('utf8').indexOf(copyrightHeader) !== 0) {
console.error(file.relative + ': Missing or bad copyright statement');
for (let i = 0; i < copyrightHeaderLines.length; i++) { errorCount++;
if (lines[i] !== copyrightHeaderLines[i]) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
break;
}
} }
this.emit('data', file); this.emit('data', file);
@@ -201,25 +204,12 @@ function hygiene(some) {
const formatting = es.map(function (file, cb) { const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), { tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false, verify: true,
tsfmt: true, tsfmt: true,
// verbose: true, // verbose: true
// keep checkJS happy
editorconfig: undefined,
replace: undefined,
tsconfig: undefined,
tsconfigFile: undefined,
tslint: undefined,
tslintFile: undefined,
tsfmtFile: undefined,
vscode: undefined,
vscodeFile: undefined
}).then(result => { }).then(result => {
let original = result.src.replace(/\r\n/gm, '\n'); if (result.error) {
let formatted = result.dest.replace(/\r\n/gm, '\n'); console.error(result.message);
if (original !== formatted) {
console.error('File not formatted:', file.relative);
errorCount++; errorCount++;
} }
cb(null, file); cb(null, file);
@@ -229,31 +219,32 @@ function hygiene(some) {
}); });
}); });
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
const tsl = es.through(function (file) { const tsl = es.through(function (file) {
const configuration = tslint.Configuration.findConfiguration(null, '.');
const options = { formatter: 'json', rulesDirectory: 'build/lib/tslint' };
const contents = file.contents.toString('utf8'); const contents = file.contents.toString('utf8');
tsLinter.lint(file.relative, contents, tslintConfiguration.results); const linter = new tslint.Linter(options);
linter.lint(file.relative, contents, configuration.results);
const result = linter.getResult();
if (result.failureCount > 0) {
reportFailures(result.failures);
errorCount += result.failureCount;
}
this.emit('data', file); this.emit('data', file);
}); });
let input; const result = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true })
if (Array.isArray(some) || typeof some === 'string' || !some) {
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
} else {
input = some;
}
const result = input
.pipe(filter(f => !f.stat.isDirectory())) .pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(eolFilter))
// {{SQL CARBON EDIT}}
//.pipe(options.skipEOL ? es.through() : eol)
.pipe(filter(indentationFilter)) .pipe(filter(indentationFilter))
.pipe(indentation) .pipe(indentation)
.pipe(filter(copyrightFilter)); .pipe(filter(copyrightFilter))
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
// .pipe(copyrights); //.pipe(copyrights);
const typescript = result const typescript = result
.pipe(filter(tslintFilter)) .pipe(filter(tslintFilter))
@@ -264,51 +255,22 @@ function hygiene(some) {
.pipe(filter(eslintFilter)) .pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc')) .pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact')); .pipe(gulpeslint.formatEach('compact'));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
// .pipe(gulpeslint.failAfterError()); // .pipe(gulpeslint.failAfterError());
let count = 0;
return es.merge(typescript, javascript) return es.merge(typescript, javascript)
.pipe(es.through(function (data) { .pipe(es.through(null, function () {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
this.emit('end'); // if (errorCount > 0) {
// this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
// } else {
// this.emit('end');
// }
this.emit('end');
})); }));
} };
function createGitIndexVinyls(paths) { gulp.task('hygiene', () => hygiene(''));
const cp = require('child_process');
const repositoryPath = process.cwd();
const fns = paths.map(relativePath => () => new Promise((c, e) => {
const fullPath = path.join(repositoryPath, relativePath);
fs.stat(fullPath, (err, stat) => {
if (err && err.code === 'ENOENT') { // ignore deletions
return c(null);
} else if (err) {
return e(err);
}
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}
c(new VinylFile({
path: fullPath,
base: repositoryPath,
contents: out,
stat
}));
});
});
}));
return pall(fns, { concurrency: 4 })
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', () => hygiene());
// this allows us to run hygiene as a git pre-commit hook // this allows us to run hygiene as a git pre-commit hook
if (require.main === module) { if (require.main === module) {
@@ -319,38 +281,33 @@ if (require.main === module) {
process.exit(1); process.exit(1);
}); });
if (process.argv.length > 2) { cp.exec('git config core.autocrlf', (err, out) => {
hygiene(process.argv.slice(2)).on('error', err => { const skipEOL = out.trim() === 'true';
console.error();
console.error(err); if (process.argv.length > 2) {
process.exit(1); return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
}); console.error();
} else { console.error(err);
process.exit(1);
});
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => { cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) { if (err) {
console.error(); console.error();
console.error(err); console.error(err);
process.exit(1); process.exit(1);
return;
} }
const some = out const some = out
.split(/\r?\n/) .split(/\r?\n/)
.filter(l => !!l); .filter(l => !!l);
if (some.length > 0) { hygiene(some, { skipEOL: skipEOL }).on('error', err => {
console.log('Reading git index versions...'); console.error();
console.error(err);
createGitIndexVinyls(some) process.exit(1);
.then(vinyls => new Promise((c, e) => hygiene(es.readArray(vinyls)) });
.on('end', () => c())
.on('error', e)))
.catch(err => {
console.error();
console.error(err);
process.exit(1);
});
}
}); });
} });
} }

View File

@@ -14,18 +14,13 @@ const util = require('./lib/util');
const remote = require('gulp-remote-src'); const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip'); const zip = require('gulp-vinyl-zip');
const assign = require('object-assign'); const assign = require('object-assign');
// {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json'); const pkg = require('../package.json');
gulp.task('mixin', function () { gulp.task('mixin', function () {
// {{SQL CARBON EDIT}} const repo = process.env['VSCODE_MIXIN_REPO'];
const updateUrl = process.env['SQLOPS_UPDATEURL'];
if (!updateUrl) { if (!repo) {
console.log('Missing SQLOPS_UPDATEURL, skipping mixin'); console.log('Missing VSCODE_MIXIN_REPO, skipping mixin');
return; return;
} }
@@ -36,20 +31,39 @@ gulp.task('mixin', function () {
return; return;
} }
// {{SQL CARBON EDIT}} const url = `https://github.com/${repo}/archive/${pkg.distro}.zip`;
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'; const opts = { base: url };
if (quality === 'insider') { const username = process.env['VSCODE_MIXIN_USERNAME'];
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`; const password = process.env['VSCODE_MIXIN_PASSWORD'];
}
let newValues = {
"updateUrl": updateUrl,
"quality": quality,
"extensionsGallery": {
"serviceUrl": serviceUrl
}
};
return gulp.src('./product.json') if (username || password) {
.pipe(jeditor(newValues)) opts.auth = { user: username || '', pass: password || '' };
}
console.log('Mixing in sources from \'' + url + '\':');
let all = remote('', opts)
.pipe(zip.src())
.pipe(filter(function (f) { return !f.isDirectory(); }))
.pipe(util.rebase(1));
if (quality) {
const productJsonFilter = filter('product.json', { restore: true });
const mixin = all
.pipe(filter(['quality/' + quality + '/**']))
.pipe(util.rebase(2))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => assign({}, require('../product.json'), o)))
.pipe(productJsonFilter.restore);
all = es.merge(mixin);
}
return all
.pipe(es.mapSync(function (f) {
console.log(f.relative);
return f;
}))
.pipe(gulp.dest('.')); .pipe(gulp.dest('.'));
}); });

View File

@@ -13,6 +13,18 @@ const filter = require('gulp-filter');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules')); gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules')); gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
gulp.task('clean-client', util.rimraf('dataprotocol-node/client/node_modules'));
gulp.task('clean-jsonrpc', util.rimraf('dataprotocol-node/jsonrpc/node_modules'));
gulp.task('clean-server', util.rimraf('dataprotocol-node/server/node_modules'));
gulp.task('clean-types', util.rimraf('dataprotocol-node/types/node_modules'));
gulp.task('clean-extensions-modules', util.rimraf('extensions-modules/node_modules'));
gulp.task('clean-protocol', ['clean-extensions-modules', 'clean-mssql-extension', 'clean-credentials-extension', 'clean-client', 'clean-jsonrpc', 'clean-server', 'clean-types']);
// Tasks to clean extensions modules
gulp.task('clean-mssql-ext-mod', util.rimraf('extensions/mssql/node_modules/extensions-modules'));
gulp.task('clean-credentials-ext-mod', util.rimraf('extensions/credentials/node_modules/extensions-modules'));
gulp.task('clean-build-ext-mod', util.rimraf('build/node_modules/extensions-modules'));
gulp.task('clean-ext-mod', ['clean-mssql-ext-mod', 'clean-credentials-ext-mod', 'clean-build-ext-mod', 'clean-extensions-modules']);
gulp.task('fmt', () => formatStagedFiles()); gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => { const formatFiles = (some) => {

View File

@@ -27,26 +27,20 @@ const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev'); const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json'); const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
const shrinkwrap = require('../npm-shrinkwrap.json');
const crypto = require('crypto'); const crypto = require('crypto');
const i18n = require('./lib/i18n'); const i18n = require('./lib/i18n');
// {{SQL CARBON EDIT}}
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const glob = require('glob');
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
// {{SQL CARBON EDIT}}
var del = require('del'); var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot); // {{SQL CARBON EDIT}}
const serviceInstaller = require('extensions-modules/lib/languageservice/serviceInstallerUtil');
const glob = require('glob');
const productDependencies = Object.keys(product.dependencies || {});
const dependencies = Object.keys(shrinkwrap.dependencies)
.concat(productDependencies); // additional dependencies from our product configuration
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n)); const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const nodeModules = [ const nodeModules = [
@@ -56,29 +50,19 @@ const nodeModules = [
'rxjs/Subject', 'rxjs/Subject',
'rxjs/Observer', 'rxjs/Observer',
'ng2-charts/ng2-charts'] 'ng2-charts/ng2-charts']
.concat(Object.keys(product.dependencies || {})) .concat(dependencies)
.concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules); .concat(baseModules);
// Build // Build
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('./builtInExtensions.json'); const builtInExtensions = [
{ name: 'ms-vscode.node-debug', version: '1.18.3' },
{ name: 'ms-vscode.node-debug2', version: '1.18.5' }
];
const excludedExtensions = [ const excludedExtensions = [
'vscode-api-tests', 'vscode-api-tests',
'vscode-colorize-tests', 'vscode-colorize-tests'
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'profiler'
]; ];
const vscodeEntryPoints = _.flatten([ const vscodeEntryPoints = _.flatten([
@@ -91,29 +75,26 @@ const vscodeEntryPoints = _.flatten([
const vscodeResources = [ const vscodeResources = [
'out-build/main.js', 'out-build/main.js',
'out-build/cli.js', 'out-build/cli.js',
'out-build/driver.js',
'out-build/bootstrap.js', 'out-build/bootstrap.js',
'out-build/bootstrap-amd.js', 'out-build/bootstrap-amd.js',
'out-build/paths.js', 'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}', 'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/common/performance.js', 'out-build/vs/base/node/startupTimers.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}', 'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**', 'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css', 'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**', 'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json', 'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt', 'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js', 'out-build/vs/workbench/parts/html/browser/webview-pre.js',
'out-build/vs/**/markdown.css', 'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json', 'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js', 'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md', 'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe', 'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md', 'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js', 'out-build/vs/code/electron-browser/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js', // {{SQL CARBON EDIT}}
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
// {{SQL CARBON EDIT}}
'out-build/sql/workbench/electron-browser/splashscreen/*', 'out-build/sql/workbench/electron-browser/splashscreen/*',
'out-build/sql/**/*.{svg,png,cur,html}', 'out-build/sql/**/*.{svg,png,cur,html}',
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}', 'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
@@ -130,7 +111,6 @@ const vscodeResources = [
'out-build/sql/parts/grid/views/**/*.html', 'out-build/sql/parts/grid/views/**/*.html',
'out-build/sql/parts/tasks/**/*.html', 'out-build/sql/parts/tasks/**/*.html',
'out-build/sql/parts/taskHistory/viewlet/media/**', 'out-build/sql/parts/taskHistory/viewlet/media/**',
'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg', 'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg', 'out-build/sql/media/icons/*.svg',
'!**/test/**' '!**/test/**'
@@ -142,7 +122,10 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/' ' *--------------------------------------------------------*/'
].join('\n'); ].join('\n');
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []); var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
if (process.env.VSCODE_QUALITY !== 'stable') {
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
}
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode')); gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
@@ -152,8 +135,7 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
loaderConfig: common.loaderConfig(nodeModules), loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER, header: BUNDLED_FILE_HEADER,
out: 'out-vscode', out: 'out-vscode',
languages: languages, languages: languages
bundleInfo: undefined
})); }));
@@ -172,7 +154,7 @@ gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], commo
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
const config = { const config = {
version: getElectronVersion(), version: packageJson.electronVersion,
productAppName: product.nameLong, productAppName: product.nameLong,
companyName: 'Microsoft Corporation', companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved', copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
@@ -194,7 +176,7 @@ const config = {
name: product.nameLong, name: product.nameLong,
urlSchemes: [product.urlProtocol] urlSchemes: [product.urlProtocol]
}], }],
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0, darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName, linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico', winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0, token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
@@ -258,30 +240,10 @@ function computeChecksum(filename) {
return hash; return hash;
} }
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath
});
});
}
function packageTask(platform, arch, opts) { function packageTask(platform, arch, opts) {
opts = opts || {}; opts = opts || {};
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : ''); const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform; platform = platform || process.platform;
@@ -307,10 +269,7 @@ function packageTask(platform, arch, opts) {
return { name: extensionName, path: extensionPath }; return { name: extensionName, path: extensionPath };
}) })
.filter(({ name }) => excludedExtensions.indexOf(name) === -1) .filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name)) .filter(({ name }) => builtInExtensions.every(b => b.name !== name));
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => { const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true }); const nlsFilter = filter('**/*.nls.json', { restore: true });
@@ -324,20 +283,15 @@ function packageTask(platform, arch, opts) {
.pipe(nlsFilter.restore); .pipe(nlsFilter.restore);
})); }));
const localExtensionDependencies = gulp.src('extensions/node_modules/**', { base: '.' });
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies) const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh'])) .pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map'])); .pipe(filter(['**',
'!**/*.js.map',
'!extensions/**/node_modules/**/{test, tests}/**',
'!extensions/**/node_modules/**/test.js']));
let version = packageJson.version; let version = packageJson.version;
const quality = product.quality; const quality = product.quality;
@@ -350,7 +304,7 @@ function packageTask(platform, arch, opts) {
const packageJsonStream = gulp.src(['package.json'], { base: '.' }) const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json({ name, version })); .pipe(json({ name, version }));
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson); const settingsSearchBuildId = getBuildNumber();
const date = new Date().toISOString(); const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' }) const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date, checksums, settingsSearchBuildId })); .pipe(json({ commit, date, checksums, settingsSearchBuildId }));
@@ -361,13 +315,11 @@ function packageTask(platform, arch, opts) {
// TODO the API should be copied to `out` during compile, not here // TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts')); const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts')); const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [ const depsSrc = _.flatten(dependencies
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])), .map(function (d) { return ['node_modules/' + d + '/**', '!node_modules/' + d + '/**/{test,tests}/**']; }));
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
];
const deps = gulp.src(depsSrc, { base: '.', dot: true }) const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json'])) .pipe(filter(['**', '!**/package-lock.json']))
@@ -375,36 +327,16 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js'])) .pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**'])) .pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js'])) .pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('v8-profiler', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node'])) .pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/**']))
// {{SQL CARBON EDIT}}
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
// {{SQL CARBON EDIT}}
let copiedModules = gulp.src([
'node_modules/jquery/**/*.*',
'node_modules/reflect-metadata/**/*.*',
'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*'
], { base: '.', dot: true });
let all = es.merge( let all = es.merge(
packageJsonStream, packageJsonStream,
@@ -412,8 +344,7 @@ function packageTask(platform, arch, opts) {
license, license,
watermark, watermark,
api, api,
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
copiedModules,
dataApi, dataApi,
sources, sources,
deps deps
@@ -484,21 +415,25 @@ gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], p
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true })); gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations // Transifex Localizations
const vscodeLanguages = [
const innoSetupConfig = { 'zh-hans',
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } }, 'zh-hant',
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } }, 'ja',
'ko': { codePage: 'CP949', defaultInfo: { name: 'Korean', id: '$0412' } }, 'ko',
'ja': { codePage: 'CP932' }, 'de',
'de': { codePage: 'CP1252' }, 'fr',
'fr': { codePage: 'CP1252' }, 'es',
'es': { codePage: 'CP1252' }, 'ru',
'ru': { codePage: 'CP1251' }, 'it',
'it': { codePage: 'CP1252' }, 'pt-br',
'pt-br': { codePage: 'CP1252' }, 'hu',
'hu': { codePage: 'CP1250' }, 'tr'
'tr': { codePage: 'CP1254' } ];
}; const setupDefaultLanguages = [
'zh-hans',
'zh-hant',
'ko'
];
const apiHostname = process.env.TRANSIFEX_API_URL; const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME; const apiName = process.env.TRANSIFEX_API_NAME;
@@ -506,50 +441,27 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task('vscode-translations-push', ['optimize-vscode'], function () { gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json'; const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*'; const pathToExtensions = './extensions/**/*.nls.json';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}'; const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge( return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()), gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()), gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions()) gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken)); ).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
}); });
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
});
gulp.task('vscode-translations-pull', function () { gulp.task('vscode-translations-pull', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => { return es.merge(
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`)); i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
let includeDefault = !!innoSetupConfig[language.id].defaultInfo; i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`)); i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
}); ).pipe(vfs.dest('../vscode-localization'));
}); });
gulp.task('vscode-translations-import', function () { gulp.task('vscode-translations-import', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => { return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
});
}); });
// Sourcemaps // Sourcemaps
@@ -575,19 +487,20 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json'); const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => { gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
const branch = process.env.BUILD_SOURCEBRANCH; const branch = process.env.BUILD_SOURCEBRANCH;
if (!branch.endsWith('/master') && !branch.indexOf('/release/') >= 0) {
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
console.log(`Only runs on master and release branches, not ${branch}`); console.log(`Only runs on master and release branches, not ${branch}`);
return; return;
} }
if (!fs.existsSync(allConfigDetailsPath)) { if (!fs.existsSync(allConfigDetailsPath)) {
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`); console.error(`configuration file at ${allConfigDetailsPath} does not exist`);
return;
} }
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson); const settingsSearchBuildId = getBuildNumber();
if (!settingsSearchBuildId) { if (!settingsSearchBuildId) {
throw new Error('Failed to compute build number'); console.error('Failed to compute build number');
return;
} }
return gulp.src(allConfigDetailsPath) return gulp.src(allConfigDetailsPath)
@@ -599,19 +512,76 @@ gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () =
})); }));
}); });
function getSettingsSearchBuildId(packageJson) { function getBuildNumber() {
const previous = util.getPreviousVersion(packageJson.version); const previous = getPreviousVersion(packageJson.version);
if (!previous) {
return 0;
}
try { try {
const out = cp.execSync(`git rev-list ${previous}..HEAD --count`); const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
const count = parseInt(out.toString()); const count = parseInt(out.toString());
return util.versionStringToNumber(packageJson.version) * 1e4 + count; return versionStringToNumber(packageJson.version) * 1e4 + count;
} catch (e) { } catch (e) {
throw new Error('Could not determine build number: ' + e.toString()); console.error('Could not determine build number: ' + e.toString());
return 0;
} }
} }
// This task is only run for the MacOS build /**
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2.
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
function getPreviousVersion(versionStr) {
function tagExists(tagName) {
try {
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
function getLastTagFromBase(semverArr, componentToTest) {
const baseVersion = semverArr.join('.');
if (!tagExists(baseVersion)) {
console.error('Failed to find tag for base version, ' + baseVersion);
return null;
}
let goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (tagExists(semverArr.join('.')));
return goodTag;
}
const semverArr = versionStr.split('.');
if (semverArr[2] > 0) {
semverArr[2]--;
return semverArr.join('.');
} else if (semverArr[1] > 0) {
semverArr[1]--;
return getLastTagFromBase(semverArr, 2);
} else {
semverArr[0]--;
return getLastTagFromBase(semverArr, 1);
}
}
function versionStringToNumber(versionStr) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
return 0;
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
gulp.task('generate-vscode-configuration', () => { gulp.task('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const buildDir = process.env['AGENT_BUILDDIRECTORY']; const buildDir = process.env['AGENT_BUILDDIRECTORY'];
@@ -621,8 +591,7 @@ gulp.task('generate-vscode-configuration', () => {
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata'); const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir'); const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app'; const appPath = path.join(buildDir, 'VSCode-darwin/Visual\\ Studio\\ Code\\ -\\ Insiders.app/Contents/Resources/app/bin/code');
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`); const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`);
const timer = setTimeout(() => { const timer = setTimeout(() => {
@@ -642,31 +611,36 @@ gulp.task('generate-vscode-configuration', () => {
clearTimeout(timer); clearTimeout(timer);
reject(err); reject(err);
}); });
}).catch(e => {
// Don't fail the build
console.error(e.toString());
}); });
}); });
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
// Install service locally before building carbon // Install service locally before building carbon
function installService() { function installService(extObj, path) {
let config = require('../extensions/mssql/src/config.json'); var installer = new serviceInstaller.ServiceInstaller(extObj, path);
return platformInfo.getCurrent().then(p => { installer.getServiceInstallDirectoryRoot().then(serviceInstallFolder => {
let runtime = p.runtimeId; console.log('Cleaning up the install folder: ' + serviceInstallFolder);
// fix path since it won't be correct del(serviceInstallFolder + '/*').then(() => {
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory); console.log('Installing the service. Install folder: ' + serviceInstallFolder);
var installer = new serviceDownloader(config); installer.installService();
let serviceInstallFolder = installer.getInstallDirectory(runtime); }, delError => {
console.log('Cleaning up the install folder: ' + serviceInstallFolder); console.log('failed to delete the install folder error: ' + delError);
return del(serviceInstallFolder + '/*').then(() => { });
console.log('Installing the service. Install folder: ' + serviceInstallFolder); }, getFolderPathError => {
return installer.installService(runtime); console.log('failed to call getServiceInstallDirectoryRoot error: ' + getFolderPathError);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
}); });
} }
gulp.task('install-sqltoolsservice', () => { gulp.task('install-sqltoolsservice', () => {
return installService(); var mssqlExt = require('../extensions/mssql/client/out/models/constants');
var extObj = new mssqlExt.Constants();
var path = '../extensions/mssql/client/out/config.json';
return installService(extObj, path);
}); });

View File

@@ -12,15 +12,35 @@ const shell = require('gulp-shell');
const es = require('event-stream'); const es = require('event-stream');
const vfs = require('vinyl-fs'); const vfs = require('vinyl-fs');
const util = require('./lib/util'); const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json'); const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file const rpmDependencies = require('../resources/linux/rpm/dependencies');
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000); const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
const flatpakManifest = {
appId: product.darwinBundleIdentifier, // We need a reverse-url style identifier.
sdk: 'org.freedesktop.Sdk',
runtime: 'org.freedesktop.Sdk',
runtimeVersion: '1.4',
base: 'io.atom.electron.BaseApp',
baseFlatpakref: 'https://s3-us-west-2.amazonaws.com/electron-flatpak.endlessm.com/electron-base-app-master.flatpakref',
command: product.applicationName,
symlinks: [
['/share/' + product.applicationName + '/bin/' + product.applicationName, '/bin/' + product.applicationName],
],
finishArgs: [
'--share=ipc', '--socket=x11', // Allow showing X11 windows.
'--share=network', // Network access (e.g. for installing extension).
'--filesystem=host', // Allow access to the whole file system.
'--device=dri', // Allow OpenGL rendering.
'--filesystem=/tmp', // Needed for Chromium's single instance check.
'--socket=pulseaudio', // Some extensions may want to play sounds...
'--talk-name=org.freedesktop.Notifications', // ...or pop up notifications.
],
};
function getDebPackageArch(arch) { function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch]; return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
} }
@@ -91,7 +111,8 @@ function buildDebPackage(arch) {
return shell.task([ return shell.task([
'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm', 'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm',
'mkdir -p deb', 'mkdir -p deb',
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb' 'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
'dpkg-scanpackages deb /dev/null > Packages'
], { cwd: '.build/linux/deb/' + debArch }); ], { cwd: '.build/linux/deb/' + debArch });
} }
@@ -199,10 +220,10 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) { function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch); const snapBuildPath = getSnapBuildPath(arch);
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([ return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`, `chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}` `cd ${snapBuildPath} && snapcraft snap`
]); ]);
} }
@@ -313,3 +334,10 @@ gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prep
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32')); gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64')); gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm')); gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-flatpak', ['vscode-linux-ia32-prepare-flatpak'], buildFlatpak('ia32'));
gulp.task('vscode-linux-x64-flatpak', ['vscode-linux-x64-prepare-flatpak'], buildFlatpak('x64'));
gulp.task('vscode-linux-arm-flatpak', ['vscode-linux-arm-prepare-flatpak'], buildFlatpak('arm'));

View File

@@ -11,11 +11,8 @@ const assert = require('assert');
const cp = require('child_process'); const cp = require('child_process');
const _7z = require('7zip')['7z']; const _7z = require('7zip')['7z'];
const util = require('./lib/util'); const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json'); const pkg = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
const vfs = require('vinyl-fs');
const repoPath = path.dirname(__dirname); const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
@@ -94,13 +91,3 @@ gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], arch
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64'))); gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64')); gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
function copyInnoUpdater(arch) {
return () => {
return gulp.src('build/win32/{inno_updater.exe,vcruntime140.dll}', { base: 'build/win32' })
.pipe(vfs.dest(path.join(buildPath(arch), 'tools')));
};
}
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));

View File

@@ -1,118 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var es = require("event-stream");
var pickle = require("chromium-pickle-js");
var Filesystem = require("asar/lib/filesystem");
var VinylFile = require("vinyl");
var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) {
var shouldUnpackFile = function (file) {
for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
var filesystem = new Filesystem(folderPath);
var out = [];
// Keep track of pending inserts
var pendingInserts = 0;
var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory
var seenDir = {};
var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
var insertDirectoryForFile = function (file) {
var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error("unknown item in stream!");
}
var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
}
else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
var _this = this;
var finish = function () {
{
var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
var headerBuf = headerPickle.toBuffer();
var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
var contents = Buffer.concat(out);
out.length = 0;
_this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
_this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
}
else {
onFileInserted = function () {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}
exports.createAsar = createAsar;

View File

@@ -1,131 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
import * as pickle from 'chromium-pickle-js';
import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch';
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
const shouldUnpackFile = (file: VinylFile): boolean => {
for (let i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
const filesystem = new Filesystem(folderPath);
const out: Buffer[] = [];
// Keep track of pending inserts
let pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; };
// Do not insert twice the same directory
const seenDir: { [key: string]: boolean; } = {};
const insertDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
const insertDirectoryForFile = (file: string) => {
let lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`);
}
const shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
} else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
let finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
const contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
} else {
onFileInserted = () => {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}

View File

@@ -1,122 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
const os = require('os');
const mkdirp = require('mkdirp');
const rimraf = require('rimraf');
const es = require('event-stream');
const rename = require('gulp-rename');
const vfs = require('vinyl-fs');
const ext = require('./extensions');
const util = require('gulp-util');
const root = path.dirname(path.dirname(__dirname));
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function getExtensionPath(extension) {
return path.join(root, '.build', 'builtInExtensions', extension.name);
}
function isUpToDate(extension) {
const packagePath = path.join(getExtensionPath(extension), 'package.json');
if (!fs.existsSync(packagePath)) {
return false;
}
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
try {
const diskVersion = JSON.parse(packageContents).version;
return (diskVersion === extension.version);
} catch (err) {
return false;
}
}
function syncMarketplaceExtension(extension) {
if (isUpToDate(extension)) {
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
return es.readArray([]);
}
rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
}
function syncExtension(extension, controlState) {
switch (controlState) {
case 'disabled':
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
return es.readArray([]);
case 'marketplace':
return syncMarketplaceExtension(extension);
default:
if (!fs.existsSync(controlState)) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
return es.readArray([]);
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
return es.readArray([]);
}
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
return es.readArray([]);
}
}
function readControlFile() {
try {
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
} catch (err) {
return {};
}
}
function writeControlFile(control) {
mkdirp.sync(path.dirname(controlFilePath));
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
}
function main() {
util.log('Syncronizing built-in extensions...');
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];
for (const extension of builtInExtensions) {
let controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}
writeControlFile(control);
es.merge(streams)
.on('error', err => {
console.error(err);
process.exit(1);
})
.on('end', () => {
process.exit(0);
});
}
main();

View File

@@ -217,7 +217,6 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __metadata/, end: /^};$/ }, { start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ }, { start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ }, { start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
]; ];
destFiles.forEach(function (destFile) { destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = []; var SEEN_BOILERPLATE = [];

View File

@@ -44,11 +44,11 @@ interface ILoaderPluginReqFunc {
export interface IEntryPoint { export interface IEntryPoint {
name: string; name: string;
include?: string[]; include: string[];
exclude?: string[]; exclude: string[];
prepend: string[]; prepend: string[];
append?: string[]; append: string[];
dest?: string; dest: string;
} }
interface IEntryPointMap { interface IEntryPointMap {
@@ -339,7 +339,6 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
{ start: /^var __metadata/, end: /^};$/ }, { start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ }, { start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ }, { start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
]; ];
destFiles.forEach((destFile) => { destFiles.forEach((destFile) => {

View File

@@ -22,12 +22,8 @@ var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions; var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir; options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build, emitError) { function createCompile(build, emitError) {
var opts = _.clone(options); var opts = _.clone(options);
opts.inlineSources = !!build; opts.inlineSources = !!build;
@@ -62,13 +58,9 @@ function compileTask(out, build) {
return function () { return function () {
var compile = createCompile(build, true); var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts')); var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return src return src
.pipe(compile()) .pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out)) .pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false)); .pipe(monacodtsTask(out, false));
}; };
} }
@@ -78,19 +70,54 @@ function watchTask(out, build) {
var compile = createCompile(build); var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts')); var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' }); var watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc return watchSrc
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out)) .pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true)); .pipe(monacodtsTask(out, true));
}; };
} }
exports.watchTask = watchTask; exports.watchTask = watchTask;
function reloadTypeScriptNodeModule() {
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
}
function heapUsed() {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out, isWatch) { function monacodtsTask(out, isWatch) {
var basePath = path.resolve(process.cwd(), out);
var neededFiles = {}; var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) { monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath); filePath = path.normalize(filePath);
@@ -133,7 +160,7 @@ function monacodtsTask(out, isWatch) {
})); }));
} }
resultStream = es.through(function (data) { resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative)); var filePath = path.normalize(data.path);
if (neededFiles[filePath]) { if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString()); setInputFile(filePath, data.contents.toString());
} }

View File

@@ -25,12 +25,8 @@ const rootDir = path.join(__dirname, '../../src');
const options = require('../../src/tsconfig.json').compilerOptions; const options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir; options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream { function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(options); const opts = _.clone(options);
@@ -53,6 +49,7 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
.pipe(tsFilter) .pipe(tsFilter)
.pipe(util.loadSourcemaps()) .pipe(util.loadSourcemaps())
.pipe(ts(token)) .pipe(ts(token))
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
.pipe(noDeclarationsFilter) .pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through()) .pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore) .pipe(noDeclarationsFilter.restore)
@@ -78,14 +75,9 @@ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWrite
gulp.src('node_modules/typescript/lib/lib.d.ts'), gulp.src('node_modules/typescript/lib/lib.d.ts'),
); );
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return src return src
.pipe(compile()) .pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out)) .pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false)); .pipe(monacodtsTask(out, false));
}; };
} }
@@ -101,21 +93,61 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
); );
const watchSrc = watch('src/**', { base: 'src' }); const watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc return watchSrc
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out)) .pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true)); .pipe(monacodtsTask(out, true));
}; };
} }
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream { function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
}
const basePath = path.resolve(process.cwd(), out); function heapUsed(): string {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
const neededFiles: { [file: string]: boolean; } = {}; const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) { monacodts.getFilesToWatch(out).forEach(function (filePath) {
@@ -164,7 +196,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
} }
resultStream = es.through(function (data) { resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative)); const filePath = path.normalize(data.path);
if (neededFiles[filePath]) { if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString()); setInputFile(filePath, data.contents.toString());
} }

View File

@@ -1,28 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
const root = path.dirname(path.dirname(__dirname));
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
if (require.main === module) {
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `v${version}`;
process.exit(isUpToDate ? 0 : 1);
}

View File

@@ -20,7 +20,7 @@ var vsce = require("vsce");
var File = require("vinyl"); var File = require("vinyl");
function fromLocal(extensionPath) { function fromLocal(extensionPath) {
var result = es.through(); var result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath })
.then(function (fileNames) { .then(function (fileNames) {
var files = fileNames var files = fileNames
.map(function (fileName) { return path.join(extensionPath, fileName); }) .map(function (fileName) { return path.join(extensionPath, fileName); })

View File

@@ -22,7 +22,7 @@ import * as File from 'vinyl';
export function fromLocal(extensionPath: string): Stream { export function fromLocal(extensionPath: string): Stream {
const result = es.through(); const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath })
.then(fileNames => { .then(fileNames => {
const files = fileNames const files = fileNames
.map(fileName => path.join(extensionPath, fileName)) .map(fileName => path.join(extensionPath, fileName))

File diff suppressed because it is too large Load Diff

View File

@@ -46,6 +46,10 @@
"name": "vs/workbench/parts/execution", "name": "vs/workbench/parts/execution",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/explorers",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/extensions", "name": "vs/workbench/parts/extensions",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -67,11 +71,7 @@
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{ {
"name": "vs/workbench/parts/localizations", "name": "vs/workbench/parts/nps",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/logs",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{ {
@@ -130,42 +130,22 @@
"name": "vs/workbench/parts/update", "name": "vs/workbench/parts/update",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/url",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/watermark", "name": "vs/workbench/parts/watermark",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/webview",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/welcome", "name": "vs/workbench/parts/welcome",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/configuration", "name": "vs/workbench/services/configuration",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/configurationResolver",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/crashReporter", "name": "vs/workbench/services/crashReporter",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/dialogs",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/editor", "name": "vs/workbench/services/editor",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -174,10 +154,6 @@
"name": "vs/workbench/services/extensions", "name": "vs/workbench/services/extensions",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/jsonschemas",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/files", "name": "vs/workbench/services/files",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -186,6 +162,10 @@
"name": "vs/workbench/services/keybinding", "name": "vs/workbench/services/keybinding",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/message",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/mode", "name": "vs/workbench/services/mode",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -215,8 +195,8 @@
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{ {
"name": "vs/workbench/services/preferences", "name": "setup_messages",
"project": "vscode-preferences" "project": "vscode-workbench"
} }
] ]
} }

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,7 @@ function isImportNode(node) {
function fileFrom(file, contents, path) { function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; } if (path === void 0) { path = file.path; }
return new File({ return new File({
contents: Buffer.from(contents), contents: new Buffer(contents),
base: file.base, base: file.base,
cwd: file.cwd, cwd: file.cwd,
path: path path: path
@@ -150,16 +150,13 @@ function isImportNode(node) {
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; }) .filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; }) .map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; })) .concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls`
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); }) .map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten() .flatten()
.filter(function (r) { return !r.isWriteAccess; }) .filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); }) .map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); }) .map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; }) .filter(function (n) { return !!n; })
.map(function (n) { return n; }) .map(function (n) { return n; })
// only `localize` calls
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; }); .filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports // `localize` named imports
var allLocalizeImportDeclarations = importDeclarations var allLocalizeImportDeclarations = importDeclarations

View File

@@ -131,7 +131,7 @@ module nls {
export function fileFrom(file: File, contents: string, path: string = file.path) { export function fileFrom(file: File, contents: string, path: string = file.path) {
return new File({ return new File({
contents: Buffer.from(contents), contents: new Buffer(contents),
base: file.base, base: file.base,
cwd: file.cwd, cwd: file.cwd,
path: path path: path

View File

@@ -59,7 +59,7 @@ function loader(bundledFileHeader, bundleLoader) {
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: '', base: '',
contents: Buffer.from(bundledFileHeader) contents: new Buffer(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
} }
@@ -98,7 +98,7 @@ function toConcatStream(bundledFileHeader, sources, dest) {
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base, base: base,
contents: Buffer.from(source.contents) contents: new Buffer(source.contents)
}); });
}); });
return es.readArray(treatedSources) return es.readArray(treatedSources)
@@ -141,7 +141,7 @@ function optimizeTask(opts) {
bundleInfoArray.push(new VinylFile({ bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json', path: 'bundleInfo.json',
base: '.', base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t')) contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
})); }));
} }
es.readArray(bundleInfoArray).pipe(bundleInfoStream); es.readArray(bundleInfoArray).pipe(bundleInfoStream);
@@ -174,6 +174,7 @@ function optimizeTask(opts) {
}; };
} }
exports.optimizeTask = optimizeTask; exports.optimizeTask = optimizeTask;
;
/** /**
* Wrap around uglify and allow the preserveComments function * Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file. * to have a file "context" to include our copyright only once per file.
@@ -211,7 +212,8 @@ function uglifyWithCopyrights() {
return stream.pipe(minify({ return stream.pipe(minify({
output: { output: {
comments: preserveComments(f), comments: preserveComments(f),
max_line_len: 1024 // linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
} }
})); }));
})); }));
@@ -236,3 +238,4 @@ function minifyTask(src, sourceMapBaseUrl) {
}; };
} }
exports.minifyTask = minifyTask; exports.minifyTask = minifyTask;
;

View File

@@ -31,7 +31,7 @@ function log(prefix: string, message: string): void {
} }
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths?: string[]) { export function loaderConfig(emptyPaths: string[]) {
const result = { const result = {
paths: { paths: {
'vs': 'out-build/vs', 'vs': 'out-build/vs',
@@ -73,7 +73,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: '', base: '',
contents: Buffer.from(bundledFileHeader) contents: new Buffer(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
} else { } else {
@@ -117,7 +117,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base, base: base,
contents: Buffer.from(source.contents) contents: new Buffer(source.contents)
}); });
}); });
@@ -165,7 +165,7 @@ export interface IOptimizeTaskOpts {
/** /**
* (languages to process) * (languages to process)
*/ */
languages: i18n.Language[]; languages: string[];
} }
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream { export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const entryPoints = opts.entryPoints; const entryPoints = opts.entryPoints;
@@ -201,7 +201,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundleInfoArray.push(new VinylFile({ bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json', path: 'bundleInfo.json',
base: '.', base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t')) contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
})); }));
} }
es.readArray(bundleInfoArray).pipe(bundleInfoStream); es.readArray(bundleInfoArray).pipe(bundleInfoStream);
@@ -241,7 +241,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
})) }))
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}; };
} };
declare class FileWithCopyright extends VinylFile { declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean; public __hasOurCopyright: boolean;
@@ -287,7 +287,8 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
return stream.pipe(minify({ return stream.pipe(minify({
output: { output: {
comments: preserveComments(<FileWithCopyright>f), comments: preserveComments(<FileWithCopyright>f),
max_line_len: 1024 // linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
} }
})); }));
})); }));
@@ -295,7 +296,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
return es.duplex(input, output); return es.duplex(input, output);
} }
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void { export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`); const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => { return cb => {
@@ -326,4 +327,4 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
cb(err); cb(err);
}); });
}; };
} };

View File

@@ -34,13 +34,7 @@ catch (err) {
} }
function log() { function log() {
var errors = _.flatten(allErrors); var errors = _.flatten(allErrors);
var seen = new Set(); errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
errors.map(function (err) {
if (!seen.has(err)) {
seen.add(err);
util.log(util.colors.red('Error') + ": " + err);
}
});
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/; var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
var messages = errors var messages = errors
.map(function (err) { return regex.exec(err); }) .map(function (err) { return regex.exec(err); })
@@ -73,13 +67,8 @@ function createReporter() {
return es.through(null, function () { return es.through(null, function () {
onEnd(); onEnd();
if (emitError && errors.length > 0) { if (emitError && errors.length > 0) {
errors.__logged__ = true; log();
if (!errors.__logged__) { this.emit('error');
log();
}
var err = new Error("Found " + errors.length + " errors");
err.__reporter__ = true;
this.emit('error', err);
} }
else { else {
this.emit('end'); this.emit('end');
@@ -91,3 +80,4 @@ function createReporter() {
return ReportFunc; return ReportFunc;
} }
exports.createReporter = createReporter; exports.createReporter = createReporter;
;

View File

@@ -11,7 +11,7 @@ import * as util from 'gulp-util';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
const allErrors: string[][] = []; const allErrors: Error[][] = [];
let startTime: number = null; let startTime: number = null;
let count = 0; let count = 0;
@@ -42,14 +42,7 @@ try {
function log(): void { function log(): void {
const errors = _.flatten(allErrors); const errors = _.flatten(allErrors);
const seen = new Set<string>(); errors.map(err => util.log(`${util.colors.red('Error')}: ${err}`));
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
util.log(`${util.colors.red('Error')}: ${err}`);
}
});
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/; const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors const messages = errors
@@ -68,17 +61,17 @@ function log(): void {
} }
export interface IReporter { export interface IReporter {
(err: string): void; (err: Error): void;
hasErrors(): boolean; hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream; end(emitError: boolean): NodeJS.ReadWriteStream;
} }
export function createReporter(): IReporter { export function createReporter(): IReporter {
const errors: string[] = []; const errors: Error[] = [];
allErrors.push(errors); allErrors.push(errors);
class ReportFunc { class ReportFunc {
constructor(err: string) { constructor(err: Error) {
errors.push(err); errors.push(err);
} }
@@ -94,15 +87,8 @@ export function createReporter(): IReporter {
onEnd(); onEnd();
if (emitError && errors.length > 0) { if (emitError && errors.length > 0) {
(errors as any).__logged__ = true; log();
this.emit('error');
if (!(errors as any).__logged__) {
log();
}
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
} else { } else {
this.emit('end'); this.emit('end');
} }
@@ -111,4 +97,4 @@ export function createReporter(): IReporter {
} }
return <IReporter><any>ReportFunc; return <IReporter><any>ReportFunc;
} };

View File

@@ -1,265 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var fs = require("fs");
var path = require("path");
var REPO_ROOT = path.join(__dirname, '../../');
var SRC_DIR = path.join(REPO_ROOT, 'src');
var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
function createESMSourcesAndResources(options) {
var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
var in_queue = Object.create(null);
var queue = [];
var enqueue = function (module) {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
var seenDir = {};
var createDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try {
fs.mkdirSync(dir);
}
catch (err) { }
};
seenDir[REPO_ROOT] = true;
var toggleComments = function (fileContents) {
var lines = fileContents.split(/\r\n|\r|\n/);
var mode = 0;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
var write = function (filePath, contents) {
var absoluteFilePath;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
}
else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
while (queue.length > 0) {
var module_1 = queue.shift();
if (transportCSS(options, module_1, enqueue, write)) {
continue;
}
if (transportResource(options, module_1, enqueue, write)) {
continue;
}
if (transportDTS(options, module_1, enqueue, write)) {
continue;
}
var filename = void 0;
if (options.redirects[module_1]) {
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
}
else {
filename = path.join(SRC_DIR, module_1 + '.ts');
}
var fileContents = fs.readFileSync(filename).toString();
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFilename = info.importedFiles[i].fileName;
var pos = info.importedFiles[i].pos;
var end = info.importedFiles[i].end;
var importedFilepath = void 0;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
}
enqueue(importedFilepath);
var relativePath = void 0;
if (importedFilepath === path.dirname(module_1)) {
relativePath = '../' + path.basename(path.dirname(module_1));
}
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
}
else {
relativePath = path.relative(path.dirname(module_1), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return "import * as " + m1 + " from " + m2 + ";";
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module_1 + '.ts', fileContents);
}
var esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": []
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
exports.createESMSourcesAndResources = createESMSourcesAndResources;
function transportCSS(options, module, enqueue, write) {
if (!/\.css/.test(module)) {
return false;
}
var filename = path.join(SRC_DIR, module);
var fileContents = fs.readFileSync(filename).toString();
var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
var newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(originalFileFSPath, contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, function (url) {
var imagePath = path.join(path.dirname(module), url);
var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
var DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
var newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
var encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents, replacer) {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
var matches = [];
for (var _i = 1; _i < arguments.length; _i++) {
matches[_i - 1] = arguments[_i];
}
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack, needle) {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options, module, enqueue, write) {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options, module, enqueue, write) {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
var filename;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
}
else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,305 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as fs from 'fs';
import * as path from 'path';
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
export interface IOptions {
entryPoints: string[];
outFolder: string;
outResourcesFolder: string;
redirects: { [module: string]: string; };
}
export function createESMSourcesAndResources(options: IOptions): void {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
let in_queue: { [module: string]: boolean; } = Object.create(null);
let queue: string[] = [];
const enqueue = (module: string) => {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
const seenDir: { [key: string]: boolean; } = {};
const createDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try { fs.mkdirSync(dir); } catch (err) { }
};
seenDir[REPO_ROOT] = true;
const toggleComments = (fileContents: string) => {
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
const write = (filePath: string, contents: string | Buffer) => {
let absoluteFilePath: string;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
} else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const module = queue.shift();
if (transportCSS(options, module, enqueue, write)) {
continue;
}
if (transportResource(options, module, enqueue, write)) {
continue;
}
if (transportDTS(options, module, enqueue, write)) {
continue;
}
let filename: string;
if (options.redirects[module]) {
filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
} else {
filename = path.join(SRC_DIR, module + '.ts');
}
let fileContents = fs.readFileSync(filename).toString();
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module), importedFilepath);
}
enqueue(importedFilepath);
let relativePath: string;
if (importedFilepath === path.dirname(module)) {
relativePath = '../' + path.basename(path.dirname(module));
} else if (importedFilepath === path.dirname(path.dirname(module))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
} else {
relativePath = path.relative(path.dirname(module), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module + '.ts', fileContents);
}
const esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": [
]
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
function transportCSS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.css/.test(module)) {
return false;
}
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(originalFileFSPath: string, contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents: string, replacer: (url: string) => string): string {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack: string, needle: string): boolean {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
let filename: string;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
} else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,56 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var assert = require("assert");
var util = require("../util");
function getMockTagExists(tags) {
return function (tag) { return tags.indexOf(tag) >= 0; };
}
suite('util tests', function () {
test('getPreviousVersion - patch', function () {
assert.equal(util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])), '1.2.2');
});
test('getPreviousVersion - patch invalid', function () {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])), '1.1.3');
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])), '1.1.0');
});
test('getPreviousVersion - minor gap', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])), '1.1.1');
});
test('getPreviousVersion - minor invalid', function () {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', function () {
assert.equal(util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])), '1.2.2');
});
test('getPreviousVersion - major invalid', function () {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -1,79 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import assert = require('assert');
import util = require('../util');
function getMockTagExists(tags: string[]) {
return (tag: string) => tags.indexOf(tag) >= 0;
}
suite('util tests', () => {
test('getPreviousVersion - patch', () => {
assert.equal(
util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])),
'1.2.2'
);
});
test('getPreviousVersion - patch invalid', () => {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])),
'1.1.3'
);
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])),
'1.1.0'
);
});
test('getPreviousVersion - minor gap', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])),
'1.1.1'
);
});
test('getPreviousVersion - minor invalid', () => {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', () => {
assert.equal(
util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])),
'1.2.2'
);
});
test('getPreviousVersion - major invalid', () => {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -88,11 +88,10 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
var info = this.findDescribingParent(node); var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes. // Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) { if (info && info.isImport && doubleQuoted) {
var fix = [ this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''), this.createReplacement(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''), this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
]; ]));
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
return; return;
} }
var callInfo = info ? info.callInfo : null; var callInfo = info ? info.callInfo : null;
@@ -102,9 +101,8 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
} }
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) { if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
var s = node.getText(); var s = node.getText();
var fix = [ var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")");
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"), var fix = new Lint.Fix('Unexternalitzed string', [replacement]);
];
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return; return;
} }
@@ -136,24 +134,16 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
} }
} }
} }
var messageArg = callInfo.callExpression.arguments[this.messageIndex]; var messageArg = callInfo.argIndex === this.messageIndex
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) { ? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal.")); this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return; return;
} }
}; };
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) { NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
var text = keyNode.getText(); var text = keyNode.getText();
// We have an empty key
if (text.match(/(['"]) *\1/)) {
if (messageNode) {
this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
}
else {
this.addFailureAtNode(keyNode, "Key is empty.");
}
return;
}
var occurrences = this.usedKeys[text]; var occurrences = this.usedKeys[text];
if (!occurrences) { if (!occurrences) {
occurrences = []; occurrences = [];
@@ -186,7 +176,7 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
node = parent; node = parent;
} }
}; };
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.'; NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double qoutes for imports.';
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"'; NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
return NoUnexternalizedStringsRuleWalker; return NoUnexternalizedStringsRuleWalker;
}(Lint.RuleWalker)); }(Lint.RuleWalker));

View File

@@ -45,7 +45,7 @@ interface KeyMessagePair {
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker { class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private static ImportFailureMessage = 'Do not use double quotes for imports.'; private static ImportFailureMessage = 'Do not use double qoutes for imports.';
private static DOUBLE_QUOTE: string = '"'; private static DOUBLE_QUOTE: string = '"';
@@ -104,14 +104,13 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
let info = this.findDescribingParent(node); let info = this.findDescribingParent(node);
// Ignore strings in import and export nodes. // Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) { if (info && info.isImport && doubleQuoted) {
const fix = [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
];
this.addFailureAtNode( this.addFailureAtNode(
node, node,
NoUnexternalizedStringsRuleWalker.ImportFailureMessage, NoUnexternalizedStringsRuleWalker.ImportFailureMessage,
fix new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
])
); );
return; return;
} }
@@ -123,9 +122,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) { if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText(); const s = node.getText();
const fix = [ const replacement = new Lint.Replacement(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`);
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`), const fix = new Lint.Fix('Unexternalitzed string', [replacement]);
];
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
return; return;
} }
@@ -156,10 +154,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
} }
} }
let messageArg: ts.Expression = callInfo.argIndex === this.messageIndex
const messageArg = callInfo.callExpression.arguments[this.messageIndex]; ? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) { if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure( this.addFailure(this.createFailure(
messageArg.getStart(), messageArg.getWidth(), messageArg.getStart(), messageArg.getWidth(),
`Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`)); `Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
@@ -169,15 +167,6 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) { private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
let text = keyNode.getText(); let text = keyNode.getText();
// We have an empty key
if (text.match(/(['"]) *\1/)) {
if (messageNode) {
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`);
} else {
this.addFailureAtNode(keyNode, `Key is empty.`);
}
return;
}
let occurrences: KeyMessagePair[] = this.usedKeys[text]; let occurrences: KeyMessagePair[] = this.usedKeys[text];
if (!occurrences) { if (!occurrences) {
occurrences = []; occurrences = [];

View File

@@ -71,7 +71,7 @@ var TranslationRemindRuleWalker = /** @class */ (function (_super) {
} }
}); });
if (!resourceDefined) { if (!resourceDefined) {
this.addFailureAtNode(node, "Please add '" + resource + "' to ./build/lib/i18n.resources.json file to use translations here."); this.addFailureAtNode(node, "Please add '" + resource + "' to ./builds/lib/i18n.resources.json file to use translations here.");
} }
}; };
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls'; TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';

View File

@@ -67,7 +67,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
}); });
if (!resourceDefined) { if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./build/lib/i18n.resources.json file to use translations here.`); this.addFailureAtNode(node, `Please add '${resource}' to ./builds/lib/i18n.resources.json file to use translations here.`);
} }
} }
} }

View File

@@ -1,14 +1,7 @@
declare module "event-stream" { declare module "event-stream" {
import { Stream } from 'stream'; import { Stream } from 'stream';
import { ThroughStream as _ThroughStream} from 'through'; import { ThroughStream } from 'through';
import { MapStream } from 'map-stream'; import { MapStream } from 'map-stream';
import * as File from 'vinyl';
export interface ThroughStream extends _ThroughStream {
queue(data: File | null);
push(data: File | null);
paused: boolean;
}
function merge(streams: Stream[]): ThroughStream; function merge(streams: Stream[]): ThroughStream;
function merge(...streams: Stream[]): ThroughStream; function merge(...streams: Stream[]): ThroughStream;

View File

@@ -14,7 +14,6 @@ var fs = require("fs");
var _rimraf = require("rimraf"); var _rimraf = require("rimraf");
var git = require("./git"); var git = require("./git");
var VinylFile = require("vinyl"); var VinylFile = require("vinyl");
var cp = require("child_process");
var NoCancellationToken = { isCancellationRequested: function () { return false; } }; var NoCancellationToken = { isCancellationRequested: function () { return false; } };
function incremental(streamProvider, initial, supportsCancellation) { function incremental(streamProvider, initial, supportsCancellation) {
var input = es.through(); var input = es.through();
@@ -144,7 +143,7 @@ function loadSourcemaps() {
cb(null, f); cb(null, f);
return; return;
} }
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8'); f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) { fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) { if (err) {
return cb(err); return cb(err);
@@ -161,7 +160,7 @@ function stripSourceMappingURL() {
var output = input var output = input
.pipe(es.mapSync(function (f) { .pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8'); var contents = f.contents.toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8'); f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f; return f;
})); }));
return es.duplex(input, output); return es.duplex(input, output);
@@ -174,6 +173,7 @@ function rimraf(dir) {
if (!err) { if (!err) {
return cb(); return cb();
} }
;
if (err.code === 'ENOTEMPTY' && ++retries < 5) { if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(function () { return retry(cb); }, 10); return setTimeout(function () { return retry(cb); }, 10);
} }
@@ -211,68 +211,3 @@ function filter(fn) {
return result; return result;
} }
exports.filter = filter; exports.filter = filter;
function tagExists(tagName) {
try {
cp.execSync("git rev-parse " + tagName, { stdio: 'ignore' });
return true;
}
catch (e) {
return false;
}
}
/**
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
function getPreviousVersion(versionStr, _tagExists) {
if (_tagExists === void 0) { _tagExists = tagExists; }
function getLatestTagFromBase(semverArr, componentToTest) {
var baseVersion = semverArr.join('.');
if (!_tagExists(baseVersion)) {
throw new Error('Failed to find git tag for base version, ' + baseVersion);
}
var goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (_tagExists(semverArr.join('.')));
return goodTag;
}
var semverArr = versionStringToNumberArray(versionStr);
if (semverArr[2] > 0) {
semverArr[2]--;
var previous = semverArr.join('.');
if (!_tagExists(previous)) {
throw new Error('Failed to find git tag for previous version, ' + previous);
}
return previous;
}
else if (semverArr[1] > 0) {
semverArr[1]--;
return getLatestTagFromBase(semverArr, 2);
}
else {
semverArr[0]--;
// Find 1.x.0 for latest x
var latestMinorVersion = getLatestTagFromBase(semverArr, 1);
// Find 1.x.y for latest y
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
}
}
exports.getPreviousVersion = getPreviousVersion;
function versionStringToNumberArray(versionStr) {
return versionStr
.split('.')
.map(function (s) { return parseInt(s); });
}
function versionStringToNumber(versionStr) {
var semverRegex = /(\d+)\.(\d+)\.(\d+)/;
var match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
exports.versionStringToNumber = versionStringToNumber;

View File

@@ -17,7 +17,6 @@ import * as git from './git';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import { ThroughStream } from 'through'; import { ThroughStream } from 'through';
import * as sm from 'source-map'; import * as sm from 'source-map';
import * as cp from 'child_process';
export interface ICancellationToken { export interface ICancellationToken {
isCancellationRequested(): boolean; isCancellationRequested(): boolean;
@@ -29,7 +28,7 @@ export interface IStreamProvider {
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream; (cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
} }
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation?: boolean): NodeJS.ReadWriteStream { export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
const input = es.through(); const input = es.through();
const output = es.through(); const output = es.through();
let state = 'idle'; let state = 'idle';
@@ -130,7 +129,7 @@ export function skipDirectories(): NodeJS.ReadWriteStream {
}); });
} }
export function cleanNodeModule(name: string, excludes: string[], includes?: string[]): NodeJS.ReadWriteStream { export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : ''); const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str: string) => '!' + str; const negate = (str: string) => '!' + str;
@@ -191,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
return; return;
} }
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8'); f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => { fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
if (err) { return cb(err); } if (err) { return cb(err); }
@@ -210,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const output = input const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => { .pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8'); const contents = (<Buffer>f.contents).toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8'); f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f; return f;
})); }));
@@ -224,7 +223,7 @@ export function rimraf(dir: string): (cb: any) => void {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => { _rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) { if (!err) {
return cb(); return cb();
} };
if (err.code === 'ENOTEMPTY' && ++retries < 5) { if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10); return setTimeout(() => retry(cb), 10);
@@ -269,74 +268,4 @@ export function filter(fn: (data: any) => boolean): FilterStream {
result.restore = es.through(); result.restore = es.through();
return result; return result;
} }
function tagExists(tagName: string): boolean {
try {
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
/**
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
export function getPreviousVersion(versionStr: string, _tagExists = tagExists) {
function getLatestTagFromBase(semverArr: number[], componentToTest: number): string {
const baseVersion = semverArr.join('.');
if (!_tagExists(baseVersion)) {
throw new Error('Failed to find git tag for base version, ' + baseVersion);
}
let goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (_tagExists(semverArr.join('.')));
return goodTag;
}
const semverArr = versionStringToNumberArray(versionStr);
if (semverArr[2] > 0) {
semverArr[2]--;
const previous = semverArr.join('.');
if (!_tagExists(previous)) {
throw new Error('Failed to find git tag for previous version, ' + previous);
}
return previous;
} else if (semverArr[1] > 0) {
semverArr[1]--;
return getLatestTagFromBase(semverArr, 2);
} else {
semverArr[0]--;
// Find 1.x.0 for latest x
const latestMinorVersion = getLatestTagFromBase(semverArr, 1);
// Find 1.x.y for latest y
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
}
}
function versionStringToNumberArray(versionStr: string): number[] {
return versionStr
.split('.')
.map(s => parseInt(s));
}
export function versionStringToNumber(versionStr: string) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}

View File

@@ -1 +0,0 @@
.yarnrc

View File

@@ -9,7 +9,7 @@ const es = require('event-stream');
function handleDeletions() { function handleDeletions() {
return es.mapSync(f => { return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) { if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = Buffer.from(''); f.contents = new Buffer('');
f.stat = { mtime: new Date() }; f.stat = { mtime: new Date() };
} }

View File

@@ -30,12 +30,12 @@ function watch(root) {
path: path, path: path,
base: root base: root
}); });
//@ts-ignore
file.event = type; file.event = type;
result.emit('data', file); result.emit('data', file);
} }
nsfw(root, function (events) { nsfw(root, function(events) {
for (var i = 0; i < events.length; i++) { for (var i = 0; i < events.length; i++) {
var e = events[i]; var e = events[i];
var changeType = e.action; var changeType = e.action;
@@ -47,16 +47,16 @@ function watch(root) {
handleEvent(path.join(e.directory, e.file), toChangeType(changeType)); handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
} }
} }
}).then(function (watcher) { }).then(function(watcher) {
watcher.start(); watcher.start();
}); });
return result; return result;
} }
var cache = Object.create(null); var cache = Object.create(null);
module.exports = function (pattern, options) { module.exports = function(pattern, options) {
options = options || {}; options = options || {};
var cwd = path.normalize(options.cwd || process.cwd()); var cwd = path.normalize(options.cwd || process.cwd());
@@ -66,7 +66,7 @@ module.exports = function (pattern, options) {
watcher = cache[cwd] = watch(cwd); watcher = cache[cwd] = watch(cwd);
} }
var rebase = !options.base ? es.through() : es.mapSync(function (f) { var rebase = !options.base ? es.through() : es.mapSync(function(f) {
f.base = options.base; f.base = options.base;
return f; return f;
}); });
@@ -74,13 +74,13 @@ module.exports = function (pattern, options) {
return watcher return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git .pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern)) .pipe(filter(pattern))
.pipe(es.map(function (file, cb) { .pipe(es.map(function(file, cb) {
fs.stat(file.path, function (err, stat) { fs.stat(file.path, function(err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); } if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); } if (err) { return cb(); }
if (!stat.isFile()) { return cb(); } if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function (err, contents) { fs.readFile(file.path, function(err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); } if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); } if (err) { return cb(); }

View File

@@ -24,8 +24,7 @@ function watch(root) {
var result = es.through(); var result = es.through();
var child = cp.spawn(watcherPath, [root]); var child = cp.spawn(watcherPath, [root]);
child.stdout.on('data', function (data) { child.stdout.on('data', function(data) {
// @ts-ignore
var lines = data.toString('utf8').split('\n'); var lines = data.toString('utf8').split('\n');
for (var i = 0; i < lines.length; i++) { for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim(); var line = lines[i].trim();
@@ -47,17 +46,17 @@ function watch(root) {
path: changePathFull, path: changePathFull,
base: root base: root
}); });
//@ts-ignore
file.event = toChangeType(changeType); file.event = toChangeType(changeType);
result.emit('data', file); result.emit('data', file);
} }
}); });
child.stderr.on('data', function (data) { child.stderr.on('data', function(data) {
result.emit('error', data); result.emit('error', data);
}); });
child.on('exit', function (code) { child.on('exit', function(code) {
result.emit('error', 'Watcher died with code ' + code); result.emit('error', 'Watcher died with code ' + code);
child = null; child = null;
}); });
@@ -71,7 +70,7 @@ function watch(root) {
var cache = Object.create(null); var cache = Object.create(null);
module.exports = function (pattern, options) { module.exports = function(pattern, options) {
options = options || {}; options = options || {};
var cwd = path.normalize(options.cwd || process.cwd()); var cwd = path.normalize(options.cwd || process.cwd());

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +0,0 @@
The Source EULA
Copyright (c) 2016 - present Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -171,7 +171,9 @@ function format(text) {
function getRuleProvider(options) { function getRuleProvider(options) {
// Share this between multiple formatters using the same options. // Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses. // This represents the bulk of the space the formatter uses.
return ts.formatting.getFormatContext(options); var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
} }
function applyEdits(text, edits) { function applyEdits(text, edits) {
// Apply edits in reverse on the existing text // Apply edits in reverse on the existing text
@@ -208,8 +210,7 @@ function createReplacer(data) {
}; };
} }
function generateDeclarationFile(out, inputFiles, recipe) { function generateDeclarationFile(out, inputFiles, recipe) {
var endl = /\r\n/.test(recipe) ? '\r\n' : '\n'; var lines = recipe.split(/\r\n|\n|\r/);
var lines = recipe.split(endl);
var result = []; var result = [];
lines.forEach(function (line) { lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/); var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
@@ -277,11 +278,12 @@ function generateDeclarationFile(out, inputFiles, recipe) {
} }
result.push(line); result.push(line);
}); });
var resultTxt = result.join(endl); var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri'); resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<'); resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<'); resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt); resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt; return resultTxt;
} }
function getFilesToWatch(out) { function getFilesToWatch(out) {
@@ -312,13 +314,10 @@ function run(out, inputFiles) {
var result = generateDeclarationFile(out, inputFiles, recipe); var result = generateDeclarationFile(out, inputFiles, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString(); var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation'); log('Finished monaco.d.ts generation');
var one = currentContent.replace(/\r\n/gm, '\n');
var other = result.replace(/\r\n/gm, '\n');
var isTheSame = one === other;
return { return {
content: result, content: result,
filePath: DECLARATION_PATH, filePath: DECLARATION_PATH,
isTheSame: isTheSame isTheSame: currentContent === result
}; };
} }
exports.run = run; exports.run = run;

View File

@@ -24,15 +24,15 @@ function logErr(message: any, ...rest: any[]): void {
util.log(util.colors.red('[monaco.d.ts]'), message, ...rest); util.log(util.colors.red('[monaco.d.ts]'), message, ...rest);
} }
function moduleIdToPath(out: string, moduleId: string): string { function moduleIdToPath(out:string, moduleId:string): string {
if (/\.d\.ts/.test(moduleId)) { if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId); return path.join(SRC, moduleId);
} }
return path.join(OUT_ROOT, out, moduleId) + '.d.ts'; return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
} }
let SOURCE_FILE_MAP: { [moduleId: string]: ts.SourceFile; } = {}; let SOURCE_FILE_MAP: {[moduleId:string]:ts.SourceFile;} = {};
function getSourceFile(out: string, inputFiles: { [file: string]: string; }, moduleId: string): ts.SourceFile { function getSourceFile(out:string, inputFiles: { [file: string]: string; }, moduleId:string): ts.SourceFile {
if (!SOURCE_FILE_MAP[moduleId]) { if (!SOURCE_FILE_MAP[moduleId]) {
let filePath = path.normalize(moduleIdToPath(out, moduleId)); let filePath = path.normalize(moduleIdToPath(out, moduleId));
@@ -53,7 +53,7 @@ function getSourceFile(out: string, inputFiles: { [file: string]: string; }, mod
type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration; type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration;
type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement; type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement;
function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration { function isDeclaration(a:TSTopLevelDeclare): a is TSTopLevelDeclaration {
return ( return (
a.kind === ts.SyntaxKind.InterfaceDeclaration a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration || a.kind === ts.SyntaxKind.EnumDeclaration
@@ -64,7 +64,7 @@ function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
); );
} }
function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void { function visitTopLevelDeclarations(sourceFile:ts.SourceFile, visitor:(node:TSTopLevelDeclare)=>boolean): void {
let stop = false; let stop = false;
let visit = (node: ts.Node): void => { let visit = (node: ts.Node): void => {
@@ -100,8 +100,8 @@ function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TS
} }
function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclare[] { function getAllTopLevelDeclarations(sourceFile:ts.SourceFile): TSTopLevelDeclare[] {
let all: TSTopLevelDeclare[] = []; let all:TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(sourceFile, (node) => { visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) { if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node; let interfaceDeclaration = <ts.InterfaceDeclaration>node;
@@ -128,8 +128,8 @@ function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclar
} }
function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TSTopLevelDeclare { function getTopLevelDeclaration(sourceFile:ts.SourceFile, typeName:string): TSTopLevelDeclare {
let result: TSTopLevelDeclare = null; let result:TSTopLevelDeclare = null;
visitTopLevelDeclarations(sourceFile, (node) => { visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node)) { if (isDeclaration(node)) {
if (node.name.text === typeName) { if (node.name.text === typeName) {
@@ -149,12 +149,12 @@ function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TS
} }
function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number; }): string { function getNodeText(sourceFile:ts.SourceFile, node:{pos:number; end:number;}): string {
return sourceFile.getFullText().substring(node.pos, node.end); return sourceFile.getFullText().substring(node.pos, node.end);
} }
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare): string { function getMassagedTopLevelDeclarationText(sourceFile:ts.SourceFile, declaration: TSTopLevelDeclare): string {
let result = getNodeText(sourceFile, declaration); let result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) { // if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!'); // console.log('here!');
@@ -163,7 +163,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) { if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration; let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
let members: ts.NodeArray<ts.Node> = interfaceDeclaration.members; let members:ts.NodeArray<ts.Node> = interfaceDeclaration.members;
members.forEach((member) => { members.forEach((member) => {
try { try {
let memberText = getNodeText(sourceFile, member); let memberText = getNodeText(sourceFile, member);
@@ -182,7 +182,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
return result; return result;
} }
function format(text: string): string { function format(text:string): string {
// Parse the source text // Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true); let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
@@ -196,7 +196,9 @@ function format(text: string): string {
function getRuleProvider(options: ts.FormatCodeSettings) { function getRuleProvider(options: ts.FormatCodeSettings) {
// Share this between multiple formatters using the same options. // Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses. // This represents the bulk of the space the formatter uses.
return (ts as any).formatting.getFormatContext(options); let ruleProvider = new (<any>ts).formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
} }
function applyEdits(text: string, edits: ts.TextChange[]): string { function applyEdits(text: string, edits: ts.TextChange[]): string {
@@ -212,10 +214,10 @@ function format(text: string): string {
} }
} }
function createReplacer(data: string): (str: string) => string { function createReplacer(data:string): (str:string)=>string {
data = data || ''; data = data || '';
let rawDirectives = data.split(';'); let rawDirectives = data.split(';');
let directives: [RegExp, string][] = []; let directives: [RegExp,string][] = [];
rawDirectives.forEach((rawDirective) => { rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) { if (rawDirective.length === 0) {
return; return;
@@ -229,7 +231,7 @@ function createReplacer(data: string): (str: string) => string {
directives.push([new RegExp(findStr, 'g'), replaceStr]); directives.push([new RegExp(findStr, 'g'), replaceStr]);
}); });
return (str: string) => { return (str:string)=> {
for (let i = 0; i < directives.length; i++) { for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]); str = str.replace(directives[i][0], directives[i][1]);
} }
@@ -237,12 +239,11 @@ function createReplacer(data: string): (str: string) => string {
}; };
} }
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe: string): string { function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe:string): string {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n'; let lines = recipe.split(/\r\n|\n|\r/);
let lines = recipe.split(endl);
let result = []; let result = [];
lines.forEach(line => { lines.forEach(line => {
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/); let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
@@ -284,7 +285,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
let replacer = createReplacer(m2[2]); let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/); let typeNames = m2[3].split(/,/);
let typesToExcludeMap: { [typeName: string]: boolean; } = {}; let typesToExcludeMap: {[typeName:string]:boolean;} = {};
let typesToExcludeArr: string[] = []; let typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => { typeNames.forEach((typeName) => {
typeName = typeName.trim(); typeName = typeName.trim();
@@ -317,17 +318,18 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
result.push(line); result.push(line);
}); });
let resultTxt = result.join(endl); let resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri'); resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<'); resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<'); resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt); resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt; return resultTxt;
} }
export function getFilesToWatch(out: string): string[] { export function getFilesToWatch(out:string): string[] {
let recipe = fs.readFileSync(RECIPE_PATH).toString(); let recipe = fs.readFileSync(RECIPE_PATH).toString();
let lines = recipe.split(/\r\n|\n|\r/); let lines = recipe.split(/\r\n|\n|\r/);
let result = []; let result = [];
@@ -368,14 +370,10 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
let currentContent = fs.readFileSync(DECLARATION_PATH).toString(); let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation'); log('Finished monaco.d.ts generation');
const one = currentContent.replace(/\r\n/gm, '\n');
const other = result.replace(/\r\n/gm, '\n');
const isTheSame = one === other;
return { return {
content: result, content: result,
filePath: DECLARATION_PATH, filePath: DECLARATION_PATH,
isTheSame isTheSame: currentContent === result
}; };
} }

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
declare namespace monaco { declare module monaco {
export type Thenable<T> = PromiseLike<T>; type Thenable<T> = PromiseLike<T>;
export interface IDisposable { export interface IDisposable {
dispose(): void; dispose(): void;
@@ -32,14 +32,6 @@ declare namespace monaco {
Error = 3, Error = 3,
} }
export enum MarkerSeverity {
Hint = 1,
Info = 2,
Warning = 4,
Error = 8,
}
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise #include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise
#include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken #include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken
#include(vs/base/common/uri): URI, UriComponents #include(vs/base/common/uri): URI, UriComponents
@@ -54,46 +46,41 @@ declare namespace monaco {
#include(vs/editor/common/core/token): Token #include(vs/editor/common/core/token): Token
} }
declare namespace monaco.editor { declare module monaco.editor {
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>): #includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors #include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule #include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions #include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor #include(vs/editor/standalone/browser/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler { export interface ICommandHandler {
(...args:any[]): void; (...args:any[]): void;
} }
#include(vs/platform/contextkey/common/contextkey): IContextKey #include(vs/platform/contextkey/common/contextkey): IContextKey
#include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices #include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarker, IMarkerData, IRelatedInformation #include(vs/platform/markers/common/markers): IMarker, IMarkerData
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions #include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/base/common/scrollable): ScrollbarVisibility #include(vs/base/common/scrollable): ScrollbarVisibility
#include(vs/platform/theme/common/themeService): ThemeColor #include(vs/platform/theme/common/themeService): ThemeColor
#includeAll(vs/editor/common/model;LanguageIdentifier=>languages.LanguageIdentifier): IScrollEvent #includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents): #includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents): #includeAll(vs/editor/common/controller/cursorEvents):
#includeAll(vs/editor/common/config/editorOptions): #includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>): #includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo #include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
//compatibility:
export type IReadOnlyModel = ITextModel;
export type IModel = ITextModel;
} }
declare namespace monaco.languages { declare module monaco.languages {
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;model.=>editor.;IMarkerData=>editor.IMarkerData): #includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/common/modes/languageConfiguration): #includeAll(vs/editor/common/modes/languageConfiguration):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData;model.=>editor.): #includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.):
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint #include(vs/editor/common/services/modeService): ILanguageExtensionPoint
#includeAll(vs/editor/standalone/common/monarch/monarchTypes): #includeAll(vs/editor/standalone/common/monarch/monarchTypes):
} }
declare namespace monaco.worker { declare module monaco.worker {
#includeAll(vs/editor/common/services/editorSimpleWorker;): #includeAll(vs/editor/common/services/editorSimpleWorker;):

View File

@@ -1,17 +1,57 @@
{ {
"name": "monaco-editor-core", "name": "monaco-editor-core",
"private": true, "private": true,
"version": "0.12.0", "version": "0.9.0",
"description": "A browser based code editor", "description": "A browser based code editor",
"author": "Microsoft Corporation", "author": "Microsoft Corporation",
"license": "MIT", "license": "MIT",
"typings": "./esm/vs/editor/editor.api.d.ts",
"module": "./esm/vs/editor/editor.main.js",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/Microsoft/vscode" "url": "https://github.com/Microsoft/vscode"
}, },
"bugs": { "bugs": {
"url": "https://github.com/Microsoft/vscode/issues" "url": "https://github.com/Microsoft/vscode/issues"
},
"devDependencies": {
"@types/minimist": "1.2.0",
"@types/mocha": "2.2.39",
"@types/semver": "5.3.30",
"@types/sinon": "1.16.34",
"debounce": "^1.0.0",
"eslint": "^3.4.0",
"event-stream": "^3.1.7",
"ghooks": "1.0.3",
"glob": "^5.0.13",
"gulp": "^3.8.9",
"gulp-bom": "^1.0.0",
"gulp-concat": "^2.6.0",
"gulp-cssnano": "^2.1.0",
"gulp-filter": "^3.0.0",
"gulp-flatmap": "^1.0.0",
"gulp-rename": "^1.2.0",
"gulp-sourcemaps": "^1.11.0",
"gulp-tsb": "^2.0.3",
"gulp-tslint": "^7.0.1",
"gulp-uglify": "^2.0.0",
"gulp-util": "^3.0.6",
"gulp-watch": "^4.3.9",
"is": "^3.1.0",
"istanbul": "^0.3.17",
"jsdom-no-contextify": "^3.1.0",
"lazy.js": "^0.4.2",
"minimatch": "^2.0.10",
"mocha": "^2.2.5",
"object-assign": "^4.0.1",
"pump": "^1.0.1",
"remap-istanbul": "^0.6.4",
"rimraf": "^2.2.8",
"sinon": "^1.17.2",
"source-map": "^0.4.4",
"tslint": "^4.3.1",
"typescript": "2.5.2",
"typescript-formatter": "4.0.1",
"underscore": "^1.8.2",
"vinyl": "^0.4.5",
"vscode-nls-dev": "^2.0.1"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -5,15 +5,14 @@
const cp = require('child_process'); const cp = require('child_process');
const path = require('path'); const path = require('path');
const fs = require('fs'); const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
function yarnInstall(location, opts) { function npmInstall(location, opts) {
opts = opts || {}; opts = opts || {};
opts.cwd = location; opts.cwd = location;
opts.stdio = 'inherit'; opts.stdio = 'inherit';
const result = cp.spawnSync(yarn, ['install'], opts); const result = cp.spawnSync(npm, ['install'], opts);
if (result.error || result.status !== 0) { if (result.error || result.status !== 0) {
process.exit(1); process.exit(1);
@@ -21,40 +20,44 @@ function yarnInstall(location, opts) {
} }
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
yarnInstall('extensions'); // node modules shared by all extensions const protocol = [
'jsonrpc',
'types',
'client'
];
const allExtensionFolders = fs.readdirSync('extensions'); protocol.forEach(item => npmInstall(`dataprotocol-node/${item}`));
const extensions = allExtensionFolders.filter(e => {
try {
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
return packageJSON && (packageJSON.dependencies || packageJSON.devDependencies);
} catch (e) {
return false;
}
});
extensions.forEach(extension => yarnInstall(`extensions/${extension}`)); // {{SQL CARBON EDIT}}
npmInstall('extensions-modules');
npmInstall('extensions'); // node modules shared by all extensions
function yarnInstallBuildDependencies() { const extensions = [
// make sure we install the deps of build/lib/watch for the system installed 'vscode-colorize-tests',
'git',
'json',
'mssql',
'configuration-editing',
'extension-editing',
'markdown',
'merge-conflict',
'insights-default',
'account-provider-azure'
];
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
function npmInstallBuildDependencies() {
// make sure we install gulp watch for the system installed
// node, since that is the driver of gulp // node, since that is the driver of gulp
//@ts-ignore
const env = Object.assign({}, process.env); const env = Object.assign({}, process.env);
const watchPath = path.join(path.dirname(__dirname), 'lib', 'watch');
const yarnrcPath = path.join(watchPath, '.yarnrc');
const disturl = 'https://nodejs.org/download/release'; delete env['npm_config_disturl'];
const target = process.versions.node; delete env['npm_config_target'];
const runtime = 'node'; delete env['npm_config_runtime'];
const yarnrc = `disturl "${disturl}" npmInstall(path.join(path.dirname(__dirname), 'lib', 'watch'), { env });
target "${target}"
runtime "${runtime}"`;
fs.writeFileSync(yarnrcPath, yarnrc, 'utf8');
yarnInstall(watchPath, { env });
} }
yarnInstall(`build`); // node modules required for build npmInstall(`build`); // node modules required for build
yarnInstall('test/smoke'); // node modules required for smoketest npmInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

View File

@@ -3,21 +3,13 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
let err = false; if (process.env['npm_config_disturl'] !== 'https://atom.io/download/electron') {
console.error("You can't use plain npm to install Code's dependencies.");
console.error(
/^win/.test(process.platform)
? "Please run '.\\scripts\\npm.bat install' instead."
: "Please run './scripts/npm.sh install' instead."
);
const major = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
if (major < 8) {
console.error('\033[1;31m*** Please use node>=8.\033[0;0m');
err = true;
}
if (!/yarn\.js$|yarnpkg$/.test(process.env['npm_execpath'])) {
console.error('\033[1;31m*** Please use yarn to install dependencies.\033[0;0m');
err = true;
}
if (err) {
console.error('');
process.exit(1); process.exit(1);
} }

View File

@@ -4,11 +4,9 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const cp = require('child_process'); const cp = require('child_process');
const fs = require('fs'); const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const path = require('path');
function updateGrammar(location) { function updateGrammar(location) {
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const result = cp.spawnSync(npm, ['run', 'update-grammar'], { const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
cwd: location, cwd: location,
stdio: 'inherit' stdio: 'inherit'
@@ -19,17 +17,50 @@ function updateGrammar(location) {
} }
} }
const allExtensionFolders = fs.readdirSync('extensions'); const extensions = [
const extensions = allExtensionFolders.filter(e => { // 'bat' Grammar no longer available
try { 'clojure',
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString()); 'coffeescript',
return packageJSON && packageJSON.scripts && packageJSON.scripts['update-grammar']; 'cpp',
} catch (e) { 'csharp',
return false; 'css',
} 'diff',
}); 'docker',
'fsharp',
console.log(`Updating ${extensions.length} grammars...`); 'gitsyntax',
'go',
'groovy',
'handlebars',
'hlsl',
'html',
'ini',
'java',
// 'javascript', updated through JavaScript
'json',
'less',
'lua',
'make',
'markdown',
'objective-c',
'perl',
'php',
// 'powershell', grammar not ready yet, @daviwil will ping when ready
'pug',
'python',
'r',
'razor',
'ruby',
'rust',
'scss',
'shaderlab',
'shellscript',
'sql',
'swift',
'typescript',
'vb',
'xml',
'yaml'
];
extensions.forEach(extension => updateGrammar(`extensions/${extension}`)); extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
@@ -39,5 +70,4 @@ if (process.platform === 'win32') {
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' }); cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
} else { } else {
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' }); cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
} }

View File

@@ -14,19 +14,14 @@ var url = require('url');
function getOptions(urlString) { function getOptions(urlString) {
var _url = url.parse(urlString); var _url = url.parse(urlString);
var headers = {
'User-Agent': 'VSCode'
};
var token = process.env['GITHUB_TOKEN'];
if (token) {
headers['Authorization'] = 'token ' + token
}
return { return {
protocol: _url.protocol, protocol: _url.protocol,
host: _url.host, host: _url.host,
port: _url.port, port: _url.port,
path: _url.path, path: _url.path,
headers: headers headers: {
'User-Agent': 'NodeJS'
}
}; };
} }
@@ -37,16 +32,12 @@ function download(url, redirectCount) {
response.on('data', function (data) { response.on('data', function (data) {
content += data.toString(); content += data.toString();
}).on('end', function () { }).on('end', function () {
if (response.statusCode === 403 && response.headers['x-ratelimit-remaining'] === '0') {
e('GitHub API rate exceeded. Set GITHUB_TOKEN environment variable to increase rate limit.');
return;
}
let count = redirectCount || 0; let count = redirectCount || 0;
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) { if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
let location = response.headers['location']; let location = response.headers['location'];
if (location) { if (location) {
console.log("Redirected " + url + " to " + location); console.log("Redirected " + url + " to " + location);
download(location, count + 1).then(c, e); download(location, count+1).then(c, e);
return; return;
} }
} }
@@ -68,13 +59,17 @@ function getCommitSha(repoId, repoPath) {
commitDate: lastCommit.commit.author.date commitDate: lastCommit.commit.author.date
}); });
} catch (e) { } catch (e) {
return Promise.reject(new Error("Failed extracting the SHA: " + content)); console.error("Failed extracting the SHA: " + content);
return Promise.resolve(null);
} }
}, function () {
console.error('Failed loading ' + commitInfo);
return Promise.resolve(null);
}); });
} }
exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'master') { exports.update = function (repoId, repoPath, dest, modifyGrammar) {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + `/${version}/` + repoPath; var contentPath = 'https://raw.githubusercontent.com/' + repoId + '/master/' + repoPath;
console.log('Reading from ' + contentPath); console.log('Reading from ' + contentPath);
return download(contentPath).then(function (content) { return download(contentPath).then(function (content) {
var ext = path.extname(repoPath); var ext = path.extname(repoPath);
@@ -86,7 +81,8 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
} else if (ext === '.json') { } else if (ext === '.json') {
grammar = JSON.parse(content); grammar = JSON.parse(content);
} else { } else {
return Promise.reject(new Error('Unknown file extension: ' + ext)); console.error('Unknown file extension: ' + ext);
return;
} }
if (modifyGrammar) { if (modifyGrammar) {
modifyGrammar(grammar); modifyGrammar(grammar);
@@ -103,10 +99,8 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
if (info) { if (info) {
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha; result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
} }
for (let key in grammar) {
let keys = ['name', 'scopeName', 'comment', 'injections', 'patterns', 'repository']; if (!result.hasOwnProperty(key)) {
for (let key of keys) {
if (grammar.hasOwnProperty(key)) {
result[key] = grammar[key]; result[key] = grammar[key];
} }
} }
@@ -119,14 +113,11 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
console.log('Updated ' + path.basename(dest)); console.log('Updated ' + path.basename(dest));
} }
} catch (e) { } catch (e) {
return Promise.reject(e); console.error(e);
} }
}); });
}, console.error).catch(e => { }, console.error);
console.error(e);
process.exit(1);
});
}; };
if (path.basename(process.argv[1]) === 'update-grammar.js') { if (path.basename(process.argv[1]) === 'update-grammar.js') {

View File

@@ -1,69 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
let i18n = require("../lib/i18n");
let fs = require("fs");
let path = require("path");
let vfs = require("vinyl-fs");
let rimraf = require('rimraf');
function update(idOrPath) {
if (!idOrPath) {
throw new Error('Argument must be the location of the localization extension.');
}
let locExtFolder = idOrPath;
if (/^\w{2}(-\w+)?$/.test(idOrPath)) {
locExtFolder = '../vscode-language-pack-' + idOrPath;
}
let locExtStat = fs.statSync(locExtFolder);
if (!locExtStat || !locExtStat.isDirectory) {
throw new Error('No directory found at ' + idOrPath);
}
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
let contributes = packageJSON['contributes'];
if (!contributes) {
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
}
let localizations = contributes['localizations'];
if (!localizations) {
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
}
localizations.forEach(function (localization) {
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
}
let server = localization.server || 'www.transifex.com';
let userName = localization.userName || 'api';
let apiToken = process.env.TRANSIFEX_API_TOKEN;
let languageId = localization.transifexId || localization.languageId;
let translationDataFolder = path.join(locExtFolder, 'translations');
if (fs.existsSync(translationDataFolder) && fs.existsSync(path.join(translationDataFolder, 'main.i18n.json'))) {
console.log('Clearing \'' + translationDataFolder + '\'...');
rimraf.sync(translationDataFolder);
}
console.log('Downloading translations for \'' + languageId + '\' to \'' + translationDataFolder + '\'...');
const translationPaths = [];
i18n.pullI18nPackFiles(server, userName, apiToken, { id: languageId }, translationPaths)
.pipe(vfs.dest(translationDataFolder)).on('end', function () {
localization.translations = [];
for (let tp of translationPaths) {
localization.translations.push({ id: tp.id, path: `./translations/${tp.resourceName}`});
}
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
});
});
}
if (path.basename(process.argv[1]) === 'update-localization-extension.js') {
update(process.argv[2]);
}

View File

@@ -9,24 +9,20 @@
"@types/mime": "0.0.29", "@types/mime": "0.0.29",
"@types/node": "8.0.33", "@types/node": "8.0.33",
"@types/xml2js": "0.0.33", "@types/xml2js": "0.0.33",
"@types/request": "^2.47.0",
"azure-storage": "^2.1.0", "azure-storage": "^2.1.0",
"decompress": "^4.2.0", "decompress": "^4.2.0",
"documentdb": "1.13.0", "documentdb": "^1.11.0",
"service-downloader": "github:anthonydresser/service-downloader#0.1.2", "extensions-modules": "file:../extensions-modules",
"fs-extra-promise": "^1.0.1", "fs-extra-promise": "^1.0.1",
"mime": "^1.3.4", "mime": "^1.3.4",
"minimist": "^1.2.0", "minimist": "^1.2.0",
"typescript": "2.8.1", "typescript": "2.5.2",
"vscode": "^1.0.1", "vscode": "^1.0.1",
"xml2js": "^0.4.17", "xml2js": "^0.4.17"
"github-releases": "^0.4.1",
"request": "^2.85.0"
}, },
"scripts": { "scripts": {
"compile": "tsc -p tsconfig.build.json", "compile": "tsc",
"watch": "tsc -p tsconfig.build.json --watch", "watch": "tsc --watch",
"postinstall": "npm run compile", "postinstall": "npm run compile"
"npmCheckJs": "tsc --noEmit"
} }
} }

View File

@@ -6,9 +6,6 @@ if [ -n "$AGENT_WORKFOLDER" ]
then then
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache" export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
echo "Using npm cache: $npm_config_cache" echo "Using npm cache: $npm_config_cache"
export YARN_CACHE_FOLDER="$AGENT_WORKFOLDER/yarn-cache"
echo "Using yarn cache: $YARN_CACHE_FOLDER"
fi fi
SUMMARY="Task;Duration"$'\n' SUMMARY="Task;Duration"$'\n'

View File

@@ -4,15 +4,23 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const cp = require('child_process'); const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function yarnInstall(package: string): void { function npmInstall(package: string, args: string[]): void {
cp.execSync(`yarn add --no-lockfile ${package}`); const result = cp.spawnSync(npm, ['install', package, ...args], {
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
} }
const product = require('../../../product.json'); const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; }; const dependencies = product.dependencies || {} as { [name: string]: string; };
const [, , ...args] = process.argv;
Object.keys(dependencies).forEach(name => { Object.keys(dependencies).forEach(name => {
const url = dependencies[name]; const url = dependencies[name];
yarnInstall(url); npmInstall(url, args);
}); });

View File

@@ -4,15 +4,12 @@ set -e
# setup nvm # setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh --no-use source $(brew --prefix nvm)/nvm.sh
else else
source $NVM_DIR/nvm.sh --no-use source $NVM_DIR/nvm.sh
fi fi
# install node # install node
NODE_VERSION=8.9.1 NODE_VERSION=7.10.0
nvm install $NODE_VERSION nvm install $NODE_VERSION
nvm use $NODE_VERSION nvm use $NODE_VERSION
# install yarn
npm i -g yarn

View File

@@ -14,9 +14,8 @@ import * as mime from 'mime';
import * as minimist from 'minimist'; import * as minimist from 'minimist';
import { DocumentClient, NewDocument } from 'documentdb'; import { DocumentClient, NewDocument } from 'documentdb';
// {{SQL CARBON EDIT}} if (process.argv.length < 6) {
if (process.argv.length < 9) { console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
process.exit(-1); process.exit(-1);
} }
@@ -70,7 +69,6 @@ interface Asset {
hash: string; hash: string;
sha256hash: string; sha256hash: string;
size: number; size: number;
supportsFastUpdate?: boolean;
} }
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> { function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
@@ -185,10 +183,21 @@ async function publish(commit: string, quality: string, platform: string, type:
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']) const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20)); .withFilter(new azure.ExponentialRetryPolicyFilter(20));
// {{SQL CARBON EDIT}} const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
await assertContainer(blobService, quality); .withFilter(new azure.ExponentialRetryPolicyFilter(20));
const blobExists = await doesAssetExist(blobService, quality, blobName); // mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = []; const promises = [];
@@ -196,38 +205,10 @@ async function publish(commit: string, quality: string, platform: string, type:
promises.push(uploadBlob(blobService, quality, blobName, file)); promises.push(uploadBlob(blobService, quality, blobName, file));
} }
// {{SQL CARBON EDIT}} if (!moooncakeBlobExists) {
if (process.env['MOONCAKE_STORAGE_ACCESS_KEY']) { promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
} else {
console.log('Skipping Mooncake publishing.');
} }
if (promises.length === 0) { if (promises.length === 0) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`); console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return; return;
@@ -247,20 +228,12 @@ async function publish(commit: string, quality: string, platform: string, type:
platform: platform, platform: platform,
type: type, type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`, url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
// {{SQL CARBON EDIT}} mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
mooncakeUrl: process.env['MOONCAKE_CDN_URL'] ? `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}` : undefined,
hash: sha1hash, hash: sha1hash,
sha256hash, sha256hash,
size size
}; };
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const release = { const release = {
id: commit, id: commit,
timestamp: (new Date()).getTime(), timestamp: (new Date()).getTime(),
@@ -288,11 +261,8 @@ function main(): void {
boolean: ['upload-only'] boolean: ['upload-only']
}); });
// {{SQL CARBON EDIT}} const [quality, platform, type, name, version, _isUpdate, file] = opts._;
let [quality, platform, type, name, version, _isUpdate, file, commit] = opts._; const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
if (!commit) {
commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
}
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => { publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
console.error(err); console.error(err);

View File

@@ -1,219 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as request from 'request';
import { createReadStream, createWriteStream, unlink, mkdir } from 'fs';
import * as github from 'github-releases';
import { join } from 'path';
import { tmpdir } from 'os';
import { promisify } from 'util';
const BASE_URL = 'https://rink.hockeyapp.net/api/2/';
const HOCKEY_APP_TOKEN_HEADER = 'X-HockeyAppToken';
export interface IVersions {
app_versions: IVersion[];
}
export interface IVersion {
id: number;
version: string;
}
export interface IApplicationAccessor {
accessToken: string;
appId: string;
}
export interface IVersionAccessor extends IApplicationAccessor {
id: string;
}
enum Platform {
WIN_32 = 'win32-ia32',
WIN_64 = 'win32-x64',
LINUX_32 = 'linux-ia32',
LINUX_64 = 'linux-x64',
MAC_OS = 'darwin-x64'
}
function symbolsZipName(platform: Platform, electronVersion: string, insiders: boolean): string {
return `${insiders ? 'insiders' : 'stable'}-symbols-v${electronVersion}-${platform}.zip`;
}
const SEED = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
async function tmpFile(name: string): Promise<string> {
let res = '';
for (let i = 0; i < 8; i++) {
res += SEED.charAt(Math.floor(Math.random() * SEED.length));
}
const tmpParent = join(tmpdir(), res);
await promisify(mkdir)(tmpParent);
return join(tmpParent, name);
}
function getVersions(accessor: IApplicationAccessor): Promise<IVersions> {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions`,
method: 'GET',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
}
});
}
function createVersion(accessor: IApplicationAccessor, version: string): Promise<IVersion> {
return asyncRequest<IVersion>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/new`,
method: 'POST',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
bundle_version: version
}
});
}
function updateVersion(accessor: IVersionAccessor, symbolsPath: string) {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/${accessor.id}`,
method: 'PUT',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
dsym: createReadStream(symbolsPath)
}
});
}
function asyncRequest<T>(options: request.UrlOptions & request.CoreOptions): Promise<T> {
return new Promise<T>((resolve, reject) => {
request(options, (error, response, body) => {
if (error) {
reject(error);
} else {
resolve(JSON.parse(body));
}
});
});
}
function downloadAsset(repository, assetName: string, targetPath: string, electronVersion: string) {
return new Promise((resolve, reject) => {
repository.getReleases({ tag_name: `v${electronVersion}` }, (err, releases) => {
if (err) {
reject(err);
} else {
const asset = releases[0].assets.filter(asset => asset.name === assetName)[0];
if (!asset) {
reject(new Error(`Asset with name ${assetName} not found`));
} else {
repository.downloadAsset(asset, (err, reader) => {
if (err) {
reject(err);
} else {
const writer = createWriteStream(targetPath);
writer.on('error', reject);
writer.on('close', resolve);
reader.on('error', reject);
reader.pipe(writer);
}
});
}
}
});
});
}
interface IOptions {
repository: string;
platform: Platform;
versions: { code: string; insiders: boolean; electron: string; };
access: { hockeyAppToken: string; hockeyAppId: string; githubToken: string };
}
async function ensureVersionAndSymbols(options: IOptions) {
// Check version does not exist
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
if (versions.app_versions.some(v => v.version === options.versions.code)) {
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
return;
}
// Download symbols for platform and electron version
const symbolsName = symbolsZipName(options.platform, options.versions.electron, options.versions.insiders);
const symbolsPath = await tmpFile('symbols.zip');
console.log(`HockeyApp: downloading symbols ${symbolsName} for electron ${options.versions.electron} (${options.platform}) into ${symbolsPath}`);
await downloadAsset(new github({ repo: options.repository, token: options.access.githubToken }), symbolsName, symbolsPath, options.versions.electron);
// Create version
console.log(`HockeyApp: creating new version ${options.versions.code} (${options.platform})`);
const version = await createVersion({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, options.versions.code);
// Upload symbols
console.log(`HockeyApp: uploading symbols for version ${options.versions.code} (${options.platform})`);
await updateVersion({ id: String(version.id), accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, symbolsPath);
// Cleanup
await promisify(unlink)(symbolsPath);
}
// Environment
const pakage = require('../../../package.json');
const product = require('../../../product.json');
const repository = product.electronRepository;
const electronVersion = require('../../lib/electron').getElectronVersion();
const insiders = product.quality !== 'stable';
let codeVersion = pakage.version;
if (insiders) {
codeVersion = `${codeVersion}-insider`;
}
const githubToken = process.argv[2];
const hockeyAppToken = process.argv[3];
const is64 = process.argv[4] === 'x64';
const hockeyAppId = process.argv[5];
let platform: Platform;
if (process.platform === 'darwin') {
platform = Platform.MAC_OS;
} else if (process.platform === 'win32') {
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
} else {
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
}
// Create version and upload symbols in HockeyApp
if (repository && codeVersion && electronVersion && (product.quality === 'stable' || product.quality === 'insider')) {
ensureVersionAndSymbols({
repository,
platform,
versions: {
code: codeVersion,
insiders,
electron: electronVersion
},
access: {
githubToken,
hockeyAppToken,
hockeyAppId
}
}).then(() => {
console.log('HockeyApp: done');
}).catch(error => {
console.error(`HockeyApp: error (${error})`);
});
} else {
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);
}

View File

@@ -1,100 +0,0 @@
phases:
- phase: Windows
queue: Hosted VS2017
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
$ErrorActionPreference = "Stop"
yarn
.\node_modules\.bin\gulp electron
npm run gulp -- hygiene
.\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- powershell: |
$ErrorActionPreference = "Stop"
.\scripts\test.bat --tfs
.\scripts\test-integration.bat
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)\artifacts" --log "$(Build.ArtifactStagingDirectory)\artifacts\smoketest.log"
name: test
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: build-artifacts-win32
publishLocation: Container
condition: succeededOrFailed()
- phase: Linux
queue: Hosted Linux Preview
steps:
- script: |
set -e
apt-get update
apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 libgconf-2-4 dbus xvfb libgtk-3-0
cp build/tfs/linux/x64/xvfb.init /etc/init.d/xvfb
chmod +x /etc/init.d/xvfb
update-rc.d xvfb defaults
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
service xvfb start
service dbus start
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
yarn
npm run gulp -- electron-x64
- script: |
set -e
npm run gulp -- hygiene
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --tfs
# DISPLAY=:10 ./scripts/test-integration.sh
name: test
- phase: macOS
queue: Hosted macOS Preview
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
yarn
npm run gulp -- electron-x64
- script: |
set -e
npm run gulp -- hygiene
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
npm run compile
node build/lib/builtInExtensions.js
name: build
- script: |
set -e
./scripts/test.sh --tfs
./scripts/test-integration.sh
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)/artifacts" --log "$(Build.ArtifactStagingDirectory)/artifacts/smoketest.log"
name: test
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: build-artifacts-darwin
publishLocation: Container
condition: succeededOrFailed()

View File

@@ -14,14 +14,11 @@ VSO_PAT="$6"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \ step "Install dependencies" \
yarn npm install
step "Hygiene" \ step "Hygiene" \
npm run gulp -- hygiene npm run gulp -- hygiene
step "Monaco Editor Check" \
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
step "Mix in repository from vscode-distro" \ step "Mix in repository from vscode-distro" \
npm run gulp -- mixin npm run gulp -- mixin

View File

@@ -1,51 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin electron
node build/tfs/common/installDistro.js
- script: |
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-darwin-min
name: build
- script: |
./scripts/test.sh --build --tfs
name: test
- script: |
# archive the unsigned build
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
# publish the unsigned build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/tfs/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive-unsigned \
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
$VERSION \
false \
../VSCode-darwin-unsigned.zip
# enqueue the unsigned build
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
npm run gulp -- upload-vscode-configuration

View File

@@ -3,6 +3,10 @@
. ./scripts/env.sh . ./scripts/env.sh
. ./build/tfs/common/common.sh . ./build/tfs/common/common.sh
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm i)
REPO=`pwd` REPO=`pwd`
ZIP=$REPO/../VSCode-darwin-selfsigned.zip ZIP=$REPO/../VSCode-darwin-selfsigned.zip
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip

View File

@@ -1,2 +1 @@
pat pat
*.js

View File

@@ -5,7 +5,6 @@
. ./build/tfs/common/common.sh . ./build/tfs/common/common.sh
export ARCH="$1" export ARCH="$1"
export npm_config_arch="$ARCH"
export VSCODE_MIXIN_PASSWORD="$2" export VSCODE_MIXIN_PASSWORD="$2"
export AZURE_STORAGE_ACCESS_KEY="$3" export AZURE_STORAGE_ACCESS_KEY="$3"
export AZURE_STORAGE_ACCESS_KEY_2="$4" export AZURE_STORAGE_ACCESS_KEY_2="$4"
@@ -17,14 +16,11 @@ VSO_PAT="$8"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \ step "Install dependencies" \
yarn npm install --arch=$ARCH --unsafe-perm
step "Hygiene" \ step "Hygiene" \
npm run gulp -- hygiene npm run gulp -- hygiene
step "Monaco Editor Check" \
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
step "Mix in repository from vscode-distro" \ step "Mix in repository from vscode-distro" \
npm run gulp -- mixin npm run gulp -- mixin
@@ -32,7 +28,7 @@ step "Get Electron" \
npm run gulp -- "electron-$ARCH" npm run gulp -- "electron-$ARCH"
step "Install distro dependencies" \ step "Install distro dependencies" \
node build/tfs/common/installDistro.js node build/tfs/common/installDistro.js --arch=$ARCH
step "Build minified" \ step "Build minified" \
npm run gulp -- "vscode-linux-$ARCH-min" npm run gulp -- "vscode-linux-$ARCH-min"

View File

@@ -1,103 +0,0 @@
steps:
- script: |
# dependencies
dpkg --add-architecture i386
apt-get update
DEPS=" \
gcc-multilib g++-multilib \
pkg-config \
dbus \
xvfb \
fakeroot \
bc \
bsdmainutils \
rpm \
"
if [[ "$(VSCODE_ARCH)" == "x64" ]]; then
DEPS="$DEPS \
dpkg-dev \
libgconf-2-4 \
libnss3 \
libasound2 \
libxtst6 \
libx11-dev \
libxkbfile-dev \
libxss1 \
libx11-xcb-dev \
libsecret-1-dev \
"
else
DEPS="$DEPS \
dpkg-dev:i386 \
libgconf-2-4:i386 \
libnss3:i386 \
libasound2:i386 \
libxtst6:i386 \
libnotify4:i386 \
libx11-dev:i386 \
libxkbfile-dev:i386 \
libxss1:i386 \
libx11-xcb-dev:i386 \
libgl1-mesa-glx:i386 libgl1-mesa-dri:i386 \
libgirepository-1.0-1:i386 \
gir1.2-glib-2.0:i386 \
gir1.2-secret-1:i386 \
libsecret-1-dev:i386 \
libgtk2.0-0:i386 \
"
fi
apt-get install -y $DEPS
# setup xvfb
cp build/tfs/linux/$(VSCODE_ARCH)/xvfb.init /etc/init.d/xvfb
chmod +x /etc/init.d/xvfb
update-rc.d xvfb defaults
service xvfb start
# setup dbus
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
service dbus start
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
- script: |
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
name: test
- script: |
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"

View File

@@ -1,42 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { DocumentClient } from 'documentdb';
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
getConfig(process.argv[2])
.then(c => console.log(c.frozen), e => console.error(e));

View File

@@ -1,6 +0,0 @@
{
"name": "PACKAGENAME",
"version": "PACKAGEVERSION",
"repositoryId": "REPOSITORYID",
"sourceUrl": "PACKAGEURL"
}

View File

@@ -12,6 +12,10 @@ step "Build RPM package" \
# step "Build snap package" \ # step "Build snap package" \
# npm run gulp -- "vscode-linux-$ARCH-build-snap" # npm run gulp -- "vscode-linux-$ARCH-build-snap"
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm install --unsafe-perm)
# Variables # Variables
PLATFORM_LINUX="linux-$ARCH" PLATFORM_LINUX="linux-$ARCH"
PLATFORM_DEB="linux-deb-$ARCH" PLATFORM_DEB="linux-deb-$ARCH"
@@ -51,29 +55,36 @@ step "Publish RPM package" \
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)" # SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME" # SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
if [ -z "$VSCODE_QUALITY" ]; then if [ -z "$VSCODE_QUALITY" ]; then
echo "VSCODE_QUALITY is not set, skipping repo package publish" echo "VSCODE_QUALITY is not set, skipping repo package publish"
elif [ "$IS_FROZEN" = "true" ]; then
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
else else
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
# Get necessary information
pushd $REPO && COMMIT_HASH=$(git rev-parse HEAD) && popd
PACKAGE_NAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/_.*//g')"
DEB_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$DEB_FILENAME"
RPM_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$RPM_FILENAME"
PACKAGE_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\_.*$//g')"
# Write config files needed by API, use eval to force environment variable expansion # Write config files needed by API, use eval to force environment variable expansion
DIRNAME=$(dirname $(readlink -f $0)) DIRNAME=$(dirname $(readlink -f $0))
pushd $DIRNAME pushd $DIRNAME
# Submit to apt repo # Submit to apt repo
if [ "$DEB_ARCH" = "amd64" ]; then if [ "$DEB_ARCH" = "amd64" ]; then
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"sourceUrl\": \"$DEB_URL\" }' > apt-addpkg.json
echo "Submitting apt-addpkg.json:"
cat apt-addpkg.json
step "Publish to repositories" \ step "Publish to repositories" \
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH ./repoapi_client.sh -config apt-config.json -addpkg apt-addpkg.json
fi fi
# Submit to yum repo (disabled as it's manual until signing is automated) # Submit to yum repo (disabled as it's manual until signing is automated)
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json # eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
# eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"sourceUrl\": \"$RPM_URL\" }' > yum-addpkg.json
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH # echo "Submitting yum-addpkg.json:"
# cat yum-addpkg.json
# ./repoapi_client.sh -config yum-config.json -addpkg yum-addpkg.json
popd popd
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>" echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
fi fi

View File

@@ -1,67 +0,0 @@
#!/bin/bash
set -e
# Arguments
ARCH="$1"
LINUX_REPO_PASSWORD="$2"
# Variables
PLATFORM_LINUX="linux-$ARCH"
PLATFORM_DEB="linux-deb-$ARCH"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
REPO="`pwd`"
ROOT="$REPO/.."
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
if [ -z "$VSCODE_QUALITY" ]; then
echo "VSCODE_QUALITY is not set, skipping repo package publish"
elif [ "$IS_FROZEN" = "true" ]; then
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
else
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
# Write config files needed by API, use eval to force environment variable expansion
pushd build/tfs/linux
# Submit to apt repo
if [ "$DEB_ARCH" = "amd64" ]; then
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
fi
# Submit to yum repo (disabled as it's manual until signing is automated)
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
popd
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
fi
fi
fi

View File

@@ -2,9 +2,9 @@
# This is a VERY basic script for Create/Delete operations on repos and packages # This is a VERY basic script for Create/Delete operations on repos and packages
# #
cmd=$1 cmd=$1
docDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # chrmarti: Changed to script's directory. urls=urls.txt
packageJsonTemplate=$docDir/new_package.json.template defaultPackageFile=new_package.json
repoJsonTemplate=$docDir/new_repo.json.template defaultRepoFile=new_repo.json
function Bail function Bail
{ {
@@ -24,21 +24,14 @@ function Usage {
echo "$0 -config FILENAME -listrepos | -listpkgs | -addrepo FILENAME | -addpkg FILENAME |" echo "$0 -config FILENAME -listrepos | -listpkgs | -addrepo FILENAME | -addpkg FILENAME |"
echo "-addpkgs FILENAME | -check ID | -delrepo REPOID | -delpkg PKGID" echo "-addpkgs FILENAME | -check ID | -delrepo REPOID | -delpkg PKGID"
echo -e "\t-config FILENAME : JSON file containing API server name and creds" echo -e "\t-config FILENAME : JSON file containing API server name and creds"
echo -e "Package Operations:" echo -e "\t-listrepos : List repositories"
echo -e "\t-listpkgs [REGEX] : List packages, optionally filter by REGEX" echo -e "\t-listpkgs [REGEX] : List packages, optionally filter by REGEX"
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
echo -e "\t-addpkg FILENAME : Add package to repo using the specified JSON file" echo -e "\t-addpkg FILENAME : Add package to repo using the specified JSON file"
echo -e "\t-addpkgs FILENAME : Add packages to repo using urls contained in FILENAME" echo -e "\t-addpkgs FILENAME : Add packages to repo using urls contained in FILENAME"
echo -e "\t-check ID : Check upload operation by ID" echo -e "\t-check ID : Check upload operation by ID"
echo -e "\t-delpkg PKGID : Delete the specified package by ID"
echo -e "File Operations:"
echo -e "\t-uploadfile FILENAME: Upload FILENAME (does not publish) "
echo -e "\t-addfile FILENAME : Upload FILENAME AND publish to the repo"
echo -e "\t-listfiles : List uploaded files"
echo -e "\t-delfile FILEID : Delete uploaded file by ID"
echo -e "Repository Operations:"
echo -e "\t-listrepos : List repositories"
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
echo -e "\t-delrepo REPOID : Delete the specified repo by ID" echo -e "\t-delrepo REPOID : Delete the specified repo by ID"
echo -e "\t-delpkg PKGID : Delete the specified package by ID"
exit 1 exit 1
} }
@@ -91,136 +84,33 @@ function AddRepo
{ {
repoFile=$1 repoFile=$1
if [ -z $repoFile ]; then if [ -z $repoFile ]; then
Bail "Error: Must specify a JSON-formatted file. Reference $repoJsonTemplate" Bail "Error: Must specify a JSON-formatted file. Reference $defaultRepoFile.template"
fi fi
if [ ! -f $repoFile ]; then if [ ! -f $repoFile ]; then
Bail "Error: Cannot create repo - $repoFile does not exist" Bail "Error: Cannot create repo - $repoFile does not exist"
fi fi
packageUrl=$(grep "url" $repoFile | head -n 1 | awk '{print $2}' | tr -d ',') packageUrl=$(grep "url" $repoFile | head -n 1 | awk '{print $2}' | tr -d ',')
echo "Creating new repo on $server [$packageUrl]" echo "Creating new repo on $server [$packageUrl]"
curl -i -k "$baseurl/v1/repositories" --data @$repoFile -H "Content-Type: application/json" curl -i -k "$baseurl/v1/repositories" --data @./$repoFile -H "Content-Type: application/json"
echo "" echo ""
} }
# Upload AND publish the file
function AddFile
{
packageFile=$1
# Validity checks are performed by UploadFile
echo "Uploading package to $server [$packageFile]"
response=$(UploadFile $packageFile "true")
id=$(echo $response | jq -r ".id")
# Parse package metadata first to confirm it's a valid deb/rpm
# Needs to be performed in this function so we can use it to publish the package
jsonFile=$(WritePackageInfoToFile $packageFile)
sed -i "s/REPOSITORYID/$repositoryId/g" $jsonFile
# Replace the url field with fileId
sed -i "s/PACKAGEURL/$id/g" $jsonFile
sed -i "s/sourceUrl/fileId/g" $jsonFile
AddPackage $jsonFile
rm -f $jsonFile
echo ""
}
# Upload a file
function UploadFile
{
packageFile=$1
quick=$2
if [ -z $packageFile ]; then
Bail "Error: Must specify the path to a file to upload "
fi
if [ ! -f $packageFile ]; then
Bail "Error: Cannot upload - $packageFile does not exist"
fi
# Additional validation and output if quick mode isn't enabled
# Basically, if this is part of a publish operation, these steps are handled elsewhere
if [ "$quick" != "true" ]; then
# Parse package metadata first to confirm it's a valid deb/rpm
jsonFile=$(WritePackageInfoToFile $packageFile)
rm -f $jsonFile
echo "Uploading package to $server [$packageFile]"
fi
curl -s -k -X POST -F file=@$packageFile "$baseurl/v1/files"
echo ""
}
function ListFiles
{
curl -s -k "$baseurl/v1/files" | jq
}
function DeleteFile
{
fileId=$1
if [ -z "$fileId" ]; then
Bail "Error: Must specify an ID to delete"
fi
curl -s -X DELETE "$baseurl/v1/files/$fileId"
}
# Upload a single package using the specified JSON file # Upload a single package using the specified JSON file
function AddPackage function AddPackage
{ {
packageFile=$1 packageFile=$1
if [ -z $packageFile ]; then if [ -z $packageFile ]; then
Bail "Error: Must specify a JSON-formatted file. Reference $packageJsonTemplate" Bail "Error: Must specify a JSON-formatted file. Reference $defaultPackageFile.template"
fi fi
if [ ! -f $packageFile ]; then if [ ! -f $packageFile ]; then
Bail "Error: Cannot add package - $packageFile does not exist" Bail "Error: Cannot add package - $packageFile does not exist"
fi fi
packageUrl=$(grep "sourceUrl" $packageFile | head -n 1 | awk '{print $2}') packageUrl=$(grep "sourceUrl" $packageFile | head -n 1 | awk '{print $2}')
echo "Adding package to $server [$packageUrl]" echo "Adding package to $server [$packageUrl]"
curl -i -k "$baseurl/v1/packages" --data @$packageFile -H "Content-Type: application/json" curl -i -k "$baseurl/v1/packages" --data @./$packageFile -H "Content-Type: application/json"
echo "" echo ""
} }
# Gets the package name and version and writes it to a file
function WritePackageInfoToFile
{
packageFile=$1
tmpOut=$(mktemp)
if [ -z "$packageFile" ]; then
Bail "Error: Must specify path to a deb/rpm package"
elif [ ! -f "$packageFile" ]; then
Bail "Error: Specified file $packageFile does not exist"
fi
if dpkg -I $packageFile > $tmpOut 2> /dev/null; then
>&2 echo "File is deb format"
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
elif rpm -qpi $packageFile > $tmpOut 2> /dev/null; then
>&2 echo "File is rpm format"
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
else
rm -f $tmpOut
Bail "File is not a valid deb/rpm package $url"
fi
rm -f $tmpOut
if [ -z "$pkgName" ]; then
Bail "Unable to parse package name for $url"
elif [ -z "$pkgVer" ]; then
Bail "Unable to parse package version number for $url"
fi
# Create Package .json file
outJson=$(mktemp)
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
cp $packageJsonTemplate $outJson
sed -i "s/PACKAGENAME/$pkgName/g" $outJson
sed -i "s/PACKAGEVERSION/$pkgVer/g" $outJson
# Return path to json file
echo $outJson
}
# Upload a single package by dynamically creating a JSON file using a provided URL # Upload a single package by dynamically creating a JSON file using a provided URL
function AddPackageByUrl function AddPackageByUrl
{ {
@@ -229,20 +119,41 @@ function AddPackageByUrl
Bail "Unable to publish package because no URL was specified" Bail "Unable to publish package because no URL was specified"
fi fi
tmpFile=$(mktemp) tmpFile=$(mktemp)
tmpOut=$(mktemp)
if ! wget -q "$url" -O $tmpFile; then if ! wget -q "$url" -O $tmpFile; then
rm -f $tmpFile rm -f $tmpFile $tmpFile
Bail "Unable to download URL $url" Bail "Unable to download URL $url"
elif dpkg -I $tmpFile > $tmpOut 2> /dev/null; then
echo "File is deb format"
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
elif rpm -qpi $tmpFile > $tmpOut 2> /dev/null; then
echo "File is rpm format"
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
else
rm -f $tmpFile $tmpOut
Bail "File is not a valid deb/rpm package $url"
fi
rm -f $tmpFile $tmpOut
if [ -z "$pkgName" ]; then
Bail "Unable to parse package name for $url"
elif [ -z "$pkgVer" ]; then
Bail "Unable to parse package version number for $url"
fi fi
jsonFile=$(WritePackageInfoToFile $tmpFile)
# Create Package .json file # Create Package .json file
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g') escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
sed -i "s/PACKAGEURL/$escapedUrl/g" $jsonFile cp $defaultPackageFile.template $defaultPackageFile
sed -i "s/REPOSITORYID/$repositoryId/g" $jsonFile sed -i "s/PACKAGENAME/$pkgName/g" $defaultPackageFile
sed -i "s/PACKAGEVERSION/$pkgVer/g" $defaultPackageFile
sed -i "s/PACKAGEURL/$escapedUrl/g" $defaultPackageFile
sed -i "s/REPOSITORYID/$repositoryId/g" $defaultPackageFile
# Perform Upload # Perform Upload
AddPackage $jsonFile AddPackage $defaultPackageFile
# Cleanup # Cleanup
rm -f $jsonFile rm -f $defaultPackageFile
} }
# Upload multiple packages by reading urls line-by-line from the specified file # Upload multiple packages by reading urls line-by-line from the specified file
@@ -269,7 +180,7 @@ function CheckUpload {
if [ -z "$id" ]; then if [ -z "$id" ]; then
Bail "Must specify an ID" Bail "Must specify an ID"
fi fi
curl -s -k $baseurl/v1/packages/queue/$id | jq curl -k $baseurl/v1/packages/queue/$id
echo "" echo ""
} }
@@ -321,20 +232,6 @@ while (( "$#" )); do
operation=AddPackages operation=AddPackages
shift shift
operand="$1" operand="$1"
elif [[ "$1" == "-addfile" ]]; then
operation=AddFile
shift
operand="$1"
elif [[ "$1" == "-uploadfile" ]]; then
operation=UploadFile
shift
operand="$1"
elif [[ "$1" == "-listfiles" ]]; then
operation=ListFiles
elif [[ "$1" == "-delfile" ]]; then
operation=DeleteFile
shift
operand="$1"
elif [[ "$1" == "-check" ]]; then elif [[ "$1" == "-check" ]]; then
operation=CheckUpload operation=CheckUpload
shift shift

View File

@@ -1,413 +0,0 @@
phases:
- phase: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
queue:
name: Hosted VS2017
parallel: 2
matrix:
x64:
VSCODE_ARCH: x64
ia32:
VSCODE_ARCH: ia32
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- powershell: |
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
name: build
- powershell: |
$ErrorActionPreference = "Stop"
npm run gulp -- "electron-$(VSCODE_ARCH)"
.\scripts\test.bat --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$ErrorActionPreference = "Stop"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: VSCodeSetup.exe
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$ErrorActionPreference = "Stop"
$Repo = "$(pwd)"
$Root = "$Repo\.."
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe
# publish hockeyapp symbols
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId
- phase: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
queue: linux-x64
variables:
VSCODE_ARCH: x64
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
- phase: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
queue: linux-ia32
variables:
VSCODE_ARCH: ia32
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX32)"
- phase: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
queue: Hosted macOS Preview
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
name: build
- script: |
set -e
./scripts/test.sh --build --tfs
APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
name: test
- script: |
set -e
# archive the unsigned build
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
# publish the unsigned build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/tfs/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive-unsigned \
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
$VERSION \
false \
../VSCode-darwin-unsigned.zip
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# enqueue the unsigned build
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- upload-vscode-configuration

View File

@@ -14,20 +14,15 @@ Param(
# Set the right architecture # Set the right architecture
$env:npm_config_arch="$arch" $env:npm_config_arch="$arch"
$env:CHILD_CONCURRENCY="1"
step "Install dependencies" { step "Install dependencies" {
exec { & yarn } exec { & npm install }
} }
step "Hygiene" { step "Hygiene" {
exec { & npm run gulp -- hygiene } exec { & npm run gulp -- hygiene }
} }
step "Monaco Editor Check" {
exec { & .\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit }
}
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword $env:VSCODE_MIXIN_PASSWORD = $mixinPassword
step "Mix in repository from vscode-distro" { step "Mix in repository from vscode-distro" {
exec { & npm run gulp -- mixin } exec { & npm run gulp -- mixin }
@@ -45,10 +40,6 @@ step "Build minified" {
exec { & npm run gulp -- "vscode-win32-$global:arch-min" } exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
} }
step "Copy Inno updater" {
exec { & npm run gulp -- "vscode-win32-$global:arch-copy-inno-updater" }
}
# step "Create loader snapshot" { # step "Create loader snapshot" {
# exec { & node build\lib\snapshotLoader.js --arch=$global:arch } # exec { & node build\lib\snapshotLoader.js --arch=$global:arch }
# } # }

View File

@@ -1,212 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- powershell: |
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- mixin
node build/tfs/common/installDistro.js
- powershell: |
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
name: build
- powershell: |
npm run gulp -- "electron-$(VSCODE_ARCH)"
.\scripts\test.bat --build --tfs
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: VSCodeSetup.exe
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-229803",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$Repo = "$(pwd)"
$Root = "$Repo\.."
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe

View File

@@ -17,10 +17,9 @@ Param(
# Set the right architecture # Set the right architecture
$env:npm_config_arch="$arch" $env:npm_config_arch="$arch"
$env:CHILD_CONCURRENCY="1"
step "Install dependencies" { step "Install dependencies" {
exec { & yarn } exec { & npm install }
} }
step "Hygiene" { step "Hygiene" {

View File

@@ -6,7 +6,6 @@ $env:HOME=$env:USERPROFILE
if (Test-Path env:AGENT_WORKFOLDER) { if (Test-Path env:AGENT_WORKFOLDER) {
$env:HOME="${env:AGENT_WORKFOLDER}\home" $env:HOME="${env:AGENT_WORKFOLDER}\home"
$env:npm_config_cache="${env:HOME}\npm-cache" $env:npm_config_cache="${env:HOME}\npm-cache"
$env:YARN_CACHE_FOLDER="${env:HOME}\yarn-cache"
$env:npm_config_devdir="${env:HOME}\npm-devdir" $env:npm_config_devdir="${env:HOME}\npm-devdir"
New-Item -Path "$env:HOME" -Type directory -Force | out-null New-Item -Path "$env:HOME" -Type directory -Force | out-null
New-Item -Path "$env:npm_config_cache" -Type directory -Force | out-null New-Item -Path "$env:npm_config_cache" -Type directory -Force | out-null

Some files were not shown because too many files have changed in this diff Show More