Compare commits

..

4 Commits

Author SHA1 Message Date
Karl Burtram
8095643ed4 Merge branch 'master' into release/0.25 2018-01-12 15:43:19 -08:00
Karl Burtram
96a3ded120 Merge branch 'master' into release/0.25 2018-01-11 17:33:31 -08:00
Karl Burtram
bd3aa9c3cf Merge branch 'master' into release/0.25 2018-01-10 22:08:15 -08:00
Cory Rivera
b765e5aa90 Add updater service url to product.json. (#467) 2018-01-10 16:51:56 -08:00
12319 changed files with 236278 additions and 533056 deletions

View File

@@ -11,6 +11,6 @@ trim_trailing_whitespace = true
# The indent size used in the `package.json` file cannot be changed
# https://github.com/npm/npm/pull/3180#issuecomment-16336516
[{*.yml,*.yaml,package.json}]
[{*.yml,*.yaml,npm-shrinkwrap.json,package.json}]
indent_style = space
indent_size = 2

View File

@@ -10,10 +10,5 @@
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
"extends": "eslint:recommended"
}

9
.gitattributes vendored
View File

@@ -1,9 +0,0 @@
* text=auto
LICENSE.txt eol=crlf
ThirdPartyNotices.txt eol=crlf
*.bat eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf

View File

@@ -1,14 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
<!-- Use Help > Report Issue to prefill these. -->
- Azure Data Studio Version:
Steps to Reproduce:
1.

21
.gitignore vendored
View File

@@ -1,20 +1,29 @@
.DS_Store
npm-debug.log
Thumbs.db
.DS_Store
*.dat
*.db
*.exe
*.log
*.nupkg
*.orig
*.vsix
*BROWSE.VC*
sqltoolsservice
coverage
test-reports
.vscode-test
node_modules/
.build/
.vs/
out/
out-build/
out-editor/
out-editor-src/
out-editor-build/
out-editor-esm/
out-editor-min/
out-monaco-editor-core/
out-vscode/
out-vscode-min/
build/node_modules
coverage/
test_data/
test-results/
yarn-error.log
_site

1
.nvmrc
View File

@@ -1 +0,0 @@
8.9.2

54
.travis.yml Normal file
View File

@@ -0,0 +1,54 @@
sudo: false
language: cpp
os:
- linux
- osx
cache:
directories:
- $HOME/.npm
notifications:
email: false
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.9
- g++-4.9
- gcc-4.9-multilib
- g++-4.9-multilib
- zip
- libgtk2.0-0
- libx11-dev
- libxkbfile-dev
- libsecret-1-dev
before_install:
- git submodule update --init --recursive
- git clone --depth 1 https://github.com/creationix/nvm.git ./.nvm
- source ./.nvm/nvm.sh
- nvm install 7.9.0
- nvm use 7.9.0
- npm config set python `which python`
- npm install -g gulp
- if [ $TRAVIS_OS_NAME == "linux" ]; then
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
sh -e /etc/init.d/xvfb start;
sleep 3;
fi
install:
- ./scripts/npm.sh install
script:
- gulp electron --silent
- gulp compile --silent --max_old_space_size=4096
- gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
after_success:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi

View File

@@ -1,9 +1,9 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"eg2.tslint",
"dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome"
]
}
}

45
.vscode/launch.json vendored
View File

@@ -1,6 +1,7 @@
{
"version": "0.1.0",
"configurations": [
{
"type": "node",
"request": "launch",
@@ -8,7 +9,7 @@
"program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
"stopOnEntry": true,
"args": [
"hygiene"
"watch-extension:json-client"
],
"cwd": "${workspaceFolder}"
},
@@ -66,13 +67,13 @@
{
"type": "chrome",
"request": "attach",
"name": "Attach to azuredatastudio",
"name": "Attach to sqlops",
"port": 9222
},
{
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio",
"name": "Launch sqlops",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
},
@@ -86,11 +87,7 @@
"runtimeArgs": [
"--inspect=5875"
],
"skipFiles": [
"**/winjs*.js"
],
"webRoot": "${workspaceFolder}",
"timeout": 45000
"webRoot": "${workspaceFolder}"
},
{
"type": "node",
@@ -98,17 +95,15 @@
"name": "Unit Tests",
"protocol": "inspector",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"runtimeExecutable": "${workspaceFolder}/.build/electron/SQL Operations Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
"runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops.exe"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
"runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops"
},
"stopOnEntry": false,
"outputCapture": "std",
"args": [
"--delay",
"--timeout",
"2000"
],
@@ -119,38 +114,22 @@
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
]
}
],
"compounds": [
{
"name": "Debug azuredatastudio Main and Renderer",
"name": "Debug sqlops Main and Renderer",
"configurations": [
"Launch azuredatastudio",
"Launch sqlops",
"Attach to Main Process"
]
},
{
"name": "Search and Renderer processes",
"configurations": [
"Launch azuredatastudio",
"Launch sqlops",
"Attach to Search Process"
]
},
{
"name": "Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
}
]
}
}

14
.vscode/settings.json vendored
View File

@@ -10,9 +10,6 @@
"when": "$(basename).ts"
}
},
"files.associations": {
"OSSREADME.json": "jsonc"
},
"search.exclude": {
"**/node_modules": true,
"**/bower_components": true,
@@ -37,12 +34,5 @@
"command": "${workspaceFolder}\\scripts\\test.bat --coverage --run ${file}"
}
}
],
"typescript.tsdk": "node_modules/typescript/lib",
"npm.exclude": "**/extensions/**",
"git.ignoreLimitWarning": true,
"emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single"
}
]
}

6
.vscode/tasks.json vendored
View File

@@ -33,11 +33,11 @@
"task": "tslint",
"label": "Run tslint",
"problemMatcher": [
"$tslint5"
"$tslint4"
]
},
{
"label": "Run tests",
"taskName": "Run tests",
"type": "shell",
"command": "./scripts/test.sh",
"windows": {
@@ -50,7 +50,7 @@
}
},
{
"label": "Run Dev",
"taskName": "Run Dev",
"type": "shell",
"command": "./scripts/code.sh",
"windows": {

View File

@@ -1,3 +0,0 @@
disturl "https://atom.io/download/electron"
target "2.0.9"
runtime "electron"

View File

@@ -1,210 +1,6 @@
# Change Log
## Version 1.2.4
* Release date: November 6, 2018
* Release status: General Availability
## What's new in this version
* Update to the SQL Server 2019 Preview extension
* Introducing Paste the Plan extension
* Introducing High Color queries extension, including SSMS editor theme
* Fixes in SQL Server Agent, Profiler, and Import extensions
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
* Fix customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
## Version 1.1.3
* Release date: October 18, 2018
* Release status: General Availability
## What's new in this version
* Introducing the Azure Resource Explorer to browse Azure SQL Databases
* Improve Object Explorer and Query Editor connectivity robustness
* SQL Server 2019 and SQL Agent extension improvements
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
## Version 1.0.0
* Release date: September 24, 2018
* Release status: General Availability
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard
* Query Results Grid performance and UX improvements for large number of result sets.
* Visual Studio Code source code refresh from 1.23 to 1.26.1 with Grid Layout and Improved Settings Editor (preview).
* Accessibility improvements for screen reader, keyboard navigation and high-contrast.
* Added Connection name option to provide an alternative display name in the Servers viewlet.
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
## Version 0.33.7
* Release date: August 30, 2018
* Release status: Public Preview
## What's new in this version
* Announcing the SQL Server Import Extension
* SQL Server Profiler Session management
* SQL Server Agent improvements
* New community extension: First Responder Kit
* Quality of Life improvements: Connection strings
* Fix many customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
## Version 0.31.4
* Release date: July 19, 2018
* Release status: Public Preview
## What's new in this version
* SQL Server Agent for Azure Data Studio extension improvements
* Added view of Alerts, Operators, and Proxies and icons on left pane
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
* Added Delete Job, Delete Alert, and Delete Operator (right-click)
* Added Previous Runs visualization
* Added Filters for each column name
* SQL Server Profiler for Azure Data Studio extension improvements
* Added Hotkeys to quickly launch and start/stop Profiler
* Added 5 Default Templates to view Extended Events
* Added Server/Database connection name
* Added support for Azure SQL Database instances
* Added suggestion to exit Profiler when tab is closed when Profiler is still running
* Release of Combine Scripts Extension
* Wizard and Dialog Extensibility
* Fix GitHub Issues
## Version 0.30.6
* Release date: June 20, 2018
* Release status: Public Preview
## What's new in this version
* **SQL Server Profiler for Azure Data Studio *Preview*** extension initial release
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
* **Edit Data "Filtering and Sorting"** support
* **SQL Server Agent for Azure Data Studio *Preview*** extension enhancements for Jobs and Job History views
* Improved **Wizard & Dialog UI Builder Framework** extensibility APIs
* Update VS Code Platform source code integrating [March 2018 (1.22)](https://code.visualstudio.com/updates/v1_22) and [April 2018 (1.23)](https://code.visualstudio.com/updates/v1_23) releases
* Fix GitHub Issues
## Version 0.29.3
* Release date: May 7, 2018
* Release status: Public Preview
## What's new in this version
The May release is focused on stabilization and bug fixes leading up to the Build conference. This build contains the following highlights.
* Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions
* Continue to fix GitHub issues
## Version 0.28.6
* Release date: April 25, 2018
* Release status: Public Preview
## What's new in this version
The April Public Preview release contains some of the following highlights.
* Improvements to SQL Agent *Preview* extension
* Accessibility improvements for keyboard navigation, screen reader support and high-contrast mode.
* Improved large and protected file support for saving Admin protected and >256M files within SQL Ops Studio
* Integrated Terminal splitting to work with multiple open terminals at once
* Reduced installation on-disk file count foot print for faster installs and startup times
* Improvements to Server Reports extension
* Continue to fix GitHub issues
## Version 0.27.3
* Release date: March 28, 2017
* Release status: Public Preview
## What's new in this version
The March Public Preview release enables some key aspects of the Azure Data Studio
extensibility story. Here are some highlights in this release.
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)
* Extension Manager enables simple acquisition of 1st-party and 3rd-party extensions
* Add additional Extensibility APIs for `connection` and `objectexplorer` management
* Community Localization open for 10 languages
* Continue to fix important customer impacting GitHub issues
## Version 0.26.7
* Release date: February 16, 2017
* Release status: Public Preview Hotfix 1
## What's new in this version
* Bug fix for `#717 Selecting partial query and hitting Cmd or Ctrl+C opens terminal with Error message`
## Version 0.26.6
* Release date: February 15, 2017
* Release status: Public Preview
## What's new in this version
The February release fixes several important customer reported issues, as well as various feature improvements. We've also introduced auto-update support in February which will simplify keeping updated with the lastest changes.
Here's some of the highlights in the February release.
* Support Auto-Update installation on Windows and macOS
* Publish RPM and DEB packages to offical Microsoft repos
* Fix `#6 Keep connection and selected database when opening new query tabs`
* Fix `#22 'Server Name' and 'Database Name' - Can these be drop downs instead of text` boxes?
* Fix #481 Add "Check for updates" option.
* SQL Editor colorization and auto-completion fixes
* `#584 Keyword "FULL" not highlighted by IntelliSense`
* `#345 Colorize SQL functions within the editor`
* `#300 [#tempData] latest "]" will display green color`
* `#225 Keyword color mismatch`
* `#60 invalid sql syntax color highlighting when using temporary table in from clause`
* Introduce Connection extensibility API
* VS Code Editor 1.19 integration
* Update JustinPealing/html-query-plan component to pick-up several Query Plan viewer improvements
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
## Version 0.25.4
## Version 0.25.2
* Release date: January 17, 2017
* Release status: Public Preview
@@ -227,7 +23,6 @@ Here's some of the highlights in the January release.
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* alextercete for `Fix "No extension gallery service configured" error (#427)`
* SebastianPfliegel for `Add cursor snippet (#475)`
## Version 0.24.1
* Release date: December 19, 2017

View File

@@ -1,13 +1,13 @@
## Contributing Issues
### Before Submitting an Issue
First, please do a search in [open issues](https://github.com/Microsoft/azuredatastudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests.
First, please do a search in [open issues](https://github.com/Microsoft/sqlopsstudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/sqlopsstudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests.
If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment.
:+1: - upvote
👍 - upvote
:-1: - downvote
👎 - downvote
If you cannot find an existing issue that describes your bug or feature, submit an issue using the guidelines below.
@@ -18,29 +18,29 @@ File a single issue per problem and feature request.
* Do not enumerate multiple bugs or feature requests in the same issue.
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
Please include the following with each issue.
Please include the following with each issue.
* Version of Azure Data Studio (formerly SQL Operations Studio).
* Version of SQL Ops Studio
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.
> **Tip:** You can easily create an issue using `Report Issues` from SQL Operations Studio Help menu.
* Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
* Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
> **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
> **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
* Errors in the Dev Tools Console (Help | Toggle Developer Tools)
Please remember to do the following:
* Search the issue repository to see if there exists a duplicate.
* Simplify your scripts around the issue so we can better isolate the problem.
* Search the issue repository to see if there exists a duplicate.
* Simplify your scripts around the issue so we can better isolate the problem.
Don't feel bad if we can't reproduce the issue and ask for more information!
## Contributing Fixes
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute).
please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute).

View File

@@ -1,14 +1,14 @@
MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT AZURE DATA STUDIO
MICROSOFT SQL OPERATIONS STUDIO
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. Unless applicable law gives you more rights, Microsoft reserves all
other rights not expressly granted herein, whether by implication, estoppel or
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. Unless applicable law gives you more rights, Microsoft reserves all
other rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +1,21 @@
# Azure Data Studio
# SQL Operations Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
**Download the latest Azure Data Studio release**
**Download SQL Operations Studio December Public Preview**
Platform | Link
-- | --
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2038320
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2038323
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2038327
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2038332
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2038401
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2038405
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=865305
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=865304
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=865306
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=865307
Linux DEB | https://go.microsoft.com/fwlink/?linkid=865308
Linux RPM | https://go.microsoft.com/fwlink/?linkid=865309
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
Try out the latest insiders build from `master` at https://github.com/Microsoft/azuredatastudio/releases.
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases.
**Feature Highlights**
@@ -35,83 +30,25 @@ See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CH
- Task History window to view current task execution status, completion results with error messages and task T-SQL scripting
- Scripting support to generate CREATE, SELECT, ALTER and DROP statements for database objects
- Workspaces with full Git integration and Find In Files support to managing T-SQL script libraries
- Modern light-weight shell with theming, user settings, full-screen support, integrated terminal and numerous other features
- Modern light-weight shell with theming, user settings, full screen support, integrated terminal and numerous other features
Here are some of these features in action.
Here's some of these features in action.
<img src='https://github.com/Microsoft/azuredatastudio/blob/master/docs/overview_screen.jpg' width='800px'>
<img src='https://github.com/Microsoft/sqlopsstudio/blob/master/docs/overview_screen.jpg' width='800px'>
## Contributing
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute), which covers the following:
please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute), which covers the following:
* [How to build and run from source](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#Build-and-Run-From-Source)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#development-workflow)
* [Submitting pull requests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests)
* [How to build and run from source](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#Build-and-Run-From-Source)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#development-workflow)
* [Submitting pull requests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#pull-requests)
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization
Azure Data Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make Azure Data Studio available in your language!
## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to the filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for the opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
* lanceklinger `Fix for double-clicking column handle in results table #1504`
* westerncj for `Removed duplicate contribution from README.md (#753)`
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for the fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
* alextercete for `Fix "No extension gallery service configured" error (#427)`
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with a case-sensitive collation. (#152)`
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)`
* Adam Machanic for helping with the `whoisactive` extension
* All community localization contributors:
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa, Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
## License
Copyright (c) Microsoft Corporation. All rights reserved.

View File

@@ -1,4 +1,4 @@
MICROSOFT Azure Data Studio
MICROSOFT SQL OPERATIONS STUDIO
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
@@ -21,7 +21,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
@@ -34,7 +33,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
@@ -1167,43 +1165,6 @@ That's all there is to it!
=========================================
END OF jschardet NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Semver File License
===================
The semver.py file is from https://github.com/podhmo/python-semver
which is licensed under the "MIT" license. See the semver.py file for details.
END OF JupyterLab NOTICES AND INFORMATION
%% make-error NOTICES AND INFORMATION BEGIN HERE
=========================================
ISC © Julien Fontanet
@@ -2078,4 +2039,4 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF zone.js NOTICES AND INFORMATION
END OF zone.js NOTICES AND INFORMATION

21
appveyor.yml Normal file
View File

@@ -0,0 +1,21 @@
environment:
ELECTRON_RUN_AS_NODE: 1
VSCODE_BUILD_VERBOSE: true
cache:
- '%APPDATA%\npm-cache'
install:
- ps: Install-Product node 7.9.0 x64
- npm install -g npm@4 --silent
build_script:
- .\scripts\npm.bat install
- .\node_modules\.bin\gulp electron
- npm run compile
test_script:
- node --version
- npm --version
- .\scripts\test.bat
- .\scripts\test-integration.bat

View File

@@ -1,38 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.x'
displayName: 'Install Node.js'
- script: |
git submodule update --init --recursive
nvm install 8.9.1
nvm use 8.9.1
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0
sh -e /etc/init.d/xvfb start
sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
node_modules/.bin/gulp electron --silent
node_modules/.bin/gulp compile --silent --max_old_space_size=4096
node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
displayName: 'Scripts'
- script: |
./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()

View File

@@ -1,26 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.9'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
.\node_modules\.bin\gulp electron
displayName: 'Electron'
- script: |
npm run compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()

View File

@@ -1,29 +0,0 @@
trigger:
- master
- releases/*
jobs:
# All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool:
vmImage: vs2017-win2016
steps:
- template: azure-pipelines-windows.yml
# All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool:
vmImage: 'Ubuntu 16.04'
steps:
- template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1,12 +0,0 @@
[
{
"name": "ms-vscode.node-debug",
"version": "1.26.7",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.26.8",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
]

View File

@@ -1,20 +0,0 @@
{
"env": {
"node": true,
"es6": true,
"browser": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,126 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs');
const path = require('path');
const os = require('os');
// @ts-ignore review
const { remote } = require('electron');
const dialog = remote.dialog;
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, { encoding: 'utf8' }));
}
function writeJson(filePath, obj) {
fs.writeFileSync(filePath, JSON.stringify(obj, null, 2));
}
function renderOption(form, id, title, value, checked) {
const input = document.createElement('input');
input.type = 'radio';
input.id = id;
input.name = 'choice';
input.value = value;
input.checked = !!checked;
form.appendChild(input);
const label = document.createElement('label');
label.setAttribute('for', id);
label.textContent = title;
form.appendChild(label);
return input;
}
function render(el, state) {
function setState(state) {
try {
writeJson(controlFilePath, state.control);
} catch (err) {
console.error(err);
}
el.innerHTML = '';
render(el, state);
}
const ul = document.createElement('ul');
const { builtin, control } = state;
for (const ext of builtin) {
const controlState = control[ext.name] || 'marketplace';
const li = document.createElement('li');
ul.appendChild(li);
const name = document.createElement('code');
name.textContent = ext.name;
li.appendChild(name);
const form = document.createElement('form');
li.appendChild(form);
const marketplaceInput = renderOption(form, `marketplace-${ext.name}`, 'Marketplace', 'marketplace', controlState === 'marketplace');
marketplaceInput.onchange = function () {
control[ext.name] = 'marketplace';
setState({ builtin, control });
};
const disabledInput = renderOption(form, `disabled-${ext.name}`, 'Disabled', 'disabled', controlState === 'disabled');
disabledInput.onchange = function () {
control[ext.name] = 'disabled';
setState({ builtin, control });
};
let local = undefined;
if (controlState !== 'marketplace' && controlState !== 'disabled') {
local = controlState;
}
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
localInput.onchange = function () {
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result && result.length >= 1) {
control[ext.name] = result[0];
}
setState({ builtin, control });
};
if (local) {
const localSpan = document.createElement('code');
localSpan.className = 'local';
localSpan.textContent = local;
form.appendChild(localSpan);
}
}
el.appendChild(ul);
}
function main() {
const el = document.getElementById('extensions');
const builtin = readJson(builtInExtensionsPath);
let control;
try {
control = readJson(controlFilePath);
} catch (err) {
control = {};
}
render(el, { builtin, control });
}
window.onload = main;

View File

@@ -1,46 +0,0 @@
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Manage Built-in Extensions</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="browser-main.js"></script>
<style>
body {
font-family: 'Trebuchet MS', 'Lucida Sans Unicode', 'Lucida Grande', 'Lucida Sans', Arial, sans-serif;
font-size: 10pt;
}
code {
font-family: 'Menlo', 'Courier New', 'Courier', monospace;
}
ul {
padding-left: 1em;
}
li {
list-style: none;
padding: 0.3em 0;
}
label {
margin-right: 1em;
}
form {
padding: 0.3em 0 0.3em 0.3em;
}
</style>
</head>
<body>
<h1>Built-in Extensions</h1>
<div id="extensions"></div>
</body>
</html>

View File

@@ -1,20 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const { app, BrowserWindow } = require('electron');
const url = require('url');
const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600 });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();
window.once('closed', () => window = null);
});
app.on('window-all-closed', () => app.quit());

View File

@@ -1,5 +0,0 @@
{
"name": "builtin",
"version": "0.1.0",
"main": "main.js"
}

View File

@@ -1,74 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const path = require('path');
const parseSemver = require('parse-semver');
const cp = require('child_process');
const _ = require('underscore');
function asYarnDependency(prefix, tree) {
let parseResult;
try {
parseResult = parseSemver(tree.name);
} catch (err) {
err.message += `: ${tree.name}`;
console.warn(`Could not parse semver: ${tree.name}`);
return null;
}
// not an actual dependency in disk
if (parseResult.version !== parseResult.range) {
return null;
}
const name = parseResult.name;
const version = parseResult.version;
const dependencyPath = path.join(prefix, name);
const children = [];
for (const child of (tree.children || [])) {
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
if (dep) {
children.push(dep);
}
}
return { name, version, path: dependencyPath, children };
}
function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) {
throw new Error('Could not parse result of `yarn list --json`');
}
const trees = JSON.parse(match[0]).data.trees;
return trees
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
.filter(dep => !!dep);
}
function getProductionDependencies(cwd) {
const result = [];
const deps = getYarnProductionDependencies(cwd);
const flatten = dep => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
deps.forEach(flatten);
return _.uniq(result);
}
module.exports.getProductionDependencies = getProductionDependencies;
if (require.main === module) {
const root = path.dirname(__dirname);
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
}

View File

@@ -3,18 +3,12 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const gulp = require('gulp');
const path = require('path');
const util = require('./lib/util');
const common = require('./lib/optimize');
const es = require('event-stream');
const File = require('vinyl');
const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone');
const cp = require('child_process');
const compilation = require('./lib/compilation');
const monacoapi = require('./monaco/api');
const fs = require('fs');
var gulp = require('gulp');
var path = require('path');
var util = require('./lib/util');
var common = require('./lib/optimize');
var es = require('event-stream');
var File = require('vinyl');
var root = path.dirname(__dirname);
var sha1 = util.getVersion(root);
@@ -27,14 +21,14 @@ var editorEntryPoints = [
{
name: 'vs/editor/editor.main',
include: [],
exclude: ['vs/css', 'vs/nls'],
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
exclude: [ 'vs/css', 'vs/nls' ],
prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
},
{
name: 'vs/base/common/worker/simpleWorker',
include: ['vs/editor/common/services/editorSimpleWorker'],
prepend: ['vs/loader.js'],
append: ['vs/base/worker/workerMain'],
include: [ 'vs/editor/common/services/editorSimpleWorker' ],
prepend: [ 'vs/loader.js' ],
append: [ 'vs/base/worker/workerMain' ],
dest: 'vs/base/worker/workerMain.js'
}
];
@@ -61,125 +55,38 @@ var BUNDLED_FILE_HEADER = [
''
].join('\n');
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
function editorLoaderConfig() {
var result = common.loaderConfig();
gulp.task('clean-editor-src', util.rimraf('out-editor-src'));
gulp.task('extract-editor-src', ['clean-editor-src'], function () {
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
const apiusages = monacoapi.execute().usageContent;
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
standalone.extractEditor({
sourcesRoot: path.join(root, 'src'),
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker',
'vs/base/worker/workerMain',
],
inlineEntryPoints: [
apiusages,
extrausages
],
libs: [
`lib.d.ts`,
`lib.es2015.collection.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
compilerOptions: {
module: 2, // ModuleKind.AMD
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /^vs\/css!/,
destRoot: path.join(root, 'out-editor-src')
});
});
// never ship octicons in editor
result.paths['vs/base/browser/ui/octiconLabel/octiconLabel'] = 'out-build/vs/base/browser/ui/octiconLabel/octiconLabel.mock';
// Full compile, including nls and inline sources in sourcemaps, for build
gulp.task('clean-editor-build', util.rimraf('out-editor-build'));
gulp.task('compile-editor-build', ['clean-editor-build', 'extract-editor-src'], compilation.compileTask('out-editor-src', 'out-editor-build', true));
// force css inlining to use base64 -- see https://github.com/Microsoft/monaco-editor/issues/148
result['vs/css'] = {
inlineResources: 'base64',
inlineResourcesLimit: 3000 // see https://github.com/Microsoft/monaco-editor/issues/336
};
return result;
}
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'], common.optimizeTask({
src: 'out-editor-build',
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
entryPoints: editorEntryPoints,
otherSources: editorOtherSources,
resources: editorResources,
loaderConfig: {
paths: {
'vs': 'out-editor-build/vs',
'vscode': 'empty:'
}
},
loaderConfig: editorLoaderConfig(),
bundleLoader: false,
header: BUNDLED_FILE_HEADER,
bundleInfo: true,
out: 'out-editor',
languages: languages
out: 'out-editor'
}));
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources({
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker'
],
outFolder: './out-editor-esm/src',
outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
}
});
});
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
});
function toExternalDTS(contents) {
let lines = contents.split('\n');
let killNextCloseCurlyBrace = false;
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
if (killNextCloseCurlyBrace) {
if ('}' === line) {
lines[i] = '';
killNextCloseCurlyBrace = false;
continue;
}
if (line.indexOf(' ') === 0) {
lines[i] = line.substr(4);
} else if (line.charAt(0) === '\t') {
lines[i] = line.substr(1);
}
continue;
}
if ('declare namespace monaco {' === line) {
lines[i] = '';
killNextCloseCurlyBrace = true;
continue;
}
if (line.indexOf('declare namespace monaco.') === 0) {
lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
}
}
return lines.join('\n');
}
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function() {
return es.merge(
// other assets
es.merge(
@@ -188,30 +95,19 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
gulp.src('src/vs/monaco.d.ts')
).pipe(gulp.dest('out-monaco-editor-core')),
// place the .d.ts in the esm folder
gulp.src('src/vs/monaco.d.ts')
.pipe(es.through(function (data) {
this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base,
contents: new Buffer(toExternalDTS(data.contents.toString()))
}));
}))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
// package.json
gulp.src('build/monaco/package.json')
.pipe(es.through(function (data) {
.pipe(es.through(function(data) {
var json = JSON.parse(data.contents.toString());
json.private = false;
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
data.contents = new Buffer(JSON.stringify(json, null, ' '));
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// README.md
gulp.src('build/monaco/README-npm.md')
.pipe(es.through(function (data) {
.pipe(es.through(function(data) {
this.emit('data', new File({
path: data.path.replace(/README-npm\.md/, 'README.md'),
base: data.base,
@@ -228,10 +124,10 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
// min folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) {
).pipe(filterStream(function(path) {
// no map files
return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path);
})).pipe(es.through(function (data) {
})).pipe(es.through(function(data) {
// tweak the sourceMappingURL
if (!/\.js$/.test(data.path)) {
this.emit('data', data);
@@ -244,50 +140,49 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = Buffer.from(strContents);
data.contents = new Buffer(strContents);
this.emit('data', data);
})).pipe(gulp.dest('out-monaco-editor-core/min')),
// min-maps folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function (path) {
).pipe(filterStream(function(path) {
// no map files
return /\.js\.map$/.test(path);
})).pipe(gulp.dest('out-monaco-editor-core/min-maps'))
);
});
gulp.task('analyze-editor-distro', function () {
// @ts-ignore
gulp.task('analyze-editor-distro', function() {
var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles;
var inverseGraph = {};
Object.keys(graph).forEach(function (module) {
Object.keys(graph).forEach(function(module) {
var dependencies = graph[module];
dependencies.forEach(function (dep) {
dependencies.forEach(function(dep) {
inverseGraph[dep] = inverseGraph[dep] || [];
inverseGraph[dep].push(module);
});
});
var detailed = {};
Object.keys(bundles).forEach(function (entryPoint) {
Object.keys(bundles).forEach(function(entryPoint) {
var included = bundles[entryPoint];
var includedMap = {};
included.forEach(function (included) {
included.forEach(function(included) {
includedMap[included] = true;
});
var explanation = [];
included.map(function (included) {
included.map(function(included) {
if (included.indexOf('!') >= 0) {
return;
}
var reason = (inverseGraph[included] || []).filter(function (mod) {
var reason = (inverseGraph[included]||[]).filter(function(mod) {
return !!includedMap[mod];
});
explanation.push({
@@ -303,67 +198,10 @@ gulp.task('analyze-editor-distro', function () {
});
function filterStream(testFunc) {
return es.through(function (data) {
return es.through(function(data) {
if (!testFunc(data.relative)) {
return;
}
this.emit('data', data);
});
}
//#region monaco type checking
function createTscCompileTask(watch) {
return () => {
const createReporter = require('./lib/reporter').createReporter;
return new Promise((resolve, reject) => {
const args = ['./node_modules/.bin/tsc', '-p', './src/tsconfig.monaco.json', '--noEmit'];
if (watch) {
args.push('-w');
}
const child = cp.spawn(`node`, args, {
cwd: path.join(__dirname, '..'),
// stdio: [null, 'pipe', 'inherit']
});
let errors = [];
let reporter = createReporter();
let report;
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
child.stdout.on('data', data => {
let str = String(data);
str = str.replace(magic, '').trim();
if (str.indexOf('Starting compilation') >= 0 || str.indexOf('File change detected') >= 0) {
errors.length = 0;
report = reporter.end(false);
} else if (str.indexOf('Compilation complete') >= 0) {
report.end();
} else if (str) {
let match = /(.*\(\d+,\d+\): )(.*: )(.*)/.exec(str);
if (match) {
// trying to massage the message so that it matches the gulp-tsb error messages
// e.g. src/vs/base/common/strings.ts(663,5): error TS2322: Type '1234' is not assignable to type 'string'.
let fullpath = path.join(root, match[1]);
let message = match[3];
// @ts-ignore
reporter(fullpath + message);
} else {
// @ts-ignore
reporter(str);
}
}
});
child.on('exit', resolve);
child.on('error', reject);
});
};
}
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
gulp.task('monaco-typecheck', createTscCompileTask(false));
//#endregion

View File

@@ -20,7 +20,6 @@ const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -30,6 +29,7 @@ const compilations = glob.sync('**/tsconfig.json', {
});
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
@@ -55,37 +55,19 @@ const tasks = compilations.map(function (tsconfigFile) {
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const out = path.join(root, 'out');
const i18n = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
} else {
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) {
const reporter = createReporter();
tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
return function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
.pipe(plumber({
errorHandler: function (err) {
if (err && !err.__reporter__) {
reporter(err);
}
}
}))
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation())
@@ -98,9 +80,7 @@ const tasks = compilations.map(function (tsconfigFile) {
sourceRoot: '../src'
}))
.pipe(tsFilter.restore)
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
// Filter out *.nls.json file. We needed them only to bundle meta data file.
.pipe(filter(['**', '!**/*.nls.json']))
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18n, out) : es.through())
.pipe(reporter.end(emitError));
return es.duplex(input, output);
@@ -147,7 +127,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(() => pipeline(), input))
.pipe(util.incremental(() => pipeline(true), input))
.pipe(gulp.dest(out));
});

View File

@@ -12,11 +12,7 @@ const gulptslint = require('gulp-tslint');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const tslint = require('tslint');
const VinylFile = require('vinyl');
const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
/**
* Hygiene works by creating cascading subsets of all our files and
@@ -33,58 +29,53 @@ const all = [
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!**/node_modules/**'
'test/**/*'
];
const eolFilter = [
'**',
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!extensions/**/out/**',
'!**/node_modules/**',
'!**/fixtures/**',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
'!build/{lib,tslintRules}/**/*.js',
'!build/monaco/**',
'!build/win32/**',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile'
];
const indentationFilter = [
'**',
// except specific files
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!src/vs/nls.js',
'!src/vs/css.js',
'!src/vs/loader.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
// except specific folders
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/octicons/**',
'!**/fixtures/**',
'!**/*.md',
'!**/*.ps1',
'!**/*.template',
'!**/*.yaml',
'!**/*.yml',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!src/vs/*/**/*.d.ts',
'!**/*.d.ts.recipe',
'!test/assert.js',
'!**/package.json',
'!**/npm-shrinkwrap.json',
'!**/octicons/**',
'!**/vs/base/common/marked/raw.marked.js',
'!**/vs/base/common/winjs.base.raw.js',
'!**/vs/base/node/terminateProcess.sh',
'!**/vs/nls.js',
'!**/vs/css.js',
'!**/vs/loader.js',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!build/tfs/**/*.config',
'!**/Dockerfile',
'!extensions/markdown-language-features/media/*.js'
'!extensions/vscode-api-tests/testWorkspace/**'
];
const copyrightFilter = [
@@ -102,14 +93,11 @@ const copyrightFilter = [
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch',
'!resources/win32/bin/code.js',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*'
'!extensions/markdown/media/tomorrow.css',
'!extensions/html/server/src/modes/typescript/*'
];
const eslintFilter = [
@@ -120,8 +108,8 @@ const eslintFilter = [
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/winjs.base.js',
'!src/**/marked.js',
'!src/**/winjs.base.raw.js',
'!src/**/raw.marked.js',
'!**/test/**'
];
@@ -134,18 +122,26 @@ const tslintFilter = [
'!**/node_modules/**',
'!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts'
'!extensions/**/*.test.ts'
];
// {{SQL CARBON EDIT}}
const copyrightHeaderLines = [
const copyrightHeader = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/'
];
].join('\n');
function reportFailures(failures) {
failures.forEach(failure => {
const name = failure.name || failure.fileName;
const position = failure.startPosition;
const line = position.lineAndCharacter ? position.lineAndCharacter.line : position.line;
const character = position.lineAndCharacter ? position.lineAndCharacter.character : position.character;
console.error(`${name}:${line + 1}:${character + 1}:${failure.failure}`);
});
}
gulp.task('eslint', () => {
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
@@ -156,23 +152,31 @@ gulp.task('eslint', () => {
});
gulp.task('tslint', () => {
// {{SQL CARBON EDIT}}
const options = { emitError: false };
const options = { summarizeFailureOutput: true };
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report(options));
.pipe(gulptslint({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.report(reportFailures, options));
});
function hygiene(some) {
const hygiene = exports.hygiene = (some, options) => {
options = options || {};
let errorCount = 0;
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
const eol = es.through(function (file) {
if (/\r\n?/g.test(file.contents.toString('utf8'))) {
console.error(file.relative + ': Bad EOL found');
errorCount++;
}
lines
this.emit('data', file);
});
const indentation = es.through(function (file) {
file.contents
.toString('utf8')
.split(/\r\n|\r|\n/)
.forEach((line, i) => {
if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK
@@ -190,14 +194,9 @@ function hygiene(some) {
});
const copyrights = es.through(function (file) {
const lines = file.__lines;
for (let i = 0; i < copyrightHeaderLines.length; i++) {
if (lines[i] !== copyrightHeaderLines[i]) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
break;
}
if (file.contents.toString('utf8').indexOf(copyrightHeader) !== 0) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
}
this.emit('data', file);
@@ -205,25 +204,12 @@ function hygiene(some) {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
verify: true,
tsfmt: true,
// verbose: true,
// keep checkJS happy
editorconfig: undefined,
replace: undefined,
tsconfig: undefined,
tsconfigFile: undefined,
tslint: undefined,
tslintFile: undefined,
tsfmtFile: undefined,
vscode: undefined,
vscodeFile: undefined
// verbose: true
}).then(result => {
let original = result.src.replace(/\r\n/gm, '\n');
let formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error('File not formatted:', file.relative);
if (result.error) {
console.error(result.message);
errorCount++;
}
cb(null, file);
@@ -233,31 +219,32 @@ function hygiene(some) {
});
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
const tsl = es.through(function (file) {
const configuration = tslint.Configuration.findConfiguration(null, '.');
const options = { formatter: 'json', rulesDirectory: 'build/lib/tslint' };
const contents = file.contents.toString('utf8');
tsLinter.lint(file.relative, contents, tslintConfiguration.results);
const linter = new tslint.Linter(options);
linter.lint(file.relative, contents, configuration.results);
const result = linter.getResult();
if (result.failureCount > 0) {
reportFailures(result.failures);
errorCount += result.failureCount;
}
this.emit('data', file);
});
let input;
if (Array.isArray(some) || typeof some === 'string' || !some) {
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
} else {
input = some;
}
const result = input
const result = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(eolFilter))
// {{SQL CARBON EDIT}}
//.pipe(options.skipEOL ? es.through() : eol)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter));
.pipe(filter(copyrightFilter))
// {{SQL CARBON EDIT}}
// .pipe(copyrights);
//.pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
@@ -268,51 +255,22 @@ function hygiene(some) {
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'));
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
// .pipe(gulpeslint.failAfterError());
let count = 0;
return es.merge(typescript, javascript)
.pipe(es.through(function (data) {
// {{SQL CARBON EDIT}}
this.emit('end');
.pipe(es.through(null, function () {
// {{SQL CARBON EDIT}}
// if (errorCount > 0) {
// this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
// } else {
// this.emit('end');
// }
this.emit('end');
}));
}
};
function createGitIndexVinyls(paths) {
const cp = require('child_process');
const repositoryPath = process.cwd();
const fns = paths.map(relativePath => () => new Promise((c, e) => {
const fullPath = path.join(repositoryPath, relativePath);
fs.stat(fullPath, (err, stat) => {
if (err && err.code === 'ENOENT') { // ignore deletions
return c(null);
} else if (err) {
return e(err);
}
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}
c(new VinylFile({
path: fullPath,
base: repositoryPath,
contents: out,
stat
}));
});
});
}));
return pall(fns, { concurrency: 4 })
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', () => hygiene());
gulp.task('hygiene', () => hygiene(''));
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {
@@ -323,38 +281,33 @@ if (require.main === module) {
process.exit(1);
});
if (process.argv.length > 2) {
hygiene(process.argv.slice(2)).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
} else {
cp.exec('git config core.autocrlf', (err, out) => {
const skipEOL = out.trim() === 'true';
if (process.argv.length > 2) {
return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
return;
}
const some = out
.split(/\r?\n/)
.filter(l => !!l);
if (some.length > 0) {
console.log('Reading git index versions...');
createGitIndexVinyls(some)
.then(vinyls => new Promise((c, e) => hygiene(es.readArray(vinyls))
.on('end', () => c())
.on('error', e)))
.catch(err => {
console.error();
console.error(err);
process.exit(1);
});
}
hygiene(some, { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
}
});
}

View File

@@ -14,17 +14,13 @@ const util = require('./lib/util');
const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
// {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor');
const pkg = require('../package.json');
gulp.task('mixin', function () {
// {{SQL CARBON EDIT}}
const updateUrl = process.env['SQLOPS_UPDATEURL'];
if (!updateUrl) {
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
const repo = process.env['VSCODE_MIXIN_REPO'];
if (!repo) {
console.log('Missing VSCODE_MIXIN_REPO, skipping mixin');
return;
}
@@ -35,20 +31,39 @@ gulp.task('mixin', function () {
return;
}
// {{SQL CARBON EDIT}}
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json';
if (quality === 'insider') {
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
}
let newValues = {
"updateUrl": updateUrl,
"quality": quality,
"extensionsGallery": {
"serviceUrl": serviceUrl
}
};
const url = `https://github.com/${repo}/archive/${pkg.distro}.zip`;
const opts = { base: url };
const username = process.env['VSCODE_MIXIN_USERNAME'];
const password = process.env['VSCODE_MIXIN_PASSWORD'];
return gulp.src('./product.json')
.pipe(jeditor(newValues))
if (username || password) {
opts.auth = { user: username || '', pass: password || '' };
}
console.log('Mixing in sources from \'' + url + '\':');
let all = remote('', opts)
.pipe(zip.src())
.pipe(filter(function (f) { return !f.isDirectory(); }))
.pipe(util.rebase(1));
if (quality) {
const productJsonFilter = filter('product.json', { restore: true });
const mixin = all
.pipe(filter(['quality/' + quality + '/**']))
.pipe(util.rebase(2))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => assign({}, require('../product.json'), o)))
.pipe(productJsonFilter.restore);
all = es.merge(mixin);
}
return all
.pipe(es.mapSync(function (f) {
console.log(f.relative);
return f;
}))
.pipe(gulp.dest('.'));
});
});

View File

@@ -13,6 +13,18 @@ const filter = require('gulp-filter');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
gulp.task('clean-client', util.rimraf('dataprotocol-node/client/node_modules'));
gulp.task('clean-jsonrpc', util.rimraf('dataprotocol-node/jsonrpc/node_modules'));
gulp.task('clean-server', util.rimraf('dataprotocol-node/server/node_modules'));
gulp.task('clean-types', util.rimraf('dataprotocol-node/types/node_modules'));
gulp.task('clean-extensions-modules', util.rimraf('extensions-modules/node_modules'));
gulp.task('clean-protocol', ['clean-extensions-modules', 'clean-mssql-extension', 'clean-credentials-extension', 'clean-client', 'clean-jsonrpc', 'clean-server', 'clean-types']);
// Tasks to clean extensions modules
gulp.task('clean-mssql-ext-mod', util.rimraf('extensions/mssql/node_modules/extensions-modules'));
gulp.task('clean-credentials-ext-mod', util.rimraf('extensions/credentials/node_modules/extensions-modules'));
gulp.task('clean-build-ext-mod', util.rimraf('build/node_modules/extensions-modules'));
gulp.task('clean-ext-mod', ['clean-mssql-ext-mod', 'clean-credentials-ext-mod', 'clean-build-ext-mod', 'clean-extensions-modules']);
gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {

View File

@@ -17,32 +17,30 @@ const vfs = require('vinyl-fs');
const rename = require('gulp-rename');
const replace = require('gulp-replace');
const filter = require('gulp-filter');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile');
const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const packageJson = require('../package.json');
const product = require('../product.json');
const shrinkwrap = require('../npm-shrinkwrap.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
// {{SQL CARBON EDIT}}
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const glob = require('glob');
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
// {{SQL CARBON EDIT}}
var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
// {{SQL CARBON EDIT}}
const serviceInstaller = require('extensions-modules/lib/languageservice/serviceInstallerUtil');
const glob = require('glob');
const productDependencies = Object.keys(product.dependencies || {});
const dependencies = Object.keys(shrinkwrap.dependencies)
.concat(productDependencies); // additional dependencies from our product configuration
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}}
const nodeModules = [
@@ -52,32 +50,21 @@ const nodeModules = [
'rxjs/Subject',
'rxjs/Observer',
'ng2-charts/ng2-charts']
.concat(Object.keys(product.dependencies || {}))
.concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(dependencies)
.concat(baseModules);
// Build
const builtInExtensions = require('./builtInExtensions.json');
const builtInExtensions = [
{ name: 'ms-vscode.node-debug', version: '1.18.3' },
{ name: 'ms-vscode.node-debug2', version: '1.18.5' }
];
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-colorize-tests'
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'import',
'profiler'
];
var azureExtensions = [ 'azurecore'];
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base,
@@ -88,28 +75,26 @@ const vscodeEntryPoints = _.flatten([
const vscodeResources = [
'out-build/main.js',
'out-build/cli.js',
'out-build/driver.js',
'out-build/bootstrap.js',
'out-build/bootstrap-amd.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
'out-build/vs/base/node/startupTimers.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
'out-build/vs/workbench/parts/html/browser/webview-pre.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
// {{SQL CARBON EDIT}}
'out-build/vs/code/electron-browser/sharedProcess.js',
// {{SQL CARBON EDIT}}
'out-build/sql/workbench/electron-browser/splashscreen/*',
'out-build/sql/**/*.{svg,png,cur,html}',
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
@@ -126,10 +111,8 @@ const vscodeResources = [
'out-build/sql/parts/grid/views/**/*.html',
'out-build/sql/parts/tasks/**/*.html',
'out-build/sql/parts/taskHistory/viewlet/media/**',
'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'out-build/sql/parts/notebook/media/**/*.svg',
'!**/test/**'
];
@@ -139,16 +122,20 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/'
].join('\n');
var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
if (process.env.VSCODE_QUALITY !== 'stable') {
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
}
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
src: 'out-build',
entryPoints: vscodeEntryPoints,
otherSources: [],
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
bundleInfo: undefined
languages: languages
}));
@@ -164,12 +151,10 @@ gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
const config = {
version: getElectronVersion(),
version: packageJson.electronVersion,
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
@@ -191,12 +176,10 @@ const config = {
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || void 0
};
@@ -210,7 +193,7 @@ function getElectron(arch) {
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
@@ -257,66 +240,11 @@ function computeChecksum(filename) {
return hash;
}
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
// {{SQL CARBON EDIT}}
function packageAzureCoreTask(platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', 'azurecore');
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', 'azurecore');
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => azureExtensions.indexOf(name) > -1);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return ext.fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
function packageTask(platform, arch, opts) {
opts = opts || {};
// {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
// {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform;
return () => {
@@ -341,54 +269,45 @@ function packageTask(platform, arch, opts) {
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
}));
const localExtensionDependencies = gulp.src('extensions/node_modules/**', { base: '.' });
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
.pipe(filter(['**',
'!**/*.js.map',
'!extensions/**/node_modules/**/{test, tests}/**',
'!extensions/**/node_modules/**/test.js']));
let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
// {{SQL CARBON EDIT}}
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
const name = product.nameShort;
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json({ name, version }));
const settingsSearchBuildId = getBuildNumber();
const date = new Date().toISOString();
const productJsonUpdate = { commit, date, checksums };
if (shouldSetupSettingsSearch()) {
productJsonUpdate.settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
}
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json(productJsonUpdate));
.pipe(json({ commit, date, checksums, settingsSearchBuildId }));
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
@@ -396,14 +315,11 @@ function packageTask(platform, arch, opts) {
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
// @ts-ignore JSON checking: dependencies is optional
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
];
const depsSrc = _.flatten(dependencies
.map(function (d) { return ['node_modules/' + d + '/**', '!node_modules/' + d + '/**/{test,tests}/**']; }));
const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
@@ -411,36 +327,16 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('v8-profiler', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
// {{SQL CARBON EDIT}}
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
// {{SQL CARBON EDIT}}
let copiedModules = gulp.src([
'node_modules/jquery/**/*.*',
'node_modules/reflect-metadata/**/*.*',
'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*'
], { base: '.', dot: true });
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
let all = es.merge(
packageJsonStream,
@@ -448,8 +344,7 @@ function packageTask(platform, arch, opts) {
license,
watermark,
api,
// {{SQL CARBON EDIT}}
copiedModules,
// {{SQL CARBON EDIT}}
dataApi,
sources,
deps
@@ -498,22 +393,18 @@ function packageTask(platform, arch, opts) {
const buildRoot = path.dirname(root);
// {{SQL CARBON EDIT}}
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('win32', 'x64'));
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], packageAzureCoreTask('darwin'));
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('linux', 'x64'));
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastudio-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'sqlops-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'sqlops-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'sqlops-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'sqlops-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'sqlops-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'sqlops-linux-arm')));
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-win32-x64', ['optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-x64', ['optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
@@ -524,21 +415,25 @@ gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], p
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations
const innoSetupConfig = {
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } },
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } },
'ko': { codePage: 'CP949', defaultInfo: { name: 'Korean', id: '$0412' } },
'ja': { codePage: 'CP932' },
'de': { codePage: 'CP1252' },
'fr': { codePage: 'CP1252' },
'es': { codePage: 'CP1252' },
'ru': { codePage: 'CP1251' },
'it': { codePage: 'CP1252' },
'pt-br': { codePage: 'CP1252' },
'hu': { codePage: 'CP1250' },
'tr': { codePage: 'CP1254' }
};
const vscodeLanguages = [
'zh-hans',
'zh-hant',
'ja',
'ko',
'de',
'fr',
'es',
'ru',
'it',
'pt-br',
'hu',
'tr'
];
const setupDefaultLanguages = [
'zh-hans',
'zh-hant',
'ko'
];
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
@@ -546,52 +441,27 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = './extensions/**/*.nls.json';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
});
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
// {{SQL CARBON EDIT}}
// disable since function makes calls to VS Code Transifex API
// ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
});
gulp.task('vscode-translations-pull', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
});
return es.merge(
i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
).pipe(vfs.dest('../vscode-localization'));
});
gulp.task('vscode-translations-import', function () {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
});
return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
});
// Sourcemaps
@@ -616,19 +486,21 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
if (!shouldSetupSettingsSearch()) {
const branch = process.env.BUILD_SOURCEBRANCH;
const branch = process.env.BUILD_SOURCEBRANCH;
if (!branch.endsWith('/master') && !branch.indexOf('/release/') >= 0) {
console.log(`Only runs on master and release branches, not ${branch}`);
return;
}
if (!fs.existsSync(allConfigDetailsPath)) {
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
console.error(`configuration file at ${allConfigDetailsPath} does not exist`);
return;
}
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
const settingsSearchBuildId = getBuildNumber();
if (!settingsSearchBuildId) {
throw new Error('Failed to compute build number');
console.error('Failed to compute build number');
return;
}
return gulp.src(allConfigDetailsPath)
@@ -640,30 +512,76 @@ gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () =
}));
});
function shouldSetupSettingsSearch() {
const branch = process.env.BUILD_SOURCEBRANCH;
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
}
function getBuildNumber() {
const previous = getPreviousVersion(packageJson.version);
if (!previous) {
return 0;
}
function getSettingsSearchBuildId(packageJson) {
try {
const branch = process.env.BUILD_SOURCEBRANCH;
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
/\/master$/.test(branch) ? 1 :
2; // Some unexpected branch
const out = cp.execSync(`git rev-list HEAD --count`);
const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
const count = parseInt(out.toString());
// <version number><commit count><branchId (avoid unlikely conflicts)>
// 1.25.1, 1,234,567 commits, master = 1250112345671
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
return versionStringToNumber(packageJson.version) * 1e4 + count;
} catch (e) {
throw new Error('Could not determine build number: ' + e.toString());
console.error('Could not determine build number: ' + e.toString());
return 0;
}
}
// This task is only run for the MacOS build
/**
* Given 1.17.2, return 1.17.1
* 1.18.0 => 1.17.2.
* 2.0.0 => 1.18.0 (or the highest 1.x)
*/
function getPreviousVersion(versionStr) {
function tagExists(tagName) {
try {
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
function getLastTagFromBase(semverArr, componentToTest) {
const baseVersion = semverArr.join('.');
if (!tagExists(baseVersion)) {
console.error('Failed to find tag for base version, ' + baseVersion);
return null;
}
let goodTag;
do {
goodTag = semverArr.join('.');
semverArr[componentToTest]++;
} while (tagExists(semverArr.join('.')));
return goodTag;
}
const semverArr = versionStr.split('.');
if (semverArr[2] > 0) {
semverArr[2]--;
return semverArr.join('.');
} else if (semverArr[1] > 0) {
semverArr[1]--;
return getLastTagFromBase(semverArr, 2);
} else {
semverArr[0]--;
return getLastTagFromBase(semverArr, 1);
}
}
function versionStringToNumber(versionStr) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
return 0;
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
gulp.task('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
@@ -671,14 +589,9 @@ gulp.task('generate-vscode-configuration', () => {
return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
}
if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
return resolve();
}
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
const appPath = path.join(buildDir, 'VSCode-darwin/Visual\\ Studio\\ Code\\ -\\ Insiders.app/Contents/Resources/app/bin/code');
const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`);
const timer = setTimeout(() => {
@@ -698,31 +611,36 @@ gulp.task('generate-vscode-configuration', () => {
clearTimeout(timer);
reject(err);
});
}).catch(e => {
// Don't fail the build
console.error(e.toString());
});
});
// {{SQL CARBON EDIT}}
// Install service locally before building carbon
function installService() {
let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
var installer = new serviceDownloader(config);
let serviceInstallFolder = installer.getInstallDirectory(runtime);
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
return del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
function installService(extObj, path) {
var installer = new serviceInstaller.ServiceInstaller(extObj, path);
installer.getServiceInstallDirectoryRoot().then(serviceInstallFolder => {
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
installer.installService();
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
}, getFolderPathError => {
console.log('failed to call getServiceInstallDirectoryRoot error: ' + getFolderPathError);
});
}
gulp.task('install-sqltoolsservice', () => {
return installService();
var mssqlExt = require('../extensions/mssql/client/out/models/constants');
var extObj = new mssqlExt.Constants();
var path = '../extensions/mssql/client/out/config.json';
return installService(extObj, path);
});

View File

@@ -14,17 +14,40 @@ const vfs = require('vinyl-fs');
const util = require('./lib/util');
const packageJson = require('../package.json');
const product = require('../product.json');
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const rpmDependencies = require('../resources/linux/rpm/dependencies');
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
const flatpakManifest = {
appId: product.darwinBundleIdentifier, // We need a reverse-url style identifier.
sdk: 'org.freedesktop.Sdk',
runtime: 'org.freedesktop.Sdk',
runtimeVersion: '1.4',
base: 'io.atom.electron.BaseApp',
baseFlatpakref: 'https://s3-us-west-2.amazonaws.com/electron-flatpak.endlessm.com/electron-base-app-master.flatpakref',
command: product.applicationName,
symlinks: [
['/share/' + product.applicationName + '/bin/' + product.applicationName, '/bin/' + product.applicationName],
],
finishArgs: [
'--share=ipc', '--socket=x11', // Allow showing X11 windows.
'--share=network', // Network access (e.g. for installing extension).
'--filesystem=host', // Allow access to the whole file system.
'--device=dri', // Allow OpenGL rendering.
'--filesystem=/tmp', // Needed for Chromium's single instance check.
'--socket=pulseaudio', // Some extensions may want to play sounds...
'--talk-name=org.freedesktop.Notifications', // ...or pop up notifications.
],
};
function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
}
function prepareDebPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
const binaryDir = '../sqlops-linux-' + arch;
const debArch = getDebPackageArch(arch);
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
@@ -73,9 +96,7 @@ function prepareDebPackage(arch) {
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
@@ -90,7 +111,8 @@ function buildDebPackage(arch) {
return shell.task([
'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm',
'mkdir -p deb',
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb'
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
'dpkg-scanpackages deb /dev/null > Packages'
], { cwd: '.build/linux/deb/' + debArch });
}
@@ -104,7 +126,7 @@ function getRpmPackageArch(arch) {
function prepareRpmPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
const binaryDir = '../sqlops-linux-' + arch;
const rpmArch = getRpmPackageArch(arch);
return function () {
@@ -134,9 +156,7 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
.pipe(replace('@@LICENSE@@', product.licenseName))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
.pipe(rename('SPECS/' + product.applicationName + '.spec'));
@@ -200,10 +220,10 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
`cd ${snapBuildPath} && snapcraft snap`
]);
}
@@ -213,7 +233,7 @@ function getFlatpakArch(arch) {
function prepareFlatpak(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
const binaryDir = '../sqlops-linux-' + arch;
const flatpakArch = getFlatpakArch(arch);
const destination = '.build/linux/flatpak/' + flatpakArch;
@@ -314,3 +334,10 @@ gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prep
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-flatpak', ['vscode-linux-ia32-prepare-flatpak'], buildFlatpak('ia32'));
gulp.task('vscode-linux-x64-flatpak', ['vscode-linux-x64-prepare-flatpak'], buildFlatpak('x64'));
gulp.task('vscode-linux-arm-flatpak', ['vscode-linux-arm-prepare-flatpak'], buildFlatpak('arm'));

View File

@@ -7,71 +7,42 @@
const gulp = require('gulp');
const path = require('path');
const fs = require('fs');
const assert = require('assert');
const cp = require('child_process');
const _7z = require('7zip')['7z'];
const util = require('./lib/util');
const pkg = require('../package.json');
const product = require('../product.json');
const vfs = require('vinyl-fs');
const mkdirp = require('mkdirp');
const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}}
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
const buildPath = arch => path.join(path.dirname(repoPath), `sqlops-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) {
options = options || {};
const definitions = options.definitions || {};
const debug = process.argv.some(arg => arg === '--debug-inno');
if (debug) {
definitions['Debug'] = 'true';
}
const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [
iss,
...defs
//,
//`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
];
const args = [iss].concat(defs);
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })
cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
.on('error', cb)
.on('exit', () => cb(null));
}
function buildWin32Setup(arch, target) {
if (target !== 'system' && target !== 'user') {
throw new Error('Invalid setup target');
}
function buildWin32Setup(arch) {
return cb => {
const ia32AppId = target === 'system' ? product.win32AppId : product.win32UserAppId;
const x64AppId = target === 'system' ? product.win32x64AppId : product.win32x64UserAppId;
const sourcePath = buildPath(arch);
const outputPath = setupDir(arch, target);
mkdirp.sync(outputPath);
const originalProductJsonPath = path.join(sourcePath, 'resources/app/product.json');
const productJsonPath = path.join(outputPath, 'product.json');
const productJson = JSON.parse(fs.readFileSync(originalProductJsonPath, 'utf8'));
productJson['target'] = target;
fs.writeFileSync(productJsonPath, JSON.stringify(productJson, undefined, '\t'));
const ia32AppId = product.win32AppId;
const x64AppId = product.win32x64AppId;
const definitions = {
NameLong: product.nameLong,
@@ -79,42 +50,35 @@ function buildWin32Setup(arch, target) {
DirName: product.win32DirName,
Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion + (target === 'user' ? ' (User)' : ''),
NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName,
ShellNameShort: product.win32ShellNameShort,
AppMutex: product.win32MutexName,
Arch: arch,
AppId: arch === 'ia32' ? ia32AppId : x64AppId,
IncompatibleTargetAppId: arch === 'ia32' ? product.win32AppId : product.win32x64AppId,
IncompatibleArchAppId: arch === 'ia32' ? x64AppId : ia32AppId,
IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
AppUserId: product.win32AppUserModelId,
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
SourceDir: sourcePath,
SourceDir: buildPath(arch),
RepoDir: repoPath,
OutputDir: outputPath,
InstallTarget: target,
ProductJsonPath: productJsonPath
OutputDir: setupDir(arch)
};
packageInnoSetup(issPath, { definitions }, cb);
};
}
function defineWin32SetupTasks(arch, target) {
gulp.task(`clean-vscode-win32-${arch}-${target}-setup`, util.rimraf(setupDir(arch, target)));
gulp.task(`vscode-win32-${arch}-${target}-setup`, [`clean-vscode-win32-${arch}-${target}-setup`], buildWin32Setup(arch, target));
}
gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
defineWin32SetupTasks('ia32', 'system');
defineWin32SetupTasks('x64', 'system');
defineWin32SetupTasks('ia32', 'user');
defineWin32SetupTasks('x64', 'user');
gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
function archiveWin32Setup(arch) {
return cb => {
const args = ['a', '-tzip', zipPath(arch), '-x!CodeSignSummary*.md', '.', '-r'];
const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
.on('error', cb)
@@ -127,13 +91,3 @@ gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], arch
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
function copyInnoUpdater(arch) {
return () => {
return gulp.src('build/win32/{inno_updater.exe,vcruntime140.dll}', { base: 'build/win32' })
.pipe(vfs.dest(path.join(buildPath(arch), 'tools')));
};
}
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));

View File

@@ -1,118 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var es = require("event-stream");
var pickle = require("chromium-pickle-js");
var Filesystem = require("asar/lib/filesystem");
var VinylFile = require("vinyl");
var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) {
var shouldUnpackFile = function (file) {
for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
var filesystem = new Filesystem(folderPath);
var out = [];
// Keep track of pending inserts
var pendingInserts = 0;
var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory
var seenDir = {};
var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
var insertDirectoryForFile = function (file) {
var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error("unknown item in stream!");
}
var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
}
else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
var _this = this;
var finish = function () {
{
var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
var headerBuf = headerPickle.toBuffer();
var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
var contents = Buffer.concat(out);
out.length = 0;
_this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
_this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
}
else {
onFileInserted = function () {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}
exports.createAsar = createAsar;

View File

@@ -1,131 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
import * as pickle from 'chromium-pickle-js';
import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch';
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
const shouldUnpackFile = (file: VinylFile): boolean => {
for (let i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
const filesystem = new Filesystem(folderPath);
const out: Buffer[] = [];
// Keep track of pending inserts
let pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; };
// Do not insert twice the same directory
const seenDir: { [key: string]: boolean; } = {};
const insertDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
const insertDirectoryForFile = (file: string) => {
let lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
if (lastSlash !== -1) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
};
return es.through(function (file) {
if (file.stat.isDirectory()) {
return;
}
if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`);
}
const shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
}));
} else {
// The file goes inside of xx.asar
out.push(file.contents);
}
}, function () {
let finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
const contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
} else {
onFileInserted = () => {
pendingInserts--;
if (pendingInserts === 0) {
finish();
}
};
}
});
}

View File

@@ -1,121 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
const os = require('os');
const mkdirp = require('mkdirp');
const rimraf = require('rimraf');
const es = require('event-stream');
const rename = require('gulp-rename');
const vfs = require('vinyl-fs');
const ext = require('./extensions');
const util = require('gulp-util');
const root = path.dirname(path.dirname(__dirname));
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function getExtensionPath(extension) {
return path.join(root, '.build', 'builtInExtensions', extension.name);
}
function isUpToDate(extension) {
const packagePath = path.join(getExtensionPath(extension), 'package.json');
if (!fs.existsSync(packagePath)) {
return false;
}
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
try {
const diskVersion = JSON.parse(packageContents).version;
return (diskVersion === extension.version);
} catch (err) {
return false;
}
}
function syncMarketplaceExtension(extension) {
if (isUpToDate(extension)) {
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
return es.readArray([]);
}
rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
}
function syncExtension(extension, controlState) {
switch (controlState) {
case 'disabled':
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
return es.readArray([]);
case 'marketplace':
return syncMarketplaceExtension(extension);
default:
if (!fs.existsSync(controlState)) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
return es.readArray([]);
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
return es.readArray([]);
}
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
return es.readArray([]);
}
}
function readControlFile() {
try {
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
} catch (err) {
return {};
}
}
function writeControlFile(control) {
mkdirp.sync(path.dirname(controlFilePath));
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
}
function main() {
util.log('Syncronizing built-in extensions...');
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];
for (const extension of builtInExtensions) {
let controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}
writeControlFile(control);
es.merge(streams)
.on('error', err => {
console.error(err);
process.exit(1);
})
.on('end', () => {
process.exit(0);
});
}
main();

View File

@@ -217,7 +217,6 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];

View File

@@ -44,11 +44,11 @@ interface ILoaderPluginReqFunc {
export interface IEntryPoint {
name: string;
include?: string[];
exclude?: string[];
include: string[];
exclude: string[];
prepend: string[];
append?: string[];
dest?: string;
append: string[];
dest: string;
}
interface IEntryPointMap {
@@ -339,7 +339,6 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
];
destFiles.forEach((destFile) => {

View File

@@ -18,21 +18,14 @@ var _ = require("underscore");
var monacodts = require("../monaco/api");
var fs = require("fs");
var reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
var rootDir = path.join(__dirname, "../../" + src);
var options = require("../../" + src + "/tsconfig.json").compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
function createCompile(src, build, emitError) {
var opts = _.clone(getTypeScriptCompilerOptions(src));
var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
@@ -54,46 +47,77 @@ function createCompile(src, build, emitError) {
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
function compileTask(src, out, build) {
function compileTask(out, build) {
return function () {
var compile = createCompile(src, build, true);
var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return srcPipe
var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
return src
.pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
.pipe(monacodtsTask(out, false));
};
}
exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
var compile = createCompile('src', build);
var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
function reloadTypeScriptNodeModule() {
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
}
function heapUsed() {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out, isWatch) {
var basePath = path.resolve(process.cwd(), out);
var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
@@ -125,7 +149,6 @@ function monacodtsTask(out, isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
fs.writeFileSync(result.filePath, result.content);
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
@@ -137,7 +160,7 @@ function monacodtsTask(out, isWatch) {
}));
}
resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative));
var filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}

View File

@@ -21,22 +21,15 @@ import * as fs from 'fs';
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`);
const options = require(`../../${src}/tsconfig.json`).compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
const rootDir = path.join(__dirname, '../../src');
const options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
@@ -56,13 +49,14 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
@@ -71,32 +65,27 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
};
}
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const compile = createCompile(build, true);
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return srcPipe
return src
.pipe(compile())
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
.pipe(monacodtsTask(out, false));
};
}
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile('src', build);
const compile = createCompile(build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
@@ -104,21 +93,61 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
);
const watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
}
const basePath = path.resolve(process.cwd(), out);
function heapUsed(): string {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
@@ -153,7 +182,6 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
} else {
fs.writeFileSync(result.filePath, result.content);
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
@@ -168,7 +196,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
}
resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative));
const filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}

View File

@@ -1,28 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
const root = path.dirname(path.dirname(__dirname));
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
if (require.main === module) {
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `v${version}`;
process.exit(isUpToDate ? 0 : 1);
}

View File

@@ -20,7 +20,7 @@ var vsce = require("vsce");
var File = require("vinyl");
function fromLocal(extensionPath) {
var result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
vsce.listFiles({ cwd: extensionPath })
.then(function (fileNames) {
var files = fileNames
.map(function (fileName) { return path.join(extensionPath, fileName); })
@@ -44,7 +44,6 @@ function error(err) {
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
function fromMarketplace(extensionName, version) {
var filterType = 7;

View File

@@ -22,7 +22,7 @@ import * as File from 'vinyl';
export function fromLocal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
vsce.listFiles({ cwd: extensionPath })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
@@ -49,7 +49,6 @@ function error(err: any): Stream {
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
export function fromMarketplace(extensionName: string, version: string): Stream {

File diff suppressed because it is too large Load Diff

View File

@@ -34,10 +34,6 @@
"name": "vs/workbench/parts/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/comments",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/debug",
"project": "vscode-workbench"
@@ -50,6 +46,10 @@
"name": "vs/workbench/parts/execution",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/explorers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/extensions",
"project": "vscode-workbench"
@@ -71,15 +71,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/localizations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/logs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/navigation",
"name": "vs/workbench/parts/nps",
"project": "vscode-workbench"
},
{
@@ -138,50 +130,22 @@
"name": "vs/workbench/parts/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/url",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/outline",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configuration",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configurationResolver",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/crashReporter",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/dialogs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/editor",
"project": "vscode-workbench"
@@ -190,10 +154,6 @@
"name": "vs/workbench/services/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/jsonschemas",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
@@ -202,6 +162,10 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/message",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@@ -231,8 +195,8 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/preferences",
"project": "vscode-preferences"
"name": "setup_messages",
"project": "vscode-workbench"
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,7 @@ function isImportNode(node) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: Buffer.from(contents),
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
@@ -150,16 +150,13 @@ function isImportNode(node) {
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls`
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
// only `localize` calls
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports
var allLocalizeImportDeclarations = importDeclarations

View File

@@ -131,7 +131,7 @@ module nls {
export function fileFrom(file: File, contents: string, path: string = file.path) {
return new File({
contents: Buffer.from(contents),
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path

View File

@@ -17,6 +17,7 @@ var concat = require("gulp-concat");
var VinylFile = require("vinyl");
var bundle = require("./bundle");
var util = require("./util");
var i18n = require("./i18n");
var gulpUtil = require("gulp-util");
var flatmap = require("gulp-flatmap");
var pump = require("pump");
@@ -39,26 +40,26 @@ function loaderConfig(emptyPaths) {
}
exports.loaderConfig = loaderConfig;
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src, bundledFileHeader, bundleLoader) {
function loader(bundledFileHeader, bundleLoader) {
var sources = [
src + "/vs/loader.js"
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
src + "/vs/css.js",
src + "/vs/nls.js"
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
var isFirst = true;
return (gulp
.src(sources, { base: "" + src })
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: Buffer.from(bundledFileHeader)
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
}
@@ -73,7 +74,7 @@ function loader(src, bundledFileHeader, bundleLoader) {
return f;
})));
}
function toConcatStream(src, bundledFileHeader, sources, dest) {
function toConcatStream(bundledFileHeader, sources, dest) {
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
@@ -93,24 +94,23 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
}
var treatedSources = sources.map(function (source) {
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
var base = source.path ? root + ("/" + src) : '';
var base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: Buffer.from(source.contents)
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest));
}
function toBundleStream(src, bundledFileHeader, bundles) {
function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
function optimizeTask(opts) {
var src = opts.src;
var entryPoints = opts.entryPoints;
var otherSources = opts.otherSources;
var resources = opts.resources;
@@ -126,7 +126,7 @@ function optimizeTask(opts) {
if (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
@@ -135,22 +135,22 @@ function optimizeTask(opts) {
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: "" + src }).pipe(resourcesStream);
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
var bundleInfoArray = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
var otherSourcesStream = es.through();
var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: "" + src })
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
@@ -159,17 +159,22 @@ function optimizeTask(opts) {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
var result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
}
exports.optimizeTask = optimizeTask;
;
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@@ -207,7 +212,8 @@ function uglifyWithCopyrights() {
return stream.pipe(minify({
output: {
comments: preserveComments(f),
max_line_len: 1024
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
@@ -232,3 +238,4 @@ function minifyTask(src, sourceMapBaseUrl) {
};
}
exports.minifyTask = minifyTask;
;

View File

@@ -18,11 +18,11 @@ import * as concat from 'gulp-concat';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import * as util from './util';
import * as i18n from './i18n';
import * as gulpUtil from 'gulp-util';
import * as flatmap from 'gulp-flatmap';
import * as pump from 'pump';
import * as sm from 'source-map';
import { Language } from './i18n';
const REPO_ROOT_PATH = path.join(__dirname, '../..');
@@ -31,7 +31,7 @@ function log(prefix: string, message: string): void {
}
// {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths?: string[]) {
export function loaderConfig(emptyPaths: string[]) {
const result = {
paths: {
'vs': 'out-build/vs',
@@ -52,28 +52,28 @@ declare class FileSourceMap extends VinylFile {
public sourceMap: sm.RawSourceMap;
}
function loader(src: string, bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
let sources = [
`${src}/vs/loader.js`
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
`${src}/vs/css.js`,
`${src}/vs/nls.js`
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
let isFirst = true;
return (
gulp
.src(sources, { base: `${src}` })
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: Buffer.from(bundledFileHeader)
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
} else {
@@ -89,7 +89,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
);
}
function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
@@ -112,12 +112,12 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: Buffer.from(source.contents)
contents: new Buffer(source.contents)
});
});
@@ -126,17 +126,13 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
.pipe(concat(dest));
}
function toBundleStream(src:string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
function toBundleStream(bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
export interface IOptimizeTaskOpts {
/**
* The folder to read files from.
*/
src: string;
/**
* (for AMD files, will get bundled and get Copyright treatment)
*/
@@ -167,13 +163,11 @@ export interface IOptimizeTaskOpts {
*/
out: string;
/**
* (out folder name)
* (languages to process)
*/
languages?: Language[];
languages: string[];
}
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const src = opts.src;
const entryPoints = opts.entryPoints;
const otherSources = opts.otherSources;
const resources = opts.resources;
@@ -190,7 +184,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
@@ -200,14 +194,14 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream);
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
const bundleInfoArray: VinylFile[] = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
@@ -216,9 +210,9 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
const otherSourcesStream = es.through();
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
gulp.src(otherSources, { base: `${src}` })
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
@@ -228,7 +222,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
}));
const result = es.merge(
loader(src, bundledFileHeader, bundleLoader),
loader(bundledFileHeader, bundleLoader),
bundlesStream,
otherSourcesStream,
resourcesStream,
@@ -241,9 +235,13 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
}
};
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
@@ -289,7 +287,8 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
return stream.pipe(minify({
output: {
comments: preserveComments(<FileWithCopyright>f),
max_line_len: 1024
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
@@ -297,7 +296,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => {
@@ -328,4 +327,4 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
cb(err);
});
};
}
};

View File

@@ -34,13 +34,7 @@ catch (err) {
}
function log() {
var errors = _.flatten(allErrors);
var seen = new Set();
errors.map(function (err) {
if (!seen.has(err)) {
seen.add(err);
util.log(util.colors.red('Error') + ": " + err);
}
});
errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
var messages = errors
.map(function (err) { return regex.exec(err); })
@@ -73,13 +67,8 @@ function createReporter() {
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
errors.__logged__ = true;
if (!errors.__logged__) {
log();
}
var err = new Error("Found " + errors.length + " errors");
err.__reporter__ = true;
this.emit('error', err);
log();
this.emit('error');
}
else {
this.emit('end');
@@ -91,3 +80,4 @@ function createReporter() {
return ReportFunc;
}
exports.createReporter = createReporter;
;

View File

@@ -11,7 +11,7 @@ import * as util from 'gulp-util';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: string[][] = [];
const allErrors: Error[][] = [];
let startTime: number = null;
let count = 0;
@@ -42,14 +42,7 @@ try {
function log(): void {
const errors = _.flatten(allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
util.log(`${util.colors.red('Error')}: ${err}`);
}
});
errors.map(err => util.log(`${util.colors.red('Error')}: ${err}`));
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
@@ -68,17 +61,17 @@ function log(): void {
}
export interface IReporter {
(err: string): void;
(err: Error): void;
hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream;
}
export function createReporter(): IReporter {
const errors: string[] = [];
const errors: Error[] = [];
allErrors.push(errors);
class ReportFunc {
constructor(err: string) {
constructor(err: Error) {
errors.push(err);
}
@@ -94,15 +87,8 @@ export function createReporter(): IReporter {
onEnd();
if (emitError && errors.length > 0) {
(errors as any).__logged__ = true;
if (!(errors as any).__logged__) {
log();
}
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
log();
this.emit('error');
} else {
this.emit('end');
}
@@ -111,4 +97,4 @@ export function createReporter(): IReporter {
}
return <IReporter><any>ReportFunc;
}
};

View File

@@ -1,349 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var fs = require("fs");
var path = require("path");
var tss = require("./treeshaking");
var REPO_ROOT = path.join(__dirname, '../../');
var SRC_DIR = path.join(REPO_ROOT, 'src');
var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
var dirCache = {};
function writeFile(filePath, contents) {
function ensureDirs(dirPath) {
if (dirCache[dirPath]) {
return;
}
dirCache[dirPath] = true;
ensureDirs(path.dirname(dirPath));
if (fs.existsSync(dirPath)) {
return;
}
fs.mkdirSync(dirPath);
}
ensureDirs(path.dirname(filePath));
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
var result = tss.shake(options);
for (var fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
var copied = {};
var copyFile = function (fileName) {
if (copied[fileName]) {
return;
}
copied[fileName] = true;
var srcPath = path.join(options.sourcesRoot, fileName);
var dstPath = path.join(options.destRoot, fileName);
writeFile(dstPath, fs.readFileSync(srcPath));
};
var writeOutputFile = function (fileName, contents) {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (var fileName in result) {
if (result.hasOwnProperty(fileName)) {
var fileContents = result[fileName];
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFileName = info.importedFiles[i].fileName;
var importedFilePath = void 0;
if (/^vs\/css!/.test(importedFileName)) {
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
}
else {
importedFilePath = importedFileName;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
}
if (/\.css$/.test(importedFilePath)) {
transportCSS(importedFilePath, copyFile, writeOutputFile);
}
else {
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
copyFile(importedFilePath + '.js');
}
}
}
}
}
var tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
tsConfig.compilerOptions.noUnusedLocals = false;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/loader.js',
'vs/monaco.d.ts',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/nls.mock.ts',
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require.d.ts',
].forEach(copyFile);
}
exports.extractEditor = extractEditor;
function createESMSourcesAndResources(options) {
var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
var in_queue = Object.create(null);
var queue = [];
var enqueue = function (module) {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
var seenDir = {};
var createDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try {
fs.mkdirSync(dir);
}
catch (err) { }
};
seenDir[REPO_ROOT] = true;
var toggleComments = function (fileContents) {
var lines = fileContents.split(/\r\n|\r|\n/);
var mode = 0;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
var write = function (filePath, contents) {
var absoluteFilePath;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
}
else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
while (queue.length > 0) {
var module_1 = queue.shift();
if (transportCSS(module_1, enqueue, write)) {
continue;
}
if (transportResource(options, module_1, enqueue, write)) {
continue;
}
if (transportDTS(options, module_1, enqueue, write)) {
continue;
}
var filename = void 0;
if (options.redirects[module_1]) {
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
}
else {
filename = path.join(SRC_DIR, module_1 + '.ts');
}
var fileContents = fs.readFileSync(filename).toString();
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFilename = info.importedFiles[i].fileName;
var pos = info.importedFiles[i].pos;
var end = info.importedFiles[i].end;
var importedFilepath = void 0;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
}
enqueue(importedFilepath);
var relativePath = void 0;
if (importedFilepath === path.dirname(module_1)) {
relativePath = '../' + path.basename(path.dirname(module_1));
}
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
}
else {
relativePath = path.relative(path.dirname(module_1), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return "import * as " + m1 + " from " + m2 + ";";
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module_1 + '.ts', fileContents);
}
var esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": []
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
exports.createESMSourcesAndResources = createESMSourcesAndResources;
function transportCSS(module, enqueue, write) {
if (!/\.css/.test(module)) {
return false;
}
var filename = path.join(SRC_DIR, module);
var fileContents = fs.readFileSync(filename).toString();
var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
var newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, function (url) {
var imagePath = path.join(path.dirname(module), url);
var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
var DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
var newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
var encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents, replacer) {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
var matches = [];
for (var _i = 1; _i < arguments.length; _i++) {
matches[_i - 1] = arguments[_i];
}
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack, needle) {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options, module, enqueue, write) {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options, module, enqueue, write) {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
var filename;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
}
else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,395 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as fs from 'fs';
import * as path from 'path';
import * as tss from './treeshaking';
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
let dirCache: { [dir: string]: boolean; } = {};
function writeFile(filePath: string, contents: Buffer | string): void {
function ensureDirs(dirPath: string): void {
if (dirCache[dirPath]) {
return;
}
dirCache[dirPath] = true;
ensureDirs(path.dirname(dirPath));
if (fs.existsSync(dirPath)) {
return;
}
fs.mkdirSync(dirPath);
}
ensureDirs(path.dirname(filePath));
fs.writeFileSync(filePath, contents);
}
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied: { [fileName:string]: boolean; } = {};
const copyFile = (fileName: string) => {
if (copied[fileName]) {
return;
}
copied[fileName] = true;
const srcPath = path.join(options.sourcesRoot, fileName);
const dstPath = path.join(options.destRoot, fileName);
writeFile(dstPath, fs.readFileSync(srcPath));
};
const writeOutputFile = (fileName: string, contents: string) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
let importedFilePath: string;
if (/^vs\/css!/.test(importedFileName)) {
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
} else {
importedFilePath = importedFileName;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
}
if (/\.css$/.test(importedFilePath)) {
transportCSS(importedFilePath, copyFile, writeOutputFile);
} else {
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
copyFile(importedFilePath + '.js');
}
}
}
}
}
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
tsConfig.compilerOptions.noUnusedLocals = false;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/loader.js',
'vs/monaco.d.ts',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/nls.mock.ts',
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require.d.ts',
].forEach(copyFile);
}
export interface IOptions {
entryPoints: string[];
outFolder: string;
outResourcesFolder: string;
redirects: { [module: string]: string; };
}
export function createESMSourcesAndResources(options: IOptions): void {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
let in_queue: { [module: string]: boolean; } = Object.create(null);
let queue: string[] = [];
const enqueue = (module: string) => {
if (in_queue[module]) {
return;
}
in_queue[module] = true;
queue.push(module);
};
const seenDir: { [key: string]: boolean; } = {};
const createDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
if (lastSlash !== -1) {
createDirectoryRecursive(dir.substring(0, lastSlash));
}
seenDir[dir] = true;
try { fs.mkdirSync(dir); } catch (err) { }
};
seenDir[REPO_ROOT] = true;
const toggleComments = (fileContents: string) => {
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
};
const write = (filePath: string, contents: string | Buffer) => {
let absoluteFilePath: string;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
} else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString());
}
fs.writeFileSync(absoluteFilePath, contents);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const module = queue.shift();
if (transportCSS(module, enqueue, write)) {
continue;
}
if (transportResource(options, module, enqueue, write)) {
continue;
}
if (transportDTS(options, module, enqueue, write)) {
continue;
}
let filename: string;
if (options.redirects[module]) {
filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
} else {
filename = path.join(SRC_DIR, module + '.ts');
}
let fileContents = fs.readFileSync(filename).toString();
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module), importedFilepath);
}
enqueue(importedFilepath);
let relativePath: string;
if (importedFilepath === path.dirname(module)) {
relativePath = '../' + path.basename(path.dirname(module));
} else if (importedFilepath === path.dirname(path.dirname(module))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
} else {
relativePath = path.relative(path.dirname(module), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module + '.ts', fileContents);
}
const esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": [
]
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
}
function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.css/.test(module)) {
return false;
}
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents: string, replacer: (url: string) => string): string {
// Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1);
}
// The ending whitespace is captured
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
url = url.substring(0, url.length - 1);
}
// Eliminate ending quotes
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
url = url.substring(0, url.length - 1);
}
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
url = replacer(url);
}
return 'url(' + url + ')';
});
}
function _startsWith(haystack: string, needle: string): boolean {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
}
}
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
let filename: string;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
} else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,682 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var fs = require("fs");
var path = require("path");
var ts = require("typescript");
var TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
var ShakeLevel;
(function (ShakeLevel) {
ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function shake(options) {
var languageService = createTypeScriptLanguageService(options);
markNodes(languageService, options);
return generateResult(languageService, options.shakeLevel);
}
exports.shake = shake;
//#region Discovery, LanguageService & Setup
function createTypeScriptLanguageService(options) {
// Discover referenced files
var FILES = discoverAndReadFiles(options);
// Add fake usage files
options.inlineEntryPoints.forEach(function (inlineEntryPoint, index) {
FILES["inlineEntryPoint:" + index + ".ts"] = inlineEntryPoint;
});
// Resolve libs
var RESOLVED_LIBS = {};
options.libs.forEach(function (filename) {
var filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
RESOLVED_LIBS["defaultLib:" + filename] = fs.readFileSync(filepath).toString();
});
var host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
return ts.createLanguageService(host);
}
/**
* Read imports and follow them until all files have been handled
*/
function discoverAndReadFiles(options) {
var FILES = {};
var in_queue = Object.create(null);
var queue = [];
var enqueue = function (moduleId) {
if (in_queue[moduleId]) {
return;
}
in_queue[moduleId] = true;
queue.push(moduleId);
};
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
while (queue.length > 0) {
var moduleId = queue.shift();
var dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) {
var dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[moduleId + '.d.ts'] = dts_filecontents;
continue;
}
var ts_filename = void 0;
if (options.redirects[moduleId]) {
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
}
else {
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
}
var ts_filecontents = fs.readFileSync(ts_filename).toString();
var info = ts.preProcessFile(ts_filecontents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFileName = info.importedFiles[i].fileName;
if (options.importIgnorePattern.test(importedFileName)) {
// Ignore vs/css! imports
continue;
}
var importedModuleId = importedFileName;
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
}
enqueue(importedModuleId);
}
FILES[moduleId + '.ts'] = ts_filecontents;
}
return FILES;
}
/**
* A TypeScript language service host
*/
var TypeScriptLanguageServiceHost = /** @class */ (function () {
function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
return this._compilerOptions;
};
TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
return ([]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files)));
};
TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
return '1';
};
TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
return '1';
};
TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
}
else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
}
else {
return ts.ScriptSnapshot.fromString('');
}
};
TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
return ts.ScriptKind.TS;
};
TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
return '';
};
TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
return 'defaultLib:lib.d.ts';
};
TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
};
return TypeScriptLanguageServiceHost;
}());
//#endregion
//#region Tree Shaking
var NodeColor;
(function (NodeColor) {
NodeColor[NodeColor["White"] = 0] = "White";
NodeColor[NodeColor["Gray"] = 1] = "Gray";
NodeColor[NodeColor["Black"] = 2] = "Black";
})(NodeColor || (NodeColor = {}));
function getColor(node) {
return node.$$$color || 0 /* White */;
}
function setColor(node, color) {
node.$$$color = color;
}
function nodeOrParentIsBlack(node) {
while (node) {
var color = getColor(node);
if (color === 2 /* Black */) {
return true;
}
node = node.parent;
}
return false;
}
function nodeOrChildIsBlack(node) {
if (getColor(node) === 2 /* Black */) {
return true;
}
for (var _i = 0, _a = node.getChildren(); _i < _a.length; _i++) {
var child = _a[_i];
if (nodeOrChildIsBlack(child)) {
return true;
}
}
return false;
}
function markNodes(languageService, options) {
var program = languageService.getProgram();
if (options.shakeLevel === 0 /* Files */) {
// Mark all source files Black
program.getSourceFiles().forEach(function (sourceFile) {
setColor(sourceFile, 2 /* Black */);
});
return;
}
var black_queue = [];
var gray_queue = [];
var sourceFilesLoaded = {};
function enqueueTopLevelModuleStatements(sourceFile) {
sourceFile.forEachChild(function (node) {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExportDeclaration(node)) {
if (ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExpressionStatement(node)
|| ts.isIfStatement(node)
|| ts.isIterationStatement(node, true)
|| ts.isExportAssignment(node)) {
enqueue_black(node);
}
if (ts.isImportEqualsDeclaration(node)) {
if (/export/.test(node.getFullText(sourceFile))) {
// e.g. "export import Severity = BaseSeverity;"
enqueue_black(node);
}
}
});
}
function enqueue_gray(node) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
return;
}
setColor(node, 1 /* Gray */);
gray_queue.push(node);
}
function enqueue_black(node) {
var previousColor = getColor(node);
if (previousColor === 2 /* Black */) {
return;
}
if (previousColor === 1 /* Gray */) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, 0 /* White */);
// add to black queue
enqueue_black(node);
// // move from one queue to the other
// black_queue.push(node);
// setColor(node, NodeColor.Black);
return;
}
if (nodeOrParentIsBlack(node)) {
return;
}
var fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, 2 /* Black */);
return;
}
var sourceFile = node.getSourceFile();
if (!sourceFilesLoaded[sourceFile.fileName]) {
sourceFilesLoaded[sourceFile.fileName] = true;
enqueueTopLevelModuleStatements(sourceFile);
}
if (ts.isSourceFile(node)) {
return;
}
setColor(node, 2 /* Black */);
black_queue.push(node);
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
var references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (var i = 0, len = references.length; i < len; i++) {
var reference = references[i];
var referenceSourceFile = program.getSourceFile(reference.fileName);
var referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if (ts.isMethodDeclaration(referenceNode.parent)
|| ts.isPropertyDeclaration(referenceNode.parent)
|| ts.isGetAccessor(referenceNode.parent)
|| ts.isSetAccessor(referenceNode.parent)) {
enqueue_gray(referenceNode.parent);
}
}
}
}
}
function enqueueFile(filename) {
var sourceFile = program.getSourceFile(filename);
if (!sourceFile) {
console.warn("Cannot find source file " + filename);
return;
}
enqueue_black(sourceFile);
}
function enqueueImport(node, importText) {
if (options.importIgnorePattern.test(importText)) {
// this import should be ignored
return;
}
var nodeSourceFile = node.getSourceFile();
var fullPath;
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
}
else {
fullPath = importText + '.ts';
}
enqueueFile(fullPath);
}
options.entryPoints.forEach(function (moduleId) { return enqueueFile(moduleId + '.ts'); });
// Add fake usage files
options.inlineEntryPoints.forEach(function (_, index) { return enqueueFile("inlineEntryPoint:" + index + ".ts"); });
var step = 0;
var checker = program.getTypeChecker();
var _loop_1 = function () {
++step;
var node = void 0;
if (step % 100 === 0) {
console.log(step + "/" + (step + black_queue.length + gray_queue.length) + " (" + black_queue.length + ", " + gray_queue.length + ")");
}
if (black_queue.length === 0) {
for (var i = 0; i < gray_queue.length; i++) {
var node_1 = gray_queue[i];
var nodeParent = node_1.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node_1);
setColor(node_1, 2 /* Black */);
i--;
}
}
}
if (black_queue.length > 0) {
node = black_queue.shift();
}
else {
return "break";
}
var nodeSourceFile = node.getSourceFile();
var loop = function (node) {
var _a = getRealNodeSymbol(checker, node), symbol = _a[0], symbolImportNode = _a[1];
if (symbolImportNode) {
setColor(symbolImportNode, 2 /* Black */);
}
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
var declaration = symbol.declarations[i];
if (ts.isSourceFile(declaration)) {
// Do not enqueue full source files
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name);
for (var j = 0; j < declaration.members.length; j++) {
var member = declaration.members[j];
var memberName = member.name ? member.name.getText() : null;
if (ts.isConstructorDeclaration(member)
|| ts.isConstructSignatureDeclaration(member)
|| ts.isIndexSignatureDeclaration(member)
|| ts.isCallSignatureDeclaration(member)
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose' // TODO: keeping all `dispose` methods
) {
enqueue_black(member);
}
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (var _i = 0, _b = declaration.heritageClauses; _i < _b.length; _i++) {
var heritageClause = _b[_i];
enqueue_black(heritageClause);
}
}
}
else {
enqueue_black(declaration);
}
}
}
node.forEachChild(loop);
};
node.forEachChild(loop);
};
while (black_queue.length > 0 || gray_queue.length > 0) {
var state_1 = _loop_1();
if (state_1 === "break")
break;
}
}
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
var declaration = symbol.declarations[i];
var declarationSourceFile = declaration.getSourceFile();
if (nodeSourceFile === declarationSourceFile) {
if (declaration.pos <= node.pos && node.end <= declaration.end) {
return true;
}
}
}
return false;
}
function generateResult(languageService, shakeLevel) {
var program = languageService.getProgram();
var result = {};
var writeFile = function (filePath, contents) {
result[filePath] = contents;
};
program.getSourceFiles().forEach(function (sourceFile) {
var fileName = sourceFile.fileName;
if (/^defaultLib:/.test(fileName)) {
return;
}
var destination = fileName;
if (/\.d\.ts$/.test(fileName)) {
if (nodeOrChildIsBlack(sourceFile)) {
writeFile(destination, sourceFile.text);
}
return;
}
var text = sourceFile.text;
var result = '';
function keep(node) {
result += text.substring(node.pos, node.end);
}
function write(data) {
result += data;
}
function writeMarkedNodes(node) {
if (getColor(node) === 2 /* Black */) {
return keep(node);
}
// Always keep certain top-level statements
if (ts.isSourceFile(node.parent)) {
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
return keep(node);
}
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
return keep(node);
}
}
// Keep the entire import in import * as X cases
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
return keep(node);
}
}
else {
var survivingImports = [];
for (var i = 0; i < node.importClause.namedBindings.elements.length; i++) {
var importNode = node.importClause.namedBindings.elements[i];
if (getColor(importNode) === 2 /* Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
var leadingTriviaWidth = node.getLeadingTriviaWidth();
var leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return write(leadingTrivia + "import " + node.importClause.name.text + ", {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
}
return write(leadingTrivia + "import {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
}
else {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return write(leadingTrivia + "import " + node.importClause.name.text + " from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
}
}
}
}
else {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return keep(node);
}
}
}
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
var toWrite = node.getFullText();
for (var i = node.members.length - 1; i >= 0; i--) {
var member = node.members[i];
if (getColor(member) === 2 /* Black */) {
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
var pos = member.pos - node.pos;
var end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
}
if (ts.isFunctionDeclaration(node)) {
// Do not go inside functions if they haven't been marked
return;
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== 2 /* Black */) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
}
else {
result = text;
}
writeFile(destination, result);
});
return result;
}
//#endregion
//#region Utils
/**
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/
function getRealNodeSymbol(checker, node) {
/**
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
*/
/* @internal */
function getContainingObjectLiteralElement(node) {
switch (node.kind) {
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
}
// falls through
case ts.SyntaxKind.Identifier:
return ts.isObjectLiteralElement(node.parent) &&
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
node.parent.name === node ? node.parent : undefined;
}
return undefined;
}
function getPropertySymbolsFromType(type, propName) {
function getTextOfPropertyName(name) {
function isStringOrNumericLiteral(node) {
var kind = node.kind;
return kind === ts.SyntaxKind.StringLiteral
|| kind === ts.SyntaxKind.NumericLiteral;
}
switch (name.kind) {
case ts.SyntaxKind.Identifier:
return name.text;
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
return name.text;
case ts.SyntaxKind.ComputedPropertyName:
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined;
}
}
var name = getTextOfPropertyName(propName);
if (name && type) {
var result = [];
var symbol_1 = type.getProperty(name);
if (type.flags & ts.TypeFlags.Union) {
for (var _i = 0, _a = type.types; _i < _a.length; _i++) {
var t = _a[_i];
var symbol_2 = t.getProperty(name);
if (symbol_2) {
result.push(symbol_2);
}
}
return result;
}
if (symbol_1) {
result.push(symbol_1);
return result;
}
}
return undefined;
}
function getPropertySymbolsFromContextualType(typeChecker, node) {
var objectLiteral = node.parent;
var contextualType = typeChecker.getContextualType(objectLiteral);
return getPropertySymbolsFromType(contextualType, node.name);
}
// Go to the original declaration for cases:
//
// (1) when the aliased symbol was declared in the location(parent).
// (2) when the aliased symbol is originating from an import.
//
function shouldSkipAlias(node, declaration) {
if (node.kind !== ts.SyntaxKind.Identifier) {
return false;
}
if (node.parent === declaration) {
return true;
}
switch (declaration.kind) {
case ts.SyntaxKind.ImportClause:
case ts.SyntaxKind.ImportEqualsDeclaration:
return true;
case ts.SyntaxKind.ImportSpecifier:
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
default:
return false;
}
}
if (!ts.isShorthandPropertyAssignment(node)) {
if (node.getChildCount() !== 0) {
return [null, null];
}
}
var symbol = checker.getSymbolAtLocation(node);
var importNode = null;
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
var aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) {
// We should mark the import as visited
importNode = symbol.declarations[0];
symbol = aliased;
}
}
if (symbol) {
// Because name in short-hand property assignment has two different meanings: property name and property value,
// using go-to-definition at such position should go to the variable declaration of the property value rather than
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
// assignment. This case and others are handled by the following code.
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
}
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
// and return the property declaration for the referenced property.
// For example:
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
//
// function bar<T>(onfulfilled: (value: T) => void) { //....}
// interface Test {
// pr/*destination*/op1: number
// }
// bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
(node === (node.parent.propertyName || node.parent.name))) {
var type = checker.getTypeAtLocation(node.parent.parent);
if (type) {
var propSymbols = getPropertySymbolsFromType(type, node);
if (propSymbols) {
symbol = propSymbols[0];
}
}
}
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
// For example
// interface Props{
// /*first*/prop1: number
// prop2: boolean
// }
// function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false })
var element = getContainingObjectLiteralElement(node);
if (element && checker.getContextualType(element.parent)) {
var propertySymbols = getPropertySymbolsFromContextualType(checker, element);
if (propertySymbols) {
symbol = propertySymbols[0];
}
}
}
if (symbol && symbol.declarations) {
return [symbol, importNode];
}
return [null, null];
}
/** Get the token whose text contains the position */
function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
var current = sourceFile;
outer: while (true) {
// find the child that contains 'position'
for (var _i = 0, _a = current.getChildren(); _i < _a.length; _i++) {
var child = _a[_i];
var start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
if (start > position) {
// If this child begins after position, then all subsequent children will as well.
break;
}
var end = child.getEnd();
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child;
continue outer;
}
}
return current;
}
}

View File

@@ -1,817 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as ts from 'typescript';
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
export const enum ShakeLevel {
Files = 0,
InnerFile = 1,
ClassMembers = 2
}
export interface ITreeShakingOptions {
/**
* The full path to the root where sources are.
*/
sourcesRoot: string;
/**
* Module ids.
* e.g. `vs/editor/editor.main` or `index`
*/
entryPoints: string[];
/**
* Inline usages.
*/
inlineEntryPoints: string[];
/**
* TypeScript libs.
* e.g. `lib.d.ts`, `lib.es2015.collection.d.ts`
*/
libs: string[];
/**
* TypeScript compiler options.
*/
compilerOptions: ts.CompilerOptions;
/**
* The shake level to perform.
*/
shakeLevel: ShakeLevel;
/**
* regex pattern to ignore certain imports e.g. `vs/css!` imports
*/
importIgnorePattern: RegExp;
redirects: { [module: string]: string; };
}
export interface ITreeShakingResult {
[file: string]: string;
}
export function shake(options: ITreeShakingOptions): ITreeShakingResult {
const languageService = createTypeScriptLanguageService(options);
markNodes(languageService, options);
return generateResult(languageService, options.shakeLevel);
}
//#region Discovery, LanguageService & Setup
function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.LanguageService {
// Discover referenced files
const FILES = discoverAndReadFiles(options);
// Add fake usage files
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
FILES[`inlineEntryPoint:${index}.ts`] = inlineEntryPoint;
});
// Resolve libs
const RESOLVED_LIBS: ILibMap = {};
options.libs.forEach((filename) => {
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
});
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
return ts.createLanguageService(host);
}
/**
* Read imports and follow them until all files have been handled
*/
function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
const FILES: IFileMap = {};
const in_queue: { [module: string]: boolean; } = Object.create(null);
const queue: string[] = [];
const enqueue = (moduleId: string) => {
if (in_queue[moduleId]) {
return;
}
in_queue[moduleId] = true;
queue.push(moduleId);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const moduleId = queue.shift();
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) {
const dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[moduleId + '.d.ts'] = dts_filecontents;
continue;
}
let ts_filename: string;
if (options.redirects[moduleId]) {
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
} else {
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
}
const ts_filecontents = fs.readFileSync(ts_filename).toString();
const info = ts.preProcessFile(ts_filecontents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
if (options.importIgnorePattern.test(importedFileName)) {
// Ignore vs/css! imports
continue;
}
let importedModuleId = importedFileName;
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
}
enqueue(importedModuleId);
}
FILES[moduleId + '.ts'] = ts_filecontents;
}
return FILES;
}
interface ILibMap { [libName: string]: string; }
interface IFileMap { [fileName: string]: string; }
/**
* A TypeScript language service host
*/
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
private readonly _libs: ILibMap;
private readonly _files: IFileMap;
private readonly _compilerOptions: ts.CompilerOptions;
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings(): ts.CompilerOptions {
return this._compilerOptions;
}
getScriptFileNames(): string[] {
return (
[]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files))
);
}
getScriptVersion(fileName: string): string {
return '1';
}
getProjectVersion(): string {
return '1';
}
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
} else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
} else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(fileName: string): ts.ScriptKind {
return ts.ScriptKind.TS;
}
getCurrentDirectory(): string {
return '';
}
getDefaultLibFileName(options: ts.CompilerOptions): string {
return 'defaultLib:lib.d.ts';
}
isDefaultLibFileName(fileName: string): boolean {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
//#endregion
//#region Tree Shaking
const enum NodeColor {
White = 0,
Gray = 1,
Black = 2
}
function getColor(node: ts.Node): NodeColor {
return (<any>node).$$$color || NodeColor.White;
}
function setColor(node: ts.Node, color: NodeColor): void {
(<any>node).$$$color = color;
}
function nodeOrParentIsBlack(node: ts.Node): boolean {
while (node) {
const color = getColor(node);
if (color === NodeColor.Black) {
return true;
}
node = node.parent;
}
return false;
}
function nodeOrChildIsBlack(node: ts.Node): boolean {
if (getColor(node) === NodeColor.Black) {
return true;
}
for (const child of node.getChildren()) {
if (nodeOrChildIsBlack(child)) {
return true;
}
}
return false;
}
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
const program = languageService.getProgram();
if (options.shakeLevel === ShakeLevel.Files) {
// Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => {
setColor(sourceFile, NodeColor.Black);
});
return;
}
const black_queue: ts.Node[] = [];
const gray_queue: ts.Node[] = [];
const sourceFilesLoaded: { [fileName: string]: boolean } = {};
function enqueueTopLevelModuleStatements(sourceFile: ts.SourceFile): void {
sourceFile.forEachChild((node: ts.Node) => {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, NodeColor.Black);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExportDeclaration(node)) {
if (ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, NodeColor.Black);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (
ts.isExpressionStatement(node)
|| ts.isIfStatement(node)
|| ts.isIterationStatement(node, true)
|| ts.isExportAssignment(node)
) {
enqueue_black(node);
}
if (ts.isImportEqualsDeclaration(node)) {
if (/export/.test(node.getFullText(sourceFile))) {
// e.g. "export import Severity = BaseSeverity;"
enqueue_black(node);
}
}
});
}
function enqueue_gray(node: ts.Node): void {
if (nodeOrParentIsBlack(node) || getColor(node) === NodeColor.Gray) {
return;
}
setColor(node, NodeColor.Gray);
gray_queue.push(node);
}
function enqueue_black(node: ts.Node): void {
const previousColor = getColor(node);
if (previousColor === NodeColor.Black) {
return;
}
if (previousColor === NodeColor.Gray) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, NodeColor.White);
// add to black queue
enqueue_black(node);
// // move from one queue to the other
// black_queue.push(node);
// setColor(node, NodeColor.Black);
return;
}
if (nodeOrParentIsBlack(node)) {
return;
}
const fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, NodeColor.Black);
return;
}
const sourceFile = node.getSourceFile();
if (!sourceFilesLoaded[sourceFile.fileName]) {
sourceFilesLoaded[sourceFile.fileName] = true;
enqueueTopLevelModuleStatements(sourceFile);
}
if (ts.isSourceFile(node)) {
return;
}
setColor(node, NodeColor.Black);
black_queue.push(node);
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (let i = 0, len = references.length; i < len; i++) {
const reference = references[i];
const referenceSourceFile = program.getSourceFile(reference.fileName);
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if (
ts.isMethodDeclaration(referenceNode.parent)
|| ts.isPropertyDeclaration(referenceNode.parent)
|| ts.isGetAccessor(referenceNode.parent)
|| ts.isSetAccessor(referenceNode.parent)
) {
enqueue_gray(referenceNode.parent);
}
}
}
}
}
function enqueueFile(filename: string): void {
const sourceFile = program.getSourceFile(filename);
if (!sourceFile) {
console.warn(`Cannot find source file ${filename}`);
return;
}
enqueue_black(sourceFile);
}
function enqueueImport(node: ts.Node, importText: string): void {
if (options.importIgnorePattern.test(importText)) {
// this import should be ignored
return;
}
const nodeSourceFile = node.getSourceFile();
let fullPath: string;
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
} else {
fullPath = importText + '.ts';
}
enqueueFile(fullPath);
}
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
// Add fake usage files
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint:${index}.ts`));
let step = 0;
const checker = program.getTypeChecker();
while (black_queue.length > 0 || gray_queue.length > 0) {
++step;
let node: ts.Node;
if (step % 100 === 0) {
console.log(`${step}/${step+black_queue.length+gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
}
if (black_queue.length === 0) {
for (let i = 0; i < gray_queue.length; i++) {
const node = gray_queue[i];
const nodeParent = node.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node);
setColor(node, NodeColor.Black);
i--;
}
}
}
if (black_queue.length > 0) {
node = black_queue.shift();
} else {
// only gray nodes remaining...
break;
}
const nodeSourceFile = node.getSourceFile();
const loop = (node: ts.Node) => {
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
if (symbolImportNode) {
setColor(symbolImportNode, NodeColor.Black);
}
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
if (ts.isSourceFile(declaration)) {
// Do not enqueue full source files
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j];
const memberName = member.name ? member.name.getText() : null;
if (
ts.isConstructorDeclaration(member)
|| ts.isConstructSignatureDeclaration(member)
|| ts.isIndexSignatureDeclaration(member)
|| ts.isCallSignatureDeclaration(member)
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose'// TODO: keeping all `dispose` methods
) {
enqueue_black(member);
}
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
} else {
enqueue_black(declaration);
}
}
}
node.forEachChild(loop);
};
node.forEachChild(loop);
}
}
function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node, symbol: ts.Symbol): boolean {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
const declarationSourceFile = declaration.getSourceFile();
if (nodeSourceFile === declarationSourceFile) {
if (declaration.pos <= node.pos && node.end <= declaration.end) {
return true;
}
}
}
return false;
}
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
const program = languageService.getProgram();
let result: ITreeShakingResult = {};
const writeFile = (filePath: string, contents: string): void => {
result[filePath] = contents;
};
program.getSourceFiles().forEach((sourceFile) => {
const fileName = sourceFile.fileName;
if (/^defaultLib:/.test(fileName)) {
return;
}
const destination = fileName;
if (/\.d\.ts$/.test(fileName)) {
if (nodeOrChildIsBlack(sourceFile)) {
writeFile(destination, sourceFile.text);
}
return;
}
let text = sourceFile.text;
let result = '';
function keep(node: ts.Node): void {
result += text.substring(node.pos, node.end);
}
function write(data: string): void {
result += data;
}
function writeMarkedNodes(node: ts.Node): void {
if (getColor(node) === NodeColor.Black) {
return keep(node);
}
// Always keep certain top-level statements
if (ts.isSourceFile(node.parent)) {
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
return keep(node);
}
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
return keep(node);
}
}
// Keep the entire import in import * as X cases
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === NodeColor.Black) {
return keep(node);
}
} else {
let survivingImports: string[] = [];
for (let i = 0; i < node.importClause.namedBindings.elements.length; i++) {
const importNode = node.importClause.namedBindings.elements[i];
if (getColor(importNode) === NodeColor.Black) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
} else {
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
}
}
} else {
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return keep(node);
}
}
}
if (shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i];
if (getColor(member) === NodeColor.Black) {
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
}
if (ts.isFunctionDeclaration(node)) {
// Do not go inside functions if they haven't been marked
return;
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== NodeColor.Black) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
} else {
result = text;
}
writeFile(destination, result);
});
return result;
}
//#endregion
//#region Utils
/**
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol, ts.Declaration] {
/**
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
*/
/* @internal */
function getContainingObjectLiteralElement(node: ts.Node): ts.ObjectLiteralElement | undefined {
switch (node.kind) {
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
}
// falls through
case ts.SyntaxKind.Identifier:
return ts.isObjectLiteralElement(node.parent) &&
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
node.parent.name === node ? node.parent : undefined;
}
return undefined;
}
function getPropertySymbolsFromType(type: ts.Type, propName: ts.PropertyName) {
function getTextOfPropertyName(name: ts.PropertyName): string {
function isStringOrNumericLiteral(node: ts.Node): node is ts.StringLiteral | ts.NumericLiteral {
const kind = node.kind;
return kind === ts.SyntaxKind.StringLiteral
|| kind === ts.SyntaxKind.NumericLiteral;
}
switch (name.kind) {
case ts.SyntaxKind.Identifier:
return name.text;
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
return name.text;
case ts.SyntaxKind.ComputedPropertyName:
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined!;
}
}
const name = getTextOfPropertyName(propName);
if (name && type) {
const result: ts.Symbol[] = [];
const symbol = type.getProperty(name);
if (type.flags & ts.TypeFlags.Union) {
for (const t of (<ts.UnionType>type).types) {
const symbol = t.getProperty(name);
if (symbol) {
result.push(symbol);
}
}
return result;
}
if (symbol) {
result.push(symbol);
return result;
}
}
return undefined;
}
function getPropertySymbolsFromContextualType(typeChecker: ts.TypeChecker, node: ts.ObjectLiteralElement): ts.Symbol[] {
const objectLiteral = <ts.ObjectLiteralExpression | ts.JsxAttributes>node.parent;
const contextualType = typeChecker.getContextualType(objectLiteral)!;
return getPropertySymbolsFromType(contextualType, node.name!)!;
}
// Go to the original declaration for cases:
//
// (1) when the aliased symbol was declared in the location(parent).
// (2) when the aliased symbol is originating from an import.
//
function shouldSkipAlias(node: ts.Node, declaration: ts.Node): boolean {
if (node.kind !== ts.SyntaxKind.Identifier) {
return false;
}
if (node.parent === declaration) {
return true;
}
switch (declaration.kind) {
case ts.SyntaxKind.ImportClause:
case ts.SyntaxKind.ImportEqualsDeclaration:
return true;
case ts.SyntaxKind.ImportSpecifier:
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
default:
return false;
}
}
if (!ts.isShorthandPropertyAssignment(node)) {
if (node.getChildCount() !== 0) {
return [null, null];
}
}
let symbol = checker.getSymbolAtLocation(node);
let importNode: ts.Declaration = null;
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) {
// We should mark the import as visited
importNode = symbol.declarations[0];
symbol = aliased;
}
}
if (symbol) {
// Because name in short-hand property assignment has two different meanings: property name and property value,
// using go-to-definition at such position should go to the variable declaration of the property value rather than
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
// assignment. This case and others are handled by the following code.
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
}
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
// and return the property declaration for the referenced property.
// For example:
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
//
// function bar<T>(onfulfilled: (value: T) => void) { //....}
// interface Test {
// pr/*destination*/op1: number
// }
// bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
(node === (node.parent.propertyName || node.parent.name))) {
const type = checker.getTypeAtLocation(node.parent.parent);
if (type) {
const propSymbols = getPropertySymbolsFromType(type, node);
if (propSymbols) {
symbol = propSymbols[0];
}
}
}
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
// For example
// interface Props{
// /*first*/prop1: number
// prop2: boolean
// }
// function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false })
const element = getContainingObjectLiteralElement(node);
if (element && checker.getContextualType(element.parent as ts.Expression)) {
const propertySymbols = getPropertySymbolsFromContextualType(checker, element);
if (propertySymbols) {
symbol = propertySymbols[0];
}
}
}
if (symbol && symbol.declarations) {
return [symbol, importNode];
}
return [null, null];
}
/** Get the token whose text contains the position */
function getTokenAtPosition(sourceFile: ts.SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeEndPosition: boolean): ts.Node {
let current: ts.Node = sourceFile;
outer: while (true) {
// find the child that contains 'position'
for (const child of current.getChildren()) {
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
if (start > position) {
// If this child begins after position, then all subsequent children will as well.
break;
}
const end = child.getEnd();
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child;
continue outer;
}
}
return current;
}
}
//#endregion

View File

@@ -1,73 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var path_1 = require("path");
var Rule = /** @class */ (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
// the vs/editor folder is allowed to use the standalone editor
return [];
}
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var NoStandaloneEditorRuleWalker = /** @class */ (function (_super) {
__extends(NoStandaloneEditorRuleWalker, _super);
function NoStandaloneEditorRuleWalker(file, opts) {
return _super.call(this, file, opts) || this;
}
NoStandaloneEditorRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
};
NoStandaloneEditorRuleWalker.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node);
};
NoStandaloneEditorRuleWalker.prototype.visitCallExpression = function (node) {
_super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
var path = node.arguments[0];
this._validateImport(path.getText(), node);
}
};
NoStandaloneEditorRuleWalker.prototype._validateImport = function (path, node) {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path);
}
if (/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)) {
// {{SQL CARBON EDIT}}
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
};
return NoStandaloneEditorRuleWalker;
}(Lint.RuleWalker));

View File

@@ -1,66 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import { join } from 'path';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
// the vs/editor folder is allowed to use the standalone editor
return [];
}
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
}
}
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
private _validateImport(path: string, node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = join(this.getSourceFile().fileName, path);
}
if (
/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)
) {
// {{SQL CARBON EDIT}}
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
}
}

View File

@@ -88,11 +88,10 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) {
var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
];
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
]));
return;
}
var callInfo = info ? info.callInfo : null;
@@ -102,9 +101,8 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
}
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
var s = node.getText();
var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"),
];
var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")");
var fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return;
}
@@ -136,24 +134,16 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
}
}
}
var messageArg = callInfo.callExpression.arguments[this.messageIndex];
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
var messageArg = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return;
}
};
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
var text = keyNode.getText();
// We have an empty key
if (text.match(/(['"]) *\1/)) {
if (messageNode) {
this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
}
else {
this.addFailureAtNode(keyNode, "Key is empty.");
}
return;
}
var occurrences = this.usedKeys[text];
if (!occurrences) {
occurrences = [];
@@ -186,7 +176,7 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
node = parent;
}
};
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double qoutes for imports.';
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
return NoUnexternalizedStringsRuleWalker;
}(Lint.RuleWalker));

View File

@@ -45,7 +45,7 @@ interface KeyMessagePair {
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private static ImportFailureMessage = 'Do not use double quotes for imports.';
private static ImportFailureMessage = 'Do not use double qoutes for imports.';
private static DOUBLE_QUOTE: string = '"';
@@ -104,14 +104,13 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
let info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) {
const fix = [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
];
this.addFailureAtNode(
node,
NoUnexternalizedStringsRuleWalker.ImportFailureMessage,
fix
new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
])
);
return;
}
@@ -123,9 +122,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText();
const fix = [
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`),
];
const replacement = new Lint.Replacement(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`);
const fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
return;
}
@@ -156,10 +154,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
}
}
}
const messageArg = callInfo.callExpression.arguments[this.messageIndex];
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
let messageArg: ts.Expression = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(
messageArg.getStart(), messageArg.getWidth(),
`Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
@@ -169,15 +167,6 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
let text = keyNode.getText();
// We have an empty key
if (text.match(/(['"]) *\1/)) {
if (messageNode) {
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`);
} else {
this.addFailureAtNode(keyNode, `Key is empty.`);
}
return;
}
let occurrences: KeyMessagePair[] = this.usedKeys[text];
if (!occurrences) {
occurrences = [];

View File

@@ -71,7 +71,7 @@ var TranslationRemindRuleWalker = /** @class */ (function (_super) {
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, "Please add '" + resource + "' to ./build/lib/i18n.resources.json file to use translations here.");
this.addFailureAtNode(node, "Please add '" + resource + "' to ./builds/lib/i18n.resources.json file to use translations here.");
}
};
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';

View File

@@ -67,7 +67,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
});
if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./build/lib/i18n.resources.json file to use translations here.`);
this.addFailureAtNode(node, `Please add '${resource}' to ./builds/lib/i18n.resources.json file to use translations here.`);
}
}
}

View File

@@ -1,14 +1,7 @@
declare module "event-stream" {
import { Stream } from 'stream';
import { ThroughStream as _ThroughStream} from 'through';
import { ThroughStream } from 'through';
import { MapStream } from 'map-stream';
import * as File from 'vinyl';
export interface ThroughStream extends _ThroughStream {
queue(data: File | null);
push(data: File | null);
paused: boolean;
}
function merge(streams: Stream[]): ThroughStream;
function merge(...streams: Stream[]): ThroughStream;

View File

@@ -143,7 +143,7 @@ function loadSourcemaps() {
cb(null, f);
return;
}
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) {
return cb(err);
@@ -160,7 +160,7 @@ function stripSourceMappingURL() {
var output = input
.pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
@@ -173,6 +173,7 @@ function rimraf(dir) {
if (!err) {
return cb();
}
;
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(function () { return retry(cb); }, 10);
}
@@ -210,12 +211,3 @@ function filter(fn) {
return result;
}
exports.filter = filter;
function versionStringToNumber(versionStr) {
var semverRegex = /(\d+)\.(\d+)\.(\d+)/;
var match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
exports.versionStringToNumber = versionStringToNumber;

View File

@@ -28,7 +28,7 @@ export interface IStreamProvider {
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
}
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation?: boolean): NodeJS.ReadWriteStream {
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
const input = es.through();
const output = es.through();
let state = 'idle';
@@ -129,7 +129,7 @@ export function skipDirectories(): NodeJS.ReadWriteStream {
});
}
export function cleanNodeModule(name: string, excludes: string[], includes?: string[]): NodeJS.ReadWriteStream {
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str: string) => '!' + str;
@@ -190,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
return;
}
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
if (err) { return cb(err); }
@@ -209,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
@@ -223,7 +223,7 @@ export function rimraf(dir: string): (cb: any) => void {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return cb();
}
};
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
@@ -268,14 +268,4 @@ export function filter(fn: (data: any) => boolean): FilterStream {
result.restore = es.through();
return result;
}
export function versionStringToNumber(versionStr: string) {
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
const match = versionStr.match(semverRegex);
if (!match) {
throw new Error('Version string is not properly formatted: ' + versionStr);
}
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
}

View File

@@ -1 +0,0 @@
.yarnrc

View File

@@ -9,7 +9,7 @@ const es = require('event-stream');
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = Buffer.from('');
f.contents = new Buffer('');
f.stat = { mtime: new Date() };
}

View File

@@ -30,12 +30,12 @@ function watch(root) {
path: path,
base: root
});
//@ts-ignore
file.event = type;
result.emit('data', file);
}
nsfw(root, function (events) {
nsfw(root, function(events) {
for (var i = 0; i < events.length; i++) {
var e = events[i];
var changeType = e.action;
@@ -47,16 +47,16 @@ function watch(root) {
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
}
}
}).then(function (watcher) {
}).then(function(watcher) {
watcher.start();
});
});
return result;
return result;
}
var cache = Object.create(null);
module.exports = function (pattern, options) {
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());
@@ -66,7 +66,7 @@ module.exports = function (pattern, options) {
watcher = cache[cwd] = watch(cwd);
}
var rebase = !options.base ? es.through() : es.mapSync(function (f) {
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
f.base = options.base;
return f;
});
@@ -74,13 +74,13 @@ module.exports = function (pattern, options) {
return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern))
.pipe(es.map(function (file, cb) {
fs.stat(file.path, function (err, stat) {
.pipe(es.map(function(file, cb) {
fs.stat(file.path, function(err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function (err, contents) {
fs.readFile(file.path, function(err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }

View File

@@ -24,8 +24,7 @@ function watch(root) {
var result = es.through();
var child = cp.spawn(watcherPath, [root]);
child.stdout.on('data', function (data) {
// @ts-ignore
child.stdout.on('data', function(data) {
var lines = data.toString('utf8').split('\n');
for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim();
@@ -47,17 +46,17 @@ function watch(root) {
path: changePathFull,
base: root
});
//@ts-ignore
file.event = toChangeType(changeType);
result.emit('data', file);
}
});
child.stderr.on('data', function (data) {
child.stderr.on('data', function(data) {
result.emit('error', data);
});
child.on('exit', function (code) {
child.on('exit', function(code) {
result.emit('error', 'Watcher died with code ' + code);
child = null;
});
@@ -71,7 +70,7 @@ function watch(root) {
var cache = Object.create(null);
module.exports = function (pattern, options) {
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +0,0 @@
The Source EULA
Copyright (c) 2016 - present Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -134,25 +134,7 @@ function getTopLevelDeclaration(sourceFile, typeName) {
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers, kind) {
if (modifiers) {
for (var i = 0; i < modifiers.length; i++) {
var mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
}
}
return false;
}
function isStatic(member) {
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration) {
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage) {
function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
var result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
@@ -160,18 +142,6 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName,
// }
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
var interfaceDeclaration = declaration;
var staticTypeName_1 = (isDefaultExport(interfaceDeclaration)
? importName + ".default"
: importName + "." + declaration.name.text);
var instanceTypeName_1 = staticTypeName_1;
var typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
var arr = [];
for (var i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
instanceTypeName_1 = instanceTypeName_1 + "<" + arr.join(',') + ">";
}
var members = interfaceDeclaration.members;
members.forEach(function (member) {
try {
@@ -181,15 +151,6 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName,
result = result.replace(memberText, '');
// console.log('AFTER: ', result);
}
else {
var memberName = member.name.text;
if (isStatic(member)) {
usage.push("a = " + staticTypeName_1 + "." + memberName + ";");
}
else {
usage.push("a = (<" + instanceTypeName_1 + ">b)." + memberName + ";");
}
}
}
catch (err) {
// life..
@@ -210,7 +171,9 @@ function format(text) {
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return ts.formatting.getFormatContext(options);
var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
@@ -247,19 +210,8 @@ function createReplacer(data) {
};
}
function generateDeclarationFile(out, inputFiles, recipe) {
var endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
var lines = recipe.split(endl);
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
var usageCounter = 0;
var usageImports = [];
var usage = [];
usage.push("var a;");
usage.push("var b;");
var generateUsageImport = function (moduleId) {
var importName = 'm' + (++usageCounter);
usageImports.push("import * as " + importName + " from '" + moduleId.replace(/\.d\.ts$/, '') + "';");
return importName;
};
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
@@ -269,7 +221,6 @@ function generateDeclarationFile(out, inputFiles, recipe) {
if (!sourceFile_1) {
return;
}
var importName_1 = generateUsageImport(moduleId);
var replacer_1 = createReplacer(m1[2]);
var typeNames = m1[3].split(/,/);
typeNames.forEach(function (typeName) {
@@ -282,7 +233,7 @@ function generateDeclarationFile(out, inputFiles, recipe) {
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration, importName_1, usage)));
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration)));
});
return;
}
@@ -294,7 +245,6 @@ function generateDeclarationFile(out, inputFiles, recipe) {
if (!sourceFile_2) {
return;
}
var importName_2 = generateUsageImport(moduleId);
var replacer_2 = createReplacer(m2[2]);
var typeNames = m2[3].split(/,/);
var typesToExcludeMap_1 = {};
@@ -322,23 +272,21 @@ function generateDeclarationFile(out, inputFiles, recipe) {
}
}
}
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration, importName_2, usage)));
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration)));
});
return;
}
result.push(line);
});
var resultTxt = result.join(endl);
var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
return [
resultTxt,
usageImports.join('\n') + "\n\n" + usage.join('\n')
];
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getIncludesInRecipe() {
function getFilesToWatch(out) {
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
@@ -346,37 +294,30 @@ function getIncludesInRecipe() {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
var moduleId = m1[1];
result.push(moduleId);
result.push(moduleIdToPath(out, moduleId));
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
var moduleId = m2[1];
result.push(moduleId);
result.push(moduleIdToPath(out, moduleId));
return;
}
});
return result;
}
function getFilesToWatch(out) {
return getIncludesInRecipe().map(function (moduleId) { return moduleIdToPath(out, moduleId); });
}
exports.getFilesToWatch = getFilesToWatch;
function run(out, inputFiles) {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var _a = generateDeclarationFile(out, inputFiles, recipe), result = _a[0], usageContent = _a[1];
var result = generateDeclarationFile(out, inputFiles, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
var one = currentContent.replace(/\r\n/gm, '\n');
var other = result.replace(/\r\n/gm, '\n');
var isTheSame = one === other;
return {
content: result,
usageContent: usageContent,
filePath: DECLARATION_PATH,
isTheSame: isTheSame
isTheSame: currentContent === result
};
}
exports.run = run;
@@ -384,78 +325,3 @@ function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
exports.complainErrors = complainErrors;
var TypeScriptLanguageServiceHost = /** @class */ (function () {
function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
return this._compilerOptions;
};
TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
return ([]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files)));
};
TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
return '1';
};
TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
return '1';
};
TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
}
else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
}
else {
return ts.ScriptSnapshot.fromString('');
}
};
TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
return ts.ScriptKind.TS;
};
TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
return '';
};
TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
return 'defaultLib:es5';
};
TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
};
return TypeScriptLanguageServiceHost;
}());
function execute() {
var OUTPUT_FILES = {};
var SRC_FILES = {};
var SRC_FILE_TO_EXPECTED_NAME = {};
getIncludesInRecipe().forEach(function (moduleId) {
if (/\.d\.ts$/.test(moduleId)) {
var fileName_1 = path.join(SRC, moduleId);
OUTPUT_FILES[moduleIdToPath('src', moduleId)] = fs.readFileSync(fileName_1).toString();
return;
}
var fileName = path.join(SRC, moduleId) + '.ts';
SRC_FILES[fileName] = fs.readFileSync(fileName).toString();
SRC_FILE_TO_EXPECTED_NAME[fileName] = moduleIdToPath('src', moduleId);
});
var languageService = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, SRC_FILES, {}));
var t1 = Date.now();
Object.keys(SRC_FILES).forEach(function (fileName) {
var t = Date.now();
var emitOutput = languageService.getEmitOutput(fileName, true);
OUTPUT_FILES[SRC_FILE_TO_EXPECTED_NAME[fileName]] = emitOutput.outputFiles[0].text;
// console.log(`Generating .d.ts for ${fileName} took ${Date.now() - t} ms`);
});
console.log("Generating .d.ts took " + (Date.now() - t1) + " ms");
// console.log(result.filePath);
// fs.writeFileSync(result.filePath, result.content.replace(/\r\n/gm, '\n'));
// fs.writeFileSync(path.join(SRC, 'user.ts'), result.usageContent.replace(/\r\n/gm, '\n'));
return run('src', OUTPUT_FILES);
}
exports.execute = execute;

View File

@@ -24,15 +24,15 @@ function logErr(message: any, ...rest: any[]): void {
util.log(util.colors.red('[monaco.d.ts]'), message, ...rest);
}
function moduleIdToPath(out: string, moduleId: string): string {
function moduleIdToPath(out:string, moduleId:string): string {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
let SOURCE_FILE_MAP: { [moduleId: string]: ts.SourceFile; } = {};
function getSourceFile(out: string, inputFiles: { [file: string]: string; }, moduleId: string): ts.SourceFile {
let SOURCE_FILE_MAP: {[moduleId:string]:ts.SourceFile;} = {};
function getSourceFile(out:string, inputFiles: { [file: string]: string; }, moduleId:string): ts.SourceFile {
if (!SOURCE_FILE_MAP[moduleId]) {
let filePath = path.normalize(moduleIdToPath(out, moduleId));
@@ -53,7 +53,7 @@ function getSourceFile(out: string, inputFiles: { [file: string]: string; }, mod
type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration;
type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement;
function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
function isDeclaration(a:TSTopLevelDeclare): a is TSTopLevelDeclaration {
return (
a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
@@ -64,7 +64,7 @@ function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
);
}
function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
function visitTopLevelDeclarations(sourceFile:ts.SourceFile, visitor:(node:TSTopLevelDeclare)=>boolean): void {
let stop = false;
let visit = (node: ts.Node): void => {
@@ -100,8 +100,8 @@ function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TS
}
function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
let all: TSTopLevelDeclare[] = [];
function getAllTopLevelDeclarations(sourceFile:ts.SourceFile): TSTopLevelDeclare[] {
let all:TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
@@ -128,8 +128,8 @@ function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclar
}
function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TSTopLevelDeclare {
let result: TSTopLevelDeclare = null;
function getTopLevelDeclaration(sourceFile:ts.SourceFile, typeName:string): TSTopLevelDeclare {
let result:TSTopLevelDeclare = null;
visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
@@ -149,34 +149,12 @@ function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TS
}
function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number; }): string {
function getNodeText(sourceFile:ts.SourceFile, node:{pos:number; end:number;}): string {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers: ts.NodeArray<ts.Modifier>, kind: ts.SyntaxKind): boolean {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
}
}
return false;
}
function isStatic(member: ts.ClassElement | ts.TypeElement): boolean {
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
return (
hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)
);
}
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[]): string {
function getMassagedTopLevelDeclarationText(sourceFile:ts.SourceFile, declaration: TSTopLevelDeclare): string {
let result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
@@ -185,23 +163,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
const staticTypeName = (
isDefaultExport(interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`
);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr: string[] = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
}
const members: ts.NodeArray<ts.ClassElement | ts.TypeElement> = interfaceDeclaration.members;
let members:ts.NodeArray<ts.Node> = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
@@ -209,13 +171,6 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
// console.log('BEFORE: ', result);
result = result.replace(memberText, '');
// console.log('AFTER: ', result);
} else {
const memberName = (<ts.Identifier | ts.StringLiteral>member.name).text;
if (isStatic(member)) {
usage.push(`a = ${staticTypeName}.${memberName};`);
} else {
usage.push(`a = (<${instanceTypeName}>b).${memberName};`);
}
}
} catch (err) {
// life..
@@ -227,7 +182,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
return result;
}
function format(text: string): string {
function format(text:string): string {
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
@@ -241,7 +196,9 @@ function format(text: string): string {
function getRuleProvider(options: ts.FormatCodeSettings) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return (ts as any).formatting.getFormatContext(options);
let ruleProvider = new (<any>ts).formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text: string, edits: ts.TextChange[]): string {
@@ -257,10 +214,10 @@ function format(text: string): string {
}
}
function createReplacer(data: string): (str: string) => string {
function createReplacer(data:string): (str:string)=>string {
data = data || '';
let rawDirectives = data.split(';');
let directives: [RegExp, string][] = [];
let directives: [RegExp,string][] = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
@@ -274,7 +231,7 @@ function createReplacer(data: string): (str: string) => string {
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return (str: string) => {
return (str:string)=> {
for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
@@ -282,24 +239,10 @@ function createReplacer(data: string): (str: string) => string {
};
}
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe: string): [string, string] {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe:string): string {
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
let lines = recipe.split(endl);
let result: string[] = [];
let usageCounter = 0;
let usageImports: string[] = [];
let usage: string[] = [];
usage.push(`var a;`);
usage.push(`var b;`);
const generateUsageImport = (moduleId: string) => {
let importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from '${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
lines.forEach(line => {
@@ -312,8 +255,6 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
@@ -327,7 +268,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage)));
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
});
return;
}
@@ -341,12 +282,10 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap: { [typeName: string]: boolean; } = {};
let typesToExcludeMap: {[typeName:string]:boolean;} = {};
let typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
@@ -371,7 +310,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
}
}
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage)));
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
});
return;
}
@@ -379,20 +318,18 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
result.push(line);
});
let resultTxt = result.join(endl);
let resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
return [
resultTxt,
`${usageImports.join('\n')}\n\n${usage.join('\n')}`
];
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getIncludesInRecipe(): string[] {
export function getFilesToWatch(out:string): string[] {
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
@@ -402,14 +339,14 @@ function getIncludesInRecipe(): string[] {
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
result.push(moduleId);
result.push(moduleIdToPath(out, moduleId));
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
result.push(moduleId);
result.push(moduleIdToPath(out, moduleId));
return;
}
});
@@ -417,13 +354,8 @@ function getIncludesInRecipe(): string[] {
return result;
}
export function getFilesToWatch(out: string): string[] {
return getIncludesInRecipe().map((moduleId) => moduleIdToPath(out, moduleId));
}
export interface IMonacoDeclarationResult {
content: string;
usageContent: string;
filePath: string;
isTheSame: boolean;
}
@@ -433,116 +365,18 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
SOURCE_FILE_MAP = {};
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let [result, usageContent] = generateDeclarationFile(out, inputFiles, recipe);
let result = generateDeclarationFile(out, inputFiles, recipe);
let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
const one = currentContent.replace(/\r\n/gm, '\n');
const other = result.replace(/\r\n/gm, '\n');
const isTheSame = one === other;
return {
content: result,
usageContent: usageContent,
filePath: DECLARATION_PATH,
isTheSame
isTheSame: currentContent === result
};
}
export function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
interface ILibMap { [libName: string]: string; }
interface IFileMap { [fileName: string]: string; }
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
private readonly _libs: ILibMap;
private readonly _files: IFileMap;
private readonly _compilerOptions: ts.CompilerOptions;
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings(): ts.CompilerOptions {
return this._compilerOptions;
}
getScriptFileNames(): string[] {
return (
[]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files))
);
}
getScriptVersion(fileName: string): string {
return '1';
}
getProjectVersion(): string {
return '1';
}
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
} else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
} else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(fileName: string): ts.ScriptKind {
return ts.ScriptKind.TS;
}
getCurrentDirectory(): string {
return '';
}
getDefaultLibFileName(options: ts.CompilerOptions): string {
return 'defaultLib:es5';
}
isDefaultLibFileName(fileName: string): boolean {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
export function execute(): IMonacoDeclarationResult {
const OUTPUT_FILES: { [file: string]: string; } = {};
const SRC_FILES: IFileMap = {};
const SRC_FILE_TO_EXPECTED_NAME: { [filename: string]: string; } = {};
getIncludesInRecipe().forEach((moduleId) => {
if (/\.d\.ts$/.test(moduleId)) {
let fileName = path.join(SRC, moduleId);
OUTPUT_FILES[moduleIdToPath('src', moduleId)] = fs.readFileSync(fileName).toString();
return;
}
let fileName = path.join(SRC, moduleId) + '.ts';
SRC_FILES[fileName] = fs.readFileSync(fileName).toString();
SRC_FILE_TO_EXPECTED_NAME[fileName] = moduleIdToPath('src', moduleId);
});
const languageService = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, SRC_FILES, {}));
var t1 = Date.now();
Object.keys(SRC_FILES).forEach((fileName) => {
var t = Date.now();
const emitOutput = languageService.getEmitOutput(fileName, true);
OUTPUT_FILES[SRC_FILE_TO_EXPECTED_NAME[fileName]] = emitOutput.outputFiles[0].text;
// console.log(`Generating .d.ts for ${fileName} took ${Date.now() - t} ms`);
});
console.log(`Generating .d.ts took ${Date.now() - t1} ms`);
// console.log(result.filePath);
// fs.writeFileSync(result.filePath, result.content.replace(/\r\n/gm, '\n'));
// fs.writeFileSync(path.join(SRC, 'user.ts'), result.usageContent.replace(/\r\n/gm, '\n'));
return run('src', OUTPUT_FILES);
}

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
declare namespace monaco {
declare module monaco {
export type Thenable<T> = PromiseLike<T>;
type Thenable<T> = PromiseLike<T>;
export interface IDisposable {
dispose(): void;
@@ -32,18 +32,6 @@ declare namespace monaco {
Error = 3,
}
export enum MarkerTag {
Unnecessary = 1,
}
export enum MarkerSeverity {
Hint = 1,
Info = 2,
Warning = 4,
Error = 8,
}
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise
#include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken
#include(vs/base/common/uri): URI, UriComponents
@@ -58,46 +46,41 @@ declare namespace monaco {
#include(vs/editor/common/core/token): Token
}
declare namespace monaco.editor {
declare module monaco.editor {
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
#include(vs/editor/standalone/browser/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler {
(...args:any[]): void;
}
#include(vs/platform/contextkey/common/contextkey): IContextKey
#include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarker, IMarkerData, IRelatedInformation
#include(vs/platform/markers/common/markers): IMarker, IMarkerData
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/base/common/scrollable): ScrollbarVisibility
#include(vs/platform/theme/common/themeService): ThemeColor
#includeAll(vs/editor/common/model;LanguageIdentifier=>languages.LanguageIdentifier): IScrollEvent
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents):
#includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
//compatibility:
export type IReadOnlyModel = ITextModel;
export type IModel = ITextModel;
}
declare namespace monaco.languages {
declare module monaco.languages {
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;model.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/common/modes/languageConfiguration):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData;model.=>editor.):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.):
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint
#includeAll(vs/editor/standalone/common/monarch/monarchTypes):
}
declare namespace monaco.worker {
declare module monaco.worker {
#includeAll(vs/editor/common/services/editorSimpleWorker;):

View File

@@ -1,82 +0,0 @@
// This file is adding references to various symbols which should not be removed via tree shaking
import { ServiceIdentifier } from 'vs/platform/instantiation/common/instantiation';
import { IContextViewService } from 'vs/platform/contextview/browser/contextView';
import { IHighlight } from 'vs/base/parts/quickopen/browser/quickOpenModel';
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
import { CountBadge } from 'vs/base/browser/ui/countBadge/countBadge';
import { SimpleWorkerClient, create as create1 } from 'vs/base/common/worker/simpleWorker';
import { create as create2 } from 'vs/editor/common/services/editorSimpleWorker';
import { QuickOpenWidget } from 'vs/base/parts/quickopen/browser/quickOpenWidget';
import { SyncDescriptor0, SyncDescriptor1, SyncDescriptor2, SyncDescriptor3, SyncDescriptor4, SyncDescriptor5, SyncDescriptor6, SyncDescriptor7, SyncDescriptor8 } from 'vs/platform/instantiation/common/descriptors';
import { PolyfillPromise } from 'vs/base/common/winjs.polyfill.promise';
import { DiffNavigator } from 'vs/editor/browser/widget/diffNavigator';
import * as editorAPI from 'vs/editor/editor.api';
(function () {
var a: any;
var b: any;
a = (<IContextViewService>b).layout; // IContextViewProvider
a = (<IWorkspaceContextService>b).getWorkspaceFolder; // IWorkspaceFolderProvider
a = (<IWorkspaceContextService>b).getWorkspace; // IWorkspaceFolderProvider
a = (<CountBadge>b).style; // IThemable
a = (<QuickOpenWidget>b).style; // IThemable
a = (<IEnvironmentService>b).userHome; // IUserHomeProvider
a = (<DiffNavigator>b).previous; // IDiffNavigator
a = (<ServiceIdentifier<any>>b).type;
a = (<IHighlight>b).start;
a = (<IHighlight>b).end;
a = (<SimpleWorkerClient<any>>b).getProxyObject; // IWorkerClient
a = create1;
a = create2;
// promise polyfill
a = PolyfillPromise.all;
a = PolyfillPromise.race;
a = PolyfillPromise.resolve;
a = PolyfillPromise.reject;
a = (<PolyfillPromise>b).then;
a = (<PolyfillPromise>b).catch;
// injection madness
a = (<SyncDescriptor0<any>>b).ctor;
a = (<SyncDescriptor0<any>>b).bind;
a = (<SyncDescriptor1<any, any>>b).ctor;
a = (<SyncDescriptor1<any, any>>b).bind;
a = (<SyncDescriptor1<any, any>>b).ctor;
a = (<SyncDescriptor1<any, any>>b).bind;
a = (<SyncDescriptor2<any, any, any>>b).ctor;
a = (<SyncDescriptor2<any, any, any>>b).bind;
a = (<SyncDescriptor3<any, any, any, any>>b).ctor;
a = (<SyncDescriptor3<any, any, any, any>>b).bind;
a = (<SyncDescriptor4<any, any, any, any, any>>b).ctor;
a = (<SyncDescriptor4<any, any, any, any, any>>b).bind;
a = (<SyncDescriptor5<any, any, any, any, any, any>>b).ctor;
a = (<SyncDescriptor5<any, any, any, any, any, any>>b).bind;
a = (<SyncDescriptor6<any, any, any, any, any, any, any>>b).ctor;
a = (<SyncDescriptor6<any, any, any, any, any, any, any>>b).bind;
a = (<SyncDescriptor7<any, any, any, any, any, any, any, any>>b).ctor;
a = (<SyncDescriptor7<any, any, any, any, any, any, any, any>>b).bind;
a = (<SyncDescriptor8<any, any, any, any, any, any, any, any, any>>b).ctor;
a = (<SyncDescriptor8<any, any, any, any, any, any, any, any, any>>b).bind;
// exported API
a = editorAPI.CancellationTokenSource;
a = editorAPI.Emitter;
a = editorAPI.KeyCode;
a = editorAPI.KeyMod;
a = editorAPI.Position;
a = editorAPI.Range;
a = editorAPI.Selection;
a = editorAPI.SelectionDirection;
a = editorAPI.Severity;
a = editorAPI.MarkerSeverity;
a = editorAPI.MarkerTag;
a = editorAPI.Promise;
a = editorAPI.Uri;
a = editorAPI.Token;
a = editorAPI.editor;
a = editorAPI.languages;
})();

View File

@@ -1,17 +1,57 @@
{
"name": "monaco-editor-core",
"private": true,
"version": "0.12.0",
"version": "0.9.0",
"description": "A browser based code editor",
"author": "Microsoft Corporation",
"license": "MIT",
"typings": "./esm/vs/editor/editor.api.d.ts",
"module": "./esm/vs/editor/editor.main.js",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/vscode"
},
"bugs": {
"bugs": {
"url": "https://github.com/Microsoft/vscode/issues"
},
"devDependencies": {
"@types/minimist": "1.2.0",
"@types/mocha": "2.2.39",
"@types/semver": "5.3.30",
"@types/sinon": "1.16.34",
"debounce": "^1.0.0",
"eslint": "^3.4.0",
"event-stream": "^3.1.7",
"ghooks": "1.0.3",
"glob": "^5.0.13",
"gulp": "^3.8.9",
"gulp-bom": "^1.0.0",
"gulp-concat": "^2.6.0",
"gulp-cssnano": "^2.1.0",
"gulp-filter": "^3.0.0",
"gulp-flatmap": "^1.0.0",
"gulp-rename": "^1.2.0",
"gulp-sourcemaps": "^1.11.0",
"gulp-tsb": "^2.0.3",
"gulp-tslint": "^7.0.1",
"gulp-uglify": "^2.0.0",
"gulp-util": "^3.0.6",
"gulp-watch": "^4.3.9",
"is": "^3.1.0",
"istanbul": "^0.3.17",
"jsdom-no-contextify": "^3.1.0",
"lazy.js": "^0.4.2",
"minimatch": "^2.0.10",
"mocha": "^2.2.5",
"object-assign": "^4.0.1",
"pump": "^1.0.1",
"remap-istanbul": "^0.6.4",
"rimraf": "^2.2.8",
"sinon": "^1.17.2",
"source-map": "^0.4.4",
"tslint": "^4.3.1",
"typescript": "2.5.2",
"typescript-formatter": "4.0.1",
"underscore": "^1.8.2",
"vinyl": "^0.4.5",
"vscode-nls-dev": "^2.0.1"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,55 +5,59 @@
const cp = require('child_process');
const path = require('path');
const fs = require('fs');
const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function yarnInstall(location, opts) {
function npmInstall(location, opts) {
opts = opts || {};
opts.cwd = location;
opts.stdio = 'inherit';
const result = cp.spawnSync(yarn, ['install'], opts);
const result = cp.spawnSync(npm, ['install'], opts);
if (result.error || result.status !== 0) {
process.exit(1);
}
}
yarnInstall('extensions'); // node modules shared by all extensions
// {{SQL CARBON EDIT}}
const protocol = [
'jsonrpc',
'types',
'client'
];
const allExtensionFolders = fs.readdirSync('extensions');
const extensions = allExtensionFolders.filter(e => {
try {
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
return packageJSON && (packageJSON.dependencies || packageJSON.devDependencies);
} catch (e) {
return false;
}
});
protocol.forEach(item => npmInstall(`dataprotocol-node/${item}`));
extensions.forEach(extension => yarnInstall(`extensions/${extension}`));
// {{SQL CARBON EDIT}}
npmInstall('extensions-modules');
npmInstall('extensions'); // node modules shared by all extensions
function yarnInstallBuildDependencies() {
// make sure we install the deps of build/lib/watch for the system installed
const extensions = [
'vscode-colorize-tests',
'git',
'json',
'mssql',
'configuration-editing',
'extension-editing',
'markdown',
'merge-conflict',
'insights-default',
'account-provider-azure'
];
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
function npmInstallBuildDependencies() {
// make sure we install gulp watch for the system installed
// node, since that is the driver of gulp
//@ts-ignore
const env = Object.assign({}, process.env);
const watchPath = path.join(path.dirname(__dirname), 'lib', 'watch');
const yarnrcPath = path.join(watchPath, '.yarnrc');
const disturl = 'https://nodejs.org/download/release';
const target = process.versions.node;
const runtime = 'node';
delete env['npm_config_disturl'];
delete env['npm_config_target'];
delete env['npm_config_runtime'];
const yarnrc = `disturl "${disturl}"
target "${target}"
runtime "${runtime}"`;
fs.writeFileSync(yarnrcPath, yarnrc, 'utf8');
yarnInstall(watchPath, { env });
npmInstall(path.join(path.dirname(__dirname), 'lib', 'watch'), { env });
}
yarnInstall(`build`); // node modules required for build
yarnInstall('test/smoke'); // node modules required for smoketest
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
npmInstall(`build`); // node modules required for build
npmInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

View File

@@ -3,21 +3,13 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
let err = false;
if (process.env['npm_config_disturl'] !== 'https://atom.io/download/electron') {
console.error("You can't use plain npm to install Code's dependencies.");
console.error(
/^win/.test(process.platform)
? "Please run '.\\scripts\\npm.bat install' instead."
: "Please run './scripts/npm.sh install' instead."
);
const major = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
if (major < 8) {
console.error('\033[1;31m*** Please use node>=8.\033[0;0m');
err = true;
}
if (!/yarn\.js$|yarnpkg$/.test(process.env['npm_execpath'])) {
console.error('\033[1;31m*** Please use yarn to install dependencies.\033[0;0m');
err = true;
}
if (err) {
console.error('');
process.exit(1);
}

View File

@@ -4,11 +4,9 @@
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const fs = require('fs');
const path = require('path');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function updateGrammar(location) {
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
cwd: location,
stdio: 'inherit'
@@ -19,17 +17,50 @@ function updateGrammar(location) {
}
}
const allExtensionFolders = fs.readdirSync('extensions');
const extensions = allExtensionFolders.filter(e => {
try {
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
return packageJSON && packageJSON.scripts && packageJSON.scripts['update-grammar'];
} catch (e) {
return false;
}
});
console.log(`Updating ${extensions.length} grammars...`);
const extensions = [
// 'bat' Grammar no longer available
'clojure',
'coffeescript',
'cpp',
'csharp',
'css',
'diff',
'docker',
'fsharp',
'gitsyntax',
'go',
'groovy',
'handlebars',
'hlsl',
'html',
'ini',
'java',
// 'javascript', updated through JavaScript
'json',
'less',
'lua',
'make',
'markdown',
'objective-c',
'perl',
'php',
// 'powershell', grammar not ready yet, @daviwil will ping when ready
'pug',
'python',
'r',
'razor',
'ruby',
'rust',
'scss',
'shaderlab',
'shellscript',
'sql',
'swift',
'typescript',
'vb',
'xml',
'yaml'
];
extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
@@ -39,5 +70,4 @@ if (process.platform === 'win32') {
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
} else {
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
}
}

View File

@@ -14,19 +14,14 @@ var url = require('url');
function getOptions(urlString) {
var _url = url.parse(urlString);
var headers = {
'User-Agent': 'VSCode'
};
var token = process.env['GITHUB_TOKEN'];
if (token) {
headers['Authorization'] = 'token ' + token
}
return {
protocol: _url.protocol,
host: _url.host,
port: _url.port,
path: _url.path,
headers: headers
headers: {
'User-Agent': 'NodeJS'
}
};
}
@@ -37,16 +32,12 @@ function download(url, redirectCount) {
response.on('data', function (data) {
content += data.toString();
}).on('end', function () {
if (response.statusCode === 403 && response.headers['x-ratelimit-remaining'] === '0') {
e('GitHub API rate exceeded. Set GITHUB_TOKEN environment variable to increase rate limit.');
return;
}
let count = redirectCount || 0;
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
let location = response.headers['location'];
if (location) {
console.log("Redirected " + url + " to " + location);
download(location, count + 1).then(c, e);
download(location, count+1).then(c, e);
return;
}
}
@@ -68,13 +59,17 @@ function getCommitSha(repoId, repoPath) {
commitDate: lastCommit.commit.author.date
});
} catch (e) {
return Promise.reject(new Error("Failed extracting the SHA: " + content));
console.error("Failed extracting the SHA: " + content);
return Promise.resolve(null);
}
}, function () {
console.error('Failed loading ' + commitInfo);
return Promise.resolve(null);
});
}
exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'master') {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + `/${version}/` + repoPath;
exports.update = function (repoId, repoPath, dest, modifyGrammar) {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + '/master/' + repoPath;
console.log('Reading from ' + contentPath);
return download(contentPath).then(function (content) {
var ext = path.extname(repoPath);
@@ -83,10 +78,11 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
grammar = plist.parse(content);
} else if (ext === '.cson') {
grammar = cson.parse(content);
} else if (ext === '.json' || ext === '.JSON-tmLanguage') {
} else if (ext === '.json') {
grammar = JSON.parse(content);
} else {
return Promise.reject(new Error('Unknown file extension: ' + ext));
console.error('Unknown file extension: ' + ext);
return;
}
if (modifyGrammar) {
modifyGrammar(grammar);
@@ -103,10 +99,8 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
if (info) {
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
}
let keys = ['name', 'scopeName', 'comment', 'injections', 'patterns', 'repository'];
for (let key of keys) {
if (grammar.hasOwnProperty(key)) {
for (let key in grammar) {
if (!result.hasOwnProperty(key)) {
result[key] = grammar[key];
}
}
@@ -119,14 +113,11 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
console.log('Updated ' + path.basename(dest));
}
} catch (e) {
return Promise.reject(e);
console.error(e);
}
});
}, console.error).catch(e => {
console.error(e);
process.exit(1);
});
}, console.error);
};
if (path.basename(process.argv[1]) === 'update-grammar.js') {

View File

@@ -1,74 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
let i18n = require("../lib/i18n");
let fs = require("fs");
let path = require("path");
let vfs = require("vinyl-fs");
let rimraf = require('rimraf');
function update(idOrPath) {
if (!idOrPath) {
throw new Error('Argument must be the location of the localization extension.');
}
let locExtFolder = idOrPath;
if (/^\w{2}(-\w+)?$/.test(idOrPath)) {
locExtFolder = '../vscode-language-pack-' + idOrPath;
}
let locExtStat = fs.statSync(locExtFolder);
if (!locExtStat || !locExtStat.isDirectory) {
throw new Error('No directory found at ' + idOrPath);
}
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
let contributes = packageJSON['contributes'];
if (!contributes) {
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
}
let localizations = contributes['localizations'];
if (!localizations) {
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
}
localizations.forEach(function (localization) {
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
}
let server = localization.server || 'www.transifex.com';
let userName = localization.userName || 'api';
let apiToken = process.env.TRANSIFEX_API_TOKEN;
let languageId = localization.transifexId || localization.languageId;
let translationDataFolder = path.join(locExtFolder, 'translations');
if (languageId === "zh-cn") {
languageId = "zh-hans";
}
if (languageId === "zh-tw") {
languageId = "zh-hant";
}
if (fs.existsSync(translationDataFolder) && fs.existsSync(path.join(translationDataFolder, 'main.i18n.json'))) {
console.log('Clearing \'' + translationDataFolder + '\'...');
rimraf.sync(translationDataFolder);
}
console.log('Downloading translations for \'' + languageId + '\' to \'' + translationDataFolder + '\'...');
const translationPaths = [];
i18n.pullI18nPackFiles(server, userName, apiToken, { id: languageId }, translationPaths)
.pipe(vfs.dest(translationDataFolder)).on('end', function () {
localization.translations = [];
for (let tp of translationPaths) {
localization.translations.push({ id: tp.id, path: `./translations/${tp.resourceName}`});
}
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
});
});
}
if (path.basename(process.argv[1]) === 'update-localization-extension.js') {
update(process.argv[2]);
}

View File

@@ -1,5 +1,5 @@
{
"name": "azuredatastudio-oss-dev-build",
"name": "sqlops-oss-dev-build",
"version": "1.0.0",
"devDependencies": {
"@types/azure": "0.9.19",
@@ -7,28 +7,22 @@
"@types/es6-collections": "0.5.31",
"@types/es6-promise": "0.0.33",
"@types/mime": "0.0.29",
"@types/minimatch": "^3.0.3",
"@types/node": "8.0.33",
"@types/request": "^2.47.0",
"@types/xml2js": "0.0.33",
"azure-storage": "^2.1.0",
"decompress": "^4.2.0",
"del": "^3.0.0",
"documentdb": "1.13.0",
"documentdb": "^1.11.0",
"extensions-modules": "file:../extensions-modules",
"fs-extra-promise": "^1.0.1",
"github-releases": "^0.4.1",
"mime": "^1.3.4",
"minimist": "^1.2.0",
"request": "^2.85.0",
"service-downloader": "github:anthonydresser/service-downloader#0.1.5",
"typescript": "2.9.2",
"typescript": "2.5.2",
"vscode": "^1.0.1",
"xml2js": "^0.4.17"
},
"scripts": {
"compile": "tsc -p tsconfig.build.json",
"watch": "tsc -p tsconfig.build.json --watch",
"postinstall": "npm run compile",
"npmCheckJs": "tsc --noEmit"
"compile": "tsc",
"watch": "tsc --watch",
"postinstall": "npm run compile"
}
}

View File

@@ -0,0 +1,40 @@
#!/bin/bash
set -e
# set agent specific npm cache
if [ -n "$AGENT_WORKFOLDER" ]
then
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
echo "Using npm cache: $npm_config_cache"
fi
SUMMARY="Task;Duration"$'\n'
step() {
START=$SECONDS
TASK=$1; shift
echo ""
echo "*****************************************************************************"
echo "Start: $TASK"
echo "*****************************************************************************"
"$@"
# Calculate total duration
TOTAL=$(echo "$SECONDS - $START" | bc)
M=$(echo "$TOTAL / 60" | bc)
S=$(echo "$TOTAL % 60" | bc)
DURATION="$(printf "%02d" $M):$(printf "%02d" $S)"
echo "*****************************************************************************"
echo "End: $TASK, Total: $DURATION"
echo "*****************************************************************************"
SUMMARY="$SUMMARY$TASK;$DURATION"$'\n'
}
done_steps() {
echo ""
echo "Build Summary"
echo "============="
echo "${SUMMARY}" | column -t -s';'
}
trap done_steps EXIT

View File

@@ -4,15 +4,23 @@
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function yarnInstall(package: string): void {
cp.execSync(`yarn add --no-lockfile ${package}`);
function npmInstall(package: string, args: string[]): void {
const result = cp.spawnSync(npm, ['install', package, ...args], {
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
const [, , ...args] = process.argv;
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
yarnInstall(url);
npmInstall(url, args);
});

15
build/tfs/common/node.sh Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
set -e
# setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh
fi
# install node
NODE_VERSION=7.10.0
nvm install $NODE_VERSION
nvm use $NODE_VERSION

View File

@@ -14,9 +14,8 @@ import * as mime from 'mime';
import * as minimist from 'minimist';
import { DocumentClient, NewDocument } from 'documentdb';
// {{SQL CARBON EDIT}}
if (process.argv.length < 9) {
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
if (process.argv.length < 6) {
console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
process.exit(-1);
}
@@ -70,7 +69,6 @@ interface Asset {
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
@@ -185,10 +183,21 @@ async function publish(commit: string, quality: string, platform: string, type:
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// {{SQL CARBON EDIT}}
await assertContainer(blobService, quality);
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const blobExists = await doesAssetExist(blobService, quality, blobName);
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = [];
@@ -196,38 +205,10 @@ async function publish(commit: string, quality: string, platform: string, type:
promises.push(uploadBlob(blobService, quality, blobName, file));
}
// {{SQL CARBON EDIT}}
if (process.env['MOONCAKE_STORAGE_ACCESS_KEY']) {
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
} else {
console.log('Skipping Mooncake publishing.');
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
if (promises.length === 0) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
@@ -247,20 +228,12 @@ async function publish(commit: string, quality: string, platform: string, type:
platform: platform,
type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
// {{SQL CARBON EDIT}}
mooncakeUrl: process.env['MOONCAKE_CDN_URL'] ? `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}` : undefined,
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const release = {
id: commit,
timestamp: (new Date()).getTime(),
@@ -288,11 +261,8 @@ function main(): void {
boolean: ['upload-only']
});
// {{SQL CARBON EDIT}}
let [quality, platform, type, name, version, _isUpdate, file, commit] = opts._;
if (!commit) {
commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
}
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
console.error(err);

View File

@@ -1,219 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as request from 'request';
import { createReadStream, createWriteStream, unlink, mkdir } from 'fs';
import * as github from 'github-releases';
import { join } from 'path';
import { tmpdir } from 'os';
import { promisify } from 'util';
const BASE_URL = 'https://rink.hockeyapp.net/api/2/';
const HOCKEY_APP_TOKEN_HEADER = 'X-HockeyAppToken';
export interface IVersions {
app_versions: IVersion[];
}
export interface IVersion {
id: number;
version: string;
}
export interface IApplicationAccessor {
accessToken: string;
appId: string;
}
export interface IVersionAccessor extends IApplicationAccessor {
id: string;
}
enum Platform {
WIN_32 = 'win32-ia32',
WIN_64 = 'win32-x64',
LINUX_32 = 'linux-ia32',
LINUX_64 = 'linux-x64',
MAC_OS = 'darwin-x64'
}
function symbolsZipName(platform: Platform, electronVersion: string, insiders: boolean): string {
return `${insiders ? 'insiders' : 'stable'}-symbols-v${electronVersion}-${platform}.zip`;
}
const SEED = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
async function tmpFile(name: string): Promise<string> {
let res = '';
for (let i = 0; i < 8; i++) {
res += SEED.charAt(Math.floor(Math.random() * SEED.length));
}
const tmpParent = join(tmpdir(), res);
await promisify(mkdir)(tmpParent);
return join(tmpParent, name);
}
function getVersions(accessor: IApplicationAccessor): Promise<IVersions> {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions`,
method: 'GET',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
}
});
}
function createVersion(accessor: IApplicationAccessor, version: string): Promise<IVersion> {
return asyncRequest<IVersion>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/new`,
method: 'POST',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
bundle_version: version
}
});
}
function updateVersion(accessor: IVersionAccessor, symbolsPath: string) {
return asyncRequest<IVersions>({
url: `${BASE_URL}/apps/${accessor.appId}/app_versions/${accessor.id}`,
method: 'PUT',
headers: {
[HOCKEY_APP_TOKEN_HEADER]: accessor.accessToken
},
formData: {
dsym: createReadStream(symbolsPath)
}
});
}
function asyncRequest<T>(options: request.UrlOptions & request.CoreOptions): Promise<T> {
return new Promise<T>((resolve, reject) => {
request(options, (error, response, body) => {
if (error) {
reject(error);
} else {
resolve(JSON.parse(body));
}
});
});
}
function downloadAsset(repository, assetName: string, targetPath: string, electronVersion: string) {
return new Promise((resolve, reject) => {
repository.getReleases({ tag_name: `v${electronVersion}` }, (err, releases) => {
if (err) {
reject(err);
} else {
const asset = releases[0].assets.filter(asset => asset.name === assetName)[0];
if (!asset) {
reject(new Error(`Asset with name ${assetName} not found`));
} else {
repository.downloadAsset(asset, (err, reader) => {
if (err) {
reject(err);
} else {
const writer = createWriteStream(targetPath);
writer.on('error', reject);
writer.on('close', resolve);
reader.on('error', reject);
reader.pipe(writer);
}
});
}
}
});
});
}
interface IOptions {
repository: string;
platform: Platform;
versions: { code: string; insiders: boolean; electron: string; };
access: { hockeyAppToken: string; hockeyAppId: string; githubToken: string };
}
async function ensureVersionAndSymbols(options: IOptions) {
// Check version does not exist
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
if (versions.app_versions.some(v => v.version === options.versions.code)) {
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
return;
}
// Download symbols for platform and electron version
const symbolsName = symbolsZipName(options.platform, options.versions.electron, options.versions.insiders);
const symbolsPath = await tmpFile('symbols.zip');
console.log(`HockeyApp: downloading symbols ${symbolsName} for electron ${options.versions.electron} (${options.platform}) into ${symbolsPath}`);
await downloadAsset(new github({ repo: options.repository, token: options.access.githubToken }), symbolsName, symbolsPath, options.versions.electron);
// Create version
console.log(`HockeyApp: creating new version ${options.versions.code} (${options.platform})`);
const version = await createVersion({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, options.versions.code);
// Upload symbols
console.log(`HockeyApp: uploading symbols for version ${options.versions.code} (${options.platform})`);
await updateVersion({ id: String(version.id), accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId }, symbolsPath);
// Cleanup
await promisify(unlink)(symbolsPath);
}
// Environment
const pakage = require('../../../package.json');
const product = require('../../../product.json');
const repository = product.electronRepository;
const electronVersion = require('../../lib/electron').getElectronVersion();
const insiders = product.quality !== 'stable';
let codeVersion = pakage.version;
if (insiders) {
codeVersion = `${codeVersion}-insider`;
}
const githubToken = process.argv[2];
const hockeyAppToken = process.argv[3];
const is64 = process.argv[4] === 'x64';
const hockeyAppId = process.argv[5];
let platform: Platform;
if (process.platform === 'darwin') {
platform = Platform.MAC_OS;
} else if (process.platform === 'win32') {
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
} else {
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
}
// Create version and upload symbols in HockeyApp
if (repository && codeVersion && electronVersion && (product.quality === 'stable' || product.quality === 'insider')) {
ensureVersionAndSymbols({
repository,
platform,
versions: {
code: codeVersion,
insiders,
electron: electronVersion
},
access: {
githubToken,
hockeyAppToken,
hockeyAppId
}
}).then(() => {
console.log('HockeyApp: done');
}).catch(error => {
console.error(`HockeyApp: error (${error})`);
});
} else {
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);
}

View File

@@ -1,15 +0,0 @@
phases:
- phase: Windows
queue: Hosted VS2017
steps:
- template: win32/continuous-build-win32.yml
- phase: Linux
queue: Hosted Linux Preview
steps:
- template: linux/continuous-build-linux.yml
- phase: macOS
queue: Hosted macOS Preview
steps:
- template: darwin/continuous-build-darwin.yml

55
build/tfs/darwin/build.sh Normal file
View File

@@ -0,0 +1,55 @@
#!/bin/sh
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export VSCODE_MIXIN_PASSWORD="$1"
export AZURE_STORAGE_ACCESS_KEY="$2"
export AZURE_STORAGE_ACCESS_KEY_2="$3"
export MOONCAKE_STORAGE_ACCESS_KEY="$4"
export AZURE_DOCUMENTDB_MASTERKEY="$5"
VSO_PAT="$6"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install
step "Hygiene" \
npm run gulp -- hygiene
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Install distro dependencies" \
node build/tfs/common/installDistro.js
step "Build minified & upload source maps" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
# step "Create loader snapshot"
# node build/lib/snapshotLoader.js
step "Run unit tests" \
./scripts/test.sh --build --reporter dot
step "Run integration tests" \
./scripts/test-integration.sh
# function smoketest {
# ARTIFACTS="$AGENT_BUILDDIRECTORY/smoketest-artifacts"
# rm -rf $ARTIFACTS
# [[ "$VSCODE_QUALITY" == "insider" ]] && VSCODE_APPNAME="Visual Studio Code - Insiders" || VSCODE_APPNAME="Visual Studio Code"
# npm run smoketest -- --build "$AGENT_BUILDDIRECTORY/VSCode-darwin/$VSCODE_APPNAME.app" --log $ARTIFACTS
# }
# step "Run smoke test" \
# smoketest
step "Publish release" \
./build/tfs/darwin/release.sh
step "Generate and upload configuration.json" \
npm run gulp -- upload-vscode-configuration

View File

@@ -1,48 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
yarn
displayName: Install Dependencies
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn check-monaco-editor-compilation
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- script: |
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)/artifacts" --log "$(Build.ArtifactStagingDirectory)/artifacts/smoketest.log"
displayName: Run Smoke Tests
continueOnError: true
- task: PublishBuildArtifacts@1
displayName: Publish Smoketest Artifacts
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: build-artifacts-darwin
publishLocation: Container
condition: eq(variables['System.PullRequest.IsFork'], 'False')
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,106 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
var child_process_1 = require("child_process");
var documentdb_1 = require("documentdb");
var azure = require("azure-storage");
function queueSigningRequest(quality, commit) {
var retryOperations = new azure.ExponentialRetryPolicyFilter();
var queueSvc = azure
.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(retryOperations);
queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();
var message = quality + "/" + commit;
return new Promise(function (c, e) { return queueSvc.createMessage('sign-darwin', message, function (err) { return err ? e(err) : c(); }); });
}
function isBuildSigned(quality, commit) {
var client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
var collection = 'dbs/builds/colls/' + quality;
var updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
return new Promise(function (c, e) {
client.queryDocuments(collection, updateQuery).toArray(function (err, results) {
if (err) {
return e(err);
}
if (results.length !== 1) {
return c(false);
}
var release = results[0];
var assets = release.assets;
var isSigned = assets.some(function (a) { return a.platform === 'darwin' && a.type === 'archive'; });
c(isSigned);
});
});
}
// async function waitForSignedBuild(quality: string, commit: string): Promise<void> {
// let retries = 0;
// while (retries < 180) {
// if (await isBuildSigned(quality, commit)) {
// return;
// }
// await new Promise<void>(c => setTimeout(c, 10000));
// retries++;
// }
// throw new Error('Timed out waiting for signed build');
// }
function main(quality) {
return __awaiter(this, void 0, void 0, function () {
var commit;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
commit = child_process_1.execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
console.log("Queueing signing request for '" + quality + "/" + commit + "'...");
return [4 /*yield*/, queueSigningRequest(quality, commit)];
case 1:
_a.sent();
return [2 /*return*/];
}
});
});
}
main(process.argv[2]).catch(function (err) {
console.error(err);
process.exit(1);
});

View File

@@ -1,63 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.9.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.3.2"
- script: |
set -e
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
npm run gulp -- hygiene
npm run monaco-compile-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/tfs/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
name: build
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
name: test
- script: |
set -e
# archive the unsigned build
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
# publish the unsigned build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/tfs/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive-unsigned \
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
$VERSION \
false \
../VSCode-darwin-unsigned.zip
# publish hockeyapp symbols
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# enqueue the unsigned build
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
node build/tfs/darwin/enqueue.js "$(VSCODE_QUALITY)"
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
npm run gulp -- upload-vscode-configuration

View File

@@ -0,0 +1,26 @@
#!/bin/sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm i)
REPO=`pwd`
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
BUILD=$REPO/../VSCode-darwin
PACKAGEJSON=`ls $BUILD/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
rm -rf $UNSIGNEDZIP
(cd $BUILD && \
step "Create unsigned archive" \
zip -r -X -y $UNSIGNEDZIP *)
step "Upload unsigned archive" \
node build/tfs/common/publish.js $VSCODE_QUALITY darwin archive-unsigned VSCode-darwin-$VSCODE_QUALITY-unsigned.zip $VERSION false $UNSIGNEDZIP
step "Sign build" \
node build/tfs/common/enqueue.js $VSCODE_QUALITY

View File

@@ -1,2 +1 @@
pat
*.js
pat

Some files were not shown because too many files have changed in this diff Show More