Compare commits

..

3 Commits

Author SHA1 Message Date
Karl Burtram
088cac030f Merge branch 'master' into release/0.30 2018-06-18 13:07:21 -07:00
Karl Burtram
bd3c293f94 Merge branch 'master' into release/0.30 2018-06-18 10:28:15 -07:00
Karl Burtram
17db0b7d09 Revert SlickGrid to 2.3.16 to workaround Edit Data TAB issue 2018-06-12 16:37:42 -07:00
14311 changed files with 484552 additions and 624200 deletions

View File

@@ -1,4 +1,4 @@
# EditorConfig is awesome: https://EditorConfig.org
# EditorConfig is awesome: http://EditorConfig.org
# top-most EditorConfig file
root = true
@@ -6,6 +6,7 @@ root = true
# Tab indentation
[*]
indent_style = tab
indent_size = 4
trim_trailing_whitespace = true
# The indent size used in the `package.json` file cannot be changed

19
.eslintrc Normal file
View File

@@ -0,0 +1,19 @@
{
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,20 +0,0 @@
{
"root": true,
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

3
.gitattributes vendored
View File

@@ -6,5 +6,4 @@ ThirdPartyNotices.txt eol=crlf
*.bat eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf
*.rtf -text
*.sh eol=lf

View File

@@ -1,18 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: Bug
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
<!-- Use Help > Report Issue to prefill these. -->
- Azure Data Studio Version:
Steps to Reproduce:
1.

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: Enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution or feature you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,49 +0,0 @@
{
perform: false,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
autoAssignees: {
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
}
}

12
.github/commands.yml vendored
View File

@@ -1,12 +0,0 @@
{
perform: false,
commands: [
{
type: 'label',
name: 'duplicate',
allowTriggerByBot: true,
action: 'close',
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
}
]
}

5
.github/copycat.yml vendored
View File

@@ -1,5 +0,0 @@
{
perform: true,
target_owner: 'anthonydresser',
target_repo: 'testissues'
}

6
.github/locker.yml vendored
View File

@@ -1,6 +0,0 @@
{
daysAfterClose: 45,
daysSinceLastUpdate: 3,
ignoredLabels: [],
perform: true
}

View File

@@ -1,6 +0,0 @@
{
daysUntilClose: 7,
needsMoreInfoLabel: 'needs more info',
perform: true,
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity in the last 7 days. If you have more info to help resolve the issue, leave a comment"
}

View File

@@ -1,6 +0,0 @@
{
newReleaseLabel: 'new-release',
newReleaseColor: '006b75',
daysAfterRelease: 5,
perform: true
}

View File

@@ -1,5 +0,0 @@
{
perform: true,
whenCreatedByTeam: true,
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
}

12
.gitignore vendored
View File

@@ -1,25 +1,17 @@
.DS_Store
.cache
npm-debug.log
Thumbs.db
node_modules/
.build/
extensions/**/dist/
out/
out-build/
out-editor/
out-editor-src/
out-editor-build/
out-editor-esm/
out-editor-min/
out-monaco-editor-core/
out-vscode/
out-vscode-min/
out-vscode-reh/
out-vscode-reh-min/
out-vscode-reh-pkg/
**/node_modules
build/node_modules
coverage/
test_data/
test-results/
yarn-error.log
yarn-error.log

2
.nvmrc
View File

@@ -1 +1 @@
10
8.9.2

58
.travis.yml Normal file
View File

@@ -0,0 +1,58 @@
sudo: false
language: cpp
os:
- linux
- osx
cache:
directories:
- $HOME/.cache/yarn
notifications:
email: false
webhooks:
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.9
- g++-4.9
- gcc-4.9-multilib
- g++-4.9-multilib
- zip
- libgtk2.0-0
- libx11-dev
- libxkbfile-dev
- libsecret-1-dev
before_install:
- git submodule update --init --recursive
- nvm install 8.9.1
- nvm use 8.9.1
- npm i -g yarn
# - npm config set python `which python`
- if [ $TRAVIS_OS_NAME == "linux" ]; then
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
sh -e /etc/init.d/xvfb start;
sleep 3;
fi
# Make npm logs less verbose
# - npm config set depth 0
# - npm config set loglevel warn
install:
- yarn
script:
- node_modules/.bin/gulp electron --silent
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
after_success:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi

View File

@@ -1,23 +0,0 @@
{
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"licenseDetail"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"licenseDetail": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
}
}
}
}

View File

@@ -1,142 +0,0 @@
{
"type": "object",
"properties": {
"registrations": {
"type": "array",
"items": {
"type": "object",
"properties": {
"component": {
"oneOf": [
{
"type": "object",
"required": [
"type",
"git"
],
"properties": {
"type": {
"type": "string",
"enum": [
"git"
]
},
"git": {
"type": "object",
"required": [
"name",
"repositoryUrl",
"commitHash"
],
"properties": {
"name": {
"type": "string"
},
"repositoryUrl": {
"type": "string"
},
"commitHash": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"npm"
],
"properties": {
"type": {
"type": "string",
"enum": [
"npm"
]
},
"npm": {
"type": "object",
"required": [
"name",
"version"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"other"
],
"properties": {
"type": {
"type": "string",
"enum": [
"other"
]
},
"other": {
"type": "object",
"required": [
"name",
"downloadUrl",
"version"
],
"properties": {
"name": {
"type": "string"
},
"downloadUrl": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
}
]
},
"repositoryUrl": {
"type": "string",
"description": "The git url of the component"
},
"version": {
"type": "string",
"description": "The version of the component"
},
"license": {
"type": "string",
"description": "The name of the license"
},
"developmentDependency": {
"type": "boolean",
"description": "This component is inlined in the vscode repo and **is not shipped**."
},
"isOnlyProductionDependency": {
"type": "boolean",
"description": "This component is shipped and **is not inlined in the vscode repo**."
},
"licenseDetail": {
"type": "array",
"items": {
"type": "string"
},
"description": "The license text"
}
}
}
}
}
}

View File

@@ -2,7 +2,7 @@
// See https://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin",
"eg2.tslint",
"dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome"
]

150
.vscode/launch.json vendored
View File

@@ -9,12 +9,14 @@
"stopOnEntry": true,
"args": [
"hygiene"
]
],
"cwd": "${workspaceFolder}"
},
{
"type": "node",
"request": "attach",
"name": "Attach to Extension Host",
"protocol": "inspector",
"port": 5870,
"restart": true,
"outFiles": [
@@ -22,15 +24,19 @@
]
},
{
"type": "chrome",
"type": "node",
"request": "attach",
"name": "Attach to Shared Process",
"port": 9222,
"urlFilter": "*"
"protocol": "inspector",
"port": 5871,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "node",
"request": "attach",
"protocol": "inspector",
"name": "Attach to Search Process",
"port": 5876,
"outFiles": [
@@ -41,6 +47,7 @@
"type": "node",
"request": "attach",
"name": "Attach to CLI Process",
"protocol": "inspector",
"port": 5874,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -50,6 +57,7 @@
"type": "node",
"request": "attach",
"name": "Attach to Main Process",
"protocol": "inspector",
"port": 5875,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -58,52 +66,13 @@
{
"type": "chrome",
"request": "attach",
"name": "Attach to azuredatastudio",
"name": "Attach to sqlops",
"port": 9222
},
{
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 20000
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 20000
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 20000
},
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
},
"breakOnLoad": false,
"urlFilter": "*workbench.html*",
"runtimeArgs": [
"--inspect=5875",
"--no-cached-data"
],
"webRoot": "${workspaceFolder}"
},
{
"type": "node",
"request": "launch",
"name": "Launch ADS (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"runtimeArgs": [
"--no-cached-data"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio with new notebook command",
"name": "Launch sqlops",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
},
@@ -115,107 +84,56 @@
},
"urlFilter": "*index.html*",
"runtimeArgs": [
"--inspect=5875",
"--command=notebook.command.new"
"--inspect=5875"
],
"skipFiles": [
"**/winjs*.js"
],
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
]
"timeout": 15000
},
{
"type": "node",
"request": "launch",
"name": "Launch Smoke Test",
"program": "${workspaceFolder}/test/smoke/test/index.js",
"cwd": "${workspaceFolder}/test/smoke",
"env": {
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
}
},
{
"type": "node",
"request": "launch",
"name": "Run Unit Tests",
"program": "${workspaceFolder}/test/electron/index.js",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"name": "Unit Tests",
"protocol": "inspector",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"runtimeExecutable": "${workspaceFolder}/.build/electron/SQL Operations Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
"runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops.exe"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
"runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops"
},
"outputCapture": "std",
"stopOnEntry": false,
"args": [
"--remote-debugging-port=9222"
"--delay",
"--timeout",
"2000"
],
"cwd": "${workspaceFolder}",
"env": {
"ELECTRON_RUN_AS_NODE": "true"
},
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Unit Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
}
],
"compounds": [
{
"name": "Debug Unit Tests",
"name": "Debug sqlops Main and Renderer",
"configurations": [
"Attach to azuredatastudio",
"Run Unit Tests"
]
},
{
"name": "Debug Extension Unit Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Unit Tests"
]
},
{
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
"Launch azuredatastudio",
"Launch sqlops",
"Attach to Main Process"
]
},
{
"name": "Search and Renderer processes",
"configurations": [
"Launch azuredatastudio",
"Launch sqlops",
"Attach to Search Process"
]
},
{
"name": "Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
}
]
}
}

30
.vscode/settings.json vendored
View File

@@ -11,7 +11,7 @@
}
},
"files.associations": {
"cglicenses.json": "jsonc"
"OSSREADME.json": "jsonc"
},
"search.exclude": {
"**/node_modules": true,
@@ -22,9 +22,9 @@
"out-vscode/**": true,
"i18n/**": true,
"extensions/**/out/**": true,
"test/smoke/out/**": true,
"src/vs/base/test/node/uri.test.data.txt": true
"test/smoke/out/**": true
},
"tslint.enable": true,
"lcov.path": [
"./.build/coverage/lcov.info",
"./.build/coverage-single/lcov.info"
@@ -38,25 +38,5 @@
}
}
],
"typescript.tsdk": "node_modules/typescript/lib",
"npm.exclude": "**/extensions/**",
"git.ignoreLimitWarning": true,
"emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single",
"json.schemas": [
{
"fileMatch": [
"cgmanifest.json"
],
"url": "./.vscode/cgmanifest.schema.json"
},
{
"fileMatch": [
"cglicenses.json"
],
"url": "./.vscode/cglicenses.schema.json"
}
],
"git.ignoreLimitWarning": true
}
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@@ -1,40 +0,0 @@
{
// Each snippet is defined under a snippet name and has a scope, prefix, body and
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
// Placeholders with the same ids are connected.
// Example:
"MSFT Copyright Header": {
"scope": "javascript,typescript,css",
"prefix": [
"header",
"stub",
"copyright"
],
"body": [
"/*---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------*/",
"",
"$0"
],
"description": "Insert Copyright Statement"
},
"TS -> Inject Service": {
"scope": "typescript",
"description": "Constructor Injection Pattern",
"prefix": "@inject",
"body": "@$1 private readonly _$2: ${1},$0"
},
"TS -> Event & Emitter": {
"scope": "typescript",
"prefix": "emitter",
"description": "Add emitter and event properties",
"body": [
"private readonly _onDid$1 = new Emitter<$2>();",
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
],
}
}

16
.vscode/tasks.json vendored
View File

@@ -28,20 +28,6 @@
}
}
},
{
"type": "npm",
"script": "strict-initialization-watch",
"label": "TS - Strict Initialization",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-initialization",
"applyTo": "allDocuments"
}
},
{
"type": "gulp",
"task": "tslint",
@@ -83,4 +69,4 @@
"problemMatcher": []
}
]
}
}

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "3.1.8"
target "1.7.12"
runtime "electron"

View File

@@ -1,214 +1,5 @@
# Change Log
## Version 1.7.0
* Release date: May 8, 2019
* Release status: General Availability
## What's new in this version
* Announcing Schema Compare *Preview* extension
* Tasks Panel UX improvement
* Announcing new Welcome page
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/31?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues.
## Version 1.6.0
* Release date: April 18, 2019
* Release status: General Availability
## What's new in this version
* Align with latest VS Code editor platform (currently 1.33.1)
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/26?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* yamatoya for `fix the format (#4899)`
## Version 1.5.1
* Release date: March 18, 2019
* Release status: General Availability
## What's new in this version
* Announcing T-SQL Notebooks
* Announcing PostgreSQL extension
* Announcing SQL Server Dacpac extension
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* GeoffYoung for `Fix sqlDropColumn description #4422`
## Version 1.4.5
* Release date: February 13, 2019
* Release status: General Availability
## What's new in this version
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
* Added filtering extended event support in Profiler extension
* Added Save as XML feature that can save T-SQL results as XML
* Added Data-Tier Application Wizard improvements
* Added Generate script button
* Added view to give warnings of possible data loss during deployment
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* Results streaming enabled by default for long running queries
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
## Version 1.3.8
* Release date: January 9, 2019
* Release status: General Availability
## What's new in this version
* #13 Feature Request: Azure Active Directory Authentication
* #1040 Stream initial query results as they become available
* #3298 Сan't add an azure account.
* #2387 Support Per-User Installer
* SQL Server Import updates for DACPAC\BACPAC
* SQL Server Profiler UI and UX improvements
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* **sp_executesql to SQL** and **New Database** extensions
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
## Version 1.2.4
* Release date: November 6, 2018
* Release status: General Availability
## What's new in this version
* Update to the SQL Server 2019 Preview extension
* Introducing Paste the Plan extension
* Introducing High Color queries extension, including SSMS editor theme
* Fixes in SQL Server Agent, Profiler, and Import extensions
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
* Fix customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
## Version 1.1.3
* Release date: October 18, 2018
* Release status: General Availability
## What's new in this version
* Introducing the Azure Resource Explorer to browse Azure SQL Databases
* Improve Object Explorer and Query Editor connectivity robustness
* SQL Server 2019 and SQL Agent extension improvements
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
## Version 1.0.0
* Release date: September 24, 2018
* Release status: General Availability
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard
* Query Results Grid performance and UX improvements for large number of result sets.
* Visual Studio Code source code refresh from 1.23 to 1.26.1 with Grid Layout and Improved Settings Editor (preview).
* Accessibility improvements for screen reader, keyboard navigation and high-contrast.
* Added Connection name option to provide an alternative display name in the Servers viewlet.
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
## Version 0.33.7
* Release date: August 30, 2018
* Release status: Public Preview
## What's new in this version
* Announcing the SQL Server Import Extension
* SQL Server Profiler Session management
* SQL Server Agent improvements
* New community extension: First Responder Kit
* Quality of Life improvements: Connection strings
* Fix many customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
## Version 0.31.4
* Release date: July 19, 2018
* Release status: Public Preview
## What's new in this version
* SQL Server Agent for Azure Data Studio extension improvements
* Added view of Alerts, Operators, and Proxies and icons on left pane
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
* Added Delete Job, Delete Alert, and Delete Operator (right-click)
* Added Previous Runs visualization
* Added Filters for each column name
* SQL Server Profiler for Azure Data Studio extension improvements
* Added Hotkeys to quickly launch and start/stop Profiler
* Added 5 Default Templates to view Extended Events
* Added Server/Database connection name
* Added support for Azure SQL Database instances
* Added suggestion to exit Profiler when tab is closed when Profiler is still running
* Release of Combine Scripts Extension
* Wizard and Dialog Extensibility
* Fix GitHub Issues
## Version 0.30.6
* Release date: June 20, 2018
* Release status: Public Preview
## What's new in this version
* **SQL Server Profiler for Azure Data Studio *Preview*** extension initial release
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
* **Edit Data "Filtering and Sorting"** support
* **SQL Server Agent for Azure Data Studio *Preview*** extension enhancements for Jobs and Job History views
* Improved **Wizard & Dialog UI Builder Framework** extensibility APIs
* Update VS Code Platform source code integrating [March 2018 (1.22)](https://code.visualstudio.com/updates/v1_22) and [April 2018 (1.23)](https://code.visualstudio.com/updates/v1_23) releases
* Fix GitHub Issues
## Version 0.29.3
* Release date: May 7, 2018
* Release status: Public Preview
@@ -218,7 +9,7 @@ The May release is focused on stabilization and bug fixes leading up to the Buil
* Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* **GDPR-compliant** build has reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions
@@ -244,8 +35,8 @@ The April Public Preview release contains some of the following highlights.
* Release status: Public Preview
## What's new in this version
The March Public Preview release enables some key aspects of the Azure Data Studio
extensibility story. Here are some highlights in this release.
The March Public Preview release enables some key aspects of the SQL Operations
Studio extensibility story. Here are some highlights in this release.
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)

View File

@@ -1,13 +1,13 @@
## Contributing Issues
### Before Submitting an Issue
First, please do a search in [open issues](https://github.com/Microsoft/azuredatastudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests.
First, please do a search in [open issues](https://github.com/Microsoft/sqlopsstudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/sqlopsstudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests.
If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment.
:+1: - upvote
👍 - upvote
:-1: - downvote
👎 - downvote
If you cannot find an existing issue that describes your bug or feature, submit an issue using the guidelines below.
@@ -18,33 +18,29 @@ File a single issue per problem and feature request.
* Do not enumerate multiple bugs or feature requests in the same issue.
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix.
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info.
Please include the following with each issue.
Please include the following with each issue.
* Version of SQL Ops Studio
* Version of Azure Data Studio (formerly SQL Operations Studio)
> **Tip:** You can easily create an issue using `Report Issues` from SQL Operations Studio Help menu.
* Your operating system
* Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.
* Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
> **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
> **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
* Errors in the Dev Tools Console (Help | Toggle Developer Tools)
Please remember to do the following:
* Search the issue repository to see if there exists a duplicate.
* Simplify your scripts around the issue so we can better isolate the problem.
* Search the issue repository to see if there exists a duplicate.
* Simplify your scripts around the issue so we can better isolate the problem.
Don't feel bad if we can't reproduce the issue and ask for more information!
## Contributing Fixes
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute).
please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute).

View File

@@ -1,6 +1,6 @@
MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT AZURE DATA STUDIO
MICROSOFT SQL OPERATIONS STUDIO
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us

1196
OSSREADME.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +1,25 @@
# Azure Data Studio
# SQL Operations Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
**Download the latest Azure Data Studio release**
**Download SQL Operations Studio May Public Preview**
Platform | Link
-- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2091882
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2091491
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2091490
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2091489
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2091488
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2091487
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2092022
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=873386
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=873387
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=873388
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=873389
Linux RPM | https://go.microsoft.com/fwlink/?linkid=873390
Linux DEB | https://go.microsoft.com/fwlink/?linkid=873391
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
Try out the latest insiders build from `master`:
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases.
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
See the [change log](https://github.com/Microsoft/sqlopsstudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
**Feature Highlights**
@@ -41,91 +34,60 @@ See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CH
- Task History window to view current task execution status, completion results with error messages and task T-SQL scripting
- Scripting support to generate CREATE, SELECT, ALTER and DROP statements for database objects
- Workspaces with full Git integration and Find In Files support to managing T-SQL script libraries
- Modern light-weight shell with theming, user settings, full-screen support, integrated terminal and numerous other features
- Modern light-weight shell with theming, user settings, full screen support, integrated terminal and numerous other features
Here are some of these features in action.
Here's some of these features in action.
<img src='https://github.com/Microsoft/azuredatastudio/blob/master/docs/overview_screen.jpg' width='800px'>
<img src='https://github.com/Microsoft/sqlopsstudio/blob/master/docs/overview_screen.jpg' width='800px'>
## Contributing
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute), which covers the following:
please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute), which covers the following:
* [How to build and run from source](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#Build-and-Run-From-Source)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#development-workflow)
* [Submitting pull requests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests)
* [How to build and run from source](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#Build-and-Run-From-Source)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#development-workflow)
* [Submitting pull requests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#pull-requests)
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization
Azure Data Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
SQL Operations Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make Azure Data Studio available in your language!
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make SQL Operations Studio available in your language!
## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
## Contributions and "Thank You"
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* yamatoya for `fix the format (#4899)`
* GeoffYoung for `Fix sqlDropColumn description #4422`
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to the filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for the opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
* lanceklinger `Fix for double-clicking column handle in results table #1504`
* westerncj for `Removed duplicate contribution from README.md (#753)`
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
* SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for the fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)`
* alextercete for `Fix "No extension gallery service configured" error (#427)`
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with a case-sensitive collation. (#152)`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)`
* Adam Machanic for helping with the `whoisactive` extension
* All community localization contributors:
* Adam Mechanic for helping with the `whoisactive` extension
* All community localization contributors
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa, Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa , Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/sqlopsstudio/master/ThirdPartyNotices.txt)
## License

View File

@@ -1,4 +1,4 @@
MICROSOFT Azure Data Studio
MICROSOFT SQL OPERATIONS STUDIO
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
@@ -17,13 +17,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
@@ -36,35 +33,28 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
pretty-data: https://github.com/vkiryukhin/pretty-data
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
@@ -72,9 +62,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose
%% angular NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License
Copyright (c) 2014-2017 Google, Inc. http://angular.io
@@ -300,20 +291,6 @@ THE SOFTWARE.
=========================================
END OF core-js NOTICES AND INFORMATION
%% decompress NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF decompress NOTICES AND INFORMATION
%% emmet NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@@ -343,6 +320,32 @@ END OF emmet NOTICES AND INFORMATION
=========================================
The MIT License (MIT)
Copyright (c) 2015 JD Ballard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF error-ex NOTICES AND INFORMATION
%% escape-string-regexp NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
@@ -389,20 +392,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
=========================================
END OF fast-plist NOTICES AND INFORMATION
%% figures NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF figures NOTICES AND INFORMATION
%% fs-extra NOTICES AND INFORMATION BEGIN HERE
=========================================
(The MIT License)
@@ -1176,43 +1165,6 @@ That's all there is to it!
=========================================
END OF jschardet NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Semver File License
===================
The semver.py file is from https://github.com/podhmo/python-semver
which is licensed under the "MIT" license. See the semver.py file for details.
END OF JupyterLab NOTICES AND INFORMATION
%% make-error NOTICES AND INFORMATION BEGIN HERE
=========================================
ISC © Julien Fontanet
@@ -1344,32 +1296,6 @@ SOFTWARE.
=========================================
END OF ng2-charts NOTICES AND INFORMATION
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF node-fetch NOTICES AND INFORMATION
%% node-pty NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1444,30 +1370,6 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
=========================================
END OF primeng NOTICES AND INFORMATION
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
=========================================
# Copyright (c) 2015 Calvin Metcalf
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.**
=========================================
END OF process-nextick-args NOTICES AND INFORMATION
%% pty.js NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1552,66 +1454,6 @@ END OF TERMS AND CONDITIONS
=========================================
END OF reflect-metadata NOTICES AND INFORMATION
%% request NOTICES AND INFORMATION BEGIN HERE
=========================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
=========================================
END OF request NOTICES AND INFORMATION
%% rxjs NOTICES AND INFORMATION BEGIN HERE
=========================================
Apache License
@@ -1937,20 +1779,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
=========================================
END OF systemjs NOTICES AND INFORMATION
%% temp-write NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF temp-write NOTICES AND INFORMATION
%% underscore NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
@@ -2053,50 +1881,6 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
=========================================
END OF vscode-debugprotocol NOTICES AND INFORMATION
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-languageclient NOTICES AND INFORMATION
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) Microsoft Corporation
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-nls NOTICES AND INFORMATION
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
=========================================
vscode-ripgrep
@@ -2255,188 +2039,4 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF zone.js NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
===================================CoreFx (BEGIN)
The MIT License (MIT)
Copyright (c) .NET Foundation and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFx (END)
===================================CoreFxLab (BEGIN)
The MIT License (MIT)
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFxLab (END)
===================================Reactive Extensions (BEGIN)
Copyright (c) .NET Foundation and Contributors
All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License"); you
may not use this file except in compliance with the License. You may
obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions
and limitations under the License.
List of contributors to the Rx libraries
Rx and Ix.NET:
Wes Dyer
Jeffrey van Gogh
Matthew Podwysocki
Bart De Smet
Danny van Velzen
Erik Meijer
Brian Beckman
Aaron Lahman
Georgi Chkodrov
Arthur Watson
Gert Drapers
Mark Shields
Eric Rozell
Rx.js and Ix.js:
Matthew Podwysocki
Jeffrey van Gogh
Bart De Smet
Brian Beckman
Wes Dyer
Erik Meijer
Tx:
Georgi Chkodrov
Bart De Smet
Aaron Lahman
Erik Meijer
Brian Grunkemeyer
Beysim Sezgin
Tiho Tarnavski
Collin Meek
Sajay Anthony
Karen Albrecht
John Allen
Zach Kramer
Rx++ and Ix++:
Aaron Lahman
===================================Reactive Extensions (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
The MIT License (MIT)
Copyright (c) 2014 ExcelDataReader
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================ExcelDataReader (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION
END OF zone.js NOTICES AND INFORMATION

19
appveyor.yml Normal file
View File

@@ -0,0 +1,19 @@
environment:
ELECTRON_RUN_AS_NODE: 1
VSCODE_BUILD_VERBOSE: true
cache:
- '%LOCALAPPDATA%\Yarn\cache'
install:
- ps: Install-Product node 8.9.1 x64
build_script:
- yarn
- .\node_modules\.bin\gulp electron
- npm run compile
test_script:
- node --version
- .\scripts\test.bat
- .\scripts\test-integration.bat

View File

@@ -1,66 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
- script: |
yarn strict-null-check
displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
displayName: 'Tests'
condition: ne(variables['Agent.OS'], 'Linux')
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
condition: ne(variables['Agent.OS'], 'Linux')

View File

@@ -1,44 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
yarn gulp electron-x64
displayName: 'Electron'
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
- script: |
yarn strict-null-check
displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report

View File

@@ -1,29 +0,0 @@
trigger:
- master
- releases/*
jobs:
# All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool:
vmImage: vs2017-win2016
steps:
- template: azure-pipelines-windows.yml
# All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool:
vmImage: 'Ubuntu 16.04'
steps:
- template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1,126 +0,0 @@
# cleanup rules for native node modules, .gitignore style
fsevents/binding.gyp
fsevents/fsevents.cc
fsevents/build/**
fsevents/src/**
fsevents/test/**
!fsevents/**/*.node
vscode-sqlite3/binding.gyp
vscode-sqlite3/benchmark/**
vscode-sqlite3/cloudformation/**
vscode-sqlite3/deps/**
vscode-sqlite3/test/**
vscode-sqlite3/build/**
vscode-sqlite3/src/**
!vscode-sqlite3/build/Release/*.node
oniguruma/binding.gyp
oniguruma/build/**
oniguruma/src/**
oniguruma/deps/**
!oniguruma/build/Release/*.node
!oniguruma/src/*.js
windows-mutex/binding.gyp
windows-mutex/build/**
windows-mutex/src/**
!windows-mutex/**/*.node
native-keymap/binding.gyp
native-keymap/build/**
native-keymap/src/**
native-keymap/deps/**
!native-keymap/build/Release/*.node
native-is-elevated/binding.gyp
native-is-elevated/build/**
native-is-elevated/src/**
native-is-elevated/deps/**
!native-is-elevated/build/Release/*.node
native-watchdog/binding.gyp
native-watchdog/build/**
native-watchdog/src/**
!native-watchdog/build/Release/*.node
spdlog/binding.gyp
spdlog/build/**
spdlog/deps/**
spdlog/src/**
spdlog/test/**
!spdlog/build/Release/*.node
jschardet/dist/**
windows-foreground-love/binding.gyp
windows-foreground-love/build/**
windows-foreground-love/src/**
!windows-foreground-love/**/*.node
windows-process-tree/binding.gyp
windows-process-tree/build/**
windows-process-tree/src/**
!windows-process-tree/**/*.node
gc-signals/binding.gyp
gc-signals/build/**
gc-signals/src/**
gc-signals/deps/**
!gc-signals/build/Release/*.node
!gc-signals/src/index.js
keytar/binding.gyp
keytar/build/**
keytar/src/**
keytar/script/**
keytar/node_modules/**
!keytar/**/*.node
node-pty/binding.gyp
node-pty/build/**
node-pty/src/**
node-pty/tools/**
!node-pty/build/Release/*.exe
!node-pty/build/Release/*.dll
!node-pty/build/Release/*.node
chart.js/node_modules/**
emmet/node_modules/**
pty.js/build/**
!pty.js/build/Release/**
jquery-ui/external/**
jquery-ui/demos/**
core-js/**/**
slickgrid/node_modules/**
slickgrid/examples/**
vscode-nsfw/binding.gyp
vscode-nsfw/build/**
vscode-nsfw/src/**
vscode-nsfw/openpa/**
vscode-nsfw/includes/**
!vscode-nsfw/build/Release/*.node
!vscode-nsfw/**/*.a
vsda/binding.gyp
vsda/README.md
vsda/build/**
vsda/*.bat
vsda/*.sh
vsda/*.cpp
vsda/*.h
!vsda/build/Release/vsda.node
vscode-windows-ca-certs/**/*
!vscode-windows-ca-certs/package.json
!vscode-windows-ca-certs/**/*.node
node-addon-api/**/*

View File

@@ -1,20 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
import * as path from 'path';
function yarnInstall(packageName: string): void {
cp.execSync(`yarn add --no-lockfile ${packageName}`);
cp.execSync(`yarn add --no-lockfile ${packageName}`, { cwd: path.join( process.cwd(), 'remote') });
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
yarnInstall(url);
});

View File

@@ -1,176 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as url from 'url';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { DocumentClient, RetrievedDocument } from 'documentdb';
function log(...args: any[]) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
}
function error(...args: any[]) {
console.error(...[`[${new Date().toISOString()}]`, ...args]);
}
if (process.argv.length < 3) {
error('Usage: node sync-mooncake.js <quality>');
process.exit(-1);
}
interface Build extends RetrievedDocument {
assets: Asset[];
}
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function _update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.assets = [
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
asset
];
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
if (err) { return e(err); }
log('Build successfully updated.');
c();
});
});
});
}
return _update();
}
async function sync(commit: string, quality: string): Promise<void> {
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = `dbs/builds/colls/${quality}`;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
const build = await new Promise<Build>((c, e) => {
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
c(results[0] as Build);
});
});
log(`Found build for ${commit}, with ${build.assets.length} assets`);
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
for (const asset of build.assets) {
try {
const blobPath = url.parse(asset.url).path;
if (!blobPath) {
throw new Error(`Failed to parse URL: ${asset.url}`);
}
const blobName = blobPath.replace(/^\/\w+\//, '');
log(`Found ${blobName}`);
if (asset.mooncakeUrl) {
log(` Already in Mooncake ✔️`);
continue;
}
const readStream = blobService.createReadStream(quality, blobName, undefined!);
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(blobPath),
cacheControl: 'max-age=31536000, public'
}
};
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
log(` Uploading to Mooncake...`);
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await updateBuild(commit, quality, asset.platform, asset.type, asset);
log(` Done ✔️`);
} catch (err) {
error(err);
}
}
log(`All done ✔️`);
}
function main(): void {
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
error('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
return;
}
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
error('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const quality = process.argv[2];
sync(commit, quality).catch(err => {
error(err);
process.exit(1);
});
}
main();

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -e
yarn gulp vscode-darwin-min
yarn gulp vscode-reh-darwin-min
yarn gulp upload-vscode-sourcemaps

View File

@@ -1,50 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- script: |
yarn
displayName: Install Dependencies
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,96 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(devops-pat)
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
yarn
yarn gulp mixin
yarn gulp hygiene
yarn monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
./build/azure-pipelines/darwin/build.sh
displayName: Build
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
- script: |
set -e
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests
- script: |
set -e
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
displayName: Archive build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,36 +0,0 @@
#!/usr/bin/env bash
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
darwin \
archive \
"VSCode-darwin-$VSCODE_QUALITY.zip" \
$VERSION \
true \
../VSCode-darwin.zip
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
# publish Remote Extension Host
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
server-darwin \
archive-unsigned \
"vscode-server-darwin.zip" \
$VERSION \
true \
../vscode-server-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_MACOS"
# upload configuration
yarn gulp upload-vscode-configuration

View File

@@ -1,36 +0,0 @@
trigger:
branches:
include: ['master', 'release/*']
pr:
branches:
include: ['master', 'release/*']
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
git fetch distro
git push distro origin/master:refs/heads/master
git merge $(node -p "require('./package.json').distro")
displayName: Sync & Merge Distro

View File

@@ -1 +0,0 @@
pat

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -e
yarn gulp "vscode-linux-$VSCODE_ARCH-min"
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
yarn gulp vscode-reh-linux-$VSCODE_ARCH-min
fi

View File

@@ -1,55 +0,0 @@
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- script: |
yarn
displayName: Install Dependencies
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,40 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const documentdb_1 = require("documentdb");
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
});
}
getConfig(process.argv[2])
.then(config => {
console.log(config.frozen);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -1,79 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(devops-pat)
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
CHILD_CONCURRENCY=1 yarn
yarn gulp mixin
yarn gulp hygiene
yarn monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/linux/build.sh
displayName: Build
- script: |
set -e
yarn gulp "electron-$(VSCODE_ARCH)"
# xvfb seems to be crashing often, let's make sure it's always up
service xvfb start
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
displayName: Run unit tests
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/linux/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish tarball
PLATFORM_LINUX="linux-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish Remote Extension Host
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
fi
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Publish DEB
yarn gulp "vscode-linux-$VSCODE_ARCH-build-deb"
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
yarn gulp "vscode-linux-$VSCODE_ARCH-build-rpm"
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
yarn gulp "vscode-linux-$VSCODE_ARCH-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)

View File

@@ -1,55 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball
- script: |
set -e
# Get snapcraft version
snapcraft --version
# Make sure we get latest packages
sudo apt-get update
sudo apt-get upgrade -y
# Define variables
REPO="$(pwd)"
ARCH="$(VSCODE_ARCH)"
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -1,81 +0,0 @@
resources:
containers:
- container: vscode-x64
endpoint: VSCodeHub
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
- container: vscode-ia32
endpoint: VSCodeHub
image: vscodehub.azurecr.io/vscode-linux-build-agent:ia32
- container: snapcraft
image: snapcore/snapcraft
jobs:
- job: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- job: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: vscode-x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: snapcraft
dependsOn: Linux
steps:
- template: linux/snap-build-linux.yml
- job: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: ia32
container: vscode-ia32
steps:
- template: linux/product-build-linux.yml
- job: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS 10.13
steps:
- template: darwin/product-build-darwin.yml
- job: Mooncake
pool:
vmImage: 'Ubuntu-16.04'
condition: true
dependsOn:
- Windows
- Windows32
- Linux
- LinuxSnap
- Linux32
- macOS
steps:
- template: sync-mooncake.yml

View File

@@ -1,2 +0,0 @@
node_modules/
*.js

View File

@@ -1,36 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -1,43 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as cp from 'child_process';
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
} catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t: string) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -1,67 +0,0 @@
# Publish @types/vscode for each release
trigger:
branches:
include: ['refs/tags/*']
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- bash: |
# Install build dependencies
(cd build && yarn)
node build/azure-pipelines/publish-types/check-version.js
displayName: Check version
- bash: |
git config --global user.email "vscode@microsoft.com"
git config --global user.name "VSCode"
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
cd DefinitelyTyped
git diff --color | cat
git add -A
git status
git checkout -b "vscode-types-$TAG_VERSION"
git commit -m "VS Code $TAG_VERSION Extension API"
git push origin "vscode-types-$TAG_VERSION"
displayName: Push update to DefinitelyTyped
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@octref, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
https://slack.com/api/chat.postMessage
displayName: Send message on Slack

View File

@@ -1,62 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -1,73 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as cp from 'child_process';
import * as path from 'path';
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
} catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath: string, tag: string) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content: string, tag: string) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag: string) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -1,24 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
</configuration>

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="EsrpClient" version="1.0.27" />
</packages>

View File

@@ -1,5 +0,0 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min" }
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }

View File

@@ -1,54 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- powershell: |
yarn
displayName: Install Dependencies
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,24 +0,0 @@
# Taken from psake https://github.com/psake/psake
<#
.SYNOPSIS
This is a helper function that runs a scriptblock and checks the PS variable $lastexitcode
to see if an error occcured. If an error is detected then an exception is thrown.
This function allows you to run command-line programs without having to
explicitly check the $lastexitcode variable.
.EXAMPLE
exec { svn info $repository_trunk } "Error executing SVN. Please verify SVN command-line client is installed"
#>
function Exec
{
[CmdletBinding()]
param(
[Parameter(Position=0,Mandatory=1)][scriptblock]$cmd,
[Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd)
)
& $cmd
if ($lastexitcode -ne 0) {
throw ("Exec: " + $errorMessage)
}
}

View File

@@ -1,14 +0,0 @@
Param(
[string]$AuthCertificateBase64,
[string]$AuthCertificateKey
)
# Import auth certificate
$AuthCertificateFileName = [System.IO.Path]::GetTempFileName()
$AuthCertificateBytes = [Convert]::FromBase64String($AuthCertificateBase64)
[IO.File]::WriteAllBytes($AuthCertificateFileName, $AuthCertificateBytes)
$AuthCertificate = Import-PfxCertificate -FilePath $AuthCertificateFileName -CertStoreLocation Cert:\LocalMachine\My -Password (ConvertTo-SecureString $AuthCertificateKey -AsPlainText -Force)
rm $AuthCertificateFileName
$ESRPAuthCertificateSubjectName = $AuthCertificate.Subject
Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName")

View File

@@ -1,149 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com`npassword $(devops-pat)`nmachine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
exec { yarn }
exec { yarn gulp mixin }
exec { yarn gulp hygiene }
exec { yarn monaco-compile-check }
exec { node build/azure-pipelines/common/installDistro.js }
exec { node build/lib/builtInExtensions.js }
displayName: Prepare build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\build.ps1
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
displayName: Run unit tests
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
restoreDirectory: packages
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: 'ESRP CodeSign'
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_HOCKEYAPP_TOKEN = "$(vscode-hockeyapp-token)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,37 +0,0 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$Arch = "$env:VSCODE_ARCH"
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$Arch\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
$ServerName = "vscode-server-win32-$Arch"
$Server = "$Root\$ServerName"
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
$Build = "$Root\VSCode-win32-$Arch"
# Create server archive
exec { Rename-Item -Path $LegacyServer -NewName $ServerName }
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
# publish hockeyapp symbols
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }

View File

@@ -1,70 +0,0 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,2 +1,12 @@
[
{
"name": "ms-vscode.node-debug",
"version": "1.23.3",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.23.5",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
]

View File

@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
}
function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) {

View File

@@ -1,91 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const https = require("https");
const fs = require("fs");
const path = require("path");
const cp = require("child_process");
function ensureDir(filepath) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options, destination) {
ensureDir(path.dirname(destination));
return new Promise((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str) {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str) {
return Buffer.from(str, 'base64').toString('utf8');
}
function downloadInExternalProcess(options) {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise((c, e) => {
const child = cp.fork(__filename, [MARKER_ARGUMENT, base64encode(JSON.stringify(options))], {
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
});
let stderr = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
}
else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
exports.downloadInExternalProcess = downloadInExternalProcess;
function _downloadInExternalProcess() {
let options;
try {
options = JSON.parse(base64decode(process.argv[3]));
}
catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

View File

@@ -1,111 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as https from 'https';
import * as fs from 'fs';
import * as path from 'path';
import * as cp from 'child_process';
function ensureDir(filepath: string) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options: https.RequestOptions, destination: string): Promise<void> {
ensureDir(path.dirname(destination));
return new Promise<void>((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str: string): string {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str: string): string {
return Buffer.from(str, 'base64').toString('utf8');
}
export interface IDownloadRequestOptions {
host: string;
path: string;
}
export interface IDownloadOptions {
requestOptions: IDownloadRequestOptions;
destinationPath: string;
}
export function downloadInExternalProcess(options: IDownloadOptions): Promise<void> {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise<void>((c, e) => {
const child = cp.fork(
__filename,
[MARKER_ARGUMENT, base64encode(JSON.stringify(options))],
{
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
}
);
let stderr: Buffer[] = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
} else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
function _downloadInExternalProcess() {
let options: IDownloadOptions;
try {
options = JSON.parse(base64decode(process.argv[3]));
} catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

View File

@@ -1,18 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const util = require('./lib/util');
const task = require('./lib/task');
const compilation = require('./lib/compilation');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
// Full compile, including nls and inline sources in sourcemaps, for build
const compileClientBuildTask = task.define('compile-client-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
// All Build
const compileBuildTask = task.define('compile-build', task.parallel(compileClientBuildTask, compileExtensionsBuildTask));
exports.compileBuildTask = compileBuildTask;

View File

@@ -6,19 +6,16 @@
const gulp = require('gulp');
const path = require('path');
const util = require('./lib/util');
const task = require('./lib/task');
const common = require('./lib/optimize');
const es = require('event-stream');
const File = require('vinyl');
const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone');
const cp = require('child_process');
const compilation = require('./lib/compilation');
const monacoapi = require('./monaco/api');
const fs = require('fs');
var root = path.dirname(__dirname);
var sha1 = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var semver = require('./monaco/package.json').version;
var headerVersion = semver + '(' + sha1 + ')';
@@ -29,7 +26,7 @@ var editorEntryPoints = [
name: 'vs/editor/editor.main',
include: [],
exclude: ['vs/css', 'vs/nls'],
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'],
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
},
{
name: 'vs/base/common/worker/simpleWorker',
@@ -49,6 +46,9 @@ var editorResources = [
'!**/test/**'
];
var editorOtherSources = [
];
var BUNDLED_FILE_HEADER = [
'/*!-----------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -59,59 +59,29 @@ var BUNDLED_FILE_HEADER = [
''
].join('\n');
function editorLoaderConfig() {
var result = common.loaderConfig();
// never ship octicons in editor
result.paths['vs/base/browser/ui/octiconLabel/octiconLabel'] = 'out-build/vs/base/browser/ui/octiconLabel/octiconLabel.mock';
// force css inlining to use base64 -- see https://github.com/Microsoft/monaco-editor/issues/148
result['vs/css'] = {
inlineResources: 'base64',
inlineResourcesLimit: 3000 // see https://github.com/Microsoft/monaco-editor/issues/336
};
return result;
}
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const extractEditorSrcTask = task.define('extract-editor-src', () => {
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
const apiusages = monacoapi.execute().usageContent;
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
standalone.extractEditor({
sourcesRoot: path.join(root, 'src'),
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker',
'vs/base/worker/workerMain',
],
inlineEntryPoints: [
apiusages,
extrausages
],
typings: [
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require-monaco.d.ts',
"typings/lib.es2018.promise.d.ts",
'vs/monaco.d.ts'
],
libs: [
`lib.es5.d.ts`,
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')
});
});
const compileEditorAMDTask = task.define('compile-editor-amd', compilation.compileTask('out-editor-src', 'out-editor-build', true));
const optimizeEditorAMDTask = task.define('optimize-editor-amd', common.optimizeTask({
src: 'out-editor-build',
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
entryPoints: editorEntryPoints,
otherSources: editorOtherSources,
resources: editorResources,
loaderConfig: {
paths: {
'vs': 'out-editor-build/vs',
'vs/css': 'out-editor-build/vs/css.build',
'vs/nls': 'out-editor-build/vs/nls.build',
'vscode': 'empty:'
}
},
loaderConfig: editorLoaderConfig(),
bundleLoader: false,
header: BUNDLED_FILE_HEADER,
bundleInfo: true,
@@ -119,92 +89,33 @@ const optimizeEditorAMDTask = task.define('optimize-editor-amd', common.optimize
languages: languages
}));
const minifyEditorAMDTask = task.define('minify-editor-amd', common.minifyTask('out-editor'));
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () => {
standalone.createESMSourcesAndResources2({
srcFolder: './out-editor-src',
outFolder: './out-editor-esm',
outResourcesFolder: './out-monaco-editor-core/esm',
ignores: [
'inlineEntryPoint:0.ts',
'inlineEntryPoint:1.ts',
'vs/loader.js',
'vs/nls.ts',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/css.js',
'vs/css.build.js',
'vs/css.d.ts',
'vs/base/worker/workerMain.ts',
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources({
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker'
],
renames: {
'vs/nls.mock.ts': 'vs/nls.ts'
outFolder: './out-editor-esm/src',
outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
}
});
});
const compileEditorESMTask = task.define('compile-editor-esm', () => {
if (process.platform === 'win32') {
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
} else {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
}
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
});
function toExternalDTS(contents) {
let lines = contents.split('\n');
let killNextCloseCurlyBrace = false;
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
if (killNextCloseCurlyBrace) {
if ('}' === line) {
lines[i] = '';
killNextCloseCurlyBrace = false;
continue;
}
if (line.indexOf(' ') === 0) {
lines[i] = line.substr(4);
} else if (line.charAt(0) === '\t') {
lines[i] = line.substr(1);
}
continue;
}
if ('declare namespace monaco {' === line) {
lines[i] = '';
killNextCloseCurlyBrace = true;
continue;
}
if (line.indexOf('declare namespace monaco.') === 0) {
lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
}
}
return lines.join('\n');
}
function filterStream(testFunc) {
return es.through(function (data) {
if (!testFunc(data.relative)) {
return;
}
this.emit('data', data);
});
}
const finalEditorResourcesTask = task.define('final-editor-resources', () => {
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
return es.merge(
// other assets
es.merge(
@@ -219,7 +130,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base,
contents: Buffer.from(toExternalDTS(data.contents.toString()))
contents: data.contents
}));
}))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
@@ -234,14 +145,6 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// version.txt
gulp.src('build/monaco/version.txt')
.pipe(es.through(function (data) {
data.contents = Buffer.from(`monaco-editor-core: https://github.com/Microsoft/vscode/tree/${sha1}`);
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// README.md
gulp.src('build/monaco/README-npm.md')
.pipe(es.through(function (data) {
@@ -275,7 +178,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
var strContents = data.contents.toString();
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/# sourceMappingURL=[^ ]+$/, newStr);
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = Buffer.from(strContents);
this.emit('data', data);
@@ -291,31 +194,59 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
);
});
gulp.task('editor-distro',
task.series(
task.parallel(
util.rimraf('out-editor-src'),
util.rimraf('out-editor-build'),
util.rimraf('out-editor-esm'),
util.rimraf('out-monaco-editor-core'),
util.rimraf('out-editor'),
util.rimraf('out-editor-min')
),
extractEditorSrcTask,
task.parallel(
task.series(
compileEditorAMDTask,
optimizeEditorAMDTask,
minifyEditorAMDTask
),
task.series(
createESMSourcesAndResourcesTask,
compileEditorESMTask
)
),
finalEditorResourcesTask
)
);
gulp.task('analyze-editor-distro', function () {
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles;
var inverseGraph = {};
Object.keys(graph).forEach(function (module) {
var dependencies = graph[module];
dependencies.forEach(function (dep) {
inverseGraph[dep] = inverseGraph[dep] || [];
inverseGraph[dep].push(module);
});
});
var detailed = {};
Object.keys(bundles).forEach(function (entryPoint) {
var included = bundles[entryPoint];
var includedMap = {};
included.forEach(function (included) {
includedMap[included] = true;
});
var explanation = [];
included.map(function (included) {
if (included.indexOf('!') >= 0) {
return;
}
var reason = (inverseGraph[included] || []).filter(function (mod) {
return !!includedMap[mod];
});
explanation.push({
module: included,
reason: reason
});
});
detailed[entryPoint] = explanation;
});
console.log(JSON.stringify(detailed, null, '\t'));
});
function filterStream(testFunc) {
return es.through(function (data) {
if (!testFunc(data.relative)) {
return;
}
this.emit('data', data);
});
}
//#region monaco type checking
@@ -335,7 +266,6 @@ function createTscCompileTask(watch) {
let errors = [];
let reporter = createReporter();
let report;
// eslint-disable-next-line no-control-regex
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
child.stdout.on('data', data => {
@@ -369,10 +299,7 @@ function createTscCompileTask(watch) {
};
}
const monacoTypecheckWatchTask = task.define('monaco-typecheck-watch', createTscCompileTask(true));
exports.monacoTypecheckWatchTask = monacoTypecheckWatchTask;
const monacoTypecheckTask = task.define('monaco-typecheck', createTscCompileTask(false));
exports.monacoTypecheckTask = monacoTypecheckTask;
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
gulp.task('monaco-typecheck', createTscCompileTask(false));
//#endregion

View File

@@ -11,8 +11,8 @@ const path = require('path');
const tsb = require('gulp-tsb');
const es = require('event-stream');
const filter = require('gulp-filter');
const rimraf = require('rimraf');
const util = require('./lib/util');
const task = require('./lib/task');
const watcher = require('./lib/watch');
const createReporter = require('./lib/reporter').createReporter;
const glob = require('glob');
@@ -20,8 +20,8 @@ const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const i18n = require('./lib/i18n');
const plumber = require('gulp-plumber');
const _ = require('underscore');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -32,21 +32,33 @@ const compilations = glob.sync('**/tsconfig.json', {
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const tsconfig = require(absolutePath);
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
const tsOptions = require(absolutePath).compilerOptions;
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
// Tasks
const clean = 'clean-extension:' + name;
const compile = 'compile-extension:' + name;
const watch = 'watch-extension:' + name;
// Build Tasks
const cleanBuild = 'clean-extension-build:' + name;
const compileBuild = 'compile-extension-build:' + name;
const watchBuild = 'watch-extension-build:' + name;
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const out = path.join(root, 'out');
const i18nPath = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
@@ -90,9 +102,9 @@ const tasks = compilations.map(function (tsconfigFile) {
sourceRoot: '../src'
}))
.pipe(tsFilter.restore)
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through())
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
// Filter out *.nls.json file. We needed them only to bundle meta data file.
.pipe(filter(['**', '!**/*.nls.json']))
.pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
.pipe(reporter.end(emitError));
return es.duplex(input, output);
@@ -101,18 +113,18 @@ const tasks = compilations.map(function (tsconfigFile) {
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
gulp.task(clean, cb => rimraf(out, cb));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
gulp.task(compile, [clean], () => {
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
.pipe(gulp.dest(out));
}));
});
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
gulp.task(watch, [clean], () => {
const pipeline = createPipeline(false);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
@@ -120,35 +132,43 @@ const tasks = compilations.map(function (tsconfigFile) {
return watchInput
.pipe(util.incremental(pipeline, input))
.pipe(gulp.dest(out));
}));
});
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
gulp.task(cleanBuild, cb => rimraf(out, cb));
gulp.task(compileBuild, [clean], () => {
const pipeline = createPipeline(true, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
.pipe(gulp.dest(out));
}));
});
// Tasks
gulp.task(compileTask);
gulp.task(watchTask);
gulp.task(watchBuild, [clean], () => {
const pipeline = createPipeline(true);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(() => pipeline(), input))
.pipe(gulp.dest(out));
});
return {
compileTask: compileTask,
watchTask: watchTask,
compileBuildTask: compileBuildTask
clean: clean,
compile: compile,
watch: watch,
cleanBuild: cleanBuild,
compileBuild: compileBuild,
watchBuild: watchBuild
};
});
const compileExtensionsTask = task.define('compile-extensions', task.parallel(...tasks.map(t => t.compileTask)));
gulp.task(compileExtensionsTask);
exports.compileExtensionsTask = compileExtensionsTask;
gulp.task('clean-extensions', tasks.map(t => t.clean));
gulp.task('compile-extensions', tasks.map(t => t.compile));
gulp.task('watch-extensions', tasks.map(t => t.watch));
const watchExtensionsTask = task.define('watch-extensions', task.parallel(...tasks.map(t => t.watchTask)));
gulp.task(watchExtensionsTask);
exports.watchExtensionsTask = watchExtensionsTask;
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.parallel(...tasks.map(t => t.compileBuildTask)));
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));

View File

@@ -42,17 +42,13 @@ const indentationFilter = [
// except specific files
'!ThirdPartyNotices.txt',
'!LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!LICENSE.txt',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
// except specific folders
@@ -81,22 +77,12 @@ const indentationFilter = [
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
'!build/{lib,tslintRules,download}/**/*.js',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!build/tfs/**/*.js',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.{xlf,docx,sql,vsix}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**'
'!extensions/markdown/media/*.js'
];
const copyrightFilter = [
@@ -108,8 +94,6 @@ const copyrightFilter = [
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
@@ -117,46 +101,12 @@ const copyrightFilter = [
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/promise-polyfill/polyfill.js',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch',
'!resources/win32/bin/code.js',
'!resources/completions/**',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
'!src/sql/workbench/parts/notebook/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/outputs/common/url.ts',
'!src/sql/workbench/parts/notebook/outputs/common/renderMimeInterfaces.ts',
'!src/sql/workbench/parts/notebook/outputs/common/outputProcessor.ts',
'!src/sql/workbench/parts/notebook/outputs/common/mimemodel.ts',
'!src/sql/workbench/parts/notebook/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/parts/notebook/outputs/sanitizer.ts',
'!src/sql/workbench/parts/notebook/outputs/renderers.ts',
'!src/sql/workbench/parts/notebook/outputs/registry.ts',
'!src/sql/workbench/parts/notebook/outputs/factories.ts',
'!src/sql/workbench/parts/notebook/models/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/electron-browser/modelComponents/media/highlight.css',
'!src/sql/parts/modelComponents/highlight.css',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac'
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*'
];
const eslintFilter = [
@@ -167,6 +117,7 @@ const eslintFilter = [
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/winjs.base.js',
'!src/**/marked.js',
'!**/test/**'
];
@@ -185,12 +136,6 @@ const tslintFilter = [
'!extensions/html-language-features/server/lib/jquery.d.ts'
];
// {{SQL CARBON EDIT}}
const useStrictFilter = [
'src/**'
];
// {{SQL CARBON EDIT}}
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -207,7 +152,8 @@ gulp.task('eslint', () => {
});
gulp.task('tslint', () => {
const options = { emitError: true };
// {{SQL CARBON EDIT}}
const options = { emitError: false };
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter))
@@ -240,8 +186,8 @@ function hygiene(some) {
});
const copyrights = es.through(function (file) {
const lines = file.__lines;
for (let i = 0; i < copyrightHeaderLines.length; i++) {
if (lines[i] !== copyrightHeaderLines[i]) {
console.error(file.relative + ': Missing or bad copyright statement');
@@ -253,23 +199,6 @@ function hygiene(some) {
this.emit('data', file);
});
// {{SQL CARBON EDIT}}
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
const useStrict = es.through(function (file) {
const lines = file.__lines;
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
// (10 is used to account for copyright and extraneous newlines)
lines.slice(0, 10).forEach((line, i) => {
if (/\s*'use\s*strict\s*'/.test(line)) {
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
errorCount++;
}
});
this.emit('data', file);
});
// {{SQL CARBON EDIT}} END
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
@@ -290,7 +219,7 @@ function hygiene(some) {
let formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error("File not formatted. Run the 'Format Document' command to fix it:", file.relative);
console.error('File not formatted:', file.relative);
errorCount++;
}
cb(null, file);
@@ -322,52 +251,27 @@ function hygiene(some) {
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights);
.pipe(filter(copyrightFilter));
// {{SQL CARBON EDIT}}
// .pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
.pipe(formatting)
.pipe(tsl)
// {{SQL CARBON EDIT}}
.pipe(filter(useStrictFilter))
.pipe(useStrict);
.pipe(tsl);
const javascript = result
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.failAfterError());
.pipe(gulpeslint.formatEach('compact'));
// {{SQL CARBON EDIT}}
// .pipe(gulpeslint.failAfterError());
let count = 0;
return es.merge(typescript, javascript)
.pipe(es.through(function (data) {
count++;
if (process.env['TRAVIS'] && count % 10 === 0) {
process.stdout.write('.');
}
this.emit('data', data);
}, function () {
process.stdout.write('\n');
const tslintResult = tsLinter.getResult();
if (tslintResult.failures.length > 0) {
for (const failure of tslintResult.failures) {
const name = failure.getFileName();
const position = failure.getStartPosition();
const line = position.getLineAndCharacter().line;
const character = position.getLineAndCharacter().character;
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
}
errorCount += tslintResult.failures.length;
}
if (errorCount > 0) {
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
} else {
this.emit('end');
}
// {{SQL CARBON EDIT}}
this.emit('end');
}));
}
@@ -385,7 +289,7 @@ function createGitIndexVinyls(paths) {
return e(err);
}
cp.exec(`git show ":${relativePath}"`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}

View File

@@ -6,13 +6,23 @@
'use strict';
const gulp = require('gulp');
const json = require('gulp-json-editor');
const buffer = require('gulp-buffer');
const filter = require('gulp-filter');
const es = require('event-stream');
const util = require('./lib/util');
const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
// {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor');
const product = require('../product.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json');
gulp.task('mixin', function () {
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
const updateUrl = process.env['SQLOPS_UPDATEURL'];
if (!updateUrl) {
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
@@ -26,54 +36,20 @@ gulp.task('mixin', function () {
return;
}
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
// {{SQL CARBON EDIT}}
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json';
if (quality === 'insider') {
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
}
let newValues = {
"nameShort": product.nameShort,
"nameLong": product.nameLong,
"applicationName": product.applicationName,
"dataFolderName": product.dataFolderName,
"win32MutexName": product.win32MutexName,
"win32DirName": product.win32DirName,
"win32NameVersion": product.win32NameVersion,
"win32RegValueName": product.win32RegValueName,
"win32AppId": product.win32AppId,
"win32x64AppId": product.win32x64AppId,
"win32UserAppId": product.win32UserAppId,
"win32x64UserAppId": product.win32x64UserAppId,
"win32AppUserModelId": product.win32AppUserModelId,
"win32ShellNameShort": product.win32ShellNameShort,
"darwinBundleIdentifier": product.darwinBundleIdentifier,
"updateUrl": updateUrl,
"quality": quality,
"extensionsGallery": {
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
"serviceUrl": serviceUrl
}
};
if (quality === 'insider') {
let dashSuffix = '-insiders';
let dotSuffix = '.insiders';
let displaySuffix = ' - Insiders';
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
newValues.nameShort += dashSuffix;
newValues.nameLong += displaySuffix;
newValues.applicationName += dashSuffix;
newValues.dataFolderName += dashSuffix;
newValues.win32MutexName += dashSuffix;
newValues.win32DirName += displaySuffix;
newValues.win32NameVersion += displaySuffix;
newValues.win32RegValueName += dashSuffix;
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
newValues.win32x64UserAppId += dashSuffix;
newValues.win32AppUserModelId += dotSuffix;
newValues.win32ShellNameShort += displaySuffix;
newValues.darwinBundleIdentifier += dotSuffix;
}
return gulp.src('./product.json')
.pipe(jeditor(newValues))
.pipe(gulp.dest('.'));
});
});

View File

@@ -1,16 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const noop = () => { return Promise.resolve(); };
gulp.task('vscode-reh-win32-ia32-min', noop);
gulp.task('vscode-reh-win32-x64-min', noop);
gulp.task('vscode-reh-darwin-min', noop);
gulp.task('vscode-reh-linux-x64-min', noop);
gulp.task('vscode-reh-linux-arm-min', noop);

View File

@@ -28,6 +28,7 @@ const formatFiles = (some) => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
errorCount++;
}
cb(null, file);
@@ -39,7 +40,7 @@ const formatFiles = (some) => {
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
};
}
const formatStagedFiles = () => {
const cp = require('child_process');
@@ -80,4 +81,4 @@ const formatStagedFiles = () => {
process.exit(1);
});
});
};
}

15
build/gulpfile.test.js Normal file
View File

@@ -0,0 +1,15 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const mocha = require('gulp-mocha');
gulp.task('test', function () {
return gulp.src('test/all.js')
.pipe(mocha({ ui: 'tdd', delay: true }))
.once('end', function () { process.exit(); });
});

View File

@@ -17,33 +17,36 @@ const vfs = require('vinyl-fs');
const rename = require('gulp-rename');
const replace = require('gulp-replace');
const filter = require('gulp-filter');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const task = require('./lib/task');
const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile');
const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
// {{SQL CARBON EDIT}}
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
// {{SQL CARBON EDIT}} - End
const glob = require('glob');
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
// {{SQL CARBON EDIT}}
var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}}
const nodeModules = [
@@ -52,12 +55,32 @@ const nodeModules = [
'rxjs/Observable',
'rxjs/Subject',
'rxjs/Observer',
'ng2-charts']
'ng2-charts/ng2-charts']
.concat(Object.keys(product.dependencies || {}))
.concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules);
// Build
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('./builtInExtensions.json');
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'profiler'
];
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base,
@@ -70,27 +93,23 @@ const vscodeResources = [
'out-build/cli.js',
'out-build/driver.js',
'out-build/bootstrap.js',
'out-build/bootstrap-fork.js',
'out-build/bootstrap-amd.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}',
'!out-build/vs/code/browser/**/*.html',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
@@ -102,17 +121,18 @@ const vscodeResources = [
'out-build/sql/parts/admin/**/*.html',
'out-build/sql/parts/connection/connectionDialog/media/*.{gif,png,svg}',
'out-build/sql/parts/common/dblist/**/*.html',
'out-build/sql/workbench/parts/dashboard/**/*.html',
'out-build/sql/parts/dashboard/**/*.html',
'out-build/sql/parts/disasterRecovery/**/*.html',
'out-build/sql/parts/common/modal/media/**',
'out-build/sql/workbench/parts/grid/media/**',
'out-build/sql/workbench/parts/grid/views/**/*.html',
'out-build/sql/parts/grid/load/lib/**',
'out-build/sql/parts/grid/load/loadJquery.js',
'out-build/sql/parts/grid/media/**',
'out-build/sql/parts/grid/views/**/*.html',
'out-build/sql/parts/tasks/**/*.html',
'out-build/sql/parts/taskHistory/viewlet/media/**',
'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
'!**/test/**'
];
@@ -122,84 +142,63 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/'
].join('\n');
const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
task.parallel(
util.rimraf('out-vscode'),
compileBuildTask
),
common.optimizeTask({
src: 'out-build',
entryPoints: vscodeEntryPoints,
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
bundleInfo: undefined
})
));
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
entryPoints: vscodeEntryPoints,
otherSources: [],
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
languages: languages,
bundleInfo: undefined
}));
const optimizeIndexJSTask = task.define('optimize-index-js', task.series(
optimizeVSCodeTask,
() => {
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js');
const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents);
}
));
gulp.task('optimize-index-js', ['optimize-vscode'], () => {
const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents);
});
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
task.parallel(
util.rimraf('out-vscode-min'),
optimizeIndexJSTask
),
common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)
));
const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleDocumentTypes: [{
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
// {{SQL CARBON EDIT}}
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
iconFile: 'resources/darwin/code_file.icns'
}],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
repo: product.electronRepository || void 0
};
function getElectron(arch) {
@@ -219,11 +218,11 @@ function getElectron(arch) {
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
gulp.task('clean-electron', util.rimraf('.build/electron'));
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
/**
* Compute checksums for some files.
@@ -259,83 +258,142 @@ function computeChecksum(filename) {
return hash;
}
function packageTask(platform, arch, sourceFolderName, destinationFolderName, opts) {
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath
});
});
}
function packageTask(platform, arch, opts) {
opts = opts || {};
const destination = path.join(path.dirname(root), destinationFolderName);
// {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform;
return () => {
const out = sourceFolderName;
const out = opts.minified ? 'out-vscode-min' : 'out-vscode';
const checksums = computeChecksums(out, [
'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css',
'vs/code/electron-browser/workbench/workbench.html',
'vs/code/electron-browser/workbench/workbench.js'
'vs/workbench/electron-browser/bootstrap/index.html',
'vs/workbench/electron-browser/bootstrap/index.js',
'vs/workbench/electron-browser/bootstrap/preload.js'
]);
const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
}));
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
const root = path.resolve(path.join(__dirname, '..'));
// {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions();
const sources = es.merge(src, ext.packageExtensionsStream({
sourceMappingURLBase: sourceMappingURLBase
}));
let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
// {{SQL CARBON EDIT}}
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
const packageJsonUpdates = { name, version };
// for linux url handling
if (platform === 'linux') {
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
}
const name = product.nameShort;
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json(packageJsonUpdates));
.pipe(json({ name, version }));
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
const date = new Date().toISOString();
const productJsonUpdate = { commit, date, checksums };
if (shouldSetupSettingsSearch()) {
productJsonUpdate.settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
}
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json(productJsonUpdate));
.pipe(json({ commit, date, checksums, settingsSearchBuildId }));
const license = gulp.src(['LICENSES.chromium.html', product.licenseFileName, 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.', allowEmpty: true });
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
const watermark = gulp.src(['resources/letterpress.svg', 'resources/letterpress-dark.svg', 'resources/letterpress-hc.svg'], { base: '.' });
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/sql/azdata.d.ts').pipe(rename('out/sql/azdata.d.ts'));
const sqlopsAPI = gulp.src('src/sql/sqlops.d.ts').pipe(rename('out/sql/sqlops.d.ts'));
// {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
// @ts-ignore JSON checking: dependencies is optional
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
];
const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
// {{SQL CARBON EDIT}}
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
// {{SQL CARBON EDIT}}
@@ -345,33 +403,24 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*',
'node_modules/chartjs-color/**/*.*',
'node_modules/chartjs-color-string/**/*.*',
'node_modules/color-convert/**/*.*',
'node_modules/color-name/**/*.*',
'node_modules/moment/**/*.*'
'node_modules/chart.js/**/*.*'
], { base: '.', dot: true });
let all = es.merge(
packageJsonStream,
packageJsonStream,
productJsonStream,
license,
watermark,
api,
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
copiedModules,
dataApi,
sqlopsAPI,
sources,
deps
);
if (platform === 'win32') {
all = es.merge(all, gulp.src([
// {{SQL CARBON EDIT}} remove unused icons
'resources/win32/code_70x70.png',
'resources/win32/code_150x150.png'
], { base: '.' }));
all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
} else if (platform === 'linux') {
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
} else if (platform === 'darwin') {
@@ -387,10 +436,8 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
result = es.merge(result, gulp.src('resources/win32/bin/code.cmd', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort))
@@ -398,66 +445,43 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort))
.pipe(replace('@@PRODNAME@@', product.nameLong))
.pipe(replace('@@VERSION@@', version))
.pipe(replace('@@COMMIT@@', commit))
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(replace('@@QUALITY@@', quality))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
.pipe(rename(product.nameShort + '.VisualElementsManifest.xml')));
} else if (platform === 'linux') {
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
.pipe(replace('@@PRODNAME@@', product.nameLong))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('bin/' + product.applicationName)));
}
// submit all stats that have been collected
// during the build phase
if (opts.stats) {
result.on('end', () => {
const { submitAllStats } = require('./lib/stats');
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
});
}
return result.pipe(vfs.dest(destination));
};
}
const buildRoot = path.dirname(root);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
{ platform: 'linux', arch: 'arm' },
{ platform: 'linux', arch: 'arm64' },
];
BUILD_TARGETS.forEach(buildTarget => {
const dashed = (str) => (str ? `-${str}` : ``);
const platform = buildTarget.platform;
const arch = buildTarget.arch;
const opts = buildTarget.opts;
// {{SQL CARBON EDIT}}
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'sqlops-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'sqlops-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'sqlops-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'sqlops-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'sqlops-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'sqlops-linux-arm')));
['', 'min'].forEach(minified => {
const sourceFolderName = `out-vscode${dashed(minified)}`;
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
task.parallel(
minified ? minifyVSCodeTask : optimizeVSCodeTask,
util.rimraf(path.join(buildRoot, destinationFolderName))
),
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
));
gulp.task(vscodeTask);
});
});
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations
@@ -480,78 +504,66 @@ const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task(task.define(
'vscode-translations-push',
task.series(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
}
)
));
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
});
gulp.task(task.define(
'vscode-translations-export',
task.series(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(vfs.dest('../vscode-translations-export'));
}
)
));
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
});
gulp.task('vscode-translations-pull', function () {
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`));
}));
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
});
});
gulp.task('vscode-translations-import', function () {
// {{SQL CARBON EDIT}} - Replace function body with our own
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
});
// {{SQL CARBON EDIT}} - End
});
// Sourcemaps
gulp.task('upload-vscode-sourcemaps', () => {
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
return es.merge(vs, extensionsOut, extensionsDist)
.pipe(es.through(function (data) {
// debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data);
}))
return es.merge(vs, extensions)
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
@@ -560,18 +572,53 @@ gulp.task('upload-vscode-sourcemaps', () => {
}));
});
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
const branch = process.env.BUILD_SOURCEBRANCH;
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
console.log(`Only runs on master and release branches, not ${branch}`);
return;
}
if (!fs.existsSync(allConfigDetailsPath)) {
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
}
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
if (!settingsSearchBuildId) {
throw new Error('Failed to compute build number');
}
return gulp.src(allConfigDetailsPath)
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'configuration',
prefix: `${settingsSearchBuildId}/${commit}/`
}));
});
function getSettingsSearchBuildId(packageJson) {
const previous = util.getPreviousVersion(packageJson.version);
try {
const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
const count = parseInt(out.toString());
return util.versionStringToNumber(packageJson.version) * 1e4 + count;
} catch (e) {
throw new Error('Could not determine build number: ' + e.toString());
}
}
// This task is only run for the MacOS build
const generateVSCodeConfigurationTask = task.define('generate-vscode-configuration', () => {
gulp.task('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
if (!buildDir) {
return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
}
if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
return resolve();
}
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
@@ -598,61 +645,6 @@ const generateVSCodeConfigurationTask = task.define('generate-vscode-configurati
});
});
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task(task.define(
'upload-vscode-configuration',
task.series(
generateVSCodeConfigurationTask,
() => {
if (!shouldSetupSettingsSearch()) {
const branch = process.env.BUILD_SOURCEBRANCH;
console.log(`Only runs on master and release branches, not ${branch}`);
return;
}
if (!fs.existsSync(allConfigDetailsPath)) {
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
}
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
if (!settingsSearchBuildId) {
throw new Error('Failed to compute build number');
}
return gulp.src(allConfigDetailsPath)
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'configuration',
prefix: `${settingsSearchBuildId}/${commit}/`
}));
}
)
));
function shouldSetupSettingsSearch() {
const branch = process.env.BUILD_SOURCEBRANCH;
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
}
function getSettingsSearchBuildId(packageJson) {
try {
const branch = process.env.BUILD_SOURCEBRANCH;
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
/\/master$/.test(branch) ? 1 :
2; // Some unexpected branch
const out = cp.execSync(`git rev-list HEAD --count`);
const count = parseInt(out.toString());
// <version number><commit count><branchId (avoid unlikely conflicts)>
// 1.25.1, 1,234,567 commits, master = 1250112345671
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
} catch (e) {
throw new Error('Could not determine build number: ' + e.toString());
}
}
// {{SQL CARBON EDIT}}
// Install service locally before building carbon
@@ -675,28 +667,6 @@ function installService() {
}
gulp.task('install-sqltoolsservice', () => {
return installService();
return installService();
});
function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/src/config.json');
return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
var installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-ssmsmin', () => {
return installSsmsMin();
});

View File

@@ -12,39 +12,32 @@ const shell = require('gulp-shell');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const util = require('./lib/util');
const task = require('./lib/task');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const path = require('path');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
}
function prepareDebPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
// {{SQL CARBON EDIT}}
const binaryDir = '../sqlops-linux-' + arch;
const debArch = getDebPackageArch(arch);
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
.pipe(replace('@@ICON@@', product.applicationName))
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -53,13 +46,7 @@ function prepareDebPackage(arch) {
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -89,13 +76,11 @@ function prepareDebPackage(arch) {
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
return all.pipe(vfs.dest(destination));
};
@@ -115,27 +100,21 @@ function getRpmBuildPath(rpmArch) {
}
function getRpmPackageArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
}
function prepareRpmPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
const binaryDir = '../sqlops-linux-' + arch;
const rpmArch = getRpmPackageArch(arch);
return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
.pipe(replace('@@ICON@@', product.applicationName))
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -144,13 +123,7 @@ function prepareRpmPackage(arch) {
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -158,14 +131,11 @@ function prepareRpmPackage(arch) {
const spec = gulp.src('resources/linux/rpm/code.spec.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@VERSION@@', packageJson.version))
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
.pipe(replace('@@LICENSE@@', product.licenseName))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
.pipe(rename('SPECS/' + product.applicationName + '.spec'));
@@ -173,7 +143,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
};
@@ -191,45 +161,37 @@ function buildRpmPackage(arch) {
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
]);
}
function getSnapBuildPath(arch) {
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
}
function prepareSnapPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch;
const binaryDir = '../VSCode-linux-' + arch;
const destination = getSnapBuildPath(arch);
return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.applicationName}.png`))
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
.pipe(replace('@@VERSION@@', packageJson.version))
.pipe(rename('snap/snapcraft.yaml'));
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
.pipe(rename('electron-launch'));
const all = es.merge(desktops, icon, code, snapcraft, electronLaunch);
const all = es.merge(desktop, icon, code, snapcraft, electronLaunch);
return all.pipe(vfs.dest(destination));
};
@@ -237,39 +199,117 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
]);
}
const BUILD_TARGETS = [
{ arch: 'ia32' },
{ arch: 'x64' },
{ arch: 'arm' },
{ arch: 'arm64' },
];
function getFlatpakArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'arm' }[arch];
}
BUILD_TARGETS.forEach((buildTarget) => {
const arch = buildTarget.arch;
function prepareFlatpak(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../sqlops-linux-' + arch;
const flatpakArch = getFlatpakArch(arch);
const destination = '.build/linux/flatpak/' + flatpakArch;
{
const debArch = getDebPackageArch(arch);
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
// gulp.task(prepareDebTask);
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
gulp.task(buildDebTask);
return function () {
// This is not imported in the global scope to avoid requiring ImageMagick
// (or GraphicsMagick) when not building building Flatpak bundles.
const imgResize = require('gulp-image-resize');
const all = [16, 24, 32, 48, 64, 128, 192, 256, 512].map(function (size) {
return gulp.src('resources/linux/code.png', { base: '.' })
.pipe(imgResize({ width: size, height: size, format: "png", noProfile: true }))
.pipe(rename('share/icons/hicolor/' + size + 'x' + size + '/apps/' + flatpakManifest.appId + '.png'));
});
all.push(gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(replace('Exec=/usr/share/@@NAME@@/@@NAME@@', 'Exec=' + product.applicationName))
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('share/applications/' + flatpakManifest.appId + '.desktop')));
all.push(gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME@@', flatpakManifest.appId))
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(rename('share/appdata/' + flatpakManifest.appId + '.appdata.xml')));
all.push(gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) {
p.dirname = 'share/' + product.applicationName + '/' + p.dirname;
})));
return es.merge(all).pipe(vfs.dest(destination));
};
}
function buildFlatpak(arch) {
const flatpakArch = getFlatpakArch(arch);
const manifest = {};
for (var k in flatpakManifest) {
manifest[k] = flatpakManifest[k];
}
{
const rpmArch = getRpmPackageArch(arch);
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
// gulp.task(prepareRpmTask);
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
gulp.task(buildRpmTask);
manifest.files = [
['.build/linux/flatpak/' + flatpakArch, '/'],
];
const buildOptions = {
arch: flatpakArch,
subject: product.nameLong + ' ' + packageJson.version + '.' + linuxPackageRevision,
};
// If requested, use the configured path for the OSTree repository.
if (process.env.FLATPAK_REPO) {
buildOptions.repoDir = process.env.FLATPAK_REPO;
} else {
buildOptions.bundlePath = manifest.appId + '-' + flatpakArch + '.flatpak';
}
{
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
gulp.task(prepareSnapTask);
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
gulp.task(buildSnapTask);
// Setup PGP signing if requested.
if (process.env.GPG_KEY_ID !== undefined) {
buildOptions.gpgSign = process.env.GPG_KEY_ID;
if (process.env.GPG_HOMEDIR) {
buildOptions.gpgHomedir = process.env.GPG_HOME_DIR;
}
}
});
return function (cb) {
require('flatpak-bundler').bundle(manifest, buildOptions, cb);
};
}
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));

View File

@@ -7,76 +7,45 @@
const gulp = require('gulp');
const path = require('path');
const fs = require('fs');
const assert = require('assert');
const cp = require('child_process');
const _7z = require('7zip')['7z'];
const util = require('./lib/util');
const task = require('./lib/task');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json');
const vfs = require('vinyl-fs');
const rcedit = require('rcedit');
const mkdirp = require('mkdirp');
const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}}
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
const buildPath = arch => path.join(path.dirname(repoPath), `sqlops-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
// const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) {
options = options || {};
const definitions = options.definitions || {};
if (process.argv.some(arg => arg === '--debug-inno')) {
definitions['Debug'] = 'true';
}
if (process.argv.some(arg => arg === '--sign')) {
definitions['Sign'] = 'true';
}
const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [
iss,
...defs
//,
//`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
];
const args = [iss].concat(defs);
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })
cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
.on('error', cb)
.on('exit', () => cb(null));
}
function buildWin32Setup(arch, target) {
if (target !== 'system' && target !== 'user') {
throw new Error('Invalid setup target');
}
function buildWin32Setup(arch) {
return cb => {
const ia32AppId = target === 'system' ? product.win32AppId : product.win32UserAppId;
const x64AppId = target === 'system' ? product.win32x64AppId : product.win32x64UserAppId;
const sourcePath = buildPath(arch);
const outputPath = setupDir(arch, target);
mkdirp.sync(outputPath);
const originalProductJsonPath = path.join(sourcePath, 'resources/app/product.json');
const productJsonPath = path.join(outputPath, 'product.json');
const productJson = JSON.parse(fs.readFileSync(originalProductJsonPath, 'utf8'));
productJson['target'] = target;
fs.writeFileSync(productJsonPath, JSON.stringify(productJson, undefined, '\t'));
const ia32AppId = product.win32AppId;
const x64AppId = product.win32x64AppId;
const definitions = {
NameLong: product.nameLong,
@@ -84,42 +53,35 @@ function buildWin32Setup(arch, target) {
DirName: product.win32DirName,
Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion + (target === 'user' ? ' (User)' : ''),
NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName,
ShellNameShort: product.win32ShellNameShort,
AppMutex: product.win32MutexName,
Arch: arch,
AppId: arch === 'ia32' ? ia32AppId : x64AppId,
IncompatibleTargetAppId: arch === 'ia32' ? product.win32AppId : product.win32x64AppId,
IncompatibleArchAppId: arch === 'ia32' ? x64AppId : ia32AppId,
IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
AppUserId: product.win32AppUserModelId,
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
SourceDir: sourcePath,
SourceDir: buildPath(arch),
RepoDir: repoPath,
OutputDir: outputPath,
InstallTarget: target,
ProductJsonPath: productJsonPath
OutputDir: setupDir(arch)
};
packageInnoSetup(issPath, { definitions }, cb);
};
}
function defineWin32SetupTasks(arch, target) {
const cleanTask = util.rimraf(setupDir(arch, target));
gulp.task(task.define(`vscode-win32-${arch}-${target}-setup`, task.series(cleanTask, buildWin32Setup(arch, target))));
}
gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
defineWin32SetupTasks('ia32', 'system');
defineWin32SetupTasks('x64', 'system');
defineWin32SetupTasks('ia32', 'user');
defineWin32SetupTasks('x64', 'user');
gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
function archiveWin32Setup(arch) {
return cb => {
const args = ['a', '-tzip', zipPath(arch), '-x!CodeSignSummary*.md', '.', '-r'];
const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
.on('error', cb)
@@ -127,8 +89,11 @@ function archiveWin32Setup(arch) {
};
}
gulp.task(task.define('vscode-win32-ia32-archive', task.series(util.rimraf(zipDir('ia32')), archiveWin32Setup('ia32'))));
gulp.task(task.define('vscode-win32-x64-archive', task.series(util.rimraf(zipDir('x64')), archiveWin32Setup('x64'))));
gulp.task('clean-vscode-win32-ia32-archive', util.rimraf(zipDir('ia32')));
gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], archiveWin32Setup('ia32'));
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
function copyInnoUpdater(arch) {
return () => {
@@ -137,12 +102,5 @@ function copyInnoUpdater(arch) {
};
}
function patchInnoUpdater(arch) {
return cb => {
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
};
}
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), patchInnoUpdater('ia32'))));
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), patchInnoUpdater('x64'))));
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));

View File

@@ -1,15 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es2017",
"jsx": "preserve",
"checkJs": true
},
"include": [
"**/*.js"
],
"exclude": [
"node_modules",
"**/node_modules/*"
]
}

View File

@@ -4,33 +4,33 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const es = require("event-stream");
const pickle = require('chromium-pickle-js');
const Filesystem = require('asar/lib/filesystem');
const VinylFile = require("vinyl");
const minimatch = require("minimatch");
var path = require("path");
var es = require("event-stream");
var pickle = require("chromium-pickle-js");
var Filesystem = require("asar/lib/filesystem");
var VinylFile = require("vinyl");
var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) {
const shouldUnpackFile = (file) => {
for (let i = 0; i < unpackGlobs.length; i++) {
var shouldUnpackFile = function (file) {
for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) {
return true;
}
}
return false;
};
const filesystem = new Filesystem(folderPath);
const out = [];
var filesystem = new Filesystem(folderPath);
var out = [];
// Keep track of pending inserts
let pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; };
var pendingInserts = 0;
var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory
const seenDir = {};
const insertDirectoryRecursive = (dir) => {
var seenDir = {};
var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return;
}
let lastSlash = dir.lastIndexOf('/');
var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\');
}
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
seenDir[dir] = true;
filesystem.insertDirectory(dir);
};
const insertDirectoryForFile = (file) => {
let lastSlash = file.lastIndexOf('/');
var insertDirectoryForFile = function (file) {
var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\');
}
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
insertDirectoryRecursive(file.substring(0, lastSlash));
}
};
const insertFile = (relativePath, stat, shouldUnpack) => {
var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath);
pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
return;
}
if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`);
throw new Error("unknown item in stream!");
}
const shouldUnpack = shouldUnpackFile(file);
var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
@@ -79,33 +79,34 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents);
}
}, function () {
let finish = () => {
var _this = this;
var finish = function () {
{
const headerPickle = pickle.createEmpty();
var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty();
var headerBuf = headerPickle.toBuffer();
var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer();
var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf);
out.unshift(sizeBuf);
}
const contents = Buffer.concat(out);
var contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
_this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
path: destFilename,
contents: contents
}));
this.queue(null);
_this.queue(null);
};
// Call finish() only when all file inserts have finished...
if (pendingInserts === 0) {
finish();
}
else {
onFileInserted = () => {
onFileInserted = function () {
pendingInserts--;
if (pendingInserts === 0) {
finish();

View File

@@ -7,8 +7,8 @@
import * as path from 'path';
import * as es from 'event-stream';
const pickle = require('chromium-pickle-js');
const Filesystem = require('asar/lib/filesystem');
import * as pickle from 'chromium-pickle-js';
import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch';

View File

@@ -14,10 +14,10 @@ const es = require('event-stream');
const rename = require('gulp-rename');
const vfs = require('vinyl-fs');
const ext = require('./extensions');
const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
const util = require('gulp-util');
const root = path.dirname(path.dirname(__dirname));
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
@@ -44,22 +44,22 @@ function isUpToDate(extension) {
function syncMarketplaceExtension(extension) {
if (isUpToDate(extension)) {
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
return es.readArray([]);
}
rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
}
function syncExtension(extension, controlState) {
switch (controlState) {
case 'disabled':
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
return es.readArray([]);
case 'marketplace':
@@ -67,15 +67,15 @@ function syncExtension(extension, controlState) {
default:
if (!fs.existsSync(controlState)) {
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
return es.readArray([]);
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
return es.readArray([]);
}
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
return es.readArray([]);
}
}
@@ -94,8 +94,8 @@ function writeControlFile(control) {
}
function main() {
fancyLog('Syncronizing built-in extensions...');
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
util.log('Syncronizing built-in extensions...');
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];

View File

@@ -4,19 +4,19 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const vm = require("vm");
var fs = require("fs");
var path = require("path");
var vm = require("vm");
/**
* Bundle `entryPoints` given config `config`.
*/
function bundle(entryPoints, config, callback) {
const entryPointsMap = {};
entryPoints.forEach((module) => {
var entryPointsMap = {};
entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module;
});
const allMentionedModulesMap = {};
entryPoints.forEach((module) => {
var allMentionedModulesMap = {};
entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
@@ -25,30 +25,26 @@ function bundle(entryPoints, config, callback) {
allMentionedModulesMap[excludedModule] = true;
});
});
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} };
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
const loader = loaderModule.exports;
var loader = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
if (!config.paths['vs/nls']) {
config.paths['vs/nls'] = 'out-build/vs/nls.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], (localRequire) => {
const resolvePath = (path) => {
const r = localRequire.toUrl(path);
loader(['require'], function (localRequire) {
var resolvePath = function (path) {
var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (const moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId];
for (var moduleId in entryPointsMap) {
var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
@@ -57,59 +53,59 @@ function bundle(entryPoints, config, callback) {
}
}
});
loader(Object.keys(allMentionedModulesMap), () => {
const modules = loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources();
loader(Object.keys(allMentionedModulesMap), function () {
var modules = loader.getBuildInfo();
var partialResult = emitEntryPoints(modules, entryPointsMap);
var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, (err) => callback(err, null));
}, function (err) { return callback(err, null); });
}
exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) {
const modulesMap = {};
modules.forEach((m) => {
var modulesMap = {};
modules.forEach(function (m) {
modulesMap[m.id] = m;
});
const modulesGraph = {};
modules.forEach((m) => {
var modulesGraph = {};
modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies;
});
const sortedModules = topologicalSort(modulesGraph);
let result = [];
const usedPlugins = {};
const bundleData = {
var sortedModules = topologicalSort(modulesGraph);
var result = [];
var usedPlugins = {};
var bundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach((moduleToBundle) => {
const info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph);
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot) => {
const allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude) => {
Object.keys(entryPoints).forEach(function (moduleToBundle) {
var info = entryPoints[moduleToBundle];
var rootNodes = [moduleToBundle].concat(info.include || []);
var allDependencies = visit(rootNodes, modulesGraph);
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach(function (excludeRoot) {
var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude];
});
});
const includedModules = sortedModules.filter((module) => {
var includedModules = sortedModules.filter(function (module) {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files);
for (const pluginName in res.usedPlugins) {
for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach((pluginName) => {
const plugin = usedPlugins[pluginName];
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
const write = (filename, contents) => {
var write = function (filename, contents) {
result.push({
dest: filename,
sources: [{
@@ -128,16 +124,16 @@ function emitEntryPoints(modules, entryPoints) {
};
}
function extractStrings(destFiles) {
const parseDefineCall = (moduleMatch, depsMatch) => {
const module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(',');
deps = deps.map((dep) => {
var parseDefineCall = function (moduleMatch, depsMatch) {
var module = moduleMatch.replace(/^"|"$/g, '');
var deps = depsMatch.split(',');
deps = deps.map(function (dep) {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
let prefix = null;
let _path = null;
const pieces = dep.split('!');
var prefix = null;
var _path = null;
var pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
@@ -147,7 +143,7 @@ function extractStrings(destFiles) {
_path = pieces[0];
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
@@ -157,7 +153,7 @@ function extractStrings(destFiles) {
deps: deps
};
};
destFiles.forEach((destFile) => {
destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) {
return;
}
@@ -165,44 +161,44 @@ function extractStrings(destFiles) {
return;
}
// Do one pass to record the usage counts for each module id
const useCounts = {};
destFile.sources.forEach((source) => {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
var useCounts = {};
destFile.sources.forEach(function (source) {
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
const defineCall = parseDefineCall(matches[1], matches[2]);
var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => {
defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
const sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => {
var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a];
});
const replacementMap = {};
sortedByUseModules.forEach((module, index) => {
var replacementMap = {};
sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index;
});
destFile.sources.forEach((source) => {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
const defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
var defineCall = parseDefineCall(moduleMatch, depsMatch);
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
});
});
destFile.sources.unshift({
path: null,
contents: [
'(function() {',
`var __m = ${JSON.stringify(sortedByUseModules)};`,
`var __M = function(deps) {`,
` var result = [];`,
` for (var i = 0, len = deps.length; i < len; i++) {`,
` result[i] = __m[deps[i]];`,
` }`,
` return result;`,
`};`
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
"var __M = function(deps) {",
" var result = [];",
" for (var i = 0, len = deps.length; i < len; i++) {",
" result[i] = __m[deps[i]];",
" }",
" return result;",
"};"
].join('\n')
});
destFile.sources.push({
@@ -214,7 +210,7 @@ function extractStrings(destFiles) {
}
function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles
const BOILERPLATE = [
var BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
@@ -223,14 +219,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ },
];
destFiles.forEach((destFile) => {
const SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => {
const lines = source.contents.split(/\r\n|\n|\r/);
const newLines = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];
destFile.sources.forEach(function (source) {
var lines = source.contents.split(/\r\n|\n|\r/);
var newLines = [];
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE.test(line)) {
@@ -238,8 +234,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
}
}
else {
for (let j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j];
for (var j = 0; j < BOILERPLATE.length; j++) {
var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
@@ -267,45 +263,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
if (!dest) {
dest = entryPoint + '.js';
}
const mainResult = {
var mainResult = {
sources: [],
dest: dest
}, results = [mainResult];
const usedPlugins = {};
const getLoaderPlugin = (pluginName) => {
var usedPlugins = {};
var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach((c) => {
const bangIndex = c.indexOf('!');
includedModules.forEach(function (c) {
var bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName);
var pluginName = c.substr(0, bangIndex);
var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
const module = modulesMap[c];
var module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
const contents = readFileAndRemoveBOM(module.path);
var contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
}
else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach((pluginName) => {
const plugin = usedPlugins[pluginName];
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
const req = (() => {
var req = (function () {
throw new Error('no-no!');
});
req.toUrl = something => something;
const write = (filename, contents) => {
req.toUrl = function (something) { return something; };
var write = function (filename, contents) {
results.push({
dest: filename,
sources: [{
@@ -317,15 +313,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
const toIFile = (path) => {
const contents = readFileAndRemoveBOM(path);
var toIFile = function (path) {
var contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
const toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile);
var toPrepend = (prepend || []).map(toIFile);
var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return {
files: results,
@@ -333,8 +329,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
};
}
function readFileAndRemoveBOM(path) {
const BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8');
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
@@ -342,15 +338,15 @@ function readFileAndRemoveBOM(path) {
return contents;
}
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
let result = '';
var result = '';
if (typeof plugin.write === 'function') {
const write = ((what) => {
var write = (function (what) {
result += what;
});
write.getEntryPoint = () => {
write.getEntryPoint = function () {
return entryPoint;
};
write.asModule = (moduleId, code) => {
write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
@@ -361,20 +357,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
contents: result
};
}
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", ';
var parensOffset = contents.indexOf('(', defineCallOffset);
var insertStr = '"' + moduleId + '", ';
return {
path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
};
}
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
@@ -387,8 +383,7 @@ function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) {
return desiredCol - 1;
}
let line = 1;
let lastNewLineOffset = -1;
var line = 1, lastNewLineOffset = -1;
do {
if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1;
@@ -402,15 +397,14 @@ function positionToOffset(str, desiredLine, desiredCol) {
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes, graph) {
const result = {};
const queue = rootNodes;
rootNodes.forEach((node) => {
var result = {}, queue = rootNodes;
rootNodes.forEach(function (node) {
result[node] = true;
});
while (queue.length > 0) {
const el = queue.shift();
const myEdges = graph[el] || [];
myEdges.forEach((toNode) => {
var el = queue.shift();
var myEdges = graph[el] || [];
myEdges.forEach(function (toNode) {
if (!result[toNode]) {
result[toNode] = true;
queue.push(toNode);
@@ -423,11 +417,11 @@ function visit(rootNodes, graph) {
* Perform a topological sort on `graph`
*/
function topologicalSort(graph) {
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach((fromNode) => {
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach((toNode) => {
graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || [];
@@ -435,8 +429,8 @@ function topologicalSort(graph) {
});
});
// https://en.wikipedia.org/wiki/Topological_sorting
const S = [], L = [];
Object.keys(allNodes).forEach((node) => {
var S = [], L = [];
Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
@@ -445,10 +439,10 @@ function topologicalSort(graph) {
while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs
S.sort();
const n = S.shift();
var n = S.shift();
L.push(n);
const myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m) => {
var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as path from 'path';
import * as vm from 'vm';
import fs = require('fs');
import path = require('path');
import vm = require('vm');
interface IPosition {
line: number;
@@ -46,7 +46,7 @@ export interface IEntryPoint {
name: string;
include?: string[];
exclude?: string[];
prepend?: string[];
prepend: string[];
append?: string[];
dest?: string;
}
@@ -64,7 +64,7 @@ interface INodeSet {
}
export interface IFile {
path: string | null;
path: string;
contents: string;
}
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
/**
* Bundle `entryPoints` given config `config`.
*/
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void {
const entryPointsMap: IEntryPointMap = {};
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
let entryPointsMap: IEntryPointMap = {};
entryPoints.forEach((module: IEntryPoint) => {
entryPointsMap[module.name] = module;
});
const allMentionedModulesMap: { [modules: string]: boolean; } = {};
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
entryPoints.forEach((module: IEntryPoint) => {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
@@ -115,32 +115,28 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
});
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} };
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
const loader: any = loaderModule.exports;
var loader: any = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
if (!config.paths['vs/nls']) {
config.paths['vs/nls'] = 'out-build/vs/nls.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], (localRequire: any) => {
const resolvePath = (path: string) => {
const r = localRequire.toUrl(path);
loader(['require'], (localRequire) => {
let resolvePath = (path: string) => {
let r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (const moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId];
for (let moduleId in entryPointsMap) {
let entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
@@ -151,76 +147,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
});
loader(Object.keys(allMentionedModulesMap), () => {
const modules = <IBuildModuleInfo[]>loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources();
let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
let partialResult = emitEntryPoints(modules, entryPointsMap);
let cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, (err: any) => callback(err, null));
}, (err) => callback(err, null));
}
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
const modulesMap: IBuildModuleInfoMap = {};
let modulesMap: IBuildModuleInfoMap = {};
modules.forEach((m: IBuildModuleInfo) => {
modulesMap[m.id] = m;
});
const modulesGraph: IGraph = {};
let modulesGraph: IGraph = {};
modules.forEach((m: IBuildModuleInfo) => {
modulesGraph[m.id] = m.dependencies;
});
const sortedModules = topologicalSort(modulesGraph);
let sortedModules = topologicalSort(modulesGraph);
let result: IConcatFile[] = [];
const usedPlugins: IPluginMap = {};
const bundleData: IBundleData = {
let usedPlugins: IPluginMap = {};
let bundleData: IBundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
const info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph);
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
let info = entryPoints[moduleToBundle];
let rootNodes = [moduleToBundle].concat(info.include || []);
let allDependencies = visit(rootNodes, modulesGraph);
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot: string) => {
const allExcludes = visit([excludeRoot], modulesGraph);
let allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude: string) => {
delete allDependencies[exclude];
});
});
const includedModules = sortedModules.filter((module: string) => {
let includedModules = sortedModules.filter((module: string) => {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint(
let res = emitEntryPoint(
modulesMap,
modulesGraph,
moduleToBundle,
includedModules,
info.prepend || [],
info.append || [],
info.prepend,
info.append,
info.dest
);
result = result.concat(res.files);
for (const pluginName in res.usedPlugins) {
for (let pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName];
let plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
const write = (filename: string, contents: string) => {
let write = (filename: string, contents: string) => {
result.push({
dest: filename,
sources: [{
@@ -241,16 +237,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
}
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
const parseDefineCall = (moduleMatch: string, depsMatch: string) => {
const module = moduleMatch.replace(/^"|"$/g, '');
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
let module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(',');
deps = deps.map((dep) => {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
let prefix: string | null = null;
let _path: string | null = null;
const pieces = dep.split('!');
let prefix: string = null;
let _path: string = null;
let pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
@@ -260,7 +256,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
@@ -271,7 +267,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
};
};
destFiles.forEach((destFile) => {
destFiles.forEach((destFile, index) => {
if (!/\.js$/.test(destFile.dest)) {
return;
}
@@ -280,33 +276,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
}
// Do one pass to record the usage counts for each module id
const useCounts: { [moduleId: string]: number; } = {};
let useCounts: { [moduleId: string]: number; } = {};
destFile.sources.forEach((source) => {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
const defineCall = parseDefineCall(matches[1], matches[2]);
let defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
const sortedByUseModules = Object.keys(useCounts);
let sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => {
return useCounts[b] - useCounts[a];
});
const replacementMap: { [moduleId: string]: number; } = {};
let replacementMap: { [moduleId: string]: number; } = {};
sortedByUseModules.forEach((module, index) => {
replacementMap[module] = index;
});
destFile.sources.forEach((source) => {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
const defineCall = parseDefineCall(moduleMatch, depsMatch);
let defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
});
});
@@ -336,7 +332,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
// Taken from typescript compiler => emitFiles
const BOILERPLATE = [
let BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
@@ -347,22 +343,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
];
destFiles.forEach((destFile) => {
const SEEN_BOILERPLATE: boolean[] = [];
let SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => {
const lines = source.contents.split(/\r\n|\n|\r/);
const newLines: string[] = [];
let lines = source.contents.split(/\r\n|\n|\r/);
let newLines: string[] = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
let line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE!.test(line)) {
if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false;
}
} else {
for (let j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j];
let boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
@@ -402,19 +398,19 @@ function emitEntryPoint(
includedModules: string[],
prepend: string[],
append: string[],
dest: string | undefined
dest: string
): IEmitEntryPointResult {
if (!dest) {
dest = entryPoint + '.js';
}
const mainResult: IConcatFile = {
let mainResult: IConcatFile = {
sources: [],
dest: dest
},
results: IConcatFile[] = [mainResult];
const usedPlugins: IPluginMap = {};
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
let usedPlugins: IPluginMap = {};
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
@@ -422,39 +418,39 @@ function emitEntryPoint(
};
includedModules.forEach((c: string) => {
const bangIndex = c.indexOf('!');
let bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName);
let pluginName = c.substr(0, bangIndex);
let plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
const module = modulesMap[c];
let module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
const contents = readFileAndRemoveBOM(module.path);
let contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName];
let plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
const req: ILoaderPluginReqFunc = <any>(() => {
let req: ILoaderPluginReqFunc = <any>(() => {
throw new Error('no-no!');
});
req.toUrl = something => something;
const write = (filename: string, contents: string) => {
let write = (filename: string, contents: string) => {
results.push({
dest: filename,
sources: [{
@@ -467,16 +463,16 @@ function emitEntryPoint(
}
});
const toIFile = (path: string): IFile => {
const contents = readFileAndRemoveBOM(path);
let toIFile = (path): IFile => {
let contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
const toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile);
let toPrepend = (prepend || []).map(toIFile);
let toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
@@ -487,8 +483,8 @@ function emitEntryPoint(
}
function readFileAndRemoveBOM(path: string): string {
const BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8');
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
@@ -499,7 +495,7 @@ function readFileAndRemoveBOM(path: string): string {
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
let result = '';
if (typeof plugin.write === 'function') {
const write: ILoaderPluginWriteFunc = <any>((what: string) => {
let write: ILoaderPluginWriteFunc = <any>((what) => {
result += what;
});
write.getEntryPoint = () => {
@@ -517,15 +513,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
};
}
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile {
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
// `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset);
let parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", ';
let insertStr = '"' + moduleId + '", ';
return {
path: path,
@@ -534,8 +530,8 @@ function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path:
}
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
@@ -550,8 +546,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
return desiredCol - 1;
}
let line = 1;
let lastNewLineOffset = -1;
let line = 1,
lastNewLineOffset = -1;
do {
if (desiredLine === line) {
@@ -569,16 +565,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes: string[], graph: IGraph): INodeSet {
const result: INodeSet = {};
const queue = rootNodes;
let result: INodeSet = {},
queue = rootNodes;
rootNodes.forEach((node) => {
result[node] = true;
});
while (queue.length > 0) {
const el = queue.shift();
const myEdges = graph[el!] || [];
let el = queue.shift();
let myEdges = graph[el] || [];
myEdges.forEach((toNode) => {
if (!result[toNode]) {
result[toNode] = true;
@@ -595,7 +591,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
*/
function topologicalSort(graph: IGraph): string[] {
const allNodes: INodeSet = {},
let allNodes: INodeSet = {},
outgoingEdgeCount: { [node: string]: number; } = {},
inverseEdges: IGraph = {};
@@ -613,7 +609,7 @@ function topologicalSort(graph: IGraph): string[] {
});
// https://en.wikipedia.org/wiki/Topological_sorting
const S: string[] = [],
let S: string[] = [],
L: string[] = [];
Object.keys(allNodes).forEach((node: string) => {
@@ -627,10 +623,10 @@ function topologicalSort(graph: IGraph): string[] {
// Ensure the exact same order all the time with the same inputs
S.sort();
const n: string = S.shift()!;
let n: string = S.shift();
L.push(n);
const myInverseEdges = inverseEdges[n] || [];
let myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m: string) => {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {

View File

@@ -4,54 +4,41 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb");
const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
const util = require("./util");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const watch = require('./watch');
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
var gulp = require("gulp");
var tsb = require("gulp-tsb");
var es = require("event-stream");
var watch = require('./watch');
var nls = require("./nls");
var util = require("./util");
var reporter_1 = require("./reporter");
var path = require("path");
var bom = require("gulp-bom");
var sourcemaps = require("gulp-sourcemaps");
var _ = require("underscore");
var monacodts = require("../monaco/api");
var fs = require("fs");
var reporter = reporter_1.createReporter();
var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
function createCompile(src, build, emitError) {
const opts = _.clone(getTypeScriptCompilerOptions(src));
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
const input = es.through();
const output = input
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
var input = es.through();
var output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(utf8Filter.restore)
@@ -64,139 +51,93 @@ function createCompile(src, build, emitError) {
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
function compileTask(out, build) {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
return srcPipe
.pipe(generator.stream)
var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return src
.pipe(compile())
.pipe(gulp.dest(out));
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
};
}
exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();
var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
// Do not write .d.ts files to disk, as they are not needed there.
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out));
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
class MonacoGenerator {
constructor(isWatch) {
this._executeSoonTimer = null;
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
return;
}
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
readFileSync(moduleId, filePath) {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
function monacodtsTask(out, isWatch) {
var basePath = path.resolve(process.cwd(), out);
var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
var inputFiles = {};
for (var filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
_executeSoon() {
if (this._executeSoonTimer !== null) {
clearTimeout(this._executeSoonTimer);
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
dispose() {
this._watchers.forEach(watcher => watcher.close());
}
_run() {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
_log(message, ...rest) {
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
}
execute() {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
var setInputFile = function (filePath, contents) {
if (inputFiles[filePath] === contents) {
// no change
return;
}
if (result.isTheSame) {
return;
inputFiles[filePath] = contents;
var neededInputFilesCount = Object.keys(neededFiles).length;
var availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
fs.writeFileSync(result.filePath, result.content);
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
if (!this._isWatch) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
};
var run = function () {
var result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
var resultStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
}
resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

View File

@@ -5,52 +5,39 @@
'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as tsb from 'gulp-tsb';
import * as es from 'event-stream';
const watch = require('./watch');
import * as nls from './nls';
import * as util from './util';
import { createReporter } from './reporter';
import * as path from 'path';
import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
const watch = require('./watch');
import * as fs from 'fs';
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
const rootDir = path.join(__dirname, '../../src');
const options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
return function (token?: util.ICancellationToken) {
@@ -72,173 +59,117 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const compile = createCompile(build, true);
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return srcPipe
.pipe(generator.stream)
return src
.pipe(compile())
.pipe(gulp.dest(out));
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
};
}
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile('src', build);
const compile = createCompile(build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();
// Do not write .d.ts files to disk, as they are not needed there.
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out));
.pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
};
}
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
class MonacoGenerator {
private readonly _isWatch: boolean;
public readonly stream: NodeJS.ReadWriteStream;
const basePath = path.resolve(process.cwd(), out);
private readonly _watchers: fs.FSWatcher[];
private readonly _watchedFiles: { [filePath: string]: boolean; };
private readonly _fsProvider: monacodts.FSProvider;
private readonly _declarationResolver: monacodts.DeclarationResolver;
const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
constructor(isWatch: boolean) {
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
return;
}
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
public readFileSync(moduleId: string, filePath: string): Buffer {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
const inputFiles: { [file: string]: string; } = {};
for (let filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
private _executeSoonTimer: NodeJS.Timer | null = null;
private _executeSoon(): void {
if (this._executeSoonTimer !== null) {
clearTimeout(this._executeSoonTimer);
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
public dispose(): void {
this._watchers.forEach(watcher => watcher.close());
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
private _log(message: any, ...rest: any[]): void {
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
}
public execute(): void {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
const setInputFile = (filePath: string, contents: string) => {
if (inputFiles[filePath] === contents) {
// no change
return;
}
if (result.isTheSame) {
return;
inputFiles[filePath] = contents;
const neededInputFilesCount = Object.keys(neededFiles).length;
const availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
};
fs.writeFileSync(result.filePath, result.content);
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
if (!this._isWatch) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
const run = () => {
const result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
} else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
let resultStream: NodeJS.ReadWriteStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
}
resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

View File

@@ -11,7 +11,6 @@ const root = path.dirname(path.dirname(__dirname));
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
@@ -20,7 +19,6 @@ function getElectronVersion() {
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) {
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');

View File

@@ -4,330 +4,115 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const glob = require("glob");
const gulp = require("gulp");
const path = require("path");
const File = require("vinyl");
const vsce = require("vsce");
const stats_1 = require("./stats");
const util2 = require("./util");
const remote = require("gulp-remote-src");
const vzip = require('gulp-vinyl-zip');
const filter = require("gulp-filter");
const rename = require("gulp-rename");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const buffer = require('gulp-buffer');
const json = require("gulp-json-editor");
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
const _ = require("underscore");
const vfs = require("vinyl-fs");
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
const visxDirectory = path.join(path.dirname(root), 'vsix');
try {
if (!fs.existsSync(visxDirectory)) {
fs.mkdirSync(visxDirectory);
}
}
catch (err) {
// don't fail the build if the output directory already exists
console.warn(err);
}
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
exports.packageBuiltInExtensions = packageBuiltInExtensions;
function packageExtensionTask(extensionName, platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
}
else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
exports.packageExtensionTask = packageExtensionTask;
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath, sourceMappingURLBase) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
}
else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err, stats) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
var es = require("event-stream");
var assign = require("object-assign");
var remote = require("gulp-remote-src");
var flatmap = require('gulp-flatmap');
var vzip = require('gulp-vinyl-zip');
var filter = require('gulp-filter');
var rename = require('gulp-rename');
var util = require('gulp-util');
var buffer = require('gulp-buffer');
var json = require('gulp-json-editor');
var fs = require("fs");
var path = require("path");
var vsce = require("vsce");
var File = require("vinyl");
function fromLocal(extensionPath) {
var result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
.then(function (fileNames) {
var files = fileNames
.map(function (fileName) { return path.join(extensionPath, fileName); })
.map(function (filePath) { return new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
}); });
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
function fromMarketplace(extensionName, version, metadata) {
const [publisher, name] = extensionName.split('.');
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac',
'schema-compare',
'resource-deployment',
'cms'
];
const builtInExtensions = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders) {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
}
else {
const fn = streamProviders.shift();
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
.catch(function (err) { return result.emit('error', err); });
return result;
}
function packageExtensionsStream(optsIn) {
const opts = optsIn || {};
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies,])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
exports.fromLocal = fromLocal;
function error(err) {
var result = es.through();
setTimeout(function () { return result.emit('error', err); });
return result;
}
exports.packageExtensionsStream = packageExtensionsStream;
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
function fromMarketplace(extensionName, version) {
var filterType = 7;
var value = extensionName;
var criterium = { filterType: filterType, value: value };
var criteria = [criterium];
var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap(function (stream, f) {
var rawResult = f.contents.toString('utf8');
var result = JSON.parse(rawResult);
var extension = result.results[0].extensions[0];
if (!extension) {
return error("No such extension: " + extension);
}
var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}
exports.fromMarketplace = fromMarketplace;

View File

@@ -4,232 +4,22 @@
*--------------------------------------------------------------------------------------------*/
import * as es from 'event-stream';
import * as fs from 'fs';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
import { Stream } from 'stream';
import * as File from 'vinyl';
import * as vsce from 'vsce';
import { createStatsStream } from './stats';
import * as util2 from './util';
import assign = require('object-assign');
import remote = require('gulp-remote-src');
const flatmap = require('gulp-flatmap');
const vzip = require('gulp-vinyl-zip');
import filter = require('gulp-filter');
import rename = require('gulp-rename');
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
const filter = require('gulp-filter');
const rename = require('gulp-rename');
const util = require('gulp-util');
const buffer = require('gulp-buffer');
import json = require('gulp-json-editor');
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const json = require('gulp-json-editor');
import * as fs from 'fs';
import * as path from 'path';
import * as vsce from 'vsce';
import * as File from 'vinyl';
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
import * as _ from 'underscore';
import * as vfs from 'vinyl-fs';
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
export function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
const visxDirectory = path.join(path.dirname(root), 'vsix');
try {
if (!fs.existsSync(visxDirectory)) {
fs.mkdirSync(visxDirectory);
}
} catch (err) {
// don't fail the build if the output directory already exists
console.warn(err);
}
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
} else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = (<string[]>glob.sync(
path.join(extensionPath, '/**/extension.webpack.config.js'),
{ ignore: ['**/node_modules'] }
));
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data: any) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err: any, stats: any) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data: File) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
export function fromLocal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
@@ -247,151 +37,96 @@ function fromLocalNormal(extensionPath: string): Stream {
})
.catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath)));
return result;
}
function error(err: any): Stream {
const result = es.through();
setTimeout(() => result.emit('error', err));
return result;
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
const [publisher, name] = extensionName.split('.');
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
export function fromMarketplace(extensionName: string, version: string): Stream {
const filterType = 7;
const value = extensionName;
const criterium = { filterType, value };
const criteria = [criterium];
const pageNumber = 1;
const pageSize = 1;
const sortBy = 0;
const sortOrder = 0;
const flags = 0x1 | 0x2 | 0x80;
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
const body = JSON.stringify({ filters, assetTypes, flags });
const headers: any = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
const options = {
base: url,
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: baseHeaders
headers,
body: body
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('/extensionquery', options)
.pipe(flatmap((stream, f) => {
const rawResult = f.contents.toString('utf8');
const result = JSON.parse(rawResult);
const extension = result.results[0].extensions[0];
if (!extension) {
return error(`No such extension: ${extension}`);
}
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
interface IPackageExtensionsOptions {
/**
* Set to undefined to package all of them.
*/
desiredExtensions?: string[];
sourceMappingURLBase?: string;
}
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac',
'schema-compare',
'resource-deployment',
'cms'
];
// {{SQL CARBON EDIT}} - End
interface IBuiltInExtension {
name: string;
version: string;
repo: string;
metadata: any;
}
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders: { (): Stream }[]): Stream {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
} else {
const fn = streamProviders.shift()!;
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
const opts = optsIn || {};
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
const metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
if (!extensionVersion) {
return error(`No such extension version: ${extensionName} @ ${version}`);
}
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
if (!asset) {
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
}
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(stream => {
const packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}

View File

@@ -4,47 +4,47 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const fs = require("fs");
var path = require("path");
var fs = require("fs");
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
function getVersion(repo) {
const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD');
let head;
var git = path.join(repo, '.git');
var headPath = path.join(git, 'HEAD');
var head;
try {
head = fs.readFileSync(headPath, 'utf8').trim();
}
catch (e) {
return undefined;
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
return head;
}
const refMatch = /^ref: (.*)$/.exec(head);
var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return undefined;
return void 0;
}
const ref = refMatch[1];
const refPath = path.join(git, ref);
var ref = refMatch[1];
var refPath = path.join(git, ref);
try {
return fs.readFileSync(refPath, 'utf8').trim();
}
catch (e) {
// noop
}
const packedRefsPath = path.join(git, 'packed-refs');
let refsRaw;
var packedRefsPath = path.join(git, 'packed-refs');
var refsRaw;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
}
catch (e) {
return undefined;
return void 0;
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch;
let refs = {};
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
var refsMatch;
var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}

View File

@@ -10,7 +10,7 @@ import * as fs from 'fs';
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
export function getVersion(repo: string): string | undefined {
export function getVersion(repo: string): string {
const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD');
let head: string;
@@ -18,7 +18,7 @@ export function getVersion(repo: string): string | undefined {
try {
head = fs.readFileSync(headPath, 'utf8').trim();
} catch (e) {
return undefined;
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
@@ -28,7 +28,7 @@ export function getVersion(repo: string): string | undefined {
const refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return undefined;
return void 0;
}
const ref = refMatch[1];
@@ -46,11 +46,11 @@ export function getVersion(repo: string): string | undefined {
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} catch (e) {
return undefined;
return void 0;
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray | null;
let refsMatch: RegExpExecArray;
let refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) {

File diff suppressed because it is too large Load Diff

View File

@@ -27,173 +27,129 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/api/common",
"name": "vs/workbench/parts/cli",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/cli",
"name": "vs/workbench/parts/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeEditor",
"name": "vs/workbench/parts/debug",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeinset",
"name": "vs/workbench/parts/emmet",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/callHierarchy",
"name": "vs/workbench/parts/execution",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/comments",
"name": "vs/workbench/parts/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/debug",
"name": "vs/workbench/parts/feedback",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/emmet",
"name": "vs/workbench/parts/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/extensions",
"name": "vs/workbench/parts/html",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/externalTerminal",
"name": "vs/workbench/parts/markers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/feedback",
"name": "vs/workbench/parts/localizations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/files",
"name": "vs/workbench/parts/logs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/html",
"name": "vs/workbench/parts/output",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/issue",
"name": "vs/workbench/parts/performance",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/markers",
"name": "vs/workbench/parts/preferences",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/localizations",
"name": "vs/workbench/parts/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/logs",
"name": "vs/workbench/parts/relauncher",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/output",
"name": "vs/workbench/parts/scm",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/performance",
"name": "vs/workbench/parts/search",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/preferences",
"name": "vs/workbench/parts/snippets",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/quickopen",
"name": "vs/workbench/parts/surveys",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/remote",
"name": "vs/workbench/parts/tasks",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/relauncher",
"name": "vs/workbench/parts/terminal",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/scm",
"name": "vs/workbench/parts/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/search",
"name": "vs/workbench/parts/trust",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/snippets",
"name": "vs/workbench/parts/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/format",
"name": "vs/workbench/parts/url",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/stats",
"name": "vs/workbench/parts/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/surveys",
"name": "vs/workbench/parts/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/tasks",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/terminal",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/trust",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/url",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/outline",
"name": "vs/workbench/parts/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/commands",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configuration",
"project": "vscode-workbench"
@@ -219,17 +175,13 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/extensionManagement",
"name": "vs/workbench/services/jsonschemas",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/integrity",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
@@ -242,10 +194,6 @@
"name": "vs/workbench/services/progress",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/remote",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/textfile",
"project": "vscode-workbench"
@@ -266,13 +214,9 @@
"name": "vs/workbench/services/decorations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/label",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/preferences",
"project": "vscode-preferences"
}
]
}
}

View File

@@ -7,25 +7,25 @@ import * as path from 'path';
import * as fs from 'fs';
import { through, readable, ThroughStream } from 'event-stream';
import * as File from 'vinyl';
import File = require('vinyl');
import * as Is from 'is';
import * as xml2js from 'xml2js';
import * as glob from 'glob';
import * as https from 'https';
import * as gulp from 'gulp';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import * as iconv from 'iconv-lite';
var util = require('gulp-util');
var iconv = require('iconv-lite');
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
function log(message: any, ...rest: any[]): void {
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
util.log(util.colors.green('[i18n]'), message, ...rest);
}
export interface Language {
id: string; // language id, e.g. zh-tw, de
translationId?: string; // language id used in translation tools, e.g zh-hant, de (optional, if not set, the id is used)
id: string; // laguage id, e.g. zh-tw, de
transifexId?: string; // language id used in transifex, e.g zh-hant, de (optional, if not set, the id is used)
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
}
@@ -38,8 +38,8 @@ export interface InnoSetup {
}
export const defaultLanguages: Language[] = [
{ id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' },
{ id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' },
{ id: 'zh-tw', folderName: 'cht', transifexId: 'zh-hant' },
{ id: 'zh-cn', folderName: 'chs', transifexId: 'zh-hans' },
{ id: 'ja', folderName: 'jpn' },
{ id: 'ko', folderName: 'kor' },
{ id: 'de', folderName: 'deu' },
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
];
// non built-in extensions also that are transifex and need to be part of the language packs
export const externalExtensionsWithTranslations = {
const externalExtensionsWithTranslations = {
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
'vscode-node-debug': 'ms-vscode.node-debug',
'vscode-node-debug2': 'ms-vscode.node-debug2'
@@ -71,7 +71,7 @@ interface Map<V> {
interface Item {
id: string;
message: string;
comment?: string;
comment: string;
}
export interface Resource {
@@ -137,6 +137,27 @@ module PackageJsonFormat {
}
}
interface ModuleJsonFormat {
messages: string[];
keys: (string | LocalizeInfo)[];
}
module ModuleJsonFormat {
export function is(value: any): value is ModuleJsonFormat {
let candidate = value as ModuleJsonFormat;
return Is.defined(candidate)
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
}
}
interface BundledExtensionHeaderFormat {
id: string;
type: string;
hash: string;
outDir: string;
}
interface BundledExtensionFormat {
[key: string]: {
messages: string[];
@@ -144,19 +165,10 @@ interface BundledExtensionFormat {
};
}
interface I18nFormat {
version: string;
contents: {
[module: string]: {
[messageKey: string]: string;
};
};
}
export class Line {
private buffer: string[] = [];
constructor(indent: number = 0) {
constructor(private indent: number = 0) {
if (indent > 0) {
this.buffer.push(new Array(indent + 1).join(' '));
}
@@ -223,8 +235,8 @@ export class XLF {
let existingKeys = new Set<string>();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
let realKey: string | undefined;
let comment: string | undefined;
let realKey: string;
let comment: string;
if (Is.string(key)) {
realKey = key;
comment = undefined;
@@ -274,17 +286,17 @@ export class XLF {
}
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve) => {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (_err: any, result: any) {
parser.parseString(xlfString, function (err, result) {
const fileNodes: any[] = result['xliff']['file'];
fileNodes.forEach(file => {
const originalFilePath = file.$.original;
const messages: Map<string> = {};
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit: any) => {
transUnits.forEach(unit => {
const key = unit.$.id;
const val = pseudify(unit.source[0]['_'].toString());
if (key && val) {
@@ -305,7 +317,7 @@ export class XLF {
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (err: any, result: any) {
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
}
@@ -328,20 +340,17 @@ export class XLF {
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit: any) => {
transUnits.forEach(unit => {
const key = unit.$.id;
if (!unit.target) {
return; // No translation available
}
let val = unit.target[0];
if (typeof val !== 'string') {
val = val._;
}
const val = unit.target.toString();
if (key && val) {
messages[key] = decodeEntities(val);
} else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
@@ -360,7 +369,7 @@ export interface ITask<T> {
interface ILimitedTaskFactory<T> {
factory: ITask<Promise<T>>;
c: (value?: T | Promise<T>) => void;
c: (value?: T | Thenable<T>) => void;
e: (error?: any) => void;
}
@@ -382,7 +391,7 @@ export class Limiter<T> {
private consume(): void {
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
const iLimitedTask = this.outstandingPromises.shift()!;
const iLimitedTask = this.outstandingPromises.shift();
this.runningPromises++;
const promise = iLimitedTask.factory();
@@ -410,8 +419,8 @@ function stripComments(content: string): string {
* Third matches block comments
* Fourth matches line comments
*/
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches
if (m3) {
// A block comment. Replace with nothing
@@ -433,9 +442,9 @@ function stripComments(content: string): string {
}
function escapeCharacters(value: string): string {
const result: string[] = [];
for (let i = 0; i < value.length; i++) {
const ch = value.charAt(i);
var result: string[] = [];
for (var i = 0; i < value.length; i++) {
var ch = value.charAt(i);
switch (ch) {
case '\'':
result.push('\\\'');
@@ -475,6 +484,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let statistics: Map<number> = Object.create(null);
let total: number = 0;
let defaultMessages: Map<Map<string>> = Object.create(null);
let modules = Object.keys(keysSection);
modules.forEach((module) => {
@@ -487,6 +497,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let messageMap: Map<string> = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
total++;
if (typeof key === 'string') {
messageMap[key] = messages[i];
} else {
@@ -495,11 +506,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let languageDirectory = path.join(__dirname, '..', '..', 'i18n');
let sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
@@ -508,35 +515,31 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
statistics[language.id] = 0;
let localizedModules: Map<string[]> = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages: I18nFormat | undefined;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
let languageFolderName = language.folderName || language.id;
let cwd = path.join(languageDirectory, languageFolderName, 'src');
modules.forEach((module) => {
let order = keysSection[module];
let moduleMessage: { [messageKey: string]: string } | undefined;
if (allMessages) {
moduleMessage = allMessages.contents[module];
}
if (!moduleMessage) {
let i18nFile = path.join(cwd, module) + '.i18n.json';
let messages: Map<string> = null;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
messages = JSON.parse(content);
} else {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`No localized messages found for module ${module}. Using default messages.`);
}
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
messages = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(messages).length;
}
let localizedMessages: string[] = [];
order.forEach((keyInfo) => {
let key: string | null = null;
let key: string = null;
if (typeof keyInfo === 'string') {
key = keyInfo;
} else {
key = keyInfo.key;
}
let message: string = moduleMessage![key];
let message: string = messages[key];
if (!message) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`No localized message found for key ${key} in module ${module}. Using default message.`);
@@ -622,7 +625,7 @@ export function getResource(sourceFile: string): Resource {
return { name: 'vs/base', project: editorProject };
} else if (/^vs\/code/.test(sourceFile)) {
return { name: 'vs/code', project: workbenchProject };
} else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
} else if (/^vs\/workbench\/parts/.test(sourceFile)) {
resource = sourceFile.split('/', 4).join('/');
return { name: resource, project: workbenchProject };
} else if (/^vs\/workbench\/services/.test(sourceFile)) {
@@ -709,7 +712,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
return _xlf;
}
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`]).pipe(through(function (file: File) {
if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path);
@@ -821,8 +824,8 @@ export function createXlfFilesForIsl(): ThroughStream {
}
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises: Array<Promise<boolean>> = [];
let updateCreatePromises: Array<Promise<boolean>> = [];
let tryGetPromises = [];
let updateCreatePromises = [];
return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative);
@@ -887,7 +890,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
let resourcesByProject: Map<string[]> = Object.create(null);
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative);
@@ -904,7 +907,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources: string[] = [];
let extractedResources = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
@@ -917,7 +920,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises: Array<Promise<void>> = [];
let promises = [];
for (let project in resourcesByProject) {
promises.push(
getAllResources(project, apiHostname, username, password).then(resources => {
@@ -962,7 +965,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
}
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
return new Promise((_resolve, reject) => {
return new Promise((resolve, reject) => {
const data = JSON.stringify({
'content': xlfFile.contents.toString(),
'name': slug,
@@ -1053,8 +1056,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1082,7 +1085,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return readable(function (_count: any, callback: any) {
return readable(function (count, callback) {
// Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end');
@@ -1092,7 +1095,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
called = true;
const stream = this;
resources.map(function (resource) {
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => {
retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
if (file) {
stream.emit('data', file);
}
@@ -1104,13 +1107,13 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
callback();
});
}
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS);
const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> {
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
return limiter.queue(() => new Promise<File>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@@ -1209,10 +1212,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
let parsePromises: Promise<ParsedXLF[]>[] = [];
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks: Map<I18nPack> = {};
let errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) {
let project = path.basename(path.dirname(xlf.relative));
let resource = path.basename(xlf.relative, '.xlf');
let stream = this;
let project = path.dirname(xlf.path);
let resource = path.basename(xlf.path, '.xlf');
let contents = xlf.contents.toString();
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
@@ -1239,15 +1242,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
}
});
}
).catch(reason => {
errors.push(reason);
});
);
}, function () {
Promise.all(parsePromises)
.then(() => {
if (errors.length > 0) {
throw errors;
}
const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
@@ -1266,9 +1264,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
}
this.queue(null);
})
.catch((reason) => {
this.emit('error', reason);
});
.catch(reason => { throw new Error(reason); });
});
}
@@ -1289,15 +1285,11 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
stream.queue(translatedFile);
});
}
).catch(reason => {
this.emit('error', reason);
});
);
}, function () {
Promise.all(parsePromises)
.then(() => { this.queue(null); })
.catch(reason => {
this.emit('error', reason);
});
.catch(reason => { throw new Error(reason); });
});
}
@@ -1314,7 +1306,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`);
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
} else {
content.push(line);
}
@@ -1324,9 +1316,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let translated = line;
if (key) {
if (key === 'LanguageName') {
translated = `${key}=${innoSetup.defaultInfo!.name}`;
translated = `${key}=${innoSetup.defaultInfo.name}`;
} else if (key === 'LanguageID') {
translated = `${key}=${innoSetup.defaultInfo!.id}`;
translated = `${key}=${innoSetup.defaultInfo.id}`;
} else if (key === 'LanguageCodePage') {
translated = `${key}=${innoSetup.codePage.substr(2)}`;
} else {
@@ -1347,14 +1339,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
return new File({
path: filePath,
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
});
}
function encodeEntities(value: string): string {
let result: string[] = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
var result: string[] = [];
for (var i = 0; i < value.length; i++) {
var ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');
@@ -1378,4 +1370,4 @@ function decodeEntities(value: string): string {
function pseudify(message: string) {
return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
}
}

View File

@@ -3,12 +3,13 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const ts = require("typescript");
const lazy = require("lazy.js");
const event_stream_1 = require("event-stream");
const File = require("vinyl");
const sm = require("source-map");
const path = require("path");
var ts = require("typescript");
var lazy = require("lazy.js");
var event_stream_1 = require("event-stream");
var File = require("vinyl");
var sm = require("source-map");
var assign = require("object-assign");
var path = require("path");
var CollectStepResult;
(function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
@@ -17,9 +18,9 @@ var CollectStepResult;
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) {
const result = [];
var result = [];
function loop(node) {
const stepResult = fn(node);
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
}
@@ -31,45 +32,43 @@ function collect(node, fn) {
return result;
}
function clone(object) {
const result = {};
for (const id in object) {
var result = {};
for (var id in object) {
result[id] = object[id];
}
return result;
}
function template(lines) {
let indent = '', wrap = '';
var indent = '', wrap = '';
if (lines.length > 1) {
indent = '\t';
wrap = '\n';
}
return `/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
}
/**
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
const input = event_stream_1.through();
const output = input.pipe(event_stream_1.through(function (f) {
var input = event_stream_1.through();
var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
}
let source = f.sourceMap.sources[0];
var source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
}
const root = f.sourceMap.sourceRoot;
var root = f.sourceMap.sourceRoot;
if (root) {
source = path.join(root, source);
}
const typescript = f.sourceMap.sourcesContent[0];
var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
}
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
}));
return event_stream_1.duplex(input, output);
}
@@ -77,7 +76,8 @@ function isImportNode(node) {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
}
(function (nls_1) {
function fileFrom(file, contents, path = file.path) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: Buffer.from(contents),
base: file.base,
@@ -87,27 +87,29 @@ function isImportNode(node) {
}
nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) {
return { source, line: lc.line + 1, column: lc.character };
return { source: source, line: lc.line + 1, column: lc.character };
}
nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) {
return { line: position.line - 1, character: position.column };
}
nls_1.lcFrom = lcFrom;
class SingleFileServiceHost {
constructor(options, filename, contents) {
var SingleFileServiceHost = /** @class */ (function () {
function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options;
this.filename = filename;
this.getCompilationSettings = () => this.options;
this.getScriptFileNames = () => [this.filename];
this.getScriptVersion = () => '1';
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib;
this.getCurrentDirectory = () => '';
this.getDefaultLibFileName = () => 'lib.d.ts';
this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString('');
}
}
return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
@@ -115,96 +117,97 @@ function isImportNode(node) {
}
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
}
function analyze(contents, options = {}) {
const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
function analyze(contents, options) {
if (options === void 0) { options = {}; }
var filename = 'file.ts';
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
var service = ts.createLanguageService(serviceHost);
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls');
const importEqualsDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
.map(n => n)
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\'');
var importEqualsDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls';
const importDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
.map(n => n)
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
const nlsExpressions = importEqualsDeclarations
.map(d => d.moduleReference.expression)
.concat(importDeclarations.map(d => d.moduleSpecifier))
.map(d => ({
var importDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
var nlsExpressions = importEqualsDeclarations
.map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
}));
}); });
// `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
.map(d => d.importClause.namedBindings.name)
.concat(importEqualsDeclarations.map(d => d.name))
var nlsLocalizeCallExpressions = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls`
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(r => !r.isWriteAccess)
.filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => n)
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
// only `localize` calls
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize');
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports
const allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
.map(d => [].concat(d.importClause.namedBindings.elements))
var allLocalizeImportDeclarations = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
.map(function (d) { return [].concat(d.importClause.namedBindings.elements); })
.flatten();
// `localize` read-only references
const localizeReferences = allLocalizeImportDeclarations
.filter(d => d.name.getText() === 'localize')
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
var localizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.name.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(r => !r.isWriteAccess);
.filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references
const namedLocalizeReferences = allLocalizeImportDeclarations
.filter(d => d.propertyName && d.propertyName.getText() === 'localize')
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1))
var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten()
.filter(r => !r.isWriteAccess);
.filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references
const localizeCallExpressions = localizeReferences
var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences)
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => n);
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; });
// collect everything
const localizeCalls = nlsLocalizeCallExpressions
var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions)
.map(e => e.arguments)
.filter(a => a.length > 1)
.sort((a, b) => a[0].getStart() - b[0].getStart())
.map(a => ({
.map(function (e) { return e.arguments; })
.filter(function (a) { return a.length > 1; })
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText()
}));
}); });
return {
localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray()
};
}
nls_1.analyze = analyze;
class TextModel {
constructor(contents) {
const regex = /\r\n|\r|\n/g;
let index = 0;
let match;
var TextModel = /** @class */ (function () {
function TextModel(contents) {
var regex = /\r\n|\r|\n/g;
var index = 0;
var match;
this.lines = [];
this.lineEndings = [];
while (match = regex.exec(contents)) {
@@ -217,80 +220,85 @@ function isImportNode(node) {
this.lineEndings.push('');
}
}
get(index) {
TextModel.prototype.get = function (index) {
return this.lines[index];
}
set(index, line) {
};
TextModel.prototype.set = function (index, line) {
this.lines[index] = line;
}
get lineCount() {
return this.lines.length;
}
};
Object.defineProperty(TextModel.prototype, "lineCount", {
get: function () {
return this.lines.length;
},
enumerable: true,
configurable: true
});
/**
* Applies patch(es) to the model.
* Multiple patches must be ordered.
* Does not support patches spanning multiple lines.
*/
apply(patch) {
const startLineNumber = patch.span.start.line;
const endLineNumber = patch.span.end.line;
const startLine = this.lines[startLineNumber] || '';
const endLine = this.lines[endLineNumber] || '';
TextModel.prototype.apply = function (patch) {
var startLineNumber = patch.span.start.line;
var endLineNumber = patch.span.end.line;
var startLine = this.lines[startLineNumber] || '';
var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character),
patch.content,
endLine.substring(patch.span.end.character)
].join('');
for (let i = startLineNumber + 1; i <= endLineNumber; i++) {
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = '';
}
}
toString() {
};
TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join('');
}
}
};
return TextModel;
}());
nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) {
const model = new nls.TextModel(contents);
var model = new nls.TextModel(contents);
// patch the localize calls
lazy(patches).reverse().each(p => model.apply(p));
lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports
const firstLine = model.get(0);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
var firstLine = model.get(0);
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine);
return model.toString();
}
nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) {
const smg = new sm.SourceMapGenerator({
var smg = new sm.SourceMapGenerator({
file: rsm.file,
sourceRoot: rsm.sourceRoot
});
patches = patches.reverse();
let currentLine = -1;
let currentLineDiff = 0;
let source = null;
smc.eachMapping(m => {
const patch = patches[patches.length - 1];
const original = { line: m.originalLine, column: m.originalColumn };
const generated = { line: m.generatedLine, column: m.generatedColumn };
var currentLine = -1;
var currentLineDiff = 0;
var source = null;
smc.eachMapping(function (m) {
var patch = patches[patches.length - 1];
var original = { line: m.originalLine, column: m.originalColumn };
var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) {
currentLineDiff = 0;
}
currentLine = generated.line;
generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
const originalLength = patch.span.end.character - patch.span.start.character;
const modifiedLength = patch.content.length;
const lengthDiff = modifiedLength - originalLength;
var originalLength = patch.span.end.character - patch.span.start.character;
var modifiedLength = patch.content.length;
var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff;
generated.column += lengthDiff;
patches.pop();
}
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source, name: m.name, original, generated });
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source));
@@ -299,47 +307,47 @@ function isImportNode(node) {
}
nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) {
const { localizeCalls, nlsExpressions } = analyze(typescript);
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) {
return { javascript, sourcemap };
return { javascript: javascript, sourcemap: sourcemap };
}
const nlsKeys = template(localizeCalls.map(lc => lc.key));
const nls = template(localizeCalls.map(lc => lc.value));
const smc = new sm.SourceMapConsumer(sourcemap);
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
let i = 0;
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
var smc = new sm.SourceMapConsumer(sourcemap);
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
var i = 0;
// build patches
const patches = lazy(localizeCalls)
.map(lc => ([
var patches = lazy(localizeCalls)
.map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' }
]))
]); })
.flatten()
.map(c => {
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start, end }, content: c.content };
.map(function (c) {
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start: start, end: end }, content: c.content };
})
.toArray();
javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information,
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, line => {
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
});
}
sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript, sourcemap, nlsKeys, nls };
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
}
nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) {
// hack?
const moduleId = javascriptFile.relative
var moduleId = javascriptFile.relative
.replace(/\.js$/, '')
.replace(/\\/g, '/');
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
const result = [fileFrom(javascriptFile, javascript)];
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap;
if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));

View File

@@ -6,9 +6,10 @@
import * as ts from 'typescript';
import * as lazy from 'lazy.js';
import { duplex, through } from 'event-stream';
import * as File from 'vinyl';
import File = require('vinyl');
import * as sm from 'source-map';
import * as path from 'path';
import assign = require('object-assign');
import path = require('path');
declare class FileSourceMap extends File {
public sourceMap: sm.RawSourceMap;
@@ -25,7 +26,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
const result: ts.Node[] = [];
function loop(node: ts.Node) {
const stepResult = fn(node);
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
@@ -41,8 +42,8 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
}
function clone<T>(object: T): T {
const result = <T>{};
for (const id in object) {
var result = <T>{};
for (var id in object) {
result[id] = object[id];
}
return result;
@@ -66,8 +67,8 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls(): NodeJS.ReadWriteStream {
const input = through();
const output = input.pipe(through(function (f: FileSourceMap) {
var input = through();
var output = input.pipe(through(function (f: FileSourceMap) {
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
}
@@ -82,7 +83,7 @@ function nls(): NodeJS.ReadWriteStream {
source = path.join(root, source);
}
const typescript = f.sourceMap.sourcesContent![0];
const typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
}
@@ -173,7 +174,7 @@ module nls {
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
@@ -205,8 +206,8 @@ module nls {
// `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
.map(d => (<ts.NamespaceImport>d.importClause!.namedBindings).name)
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
.map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
.concat(importEqualsDeclarations.map(d => d.name))
// find read-only references to `nls`
@@ -225,8 +226,8 @@ module nls {
// `localize` named imports
const allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
.map(d => ([] as any[]).concat((<ts.NamedImports>d.importClause!.namedBindings!).elements))
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
.map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
.flatten();
// `localize` read-only references
@@ -278,7 +279,7 @@ module nls {
constructor(contents: string) {
const regex = /\r\n|\r|\n/g;
let index = 0;
let match: RegExpExecArray | null;
let match: RegExpExecArray;
this.lines = [];
this.lineEndings = [];
@@ -359,7 +360,7 @@ module nls {
patches = patches.reverse();
let currentLine = -1;
let currentLineDiff = 0;
let source: string | null = null;
let source = null;
smc.eachMapping(m => {
const patch = patches[patches.length - 1];

View File

@@ -4,32 +4,30 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const gulp = require("gulp");
const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano");
const filter = require("gulp-filter");
const flatmap = require("gulp-flatmap");
const sourcemaps = require("gulp-sourcemaps");
const uglify = require("gulp-uglify");
const composer = require("gulp-uglify/composer");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const path = require("path");
const pump = require("pump");
const uglifyes = require("uglify-es");
const VinylFile = require("vinyl");
const bundle = require("./bundle");
const i18n_1 = require("./i18n");
const stats_1 = require("./stats");
const util = require("./util");
const REPO_ROOT_PATH = path.join(__dirname, '../..');
var path = require("path");
var gulp = require("gulp");
var sourcemaps = require("gulp-sourcemaps");
var filter = require("gulp-filter");
var minifyCSS = require("gulp-cssnano");
var uglify = require("gulp-uglify");
var composer = require("gulp-uglify/composer");
var uglifyes = require("uglify-es");
var es = require("event-stream");
var concat = require("gulp-concat");
var VinylFile = require("vinyl");
var bundle = require("./bundle");
var util = require("./util");
var i18n = require("./i18n");
var gulpUtil = require("gulp-util");
var flatmap = require("gulp-flatmap");
var pump = require("pump");
var REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) {
fancyLog(ansiColors.cyan('[' + prefix + ']'), message);
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
// {{SQL CARBON EDIT}}
function loaderConfig(emptyPaths) {
const result = {
var result = {
paths: {
'vs': 'out-build/vs',
'sql': 'out-build/sql',
@@ -41,26 +39,26 @@ function loaderConfig(emptyPaths) {
return result;
}
exports.loaderConfig = loaderConfig;
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src, bundledFileHeader, bundleLoader) {
let sources = [
`${src}/vs/loader.js`
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(bundledFileHeader, bundleLoader) {
var sources = [
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
`${src}/vs/css.js`,
`${src}/vs/nls.js`
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
let isFirst = true;
var isFirst = true;
return (gulp
.src(sources, { base: `${src}` })
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: undefined,
base: '',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -76,13 +74,13 @@ function loader(src, bundledFileHeader, bundleLoader) {
return f;
})));
}
function toConcatStream(src, bundledFileHeader, sources, dest) {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
function toConcatStream(bundledFileHeader, sources, dest) {
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
let containsOurCopyright = false;
for (let i = 0, len = sources.length; i < len; i++) {
const fileContents = sources[i].contents;
var containsOurCopyright = false;
for (var i = 0, len = sources.length; i < len; i++) {
var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true;
break;
@@ -94,9 +92,9 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
contents: bundledFileHeader
});
}
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : undefined;
var treatedSources = sources.map(function (source) {
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
var base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
@@ -105,41 +103,40 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest))
.pipe(stats_1.createStatsStream(dest));
.pipe(concat(dest));
}
function toBundleStream(src, bundledFileHeader, bundles) {
function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
function optimizeTask(opts) {
const src = opts.src;
const entryPoints = opts.entryPoints;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
var entryPoints = opts.entryPoints;
var otherSources = opts.otherSources;
var resources = opts.resources;
var loaderConfig = opts.loaderConfig;
var bundledFileHeader = opts.header;
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
var out = opts.out;
return function () {
const bundlesStream = es.through(); // this stream will contain the bundled files
const resourcesStream = es.through(); // this stream will contain the resources
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
var bundlesStream = es.through(); // this stream will contain the bundled files
var resourcesStream = es.through(); // this stream will contain the resources
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) {
if (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: `${src}`, allowEmpty: true }).pipe(resourcesStream);
const bundleInfoArray = [];
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
var bundleInfoArray = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
@@ -149,17 +146,30 @@ function optimizeTask(opts) {
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, resourcesStream, bundleInfoStream);
var otherSourcesStream = es.through();
var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
}
else {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: undefined,
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}) : es.through())
}))
.pipe(gulp.dest(out));
};
}
@@ -169,14 +179,14 @@ exports.optimizeTask = optimizeTask;
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights() {
const preserveComments = (f) => {
return (_node, comment) => {
const text = comment.value;
const type = comment.type;
var preserveComments = function (f) {
return function (node, comment) {
var text = comment.value;
var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) {
return false;
}
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) {
if (f.__hasOurCopyright) {
return false;
@@ -194,10 +204,10 @@ function uglifyWithCopyrights() {
return false;
};
};
const minify = composer(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {
var minify = composer(uglifyes);
var input = es.through();
var output = input
.pipe(flatmap(function (stream, f) {
return stream.pipe(minify({
output: {
comments: preserveComments(f),
@@ -208,23 +218,18 @@ function uglifyWithCopyrights() {
return es.duplex(input, output);
}
function minifyTask(src, sourceMapBaseUrl) {
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
return cb => {
const jsFilter = filter('**/*.js', { restore: true });
const cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.mapSources((sourcePath) => {
if (sourcePath === 'bootstrap-fork.js') {
return 'bootstrap-fork.orig.js';
}
return sourcePath;
}), sourcemaps.write('./', {
sourceMappingURL,
sourceRoot: undefined,
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return function (cb) {
var jsFilter = filter('**/*.js', { restore: true });
var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL: sourceMappingURL,
sourceRoot: null,
includeContent: true,
addComment: true
}), gulp.dest(src + '-min'), (err) => {
}), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
}
cb(err);
});

Some files were not shown because too many files have changed in this diff Show More