Compare commits

..

3 Commits

Author SHA1 Message Date
Karl Burtram
088cac030f Merge branch 'master' into release/0.30 2018-06-18 13:07:21 -07:00
Karl Burtram
bd3c293f94 Merge branch 'master' into release/0.30 2018-06-18 10:28:15 -07:00
Karl Burtram
17db0b7d09 Revert SlickGrid to 2.3.16 to workaround Edit Data TAB issue 2018-06-12 16:37:42 -07:00
12821 changed files with 353097 additions and 411324 deletions

View File

@@ -1,4 +1,4 @@
# EditorConfig is awesome: https://EditorConfig.org # EditorConfig is awesome: http://EditorConfig.org
# top-most EditorConfig file # top-most EditorConfig file
root = true root = true
@@ -6,6 +6,7 @@ root = true
# Tab indentation # Tab indentation
[*] [*]
indent_style = tab indent_style = tab
indent_size = 4
trim_trailing_whitespace = true trim_trailing_whitespace = true
# The indent size used in the `package.json` file cannot be changed # The indent size used in the `package.json` file cannot be changed

19
.eslintrc Normal file
View File

@@ -0,0 +1,19 @@
{
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,20 +0,0 @@
{
"root": true,
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,18 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
<!-- Use Help > Report Issue to prefill these. -->
- Azure Data Studio Version:
Steps to Reproduce:
1.

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: feature request
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution or feature you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,49 +0,0 @@
{
perform: true,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
autoAssignees: {
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
}
}

12
.github/commands.yml vendored
View File

@@ -1,12 +0,0 @@
{
perform: false,
commands: [
{
type: 'label',
name: 'duplicate',
allowTriggerByBot: true,
action: 'close',
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
}
]
}

5
.github/copycat.yml vendored
View File

@@ -1,5 +0,0 @@
{
perform: true,
target_owner: 'anthonydresser',
target_repo: 'testissues'
}

6
.github/locker.yml vendored
View File

@@ -1,6 +0,0 @@
{
daysAfterClose: 45,
daysSinceLastUpdate: 3,
ignoredLabels: [],
perform: true
}

View File

@@ -1,6 +0,0 @@
{
daysUntilClose: 7,
needsMoreInfoLabel: 'needs more info',
perform: true,
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity in the last 7 days. If you have more info to help resolve the issue, leave a comment"
}

View File

@@ -1,6 +0,0 @@
{
newReleaseLabel: 'new-release',
newReleaseColor: '006b75',
daysAfterRelease: 5,
perform: true
}

View File

@@ -1,5 +0,0 @@
{
perform: true,
whenCreatedByTeam: true,
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
}

6
.gitignore vendored
View File

@@ -3,12 +3,9 @@ npm-debug.log
Thumbs.db Thumbs.db
node_modules/ node_modules/
.build/ .build/
extensions/**/dist/
out/ out/
out-build/ out-build/
out-editor/ out-editor/
out-editor-src/
out-editor-build/
out-editor-esm/ out-editor-esm/
out-editor-min/ out-editor-min/
out-monaco-editor-core/ out-monaco-editor-core/
@@ -17,5 +14,4 @@ out-vscode-min/
build/node_modules build/node_modules
coverage/ coverage/
test_data/ test_data/
test-results/ yarn-error.log
yarn-error.log

2
.nvmrc
View File

@@ -1 +1 @@
8 8.9.2

58
.travis.yml Normal file
View File

@@ -0,0 +1,58 @@
sudo: false
language: cpp
os:
- linux
- osx
cache:
directories:
- $HOME/.cache/yarn
notifications:
email: false
webhooks:
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.9
- g++-4.9
- gcc-4.9-multilib
- g++-4.9-multilib
- zip
- libgtk2.0-0
- libx11-dev
- libxkbfile-dev
- libsecret-1-dev
before_install:
- git submodule update --init --recursive
- nvm install 8.9.1
- nvm use 8.9.1
- npm i -g yarn
# - npm config set python `which python`
- if [ $TRAVIS_OS_NAME == "linux" ]; then
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
sh -e /etc/init.d/xvfb start;
sleep 3;
fi
# Make npm logs less verbose
# - npm config set depth 0
# - npm config set loglevel warn
install:
- yarn
script:
- node_modules/.bin/gulp electron --silent
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
after_success:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi

View File

@@ -1,23 +0,0 @@
{
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"licenseDetail"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"licenseDetail": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
}
}
}
}

View File

@@ -1,142 +0,0 @@
{
"type": "object",
"properties": {
"registrations": {
"type": "array",
"items": {
"type": "object",
"properties": {
"component": {
"oneOf": [
{
"type": "object",
"required": [
"type",
"git"
],
"properties": {
"type": {
"type": "string",
"enum": [
"git"
]
},
"git": {
"type": "object",
"required": [
"name",
"repositoryUrl",
"commitHash"
],
"properties": {
"name": {
"type": "string"
},
"repositoryUrl": {
"type": "string"
},
"commitHash": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"npm"
],
"properties": {
"type": {
"type": "string",
"enum": [
"npm"
]
},
"npm": {
"type": "object",
"required": [
"name",
"version"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"other"
],
"properties": {
"type": {
"type": "string",
"enum": [
"other"
]
},
"other": {
"type": "object",
"required": [
"name",
"downloadUrl",
"version"
],
"properties": {
"name": {
"type": "string"
},
"downloadUrl": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
}
]
},
"repositoryUrl": {
"type": "string",
"description": "The git url of the component"
},
"version": {
"type": "string",
"description": "The version of the component"
},
"license": {
"type": "string",
"description": "The name of the license"
},
"developmentDependency": {
"type": "boolean",
"description": "This component is inlined in the vscode repo and **is not shipped**."
},
"isOnlyProductionDependency": {
"type": "boolean",
"description": "This component is shipped and **is not inlined in the vscode repo**."
},
"licenseDetail": {
"type": "array",
"items": {
"type": "string"
},
"description": "The license text"
}
}
}
}
}
}

View File

@@ -2,7 +2,7 @@
// See https://go.microsoft.com/fwlink/?LinkId=827846 // See https://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format // for the documentation about the extensions.json format
"recommendations": [ "recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin", "eg2.tslint",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome" "msjsdiag.debugger-for-chrome"
] ]

145
.vscode/launch.json vendored
View File

@@ -9,12 +9,14 @@
"stopOnEntry": true, "stopOnEntry": true,
"args": [ "args": [
"hygiene" "hygiene"
] ],
"cwd": "${workspaceFolder}"
}, },
{ {
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Extension Host", "name": "Attach to Extension Host",
"protocol": "inspector",
"port": 5870, "port": 5870,
"restart": true, "restart": true,
"outFiles": [ "outFiles": [
@@ -22,15 +24,19 @@
] ]
}, },
{ {
"type": "chrome", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Shared Process", "name": "Attach to Shared Process",
"port": 9222, "protocol": "inspector",
"urlFilter": "*" "port": 5871,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
}, },
{ {
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"protocol": "inspector",
"name": "Attach to Search Process", "name": "Attach to Search Process",
"port": 5876, "port": 5876,
"outFiles": [ "outFiles": [
@@ -41,6 +47,7 @@
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to CLI Process", "name": "Attach to CLI Process",
"protocol": "inspector",
"port": 5874, "port": 5874,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
@@ -50,6 +57,7 @@
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Main Process", "name": "Attach to Main Process",
"protocol": "inspector",
"port": 5875, "port": 5875,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
@@ -58,47 +66,13 @@
{ {
"type": "chrome", "type": "chrome",
"request": "attach", "request": "attach",
"name": "Attach to azuredatastudio", "name": "Attach to sqlops",
"port": 9222 "port": 9222
}, },
{ {
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"name": "Launch azuredatastudio", "name": "Launch sqlops",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 20000
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"breakOnLoad": false,
"urlFilter": "*workbench.html*",
"runtimeArgs": [
"--inspect=5875",
"--no-cached-data"
],
"webRoot": "${workspaceFolder}"
},
{
"type": "node",
"request": "launch",
"name": "Launch ADS (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"runtimeArgs": [
"--no-cached-data"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio with new notebook command",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat" "runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
}, },
@@ -110,107 +84,56 @@
}, },
"urlFilter": "*index.html*", "urlFilter": "*index.html*",
"runtimeArgs": [ "runtimeArgs": [
"--inspect=5875", "--inspect=5875"
"--command=notebook.command.new"
], ],
"skipFiles": [ "skipFiles": [
"**/winjs*.js" "**/winjs*.js"
], ],
"webRoot": "${workspaceFolder}", "webRoot": "${workspaceFolder}",
"timeout": 45000 "timeout": 15000
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
]
}, },
{ {
"type": "node", "type": "node",
"request": "launch", "request": "launch",
"name": "Launch Smoke Test", "name": "Unit Tests",
"program": "${workspaceFolder}/test/smoke/test/index.js", "protocol": "inspector",
"cwd": "${workspaceFolder}/test/smoke", "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"env": { "runtimeExecutable": "${workspaceFolder}/.build/electron/SQL Operations Studio.app/Contents/MacOS/Electron",
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
}
},
{
"type": "node",
"request": "launch",
"name": "Run Unit Tests",
"program": "${workspaceFolder}/test/electron/index.js",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe" "runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops.exe"
}, },
"linux": { "linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio" "runtimeExecutable": "${workspaceFolder}/.build/electron/sqlops"
}, },
"outputCapture": "std", "stopOnEntry": false,
"args": [ "args": [
"--remote-debugging-port=9222" "--delay",
"--timeout",
"2000"
], ],
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"env": {
"ELECTRON_RUN_AS_NODE": "true"
},
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
] ]
}, }
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Unit Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
], ],
"compounds": [ "compounds": [
{ {
"name": "Debug Unit Tests", "name": "Debug sqlops Main and Renderer",
"configurations": [ "configurations": [
"Attach to azuredatastudio", "Launch sqlops",
"Run Unit Tests"
]
},
{
"name": "Debug Extension Unit Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Unit Tests"
]
},
{
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
"Launch azuredatastudio",
"Attach to Main Process" "Attach to Main Process"
] ]
}, },
{ {
"name": "Search and Renderer processes", "name": "Search and Renderer processes",
"configurations": [ "configurations": [
"Launch azuredatastudio", "Launch sqlops",
"Attach to Search Process" "Attach to Search Process"
] ]
},
{
"name": "Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
} }
] ]
} }

22
.vscode/settings.json vendored
View File

@@ -11,7 +11,7 @@
} }
}, },
"files.associations": { "files.associations": {
"cglicenses.json": "jsonc" "OSSREADME.json": "jsonc"
}, },
"search.exclude": { "search.exclude": {
"**/node_modules": true, "**/node_modules": true,
@@ -22,9 +22,9 @@
"out-vscode/**": true, "out-vscode/**": true,
"i18n/**": true, "i18n/**": true,
"extensions/**/out/**": true, "extensions/**/out/**": true,
"test/smoke/out/**": true, "test/smoke/out/**": true
"src/vs/base/test/node/uri.test.data.txt": true
}, },
"tslint.enable": true,
"lcov.path": [ "lcov.path": [
"./.build/coverage/lcov.info", "./.build/coverage/lcov.info",
"./.build/coverage-single/lcov.info" "./.build/coverage-single/lcov.info"
@@ -38,19 +38,5 @@
} }
} }
], ],
"typescript.tsdk": "node_modules/typescript/lib", "typescript.tsdk": "node_modules/typescript/lib"
"npm.exclude": "**/extensions/**",
"git.ignoreLimitWarning": true,
"emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single",
"json.schemas": [{
"fileMatch": [ "cgmanifest.json" ],
"url": "./.vscode/cgmanifest.schema.json"
}, {
"fileMatch": [ "cglicenses.json" ],
"url": "./.vscode/cglicenses.schema.json"
}
],
"git.ignoreLimitWarning": true
} }

View File

@@ -1,40 +0,0 @@
{
// Each snippet is defined under a snippet name and has a scope, prefix, body and
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
// Placeholders with the same ids are connected.
// Example:
"MSFT Copyright Header": {
"scope": "javascript,typescript,css",
"prefix": [
"header",
"stub",
"copyright"
],
"body": [
"/*---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------*/",
"",
"$0"
],
"description": "Insert Copyright Statement"
},
"TS -> Inject Service": {
"scope": "typescript",
"description": "Constructor Injection Pattern",
"prefix": "@inject",
"body": "@$1 private readonly _$2: ${1},$0"
},
"TS -> Event & Emitter": {
"scope": "typescript",
"prefix": "emitter",
"description": "Add emitter and event properties",
"body": [
"private readonly _onDid$1 = new Emitter<$2>();",
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
],
}
}

17
.vscode/tasks.json vendored
View File

@@ -28,23 +28,6 @@
} }
} }
}, },
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Checks",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-null",
"applyTo": "allDocuments"
},
"runOptions": {
"runOn": "folderOpen"
}
},
{ {
"type": "gulp", "type": "gulp",
"task": "tslint", "task": "tslint",

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron" disturl "https://atom.io/download/electron"
target "3.1.2" target "1.7.12"
runtime "electron" runtime "electron"

View File

@@ -1,188 +1,5 @@
# Change Log # Change Log
## Version 1.5.1
* Release date: March 18, 2019
* Release status: General Availability
## What's new in this version
* Announcing T-SQL Notebooks
* Announcing PostgreSQL extension
* Announcing SQL Server Dacpac extension
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* GeoffYoung for `Fix sqlDropColumn description #4422`
## Version 1.4.5
* Release date: February 13, 2019
* Release status: General Availability
## What's new in this version
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
* Added filtering extended event support in Profiler extension
* Added Save as XML feature that can save T-SQL results as XML
* Added Data-Tier Application Wizard improvements
* Added Generate script button
* Added view to give warnings of possible data loss during deployment
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* Results streaming enabled by default for long running queries
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
## Version 1.3.8
* Release date: January 9, 2019
* Release status: General Availability
## What's new in this version
* #13 Feature Request: Azure Active Directory Authentication
* #1040 Stream initial query results as they become available
* #3298 Сan't add an azure account.
* #2387 Support Per-User Installer
* SQL Server Import updates for DACPAC\BACPAC
* SQL Server Profiler UI and UX improvements
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* **sp_executesql to SQL** and **New Database** extensions
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
## Version 1.2.4
* Release date: November 6, 2018
* Release status: General Availability
## What's new in this version
* Update to the SQL Server 2019 Preview extension
* Introducing Paste the Plan extension
* Introducing High Color queries extension, including SSMS editor theme
* Fixes in SQL Server Agent, Profiler, and Import extensions
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
* Fix customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
## Version 1.1.3
* Release date: October 18, 2018
* Release status: General Availability
## What's new in this version
* Introducing the Azure Resource Explorer to browse Azure SQL Databases
* Improve Object Explorer and Query Editor connectivity robustness
* SQL Server 2019 and SQL Agent extension improvements
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
## Version 1.0.0
* Release date: September 24, 2018
* Release status: General Availability
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard
* Query Results Grid performance and UX improvements for large number of result sets.
* Visual Studio Code source code refresh from 1.23 to 1.26.1 with Grid Layout and Improved Settings Editor (preview).
* Accessibility improvements for screen reader, keyboard navigation and high-contrast.
* Added Connection name option to provide an alternative display name in the Servers viewlet.
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
## Version 0.33.7
* Release date: August 30, 2018
* Release status: Public Preview
## What's new in this version
* Announcing the SQL Server Import Extension
* SQL Server Profiler Session management
* SQL Server Agent improvements
* New community extension: First Responder Kit
* Quality of Life improvements: Connection strings
* Fix many customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
## Version 0.31.4
* Release date: July 19, 2018
* Release status: Public Preview
## What's new in this version
* SQL Server Agent for Azure Data Studio extension improvements
* Added view of Alerts, Operators, and Proxies and icons on left pane
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
* Added Delete Job, Delete Alert, and Delete Operator (right-click)
* Added Previous Runs visualization
* Added Filters for each column name
* SQL Server Profiler for Azure Data Studio extension improvements
* Added Hotkeys to quickly launch and start/stop Profiler
* Added 5 Default Templates to view Extended Events
* Added Server/Database connection name
* Added support for Azure SQL Database instances
* Added suggestion to exit Profiler when tab is closed when Profiler is still running
* Release of Combine Scripts Extension
* Wizard and Dialog Extensibility
* Fix GitHub Issues
## Version 0.30.6
* Release date: June 20, 2018
* Release status: Public Preview
## What's new in this version
* **SQL Server Profiler for Azure Data Studio *Preview*** extension initial release
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
* **Edit Data "Filtering and Sorting"** support
* **SQL Server Agent for Azure Data Studio *Preview*** extension enhancements for Jobs and Job History views
* Improved **Wizard & Dialog UI Builder Framework** extensibility APIs
* Update VS Code Platform source code integrating [March 2018 (1.22)](https://code.visualstudio.com/updates/v1_22) and [April 2018 (1.23)](https://code.visualstudio.com/updates/v1_23) releases
* Fix GitHub Issues
## Version 0.29.3 ## Version 0.29.3
* Release date: May 7, 2018 * Release date: May 7, 2018
* Release status: Public Preview * Release status: Public Preview
@@ -192,7 +9,7 @@ The May release is focused on stabilization and bug fixes leading up to the Buil
* Announcing **Redgate SQL Search** extension available in Extension Manager * Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!** * Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) * **GDPR-compliant** build has reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions * Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement * SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions * Updates for **whoisactive** and **Server Reports** extensions
@@ -218,8 +35,8 @@ The April Public Preview release contains some of the following highlights.
* Release status: Public Preview * Release status: Public Preview
## What's new in this version ## What's new in this version
The March Public Preview release enables some key aspects of the Azure Data Studio The March Public Preview release enables some key aspects of the SQL Operations
extensibility story. Here are some highlights in this release. Studio extensibility story. Here are some highlights in this release.
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes * Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com) * Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)

View File

@@ -1,13 +1,13 @@
## Contributing Issues ## Contributing Issues
### Before Submitting an Issue ### Before Submitting an Issue
First, please do a search in [open issues](https://github.com/Microsoft/azuredatastudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests. First, please do a search in [open issues](https://github.com/Microsoft/sqlopsstudio/issues) to see if the issue or feature request has already been filed. Use this [query](https://github.com/Microsoft/sqlopsstudio/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) to search for the most popular feature requests.
If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment. If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment.
:+1: - upvote 👍 - upvote
:-1: - downvote 👎 - downvote
If you cannot find an existing issue that describes your bug or feature, submit an issue using the guidelines below. If you cannot find an existing issue that describes your bug or feature, submit an issue using the guidelines below.
@@ -18,33 +18,29 @@ File a single issue per problem and feature request.
* Do not enumerate multiple bugs or feature requests in the same issue. * Do not enumerate multiple bugs or feature requests in the same issue.
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes. * Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix. The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info. Please include the following with each issue.
Please include the following with each issue. * Version of SQL Ops Studio
* Version of Azure Data Studio (formerly SQL Operations Studio) > **Tip:** You can easily create an issue using `Report Issues` from SQL Operations Studio Help menu.
* Your operating system * Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu. > **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
* Reproducible steps (1... 2... 3...) and what you expected versus what you actually saw.
* Images, animations, or a link to a video.
* A code snippet that demonstrates the issue or a link to a code repository we can easily pull down onto our machine to recreate the issue.
> **Note:** Because we need to copy and paste the code snippet, including a code snippet as a media file (i.e. .gif) is not sufficient.
* Errors in the Dev Tools Console (Help | Toggle Developer Tools) * Errors in the Dev Tools Console (Help | Toggle Developer Tools)
Please remember to do the following: Please remember to do the following:
* Search the issue repository to see if there exists a duplicate. * Search the issue repository to see if there exists a duplicate.
* Simplify your scripts around the issue so we can better isolate the problem. * Simplify your scripts around the issue so we can better isolate the problem.
Don't feel bad if we can't reproduce the issue and ask for more information! Don't feel bad if we can't reproduce the issue and ask for more information!
## Contributing Fixes ## Contributing Fixes
If you are interested in fixing issues and contributing directly to the code base, If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute). please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute).

View File

@@ -1,6 +1,6 @@
MICROSOFT SOFTWARE LICENSE TERMS MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT AZURE DATA STUDIO MICROSOFT SQL OPERATIONS STUDIO
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual, Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us royalty-free right to use, copy, and modify the software code provided by us

1196
OSSREADME.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +1,25 @@
# Azure Data Studio # SQL Operations Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux. SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
**Download the latest Azure Data Studio release** **Download SQL Operations Studio May Public Preview**
Platform | Link Platform | Link
-- | -- -- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2083322 Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=873386
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2083323 Windows ZIP | https://go.microsoft.com/fwlink/?linkid=873387
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2083324 macOS ZIP | https://go.microsoft.com/fwlink/?linkid=873388
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2083325 Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=873389
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2083424 Linux RPM | https://go.microsoft.com/fwlink/?linkid=873390
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2083326 Linux DEB | https://go.microsoft.com/fwlink/?linkid=873391
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2083327
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions. Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
Try out the latest insiders build from `master`: Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases.
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release. See the [change log](https://github.com/Microsoft/sqlopsstudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
**Feature Highlights** **Feature Highlights**
@@ -41,90 +34,60 @@ See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CH
- Task History window to view current task execution status, completion results with error messages and task T-SQL scripting - Task History window to view current task execution status, completion results with error messages and task T-SQL scripting
- Scripting support to generate CREATE, SELECT, ALTER and DROP statements for database objects - Scripting support to generate CREATE, SELECT, ALTER and DROP statements for database objects
- Workspaces with full Git integration and Find In Files support to managing T-SQL script libraries - Workspaces with full Git integration and Find In Files support to managing T-SQL script libraries
- Modern light-weight shell with theming, user settings, full-screen support, integrated terminal and numerous other features - Modern light-weight shell with theming, user settings, full screen support, integrated terminal and numerous other features
Here are some of these features in action. Here's some of these features in action.
<img src='https://github.com/Microsoft/azuredatastudio/blob/master/docs/overview_screen.jpg' width='800px'> <img src='https://github.com/Microsoft/sqlopsstudio/blob/master/docs/overview_screen.jpg' width='800px'>
## Contributing ## Contributing
If you are interested in fixing issues and contributing directly to the code base, If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute), which covers the following: please see the document [How to Contribute](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute), which covers the following:
* [How to build and run from source](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#Build-and-Run-From-Source) * [How to build and run from source](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#Build-and-Run-From-Source)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#development-workflow) * [The development workflow, including debugging and running tests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#development-workflow)
* [Submitting pull requests](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests) * [Submitting pull requests](https://github.com/Microsoft/sqlopsstudio/wiki/How-to-Contribute#pull-requests)
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization ## Localization
Azure Data Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc SQL Operations Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make Azure Data Studio available in your language! Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make SQL Operations Studio available in your language!
## Privacy Statement ## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software. The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
## Contributions and "Thank You" ## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes: We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* GeoffYoung for `Fix sqlDropColumn description #4422`
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
* philoushka for `center the icon #2760`
* anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
* kstolte for `Fixing a reference to SQL Ops Studio #2788`
* AlexFsmn `Feature: Ability to add connection name #2332`
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
* SebastianPfliegel `Added more saveAsCsv options #2099`
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
* AlexFsmn `Fixed bug where proper file extension wasn't appended to the filename. #2151`
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
* AlexFsmn `Renamed chart option labels #2264`
* AlexFsmn `Added feature for the opening file after exporting to CSV/XLS/JSON & query files #2216`
* AlexFsmm `Get Connection String should copy to clipboard #2175`
* lanceklinger `Fix for double-clicking column handle in results table #1504`
* westerncj for `Removed duplicate contribution from README.md (#753)` * westerncj for `Removed duplicate contribution from README.md (#753)`
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)` * ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
* SebastianPfliegel for `Add cursor snippet (#475)` * SebastianPfliegel for `Add cursor snippet (#475)`
* mikaoelitiana for the fix: `revert README and CONTRIBUTING after last VSCode merge (#574)` * mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
* alextercete for `Reinstate menu item to install from VSIX (#682)` * alextercete for `Reinstate menu item to install from VSIX (#682)`
* alextercete for `Fix "No extension gallery service configured" error (#427)` * alextercete for `Fix "No extension gallery service configured" error (#427)`
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)` * mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)` * AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))` * AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with a case-sensitive collation. (#152)` * stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
* SebastianPfliegel `Remove sqlExtensionHelp (#312)` * SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)` * olljanat for `Implemented npm version check (#314)`
* Adam Machanic for helping with the `whoisactive` extension * Adam Mechanic for helping with the `whoisactive` extension
* All community localization contributors: * All community localization contributors
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier * French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani * Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer * German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI * Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa, Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治 * Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa , Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu * Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷 * Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이 * Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov * Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi * Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/sqlopsstudio/master/ThirdPartyNotices.txt)
## License ## License

View File

@@ -1,4 +1,4 @@
MICROSOFT Azure Data Studio MICROSOFT SQL OPERATIONS STUDIO
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize Do Not Translate or Localize
@@ -17,13 +17,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
chokidar: https://github.com/paulmillr/chokidar chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac getmac: https://github.com/bevry/getmac
@@ -36,35 +33,28 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet jschardet: https://github.com/aadsm/jschardet
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw nsfw: https://github.com/Axosoft/nsfw
pretty-data: https://github.com/vkiryukhin/pretty-data pretty-data: https://github.com/vkiryukhin/pretty-data
primeng: https://github.com/primefaces/primeng primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js pty.js: https://github.com/chjj/pty.js
reflect-metadata: https://github.com/rbuckton/reflect-metadata reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
underscore: https://github.com/jashkenas/underscore underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg winreg: https://github.com/fresc81/node-winreg
@@ -72,9 +62,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
yauzl: https://github.com/thejoshwolfe/yauzl yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose
%% angular NOTICES AND INFORMATION BEGIN HERE %% angular NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License
Copyright (c) 2014-2017 Google, Inc. http://angular.io Copyright (c) 2014-2017 Google, Inc. http://angular.io
@@ -300,20 +291,6 @@ THE SOFTWARE.
========================================= =========================================
END OF core-js NOTICES AND INFORMATION END OF core-js NOTICES AND INFORMATION
%% decompress NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF decompress NOTICES AND INFORMATION
%% emmet NOTICES AND INFORMATION BEGIN HERE %% emmet NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
The MIT License (MIT) The MIT License (MIT)
@@ -415,20 +392,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
========================================= =========================================
END OF fast-plist NOTICES AND INFORMATION END OF fast-plist NOTICES AND INFORMATION
%% figures NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF figures NOTICES AND INFORMATION
%% fs-extra NOTICES AND INFORMATION BEGIN HERE %% fs-extra NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
(The MIT License) (The MIT License)
@@ -1202,43 +1165,6 @@ That's all there is to it!
========================================= =========================================
END OF jschardet NOTICES AND INFORMATION END OF jschardet NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Semver File License
===================
The semver.py file is from https://github.com/podhmo/python-semver
which is licensed under the "MIT" license. See the semver.py file for details.
END OF JupyterLab NOTICES AND INFORMATION
%% make-error NOTICES AND INFORMATION BEGIN HERE %% make-error NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
ISC © Julien Fontanet ISC © Julien Fontanet
@@ -1370,32 +1296,6 @@ SOFTWARE.
========================================= =========================================
END OF ng2-charts NOTICES AND INFORMATION END OF ng2-charts NOTICES AND INFORMATION
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF node-fetch NOTICES AND INFORMATION
%% node-pty NOTICES AND INFORMATION BEGIN HERE %% node-pty NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1470,30 +1370,6 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
========================================= =========================================
END OF primeng NOTICES AND INFORMATION END OF primeng NOTICES AND INFORMATION
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
=========================================
# Copyright (c) 2015 Calvin Metcalf
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.**
=========================================
END OF process-nextick-args NOTICES AND INFORMATION
%% pty.js NOTICES AND INFORMATION BEGIN HERE %% pty.js NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1578,66 +1454,6 @@ END OF TERMS AND CONDITIONS
========================================= =========================================
END OF reflect-metadata NOTICES AND INFORMATION END OF reflect-metadata NOTICES AND INFORMATION
%% request NOTICES AND INFORMATION BEGIN HERE
=========================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
=========================================
END OF request NOTICES AND INFORMATION
%% rxjs NOTICES AND INFORMATION BEGIN HERE %% rxjs NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Apache License Apache License
@@ -1963,20 +1779,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
========================================= =========================================
END OF systemjs NOTICES AND INFORMATION END OF systemjs NOTICES AND INFORMATION
%% temp-write NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF temp-write NOTICES AND INFORMATION
%% underscore NOTICES AND INFORMATION BEGIN HERE %% underscore NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
@@ -2079,50 +1881,6 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
========================================= =========================================
END OF vscode-debugprotocol NOTICES AND INFORMATION END OF vscode-debugprotocol NOTICES AND INFORMATION
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-languageclient NOTICES AND INFORMATION
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) Microsoft Corporation
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-nls NOTICES AND INFORMATION
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE %% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
vscode-ripgrep vscode-ripgrep
@@ -2281,188 +2039,4 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. THE SOFTWARE.
========================================= =========================================
END OF zone.js NOTICES AND INFORMATION END OF zone.js NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
===================================CoreFx (BEGIN)
The MIT License (MIT)
Copyright (c) .NET Foundation and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFx (END)
===================================CoreFxLab (BEGIN)
The MIT License (MIT)
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFxLab (END)
===================================Reactive Extensions (BEGIN)
Copyright (c) .NET Foundation and Contributors
All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License"); you
may not use this file except in compliance with the License. You may
obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions
and limitations under the License.
List of contributors to the Rx libraries
Rx and Ix.NET:
Wes Dyer
Jeffrey van Gogh
Matthew Podwysocki
Bart De Smet
Danny van Velzen
Erik Meijer
Brian Beckman
Aaron Lahman
Georgi Chkodrov
Arthur Watson
Gert Drapers
Mark Shields
Eric Rozell
Rx.js and Ix.js:
Matthew Podwysocki
Jeffrey van Gogh
Bart De Smet
Brian Beckman
Wes Dyer
Erik Meijer
Tx:
Georgi Chkodrov
Bart De Smet
Aaron Lahman
Erik Meijer
Brian Grunkemeyer
Beysim Sezgin
Tiho Tarnavski
Collin Meek
Sajay Anthony
Karen Albrecht
John Allen
Zach Kramer
Rx++ and Ix++:
Aaron Lahman
===================================Reactive Extensions (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
The MIT License (MIT)
Copyright (c) 2014 ExcelDataReader
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================ExcelDataReader (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION

19
appveyor.yml Normal file
View File

@@ -0,0 +1,19 @@
environment:
ELECTRON_RUN_AS_NODE: 1
VSCODE_BUILD_VERBOSE: true
cache:
- '%LOCALAPPDATA%\Yarn\cache'
install:
- ps: Install-Product node 8.9.1 x64
build_script:
- yarn
- .\node_modules\.bin\gulp electron
- npm run compile
test_script:
- node --version
- .\scripts\test.bat
- .\scripts\test-integration.bat

View File

@@ -1,46 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.x'
displayName: 'Install Node.js'
- script: |
git submodule update --init --recursive
nvm install 8.9.1
nvm use 8.9.1
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
node_modules/.bin/gulp electron
node_modules/.bin/gulp compile --max_old_space_size=4096
node_modules/.bin/gulp optimize-vscode --max_old_space_size=4096
displayName: 'Scripts'
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()
- script: |
yarn run tslint
displayName: 'Run TSLint'

View File

@@ -1,30 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.9'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
.\node_modules\.bin\gulp electron
displayName: 'Electron'
- script: |
npm run compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()
- script: |
yarn run tslint
displayName: 'Run TSLint'

View File

@@ -1,29 +0,0 @@
trigger:
- master
- releases/*
jobs:
# All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool:
vmImage: vs2017-win2016
steps:
- template: azure-pipelines-windows.yml
# All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool:
vmImage: 'Ubuntu 16.04'
steps:
- template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1,40 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
yarn
displayName: Install Dependencies
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,91 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
set -e
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
yarn gulp -- hygiene
yarn monaco-compile-check
yarn strict-null-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" yarn gulp -- mixin
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- vscode-darwin-min upload-vscode-sourcemaps
displayName: Build
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
- script: |
set -e
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
displayName: Archive build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign
- script: |
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive \
"VSCode-darwin-$(VSCODE_QUALITY).zip" \
$VERSION \
true \
../VSCode-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# upload configuration
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- upload-vscode-configuration
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1 +0,0 @@
pat

View File

@@ -1,45 +0,0 @@
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
yarn
displayName: Install Dependencies
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,40 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const documentdb_1 = require("documentdb");
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
});
}
getConfig(process.argv[2])
.then(config => {
console.log(config.frozen);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -1,112 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
CHILD_CONCURRENCY=1 yarn
npm run gulp -- hygiene
npm run monaco-compile-check
npm run strict-null-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
# xvfb seems to be crashing often, let's make sure it's always up
service xvfb start
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
ARCH="$(VSCODE_ARCH)"
# Publish tarball
PLATFORM_LINUX="linux-$(VSCODE_ARCH)"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
# Publish DEB
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
PLATFORM_DEB="linux-deb-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$(VSCODE_ARCH).tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball

View File

@@ -1,42 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball
- script: |
set -e
REPO="$(pwd)"
ARCH="$(VSCODE_ARCH)"
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -1,65 +0,0 @@
resources:
containers:
- container: vscode-x64
image: joaomoreno/vscode-linux-build-agent:x64
- container: vscode-ia32
image: joaomoreno/vscode-linux-build-agent:ia32
- container: snapcraft
image: snapcore/snapcraft
jobs:
- job: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- job: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: vscode-x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: snapcraft
dependsOn: Linux
steps:
- template: linux/snap-build-linux.yml
- job: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: ia32
container: vscode-ia32
steps:
- template: linux/product-build-linux.yml
- job: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS 10.13
steps:
- template: darwin/product-build-darwin.yml

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
</configuration>

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="EsrpClient" version="1.0.27" />
</packages>

View File

@@ -1,44 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- powershell: |
yarn
displayName: Install Dependencies
- powershell: |
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,24 +0,0 @@
# Taken from psake https://github.com/psake/psake
<#
.SYNOPSIS
This is a helper function that runs a scriptblock and checks the PS variable $lastexitcode
to see if an error occcured. If an error is detected then an exception is thrown.
This function allows you to run command-line programs without having to
explicitly check the $lastexitcode variable.
.EXAMPLE
exec { svn info $repository_trunk } "Error executing SVN. Please verify SVN command-line client is installed"
#>
function Exec
{
[CmdletBinding()]
param(
[Parameter(Position=0,Mandatory=1)][scriptblock]$cmd,
[Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd)
)
& $cmd
if ($lastexitcode -ne 0) {
throw ("Exec: " + $errorMessage)
}
}

View File

@@ -1,14 +0,0 @@
Param(
[string]$AuthCertificateBase64,
[string]$AuthCertificateKey
)
# Import auth certificate
$AuthCertificateFileName = [System.IO.Path]::GetTempFileName()
$AuthCertificateBytes = [Convert]::FromBase64String($AuthCertificateBase64)
[IO.File]::WriteAllBytes($AuthCertificateFileName, $AuthCertificateBytes)
$AuthCertificate = Import-PfxCertificate -FilePath $AuthCertificateFileName -CertStoreLocation Cert:\LocalMachine\My -Password (ConvertTo-SecureString $AuthCertificateKey -AsPlainText -Force)
rm $AuthCertificateFileName
$ESRPAuthCertificateSubjectName = $AuthCertificate.Subject
Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName")

View File

@@ -1,152 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { yarn }
exec { npm run gulp -- hygiene }
exec { npm run monaco-compile-check }
exec { npm run strict-null-check }
exec { npm run gulp -- mixin }
exec { node build/azure-pipelines/common/installDistro.js }
exec { node build/lib/builtInExtensions.js }
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min" }
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
name: build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
restoreDirectory: packages
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: 'ESRP CodeSign'
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(ESRP_AUTH_CERTIFICATE) -AuthCertificateKey $(ESRP_AUTH_CERTIFICATE_KEY)
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-system-setup" "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-user" setup "VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $UserExe }
# publish hockeyapp symbols
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
exec { node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId }
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,70 +0,0 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,2 +1,12 @@
[ [
{
"name": "ms-vscode.node-debug",
"version": "1.23.3",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.23.5",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
] ]

View File

@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
} }
function getYarnProductionDependencies(cwd) { function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] }); const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
const match = /^{"type":"tree".*$/m.exec(raw); const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) { if (!match || match.length !== 1) {

View File

@@ -12,12 +12,10 @@ const File = require('vinyl');
const i18n = require('./lib/i18n'); const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone'); const standalone = require('./lib/standalone');
const cp = require('child_process'); const cp = require('child_process');
const compilation = require('./lib/compilation');
const monacoapi = require('./monaco/api');
const fs = require('fs');
var root = path.dirname(__dirname); var root = path.dirname(__dirname);
var sha1 = util.getVersion(root); var sha1 = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var semver = require('./monaco/package.json').version; var semver = require('./monaco/package.json').version;
var headerVersion = semver + '(' + sha1 + ')'; var headerVersion = semver + '(' + sha1 + ')';
@@ -28,7 +26,7 @@ var editorEntryPoints = [
name: 'vs/editor/editor.main', name: 'vs/editor/editor.main',
include: [], include: [],
exclude: ['vs/css', 'vs/nls'], exclude: ['vs/css', 'vs/nls'],
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'], prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
}, },
{ {
name: 'vs/base/common/worker/simpleWorker', name: 'vs/base/common/worker/simpleWorker',
@@ -61,63 +59,29 @@ var BUNDLED_FILE_HEADER = [
'' ''
].join('\n'); ].join('\n');
function editorLoaderConfig() {
var result = common.loaderConfig();
// never ship octicons in editor
result.paths['vs/base/browser/ui/octiconLabel/octiconLabel'] = 'out-build/vs/base/browser/ui/octiconLabel/octiconLabel.mock';
// force css inlining to use base64 -- see https://github.com/Microsoft/monaco-editor/issues/148
result['vs/css'] = {
inlineResources: 'base64',
inlineResourcesLimit: 3000 // see https://github.com/Microsoft/monaco-editor/issues/336
};
return result;
}
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []); const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
gulp.task('clean-editor-src', util.rimraf('out-editor-src'));
gulp.task('extract-editor-src', ['clean-editor-src'], function () {
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
const apiusages = monacoapi.execute().usageContent;
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
standalone.extractEditor({
sourcesRoot: path.join(root, 'src'),
entryPoints: [
'vs/editor/editor.main',
'vs/editor/editor.worker',
'vs/base/worker/workerMain',
],
inlineEntryPoints: [
apiusages,
extrausages
],
typings: [
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require-monaco.d.ts',
'vs/monaco.d.ts'
],
libs: [
`lib.es5.d.ts`,
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')
});
});
// Full compile, including nls and inline sources in sourcemaps, for build
gulp.task('clean-editor-build', util.rimraf('out-editor-build'));
gulp.task('compile-editor-build', ['clean-editor-build', 'extract-editor-src'], compilation.compileTask('out-editor-src', 'out-editor-build', true));
gulp.task('clean-optimized-editor', util.rimraf('out-editor')); gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'], common.optimizeTask({ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
src: 'out-editor-build',
entryPoints: editorEntryPoints, entryPoints: editorEntryPoints,
otherSources: editorOtherSources, otherSources: editorOtherSources,
resources: editorResources, resources: editorResources,
loaderConfig: { loaderConfig: editorLoaderConfig(),
paths: {
'vs': 'out-editor-build/vs',
'vs/css': 'out-editor-build/vs/css.build',
'vs/nls': 'out-editor-build/vs/nls.build',
'vscode': 'empty:'
}
},
bundleLoader: false, bundleLoader: false,
header: BUNDLED_FILE_HEADER, header: BUNDLED_FILE_HEADER,
bundleInfo: true, bundleInfo: true,
@@ -129,79 +93,27 @@ gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor')); gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm')); gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro', 'extract-editor-src'], function () { gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources2({ standalone.createESMSourcesAndResources({
srcFolder: './out-editor-src', entryPoints: [
outFolder: './out-editor-esm', 'vs/editor/editor.main',
outResourcesFolder: './out-monaco-editor-core/esm', 'vs/editor/editor.worker'
ignores: [
'inlineEntryPoint:0.ts',
'inlineEntryPoint:1.ts',
'vs/loader.js',
'vs/nls.ts',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/css.js',
'vs/css.build.js',
'vs/css.d.ts',
'vs/base/worker/workerMain.ts',
], ],
renames: { outFolder: './out-editor-esm/src',
'vs/nls.mock.ts': 'vs/nls.ts' outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
} }
}); });
}); });
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () { gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
if (process.platform === 'win32') { const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, { cwd: path.join(__dirname, '../out-editor-esm')
cwd: path.join(__dirname, '../out-editor-esm') });
}); console.log(result.stdout.toString());
console.log(result.stdout.toString());
console.log(result.stderr.toString());
} else {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
}
}); });
function toExternalDTS(contents) {
let lines = contents.split('\n');
let killNextCloseCurlyBrace = false;
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
if (killNextCloseCurlyBrace) {
if ('}' === line) {
lines[i] = '';
killNextCloseCurlyBrace = false;
continue;
}
if (line.indexOf(' ') === 0) {
lines[i] = line.substr(4);
} else if (line.charAt(0) === '\t') {
lines[i] = line.substr(1);
}
continue;
}
if ('declare namespace monaco {' === line) {
lines[i] = '';
killNextCloseCurlyBrace = true;
continue;
}
if (line.indexOf('declare namespace monaco.') === 0) {
lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
}
}
return lines.join('\n');
}
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core')); gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () { gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
return es.merge( return es.merge(
@@ -218,7 +130,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
this.emit('data', new File({ this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'), path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base, base: data.base,
contents: Buffer.from(toExternalDTS(data.contents.toString())) contents: data.contents
})); }));
})) }))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')), .pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
@@ -283,7 +195,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
}); });
gulp.task('analyze-editor-distro', function () { gulp.task('analyze-editor-distro', function () {
// @ts-ignore // @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
var bundleInfo = require('../out-editor/bundleInfo.json'); var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph; var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles; var bundles = bundleInfo.bundles;

View File

@@ -20,8 +20,8 @@ const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev'); const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
const i18n = require('./lib/i18n');
const plumber = require('gulp-plumber'); const plumber = require('gulp-plumber');
const _ = require('underscore');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions'); const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -32,12 +32,13 @@ const compilations = glob.sync('**/tsconfig.json', {
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`; const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
const tasks = compilations.map(function (tsconfigFile) { const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile); const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile); const relativeDirname = path.dirname(tsconfigFile);
const tsconfig = require(absolutePath); const tsOptions = require(absolutePath).compilerOptions;
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false; tsOptions.verbose = false;
tsOptions.sourceMap = true; tsOptions.sourceMap = true;
@@ -57,6 +58,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const srcBase = path.join(root, 'src'); const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**'); const src = path.join(srcBase, '**');
const out = path.join(root, 'out'); const out = path.join(root, 'out');
const i18nPath = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out); const baseUrl = getBaseUrl(out);
let headerId, headerOut; let headerId, headerOut;
@@ -100,9 +102,9 @@ const tasks = compilations.map(function (tsconfigFile) {
sourceRoot: '../src' sourceRoot: '../src'
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through())
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through()) .pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
// Filter out *.nls.json file. We needed them only to bundle meta data file. .pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
.pipe(filter(['**', '!**/*.nls.json']))
.pipe(reporter.end(emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
@@ -169,4 +171,4 @@ gulp.task('watch-extensions', tasks.map(t => t.watch));
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild)); gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild)); gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild)); gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));

View File

@@ -43,15 +43,12 @@ const indentationFilter = [
// except specific files // except specific files
'!ThirdPartyNotices.txt', '!ThirdPartyNotices.txt',
'!LICENSE.txt', '!LICENSE.txt',
'!**/LICENSE',
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js', '!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js', '!src/vs/loader.js',
'!src/vs/base/common/marked/marked.js', '!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh', '!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js', '!test/assert.js',
// except specific folders // except specific folders
@@ -80,15 +77,12 @@ const indentationFilter = [
'!src/vs/*/**/*.d.ts', '!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts', '!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts', '!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}', '!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js', '!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh', '!build/**/*.sh',
'!build/azure-pipelines/**/*.js', '!build/tfs/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile', '!**/Dockerfile',
'!**/*.Dockerfile', '!extensions/markdown/media/*.js'
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js'
]; ];
const copyrightFilter = [ const copyrightFilter = [
@@ -100,8 +94,6 @@ const copyrightFilter = [
'!**/*.md', '!**/*.md',
'!**/*.bat', '!**/*.bat',
'!**/*.cmd', '!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml', '!**/*.xml',
'!**/*.sh', '!**/*.sh',
'!**/*.txt', '!**/*.txt',
@@ -109,13 +101,10 @@ const copyrightFilter = [
'!**/*.opts', '!**/*.opts',
'!**/*.disabled', '!**/*.disabled',
'!**/*.code-workspace', '!**/*.code-workspace',
'!**/promise-polyfill/polyfill.js',
'!build/**/*.init', '!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml', '!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch',
'!resources/win32/bin/code.js', '!resources/win32/bin/code.js',
'!resources/completions/**', '!extensions/markdown-language-features/media/tomorrow.css',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*', '!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*' '!extensions/*/server/bin/*'
]; ];
@@ -128,6 +117,7 @@ const eslintFilter = [
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/css.build.js', '!src/vs/css.build.js',
'!src/vs/nls.build.js', '!src/vs/nls.build.js',
'!src/**/winjs.base.js',
'!src/**/marked.js', '!src/**/marked.js',
'!**/test/**' '!**/test/**'
]; ];
@@ -146,7 +136,6 @@ const tslintFilter = [
'!extensions/html-language-features/server/lib/jquery.d.ts' '!extensions/html-language-features/server/lib/jquery.d.ts'
]; ];
// {{SQL CARBON EDIT}}
const copyrightHeaderLines = [ const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------', '/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.', ' * Copyright (c) Microsoft Corporation. All rights reserved.',

View File

@@ -13,10 +13,12 @@ const es = require('event-stream');
const util = require('./lib/util'); const util = require('./lib/util');
const remote = require('gulp-remote-src'); const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip'); const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor'); const jeditor = require('gulp-json-editor');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json'); const pkg = require('../package.json');
gulp.task('mixin', function () { gulp.task('mixin', function () {
@@ -50,4 +52,4 @@ gulp.task('mixin', function () {
return gulp.src('./product.json') return gulp.src('./product.json')
.pipe(jeditor(newValues)) .pipe(jeditor(newValues))
.pipe(gulp.dest('.')); .pipe(gulp.dest('.'));
}); });

View File

@@ -17,15 +17,19 @@ const vfs = require('vinyl-fs');
const rename = require('gulp-rename'); const rename = require('gulp-rename');
const replace = require('gulp-replace'); const replace = require('gulp-replace');
const filter = require('gulp-filter'); const filter = require('gulp-filter');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor'); const json = require('gulp-json-editor');
const _ = require('underscore'); const _ = require('underscore');
const util = require('./lib/util'); const util = require('./lib/util');
const ext = require('./lib/extensions'); const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile'); const buildfile = require('../src/buildfile');
const common = require('./lib/optimize'); const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json'); const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
const crypto = require('crypto'); const crypto = require('crypto');
const i18n = require('./lib/i18n'); const i18n = require('./lib/i18n');
@@ -33,17 +37,16 @@ const i18n = require('./lib/i18n');
const serviceDownloader = require('service-downloader').ServiceDownloadProvider; const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation; const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const glob = require('glob'); const glob = require('glob');
// {{SQL CARBON EDIT}} - End
const deps = require('./dependencies'); const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion; const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar; const createAsar = require('./lib/asar').createAsar;
const minimist = require('minimist');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname)); const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore // @ts-ignore
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
var del = require('del'); var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n)); const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const nodeModules = [ const nodeModules = [
@@ -57,7 +60,27 @@ const nodeModules = [
.concat(_.uniq(productionDependencies.map(d => d.name))) .concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules); .concat(baseModules);
// Build // Build
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('./builtInExtensions.json');
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'profiler'
];
const vscodeEntryPoints = _.flatten([ const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'), buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base, buildfile.base,
@@ -70,24 +93,23 @@ const vscodeResources = [
'out-build/cli.js', 'out-build/cli.js',
'out-build/driver.js', 'out-build/driver.js',
'out-build/bootstrap.js', 'out-build/bootstrap.js',
'out-build/bootstrap-fork.js',
'out-build/bootstrap-amd.js', 'out-build/bootstrap-amd.js',
'out-build/bootstrap-window.js',
'out-build/paths.js', 'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}', 'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/common/performance.js', 'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}', 'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**', 'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css', 'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json', 'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt', 'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js', 'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
'out-build/vs/**/markdown.css', 'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json', 'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md', 'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe', 'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md', 'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js', 'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js', 'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js', 'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
@@ -111,7 +133,6 @@ const vscodeResources = [
'out-build/sql/parts/jobManagement/common/media/*.svg', 'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg', 'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg', 'out-build/sql/media/icons/*.svg',
'out-build/sql/parts/notebook/media/**/*.svg',
'!**/test/**' '!**/test/**'
]; ];
@@ -121,72 +142,63 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/' ' *--------------------------------------------------------*/'
].join('\n'); ].join('\n');
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode')); gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
src: 'out-build',
entryPoints: vscodeEntryPoints, entryPoints: vscodeEntryPoints,
otherSources: [], otherSources: [],
resources: vscodeResources, resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules), loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER, header: BUNDLED_FILE_HEADER,
out: 'out-vscode', out: 'out-vscode',
languages: languages,
bundleInfo: undefined bundleInfo: undefined
})); }));
gulp.task('optimize-index-js', ['optimize-vscode'], () => { gulp.task('optimize-index-js', ['optimize-vscode'], () => {
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js'); const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
const contents = fs.readFileSync(fullpath).toString(); const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules)); const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents); fs.writeFileSync(fullpath, newContents);
}); });
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`; const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min')); gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)); gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
// Package // Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = { const config = {
version: getElectronVersion(), version: getElectronVersion(),
productAppName: product.nameLong, productAppName: product.nameLong,
companyName: 'Microsoft Corporation', companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved', copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns', darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier, darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools', darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook', darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook', darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [ darwinBundleDocumentTypes: [{
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours name: product.nameLong + ' document',
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'), role: 'Editor',
], ostypes: ["TEXT", "utxt", "TUTX", "****"],
// {{SQL CARBON EDIT}}
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
iconFile: 'resources/darwin/code_file.icns'
}],
darwinBundleURLTypes: [{ darwinBundleURLTypes: [{
role: 'Viewer', role: 'Viewer',
name: product.nameLong, name: product.nameLong,
urlSchemes: [product.urlProtocol] urlSchemes: [product.urlProtocol]
}], }],
darwinForceDarkModeSupport: true, darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName, linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico', winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined, token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
repo: product.electronRepository || void 0
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
}; };
function getElectron(arch) { function getElectron(arch) {
@@ -210,8 +222,6 @@ gulp.task('clean-electron', util.rimraf('.build/electron'));
gulp.task('electron', ['clean-electron'], getElectron(process.arch)); gulp.task('electron', ['clean-electron'], getElectron(process.arch));
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32')); gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
gulp.task('electron-x64', ['clean-electron'], getElectron('x64')); gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
gulp.task('electron-arm', ['clean-electron'], getElectron('arm'));
gulp.task('electron-arm64', ['clean-electron'], getElectron('arm64'));
/** /**
@@ -248,11 +258,31 @@ function computeChecksum(filename) {
return hash; return hash;
} }
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath
});
});
}
function packageTask(platform, arch, opts) { function packageTask(platform, arch, opts) {
opts = opts || {}; opts = opts || {};
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : ''); const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform; platform = platform || process.platform;
return () => { return () => {
@@ -261,53 +291,69 @@ function packageTask(platform, arch, opts) {
const checksums = computeChecksums(out, [ const checksums = computeChecksums(out, [
'vs/workbench/workbench.main.js', 'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css', 'vs/workbench/workbench.main.css',
'vs/code/electron-browser/workbench/workbench.html', 'vs/workbench/electron-browser/bootstrap/index.html',
'vs/code/electron-browser/workbench/workbench.js' 'vs/workbench/electron-browser/bootstrap/index.js',
'vs/workbench/electron-browser/bootstrap/preload.js'
]); ]);
const src = gulp.src(out + '/**', { base: '.' }) const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); })) .pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
}));
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh'])) .pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map'])); .pipe(filter(['**', '!**/*.js.map']));
const root = path.resolve(path.join(__dirname, '..'));
// {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions();
const sources = es.merge(src, ext.packageExtensionsStream({
sourceMappingURLBase: sourceMappingURLBase
}));
let version = packageJson.version; let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional
const quality = product.quality; const quality = product.quality;
if (quality && quality !== 'stable') { if (quality && quality !== 'stable') {
version += '-' + quality; version += '-' + quality;
} }
// {{SQL CARBON EDIT}} const name = product.nameShort;
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
const packageJsonUpdates = { name, version };
// for linux url handling
if (platform === 'linux') {
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
}
const packageJsonStream = gulp.src(['package.json'], { base: '.' }) const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json(packageJsonUpdates)); .pipe(json({ name, version }));
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
const date = new Date().toISOString(); const date = new Date().toISOString();
const productJsonUpdate = { commit, date, checksums };
if (shouldSetupSettingsSearch()) {
productJsonUpdate.settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
}
const productJsonStream = gulp.src(['product.json'], { base: '.' }) const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json(productJsonUpdate)); .pipe(json({ commit, date, checksums, settingsSearchBuildId }));
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' }); const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
@@ -315,29 +361,27 @@ function packageTask(platform, arch, opts) {
// TODO the API should be copied to `out` during compile, not here // TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts')); const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts')); const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [ const depsSrc = [
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])), ..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
// @ts-ignore JSON checking: dependencies is optional
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`])) ..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
]; ];
const deps = gulp.src(depsSrc, { base: '.', dot: true }) const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json'])) .pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('vscode-sqlite3', ['binding.gyp', 'benchmark/**', 'cloudformation/**', 'deps/**', 'test/**', 'build/**', 'src/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**'])) .pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/index.js'])) .pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node'])) .pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
@@ -349,7 +393,6 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined)) .pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
// {{SQL CARBON EDIT}} - End
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node'])) .pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar')); .pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
@@ -360,7 +403,7 @@ function packageTask(platform, arch, opts) {
'node_modules/slickgrid/**/*.*', 'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*', 'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*', 'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*', 'node_modules/chart.js/**/*.*'
], { base: '.', dot: true }); ], { base: '.', dot: true });
let all = es.merge( let all = es.merge(
@@ -369,7 +412,7 @@ function packageTask(platform, arch, opts) {
license, license,
watermark, watermark,
api, api,
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
copiedModules, copiedModules,
dataApi, dataApi,
sources, sources,
@@ -377,37 +420,7 @@ function packageTask(platform, arch, opts) {
); );
if (platform === 'win32') { if (platform === 'win32') {
all = es.merge(all, gulp.src([ all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
'resources/win32/bower.ico',
'resources/win32/c.ico',
'resources/win32/config.ico',
'resources/win32/cpp.ico',
'resources/win32/csharp.ico',
'resources/win32/css.ico',
'resources/win32/default.ico',
'resources/win32/go.ico',
'resources/win32/html.ico',
'resources/win32/jade.ico',
'resources/win32/java.ico',
'resources/win32/javascript.ico',
'resources/win32/json.ico',
'resources/win32/less.ico',
'resources/win32/markdown.ico',
'resources/win32/php.ico',
'resources/win32/powershell.ico',
'resources/win32/python.ico',
'resources/win32/react.ico',
'resources/win32/ruby.ico',
'resources/win32/sass.ico',
'resources/win32/shell.ico',
'resources/win32/sql.ico',
'resources/win32/typescript.ico',
'resources/win32/vue.ico',
'resources/win32/xml.ico',
'resources/win32/yaml.ico',
'resources/win32/code_70x70.png',
'resources/win32/code_150x150.png'
], { base: '.' }));
} else if (platform === 'linux') { } else if (platform === 'linux') {
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' })); all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
} else if (platform === 'darwin') { } else if (platform === 'darwin') {
@@ -423,8 +436,6 @@ function packageTask(platform, arch, opts) {
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true }))) .pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'])); .pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
if (platform === 'win32') { if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' })); result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
@@ -434,8 +445,6 @@ function packageTask(platform, arch, opts) {
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' }) result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort)) .pipe(replace('@@NAME@@', product.nameShort))
.pipe(replace('@@COMMIT@@', commit))
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; }))); .pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' }) result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
@@ -446,15 +455,6 @@ function packageTask(platform, arch, opts) {
.pipe(rename('bin/' + product.applicationName))); .pipe(rename('bin/' + product.applicationName)));
} }
// submit all stats that have been collected
// during the build phase
if (opts.stats) {
result.on('end', () => {
const { submitAllStats } = require('./lib/stats');
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
});
}
return result.pipe(vfs.dest(destination)); return result.pipe(vfs.dest(destination));
}; };
} }
@@ -462,37 +462,26 @@ function packageTask(platform, arch, opts) {
const buildRoot = path.dirname(root); const buildRoot = path.dirname(root);
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'win32', 'x64')); gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'sqlops-win32-ia32')));
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'darwin')); gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'sqlops-win32-x64')));
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'linux', 'x64')); gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'sqlops-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'sqlops-linux-ia32')));
gulp.task('vscode-win32-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'win32', 'x64')); gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'sqlops-linux-x64')));
gulp.task('vscode-darwin-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'darwin')); gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'sqlops-linux-arm')));
gulp.task('vscode-linux-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'linux', 'x64'));
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastudio-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
gulp.task('clean-vscode-linux-arm64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm64')));
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32')); gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'vscode-win32-x64-mssql', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64')); gulp.task('vscode-win32-x64', ['optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'vscode-darwin-mssql', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { stats: true })); gulp.task('vscode-darwin', ['optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32')); gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'vscode-linux-x64-mssql', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64')); gulp.task('vscode-linux-x64', ['optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm')); gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
gulp.task('vscode-linux-arm64', ['optimize-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64'));
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true })); gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true })); gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true, stats: true })); gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true })); gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true })); gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true })); gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
gulp.task('vscode-linux-arm64-min', ['minify-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64', { minified: true }));
// Transifex Localizations // Transifex Localizations
@@ -528,7 +517,7 @@ gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken)); ).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
}); });
gulp.task('vscode-translations-export', ['optimize-vscode'], function () { gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json'; const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*'; const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}'; const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
@@ -537,44 +526,44 @@ gulp.task('vscode-translations-export', ['optimize-vscode'], function () {
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()), gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()), gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions()) gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(vfs.dest('../vscode-translations-export')); ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
}); });
gulp.task('vscode-translations-pull', function () { gulp.task('vscode-translations-pull', function () {
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => { [...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
let includeDefault = !!innoSetupConfig[language.id].defaultInfo; let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`)); i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
})); });
}); });
gulp.task('vscode-translations-import', function () { gulp.task('vscode-translations-import', function () {
// {{SQL CARBON EDIT}} - Replace function body with our own
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => { [...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`) gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles()) .pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`)); .pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
}); });
// {{SQL CARBON EDIT}} - End
}); });
// Sourcemaps // Sourcemaps
gulp.task('upload-vscode-sourcemaps', ['vscode-darwin-min', 'minify-vscode'], () => { gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' }) const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => { .pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`; f.path = `${f.base}/core/${f.relative}`;
return f; return f;
})); }));
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' }); const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
return es.merge(vs, extensionsOut, extensionsDist) return es.merge(vs, extensions)
.pipe(es.through(function (data) {
// debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data);
}))
.pipe(azure.upload({ .pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT, account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY, key: process.env.AZURE_STORAGE_ACCESS_KEY,
@@ -585,8 +574,9 @@ gulp.task('upload-vscode-sourcemaps', ['vscode-darwin-min', 'minify-vscode'], ()
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json'); const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => { gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
if (!shouldSetupSettingsSearch()) { const branch = process.env.BUILD_SOURCEBRANCH;
const branch = process.env.BUILD_SOURCEBRANCH;
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
console.log(`Only runs on master and release branches, not ${branch}`); console.log(`Only runs on master and release branches, not ${branch}`);
return; return;
} }
@@ -609,24 +599,13 @@ gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () =
})); }));
}); });
function shouldSetupSettingsSearch() {
const branch = process.env.BUILD_SOURCEBRANCH;
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
}
function getSettingsSearchBuildId(packageJson) { function getSettingsSearchBuildId(packageJson) {
const previous = util.getPreviousVersion(packageJson.version);
try { try {
const branch = process.env.BUILD_SOURCEBRANCH; const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
/\/master$/.test(branch) ? 1 :
2; // Some unexpected branch
const out = cp.execSync(`git rev-list HEAD --count`);
const count = parseInt(out.toString()); const count = parseInt(out.toString());
return util.versionStringToNumber(packageJson.version) * 1e4 + count;
// <version number><commit count><branchId (avoid unlikely conflicts)>
// 1.25.1, 1,234,567 commits, master = 1250112345671
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
} catch (e) { } catch (e) {
throw new Error('Could not determine build number: ' + e.toString()); throw new Error('Could not determine build number: ' + e.toString());
} }
@@ -640,10 +619,6 @@ gulp.task('generate-vscode-configuration', () => {
return reject(new Error('$AGENT_BUILDDIRECTORY not set')); return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
} }
if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
return resolve();
}
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata'); const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir'); const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app'; const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
@@ -692,5 +667,6 @@ function installService() {
} }
gulp.task('install-sqltoolsservice', () => { gulp.task('install-sqltoolsservice', () => {
return installService(); return installService();
}); });

View File

@@ -12,38 +12,32 @@ const shell = require('gulp-shell');
const es = require('event-stream'); const es = require('event-stream');
const vfs = require('vinyl-fs'); const vfs = require('vinyl-fs');
const util = require('./lib/util'); const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const packageJson = require('../package.json'); const packageJson = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const rpmDependencies = require('../resources/linux/rpm/dependencies.json'); const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const path = require('path');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000); const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
function getDebPackageArch(arch) { function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch]; return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
} }
function prepareDebPackage(arch) { function prepareDebPackage(arch) {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch; const binaryDir = '../sqlops-linux-' + arch;
const debArch = getDebPackageArch(arch); const debArch = getDebPackageArch(arch);
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch; const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
return function () { return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' }) const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.applicationName)) .pipe(replace('@@ICON@@', product.applicationName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' }) const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -54,12 +48,6 @@ function prepareDebPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png')); .pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -88,13 +76,11 @@ function prepareDebPackage(arch) {
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' }) const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch)) .pipe(replace('@@ARCHITECTURE@@', debArch))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@')) .pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@')) .pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst')); .pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code); const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
return all.pipe(vfs.dest(destination)); return all.pipe(vfs.dest(destination));
}; };
@@ -114,27 +100,21 @@ function getRpmBuildPath(rpmArch) {
} }
function getRpmPackageArch(arch) { function getRpmPackageArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch]; return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
} }
function prepareRpmPackage(arch) { function prepareRpmPackage(arch) {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch; const binaryDir = '../sqlops-linux-' + arch;
const rpmArch = getRpmPackageArch(arch); const rpmArch = getRpmPackageArch(arch);
return function () { return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' }) const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.applicationName)) .pipe(replace('@@ICON@@', product.applicationName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' }) const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -145,12 +125,6 @@ function prepareRpmPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png')); .pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -161,9 +135,7 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@RELEASE@@', linuxPackageRevision)) .pipe(replace('@@RELEASE@@', linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', rpmArch)) .pipe(replace('@@ARCHITECTURE@@', rpmArch))
.pipe(replace('@@LICENSE@@', product.licenseName)) .pipe(replace('@@LICENSE@@', product.licenseName))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@')) .pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@')) .pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', '))) .pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
.pipe(rename('SPECS/' + product.applicationName + '.spec')); .pipe(rename('SPECS/' + product.applicationName + '.spec'));
@@ -171,7 +143,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' }) const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm')); .pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon); const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch))); return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
}; };
@@ -189,7 +161,6 @@ function buildRpmPackage(arch) {
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/' 'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
]); ]);
} }
function getSnapBuildPath(arch) { function getSnapBuildPath(arch) {
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`; return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
} }
@@ -210,11 +181,11 @@ function prepareSnapPackage(arch) {
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`)); .pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; })); .pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' }) const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', commit.substr(0, 8))) .pipe(replace('@@VERSION@@', packageJson.version))
.pipe(rename('snap/snapcraft.yaml')); .pipe(rename('snap/snapcraft.yaml'));
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' }) const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
@@ -228,7 +199,11 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) { function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch); const snapBuildPath = getSnapBuildPath(arch);
return shell.task(`cd ${snapBuildPath} && snapcraft build`); const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
]);
} }
function getFlatpakArch(arch) { function getFlatpakArch(arch) {
@@ -237,7 +212,7 @@ function getFlatpakArch(arch) {
function prepareFlatpak(arch) { function prepareFlatpak(arch) {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const binaryDir = '../azuredatastudio-linux-' + arch; const binaryDir = '../sqlops-linux-' + arch;
const flatpakArch = getFlatpakArch(arch); const flatpakArch = getFlatpakArch(arch);
const destination = '.build/linux/flatpak/' + flatpakArch; const destination = '.build/linux/flatpak/' + flatpakArch;
@@ -308,39 +283,33 @@ function buildFlatpak(arch) {
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386')); gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64')); gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf')); gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
gulp.task('clean-vscode-linux-arm64-deb', util.rimraf('.build/linux/deb/arm64'));
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386')); gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64')); gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf')); gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
gulp.task('clean-vscode-linux-arm64-rpm', util.rimraf('.build/linux/rpm/arm64'));
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-arm64-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64')); gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm')); gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-deb', ['clean-vscode-linux-arm64-deb'], prepareDebPackage('arm64'));
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32')); gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64')); gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm')); gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
gulp.task('vscode-linux-arm64-build-deb', ['vscode-linux-arm64-prepare-deb'], buildDebPackage('arm64'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64')); gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm')); gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-rpm', ['clean-vscode-linux-arm64-rpm'], prepareRpmPackage('arm64'));
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32')); gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64')); gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm')); gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
gulp.task('vscode-linux-arm64-build-rpm', ['vscode-linux-arm64-prepare-rpm'], buildRpmPackage('arm64'));
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64')); gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm')); gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-snap', ['clean-vscode-linux-arm64-snap'], prepareSnapPackage('arm64'));
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32')); gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64')); gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm')); gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
gulp.task('vscode-linux-arm64-build-snap', ['vscode-linux-arm64-prepare-snap'], buildSnapPackage('arm64'));

View File

@@ -7,75 +7,45 @@
const gulp = require('gulp'); const gulp = require('gulp');
const path = require('path'); const path = require('path');
const fs = require('fs');
const assert = require('assert'); const assert = require('assert');
const cp = require('child_process'); const cp = require('child_process');
const _7z = require('7zip')['7z']; const _7z = require('7zip')['7z'];
const util = require('./lib/util'); const util = require('./lib/util');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const pkg = require('../package.json'); const pkg = require('../package.json');
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const product = require('../product.json'); const product = require('../product.json');
const vfs = require('vinyl-fs'); const vfs = require('vinyl-fs');
const rcedit = require('rcedit');
const mkdirp = require('mkdirp');
const repoPath = path.dirname(__dirname); const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`); const buildPath = arch => path.join(path.dirname(repoPath), `sqlops-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive'); const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`); const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`); const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
const issPath = path.join(__dirname, 'win32', 'code.iss'); const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe'); const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) { function packageInnoSetup(iss, options, cb) {
options = options || {}; options = options || {};
const definitions = options.definitions || {}; const definitions = options.definitions || {};
if (process.argv.some(arg => arg === '--debug-inno')) {
definitions['Debug'] = 'true';
}
if (process.argv.some(arg => arg === '--sign')) {
definitions['Sign'] = 'true';
}
const keys = Object.keys(definitions); const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`)); keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
const defs = keys.map(key => `/d${key}=${definitions[key]}`); const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [ const args = [iss].concat(defs);
iss,
...defs
//,
//`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
];
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] }) cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
.on('error', cb) .on('error', cb)
.on('exit', () => cb(null)); .on('exit', () => cb(null));
} }
function buildWin32Setup(arch, target) { function buildWin32Setup(arch) {
if (target !== 'system' && target !== 'user') {
throw new Error('Invalid setup target');
}
return cb => { return cb => {
const ia32AppId = target === 'system' ? product.win32AppId : product.win32UserAppId; const ia32AppId = product.win32AppId;
const x64AppId = target === 'system' ? product.win32x64AppId : product.win32x64UserAppId; const x64AppId = product.win32x64AppId;
const sourcePath = buildPath(arch);
const outputPath = setupDir(arch, target);
mkdirp.sync(outputPath);
const originalProductJsonPath = path.join(sourcePath, 'resources/app/product.json');
const productJsonPath = path.join(outputPath, 'product.json');
const productJson = JSON.parse(fs.readFileSync(originalProductJsonPath, 'utf8'));
productJson['target'] = target;
fs.writeFileSync(productJsonPath, JSON.stringify(productJson, undefined, '\t'));
const definitions = { const definitions = {
NameLong: product.nameLong, NameLong: product.nameLong,
@@ -83,42 +53,35 @@ function buildWin32Setup(arch, target) {
DirName: product.win32DirName, DirName: product.win32DirName,
Version: pkg.version, Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''), RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion + (target === 'user' ? ' (User)' : ''), NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort, ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName, RegValueName: product.win32RegValueName,
ShellNameShort: product.win32ShellNameShort, ShellNameShort: product.win32ShellNameShort,
AppMutex: product.win32MutexName, AppMutex: product.win32MutexName,
Arch: arch, Arch: arch,
AppId: arch === 'ia32' ? ia32AppId : x64AppId, AppId: arch === 'ia32' ? ia32AppId : x64AppId,
IncompatibleTargetAppId: arch === 'ia32' ? product.win32AppId : product.win32x64AppId, IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
IncompatibleArchAppId: arch === 'ia32' ? x64AppId : ia32AppId,
AppUserId: product.win32AppUserModelId, AppUserId: product.win32AppUserModelId,
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64', ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64', ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
SourceDir: sourcePath, SourceDir: buildPath(arch),
RepoDir: repoPath, RepoDir: repoPath,
OutputDir: outputPath, OutputDir: setupDir(arch)
InstallTarget: target,
ProductJsonPath: productJsonPath
}; };
packageInnoSetup(issPath, { definitions }, cb); packageInnoSetup(issPath, { definitions }, cb);
}; };
} }
function defineWin32SetupTasks(arch, target) { gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
gulp.task(`clean-vscode-win32-${arch}-${target}-setup`, util.rimraf(setupDir(arch, target))); gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
gulp.task(`vscode-win32-${arch}-${target}-setup`, [`clean-vscode-win32-${arch}-${target}-setup`], buildWin32Setup(arch, target));
}
defineWin32SetupTasks('ia32', 'system'); gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
defineWin32SetupTasks('x64', 'system'); gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
defineWin32SetupTasks('ia32', 'user');
defineWin32SetupTasks('x64', 'user');
function archiveWin32Setup(arch) { function archiveWin32Setup(arch) {
return cb => { return cb => {
const args = ['a', '-tzip', zipPath(arch), '-x!CodeSignSummary*.md', '.', '-r']; const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) }) cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
.on('error', cb) .on('error', cb)
@@ -140,14 +103,4 @@ function copyInnoUpdater(arch) {
} }
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32')); gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64')); gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
function patchInnoUpdater(arch) {
return cb => {
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
};
}
gulp.task('vscode-win32-ia32-inno-updater', ['vscode-win32-ia32-copy-inno-updater'], patchInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-inno-updater', ['vscode-win32-x64-copy-inno-updater'], patchInnoUpdater('x64'));

View File

@@ -1,15 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es2017",
"jsx": "preserve",
"checkJs": true
},
"include": [
"**/*.js"
],
"exclude": [
"node_modules",
"**/node_modules/*"
]
}

View File

@@ -4,33 +4,33 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path"); var path = require("path");
const es = require("event-stream"); var es = require("event-stream");
const pickle = require('chromium-pickle-js'); var pickle = require("chromium-pickle-js");
const Filesystem = require('asar/lib/filesystem'); var Filesystem = require("asar/lib/filesystem");
const VinylFile = require("vinyl"); var VinylFile = require("vinyl");
const minimatch = require("minimatch"); var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) { function createAsar(folderPath, unpackGlobs, destFilename) {
const shouldUnpackFile = (file) => { var shouldUnpackFile = function (file) {
for (let i = 0; i < unpackGlobs.length; i++) { for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) { if (minimatch(file.relative, unpackGlobs[i])) {
return true; return true;
} }
} }
return false; return false;
}; };
const filesystem = new Filesystem(folderPath); var filesystem = new Filesystem(folderPath);
const out = []; var out = [];
// Keep track of pending inserts // Keep track of pending inserts
let pendingInserts = 0; var pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; }; var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory // Do not insert twice the same directory
const seenDir = {}; var seenDir = {};
const insertDirectoryRecursive = (dir) => { var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) { if (seenDir[dir]) {
return; return;
} }
let lastSlash = dir.lastIndexOf('/'); var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) { if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\'); lastSlash = dir.lastIndexOf('\\');
} }
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
seenDir[dir] = true; seenDir[dir] = true;
filesystem.insertDirectory(dir); filesystem.insertDirectory(dir);
}; };
const insertDirectoryForFile = (file) => { var insertDirectoryForFile = function (file) {
let lastSlash = file.lastIndexOf('/'); var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) { if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\'); lastSlash = file.lastIndexOf('\\');
} }
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
insertDirectoryRecursive(file.substring(0, lastSlash)); insertDirectoryRecursive(file.substring(0, lastSlash));
} }
}; };
const insertFile = (relativePath, stat, shouldUnpack) => { var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath); insertDirectoryForFile(relativePath);
pendingInserts++; pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted); filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
return; return;
} }
if (!file.stat.isFile()) { if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`); throw new Error("unknown item in stream!");
} }
const shouldUnpack = shouldUnpackFile(file); var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack); insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) { if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked // The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path); var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({ this.queue(new VinylFile({
cwd: folderPath, cwd: folderPath,
base: folderPath, base: folderPath,
@@ -79,33 +79,34 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents); out.push(file.contents);
} }
}, function () { }, function () {
let finish = () => { var _this = this;
var finish = function () {
{ {
const headerPickle = pickle.createEmpty(); var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header)); headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer(); var headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty(); var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length); sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer(); var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf); out.unshift(headerBuf);
out.unshift(sizeBuf); out.unshift(sizeBuf);
} }
const contents = Buffer.concat(out); var contents = Buffer.concat(out);
out.length = 0; out.length = 0;
this.queue(new VinylFile({ _this.queue(new VinylFile({
cwd: folderPath, cwd: folderPath,
base: folderPath, base: folderPath,
path: destFilename, path: destFilename,
contents: contents contents: contents
})); }));
this.queue(null); _this.queue(null);
}; };
// Call finish() only when all file inserts have finished... // Call finish() only when all file inserts have finished...
if (pendingInserts === 0) { if (pendingInserts === 0) {
finish(); finish();
} }
else { else {
onFileInserted = () => { onFileInserted = function () {
pendingInserts--; pendingInserts--;
if (pendingInserts === 0) { if (pendingInserts === 0) {
finish(); finish();

View File

@@ -7,8 +7,8 @@
import * as path from 'path'; import * as path from 'path';
import * as es from 'event-stream'; import * as es from 'event-stream';
const pickle = require('chromium-pickle-js'); import * as pickle from 'chromium-pickle-js';
const Filesystem = require('asar/lib/filesystem'); import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch'; import * as minimatch from 'minimatch';

View File

@@ -17,6 +17,7 @@ const ext = require('./extensions');
const util = require('gulp-util'); const util = require('gulp-util');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
const builtInExtensions = require('../builtInExtensions.json'); const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
@@ -49,7 +50,7 @@ function syncMarketplaceExtension(extension) {
rimraf.sync(getExtensionPath(extension)); rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version, extension.metadata) return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`)) .pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions')) .pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎'))); .on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));

View File

@@ -4,19 +4,19 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const vm = require("vm"); var vm = require("vm");
/** /**
* Bundle `entryPoints` given config `config`. * Bundle `entryPoints` given config `config`.
*/ */
function bundle(entryPoints, config, callback) { function bundle(entryPoints, config, callback) {
const entryPointsMap = {}; var entryPointsMap = {};
entryPoints.forEach((module) => { entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module; entryPointsMap[module.name] = module;
}); });
const allMentionedModulesMap = {}; var allMentionedModulesMap = {};
entryPoints.forEach((module) => { entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true; allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) { (module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true; allMentionedModulesMap[includedModule] = true;
@@ -25,30 +25,26 @@ function bundle(entryPoints, config, callback) {
allMentionedModulesMap[excludedModule] = true; allMentionedModulesMap[excludedModule] = true;
}); });
}); });
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js')); var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} }; var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports); r.call({}, require, loaderModule, loaderModule.exports);
const loader = loaderModule.exports; var loader = loaderModule.exports;
config.isBuild = true; config.isBuild = true;
config.paths = config.paths || {}; config.paths = config.paths || {};
if (!config.paths['vs/nls']) { config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/nls'] = 'out-build/vs/nls.build'; config.paths['vs/css'] = 'out-build/vs/css.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
loader.config(config); loader.config(config);
loader(['require'], (localRequire) => { loader(['require'], function (localRequire) {
const resolvePath = (path) => { var resolvePath = function (path) {
const r = localRequire.toUrl(path); var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) { if (!/\.js/.test(r)) {
return r + '.js'; return r + '.js';
} }
return r; return r;
}; };
for (const moduleId in entryPointsMap) { for (var moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId]; var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) { if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath); entryPoint.append = entryPoint.append.map(resolvePath);
} }
@@ -57,59 +53,59 @@ function bundle(entryPoints, config, callback) {
} }
} }
}); });
loader(Object.keys(allMentionedModulesMap), () => { loader(Object.keys(allMentionedModulesMap), function () {
const modules = loader.getBuildInfo(); var modules = loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap); var partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources(); var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, { callback(null, {
files: partialResult.files, files: partialResult.files,
cssInlinedResources: cssInlinedResources, cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData bundleData: partialResult.bundleData
}); });
}, (err) => callback(err, null)); }, function (err) { return callback(err, null); });
} }
exports.bundle = bundle; exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) { function emitEntryPoints(modules, entryPoints) {
const modulesMap = {}; var modulesMap = {};
modules.forEach((m) => { modules.forEach(function (m) {
modulesMap[m.id] = m; modulesMap[m.id] = m;
}); });
const modulesGraph = {}; var modulesGraph = {};
modules.forEach((m) => { modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies; modulesGraph[m.id] = m.dependencies;
}); });
const sortedModules = topologicalSort(modulesGraph); var sortedModules = topologicalSort(modulesGraph);
let result = []; var result = [];
const usedPlugins = {}; var usedPlugins = {};
const bundleData = { var bundleData = {
graph: modulesGraph, graph: modulesGraph,
bundles: {} bundles: {}
}; };
Object.keys(entryPoints).forEach((moduleToBundle) => { Object.keys(entryPoints).forEach(function (moduleToBundle) {
const info = entryPoints[moduleToBundle]; var info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []); var rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph); var allDependencies = visit(rootNodes, modulesGraph);
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []); var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot) => { excludes.forEach(function (excludeRoot) {
const allExcludes = visit([excludeRoot], modulesGraph); var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude) => { Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude]; delete allDependencies[exclude];
}); });
}); });
const includedModules = sortedModules.filter((module) => { var includedModules = sortedModules.filter(function (module) {
return allDependencies[module]; return allDependencies[module];
}); });
bundleData.bundles[moduleToBundle] = includedModules; bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest); var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files); result = result.concat(res.files);
for (const pluginName in res.usedPlugins) { for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName]; usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
} }
}); });
Object.keys(usedPlugins).forEach((pluginName) => { Object.keys(usedPlugins).forEach(function (pluginName) {
const plugin = usedPlugins[pluginName]; var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') { if (typeof plugin.finishBuild === 'function') {
const write = (filename, contents) => { var write = function (filename, contents) {
result.push({ result.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -128,16 +124,16 @@ function emitEntryPoints(modules, entryPoints) {
}; };
} }
function extractStrings(destFiles) { function extractStrings(destFiles) {
const parseDefineCall = (moduleMatch, depsMatch) => { var parseDefineCall = function (moduleMatch, depsMatch) {
const module = moduleMatch.replace(/^"|"$/g, ''); var module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(','); var deps = depsMatch.split(',');
deps = deps.map((dep) => { deps = deps.map(function (dep) {
dep = dep.trim(); dep = dep.trim();
dep = dep.replace(/^"|"$/g, ''); dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, ''); dep = dep.replace(/^'|'$/g, '');
let prefix = null; var prefix = null;
let _path = null; var _path = null;
const pieces = dep.split('!'); var pieces = dep.split('!');
if (pieces.length > 1) { if (pieces.length > 1) {
prefix = pieces[0] + '!'; prefix = pieces[0] + '!';
_path = pieces[1]; _path = pieces[1];
@@ -147,7 +143,7 @@ function extractStrings(destFiles) {
_path = pieces[0]; _path = pieces[0];
} }
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) { if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/'); var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res; return prefix + res;
} }
return prefix + _path; return prefix + _path;
@@ -157,7 +153,7 @@ function extractStrings(destFiles) {
deps: deps deps: deps
}; };
}; };
destFiles.forEach((destFile) => { destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) { if (!/\.js$/.test(destFile.dest)) {
return; return;
} }
@@ -165,44 +161,44 @@ function extractStrings(destFiles) {
return; return;
} }
// Do one pass to record the usage counts for each module id // Do one pass to record the usage counts for each module id
const useCounts = {}; var useCounts = {};
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/); var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) { if (!matches) {
return; return;
} }
const defineCall = parseDefineCall(matches[1], matches[2]); var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1; useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => { defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1; useCounts[dep] = (useCounts[dep] || 0) + 1;
}); });
}); });
const sortedByUseModules = Object.keys(useCounts); var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => { sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a]; return useCounts[b] - useCounts[a];
}); });
const replacementMap = {}; var replacementMap = {};
sortedByUseModules.forEach((module, index) => { sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index; replacementMap[module] = index;
}); });
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => { source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
const defineCall = parseDefineCall(moduleMatch, depsMatch); var defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`; return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
}); });
}); });
destFile.sources.unshift({ destFile.sources.unshift({
path: null, path: null,
contents: [ contents: [
'(function() {', '(function() {',
`var __m = ${JSON.stringify(sortedByUseModules)};`, "var __m = " + JSON.stringify(sortedByUseModules) + ";",
`var __M = function(deps) {`, "var __M = function(deps) {",
` var result = [];`, " var result = [];",
` for (var i = 0, len = deps.length; i < len; i++) {`, " for (var i = 0, len = deps.length; i < len; i++) {",
` result[i] = __m[deps[i]];`, " result[i] = __m[deps[i]];",
` }`, " }",
` return result;`, " return result;",
`};` "};"
].join('\n') ].join('\n')
}); });
destFile.sources.push({ destFile.sources.push({
@@ -214,7 +210,7 @@ function extractStrings(destFiles) {
} }
function removeDuplicateTSBoilerplate(destFiles) { function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles // Taken from typescript compiler => emitFiles
const BOILERPLATE = [ var BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ }, { start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ }, { start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ }, { start: /^var __decorate/, end: /^};$/ },
@@ -223,14 +219,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __awaiter/, end: /^};$/ }, { start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ }, { start: /^var __generator/, end: /^};$/ },
]; ];
destFiles.forEach((destFile) => { destFiles.forEach(function (destFile) {
const SEEN_BOILERPLATE = []; var SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
const lines = source.contents.split(/\r\n|\n|\r/); var lines = source.contents.split(/\r\n|\n|\r/);
const newLines = []; var newLines = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE; var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (let i = 0; i < lines.length; i++) { for (var i = 0; i < lines.length; i++) {
const line = lines[i]; var line = lines[i];
if (IS_REMOVING_BOILERPLATE) { if (IS_REMOVING_BOILERPLATE) {
newLines.push(''); newLines.push('');
if (END_BOILERPLATE.test(line)) { if (END_BOILERPLATE.test(line)) {
@@ -238,8 +234,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
} }
} }
else { else {
for (let j = 0; j < BOILERPLATE.length; j++) { for (var j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j]; var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) { if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) { if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true; IS_REMOVING_BOILERPLATE = true;
@@ -267,45 +263,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
if (!dest) { if (!dest) {
dest = entryPoint + '.js'; dest = entryPoint + '.js';
} }
const mainResult = { var mainResult = {
sources: [], sources: [],
dest: dest dest: dest
}, results = [mainResult]; }, results = [mainResult];
const usedPlugins = {}; var usedPlugins = {};
const getLoaderPlugin = (pluginName) => { var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) { if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports; usedPlugins[pluginName] = modulesMap[pluginName].exports;
} }
return usedPlugins[pluginName]; return usedPlugins[pluginName];
}; };
includedModules.forEach((c) => { includedModules.forEach(function (c) {
const bangIndex = c.indexOf('!'); var bangIndex = c.indexOf('!');
if (bangIndex >= 0) { if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex); var pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName); var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1))); mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return; return;
} }
const module = modulesMap[c]; var module = modulesMap[c];
if (module.path === 'empty:') { if (module.path === 'empty:') {
return; return;
} }
const contents = readFileAndRemoveBOM(module.path); var contents = readFileAndRemoveBOM(module.path);
if (module.shim) { if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents)); mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} }
else { else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents)); mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
} }
}); });
Object.keys(usedPlugins).forEach((pluginName) => { Object.keys(usedPlugins).forEach(function (pluginName) {
const plugin = usedPlugins[pluginName]; var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') { if (typeof plugin.writeFile === 'function') {
const req = (() => { var req = (function () {
throw new Error('no-no!'); throw new Error('no-no!');
}); });
req.toUrl = something => something; req.toUrl = function (something) { return something; };
const write = (filename, contents) => { var write = function (filename, contents) {
results.push({ results.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -317,15 +313,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
plugin.writeFile(pluginName, entryPoint, req, write, {}); plugin.writeFile(pluginName, entryPoint, req, write, {});
} }
}); });
const toIFile = (path) => { var toIFile = function (path) {
const contents = readFileAndRemoveBOM(path); var contents = readFileAndRemoveBOM(path);
return { return {
path: path, path: path,
contents: contents contents: contents
}; };
}; };
const toPrepend = (prepend || []).map(toIFile); var toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile); var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend); mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return { return {
files: results, files: results,
@@ -333,8 +329,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
}; };
} }
function readFileAndRemoveBOM(path) { function readFileAndRemoveBOM(path) {
const BOM_CHAR_CODE = 65279; var BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8'); var contents = fs.readFileSync(path, 'utf8');
// Remove BOM // Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) { if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1); contents = contents.substring(1);
@@ -342,15 +338,15 @@ function readFileAndRemoveBOM(path) {
return contents; return contents;
} }
function emitPlugin(entryPoint, plugin, pluginName, moduleName) { function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
let result = ''; var result = '';
if (typeof plugin.write === 'function') { if (typeof plugin.write === 'function') {
const write = ((what) => { var write = (function (what) {
result += what; result += what;
}); });
write.getEntryPoint = () => { write.getEntryPoint = function () {
return entryPoint; return entryPoint;
}; };
write.asModule = (moduleId, code) => { write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",'); code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code; result += code;
}; };
@@ -361,20 +357,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
contents: result contents: result
}; };
} }
function emitNamedModule(moduleId, defineCallPosition, path, contents) { function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define() // `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col); var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|() // `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset); var parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", '; var insertStr = '"' + moduleId + '", ';
return { return {
path: path, path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1) contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
}; };
} }
function emitShimmedModule(moduleId, myDeps, factory, path, contents) { function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : ''); var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');'; var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return { return {
path: path, path: path,
contents: contents + '\n;\n' + strDefine contents: contents + '\n;\n' + strDefine
@@ -387,8 +383,7 @@ function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) { if (desiredLine === 1) {
return desiredCol - 1; return desiredCol - 1;
} }
let line = 1; var line = 1, lastNewLineOffset = -1;
let lastNewLineOffset = -1;
do { do {
if (desiredLine === line) { if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1; return lastNewLineOffset + 1 + desiredCol - 1;
@@ -402,15 +397,14 @@ function positionToOffset(str, desiredLine, desiredCol) {
* Return a set of reachable nodes in `graph` starting from `rootNodes` * Return a set of reachable nodes in `graph` starting from `rootNodes`
*/ */
function visit(rootNodes, graph) { function visit(rootNodes, graph) {
const result = {}; var result = {}, queue = rootNodes;
const queue = rootNodes; rootNodes.forEach(function (node) {
rootNodes.forEach((node) => {
result[node] = true; result[node] = true;
}); });
while (queue.length > 0) { while (queue.length > 0) {
const el = queue.shift(); var el = queue.shift();
const myEdges = graph[el] || []; var myEdges = graph[el] || [];
myEdges.forEach((toNode) => { myEdges.forEach(function (toNode) {
if (!result[toNode]) { if (!result[toNode]) {
result[toNode] = true; result[toNode] = true;
queue.push(toNode); queue.push(toNode);
@@ -423,11 +417,11 @@ function visit(rootNodes, graph) {
* Perform a topological sort on `graph` * Perform a topological sort on `graph`
*/ */
function topologicalSort(graph) { function topologicalSort(graph) {
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {}; var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach((fromNode) => { Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true; allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length; outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach((toNode) => { graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true; allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0; outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || []; inverseEdges[toNode] = inverseEdges[toNode] || [];
@@ -435,8 +429,8 @@ function topologicalSort(graph) {
}); });
}); });
// https://en.wikipedia.org/wiki/Topological_sorting // https://en.wikipedia.org/wiki/Topological_sorting
const S = [], L = []; var S = [], L = [];
Object.keys(allNodes).forEach((node) => { Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) { if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node]; delete outgoingEdgeCount[node];
S.push(node); S.push(node);
@@ -445,10 +439,10 @@ function topologicalSort(graph) {
while (S.length > 0) { while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs // Ensure the exact same order all the time with the same inputs
S.sort(); S.sort();
const n = S.shift(); var n = S.shift();
L.push(n); L.push(n);
const myInverseEdges = inverseEdges[n] || []; var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m) => { myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--; outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) { if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m]; delete outgoingEdgeCount[m];

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import * as fs from 'fs'; import fs = require('fs');
import * as path from 'path'; import path = require('path');
import * as vm from 'vm'; import vm = require('vm');
interface IPosition { interface IPosition {
line: number; line: number;
@@ -46,7 +46,7 @@ export interface IEntryPoint {
name: string; name: string;
include?: string[]; include?: string[];
exclude?: string[]; exclude?: string[];
prepend?: string[]; prepend: string[];
append?: string[]; append?: string[];
dest?: string; dest?: string;
} }
@@ -64,7 +64,7 @@ interface INodeSet {
} }
export interface IFile { export interface IFile {
path: string | null; path: string;
contents: string; contents: string;
} }
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
/** /**
* Bundle `entryPoints` given config `config`. * Bundle `entryPoints` given config `config`.
*/ */
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void { export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
const entryPointsMap: IEntryPointMap = {}; let entryPointsMap: IEntryPointMap = {};
entryPoints.forEach((module: IEntryPoint) => { entryPoints.forEach((module: IEntryPoint) => {
entryPointsMap[module.name] = module; entryPointsMap[module.name] = module;
}); });
const allMentionedModulesMap: { [modules: string]: boolean; } = {}; let allMentionedModulesMap: { [modules: string]: boolean; } = {};
entryPoints.forEach((module: IEntryPoint) => { entryPoints.forEach((module: IEntryPoint) => {
allMentionedModulesMap[module.name] = true; allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) { (module.include || []).forEach(function (includedModule) {
@@ -115,32 +115,28 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
}); });
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js')); var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} }; var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports); r.call({}, require, loaderModule, loaderModule.exports);
const loader: any = loaderModule.exports; var loader: any = loaderModule.exports;
config.isBuild = true; config.isBuild = true;
config.paths = config.paths || {}; config.paths = config.paths || {};
if (!config.paths['vs/nls']) { config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/nls'] = 'out-build/vs/nls.build'; config.paths['vs/css'] = 'out-build/vs/css.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
loader.config(config); loader.config(config);
loader(['require'], (localRequire: any) => { loader(['require'], (localRequire) => {
const resolvePath = (path: string) => { let resolvePath = (path: string) => {
const r = localRequire.toUrl(path); let r = localRequire.toUrl(path);
if (!/\.js/.test(r)) { if (!/\.js/.test(r)) {
return r + '.js'; return r + '.js';
} }
return r; return r;
}; };
for (const moduleId in entryPointsMap) { for (let moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId]; let entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) { if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath); entryPoint.append = entryPoint.append.map(resolvePath);
} }
@@ -151,76 +147,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
}); });
loader(Object.keys(allMentionedModulesMap), () => { loader(Object.keys(allMentionedModulesMap), () => {
const modules = <IBuildModuleInfo[]>loader.getBuildInfo(); let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap); let partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources(); let cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, { callback(null, {
files: partialResult.files, files: partialResult.files,
cssInlinedResources: cssInlinedResources, cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData bundleData: partialResult.bundleData
}); });
}, (err: any) => callback(err, null)); }, (err) => callback(err, null));
} }
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult { function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
const modulesMap: IBuildModuleInfoMap = {}; let modulesMap: IBuildModuleInfoMap = {};
modules.forEach((m: IBuildModuleInfo) => { modules.forEach((m: IBuildModuleInfo) => {
modulesMap[m.id] = m; modulesMap[m.id] = m;
}); });
const modulesGraph: IGraph = {}; let modulesGraph: IGraph = {};
modules.forEach((m: IBuildModuleInfo) => { modules.forEach((m: IBuildModuleInfo) => {
modulesGraph[m.id] = m.dependencies; modulesGraph[m.id] = m.dependencies;
}); });
const sortedModules = topologicalSort(modulesGraph); let sortedModules = topologicalSort(modulesGraph);
let result: IConcatFile[] = []; let result: IConcatFile[] = [];
const usedPlugins: IPluginMap = {}; let usedPlugins: IPluginMap = {};
const bundleData: IBundleData = { let bundleData: IBundleData = {
graph: modulesGraph, graph: modulesGraph,
bundles: {} bundles: {}
}; };
Object.keys(entryPoints).forEach((moduleToBundle: string) => { Object.keys(entryPoints).forEach((moduleToBundle: string) => {
const info = entryPoints[moduleToBundle]; let info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []); let rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph); let allDependencies = visit(rootNodes, modulesGraph);
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []); let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot: string) => { excludes.forEach((excludeRoot: string) => {
const allExcludes = visit([excludeRoot], modulesGraph); let allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude: string) => { Object.keys(allExcludes).forEach((exclude: string) => {
delete allDependencies[exclude]; delete allDependencies[exclude];
}); });
}); });
const includedModules = sortedModules.filter((module: string) => { let includedModules = sortedModules.filter((module: string) => {
return allDependencies[module]; return allDependencies[module];
}); });
bundleData.bundles[moduleToBundle] = includedModules; bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint( let res = emitEntryPoint(
modulesMap, modulesMap,
modulesGraph, modulesGraph,
moduleToBundle, moduleToBundle,
includedModules, includedModules,
info.prepend || [], info.prepend,
info.append || [], info.append,
info.dest info.dest
); );
result = result.concat(res.files); result = result.concat(res.files);
for (const pluginName in res.usedPlugins) { for (let pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName]; usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
} }
}); });
Object.keys(usedPlugins).forEach((pluginName: string) => { Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName]; let plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') { if (typeof plugin.finishBuild === 'function') {
const write = (filename: string, contents: string) => { let write = (filename: string, contents: string) => {
result.push({ result.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -241,16 +237,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
} }
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] { function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
const parseDefineCall = (moduleMatch: string, depsMatch: string) => { let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
const module = moduleMatch.replace(/^"|"$/g, ''); let module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(','); let deps = depsMatch.split(',');
deps = deps.map((dep) => { deps = deps.map((dep) => {
dep = dep.trim(); dep = dep.trim();
dep = dep.replace(/^"|"$/g, ''); dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, ''); dep = dep.replace(/^'|'$/g, '');
let prefix: string | null = null; let prefix: string = null;
let _path: string | null = null; let _path: string = null;
const pieces = dep.split('!'); let pieces = dep.split('!');
if (pieces.length > 1) { if (pieces.length > 1) {
prefix = pieces[0] + '!'; prefix = pieces[0] + '!';
_path = pieces[1]; _path = pieces[1];
@@ -260,7 +256,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
} }
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) { if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/'); let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res; return prefix + res;
} }
return prefix + _path; return prefix + _path;
@@ -271,7 +267,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
}; };
}; };
destFiles.forEach((destFile) => { destFiles.forEach((destFile, index) => {
if (!/\.js$/.test(destFile.dest)) { if (!/\.js$/.test(destFile.dest)) {
return; return;
} }
@@ -280,33 +276,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
} }
// Do one pass to record the usage counts for each module id // Do one pass to record the usage counts for each module id
const useCounts: { [moduleId: string]: number; } = {}; let useCounts: { [moduleId: string]: number; } = {};
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/); let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) { if (!matches) {
return; return;
} }
const defineCall = parseDefineCall(matches[1], matches[2]); let defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1; useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => { defineCall.deps.forEach((dep) => {
useCounts[dep] = (useCounts[dep] || 0) + 1; useCounts[dep] = (useCounts[dep] || 0) + 1;
}); });
}); });
const sortedByUseModules = Object.keys(useCounts); let sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => { sortedByUseModules.sort((a, b) => {
return useCounts[b] - useCounts[a]; return useCounts[b] - useCounts[a];
}); });
const replacementMap: { [moduleId: string]: number; } = {}; let replacementMap: { [moduleId: string]: number; } = {};
sortedByUseModules.forEach((module, index) => { sortedByUseModules.forEach((module, index) => {
replacementMap[module] = index; replacementMap[module] = index;
}); });
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => { source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
const defineCall = parseDefineCall(moduleMatch, depsMatch); let defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`; return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
}); });
}); });
@@ -336,7 +332,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] { function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
// Taken from typescript compiler => emitFiles // Taken from typescript compiler => emitFiles
const BOILERPLATE = [ let BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ }, { start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ }, { start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ }, { start: /^var __decorate/, end: /^};$/ },
@@ -347,22 +343,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
]; ];
destFiles.forEach((destFile) => { destFiles.forEach((destFile) => {
const SEEN_BOILERPLATE: boolean[] = []; let SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
const lines = source.contents.split(/\r\n|\n|\r/); let lines = source.contents.split(/\r\n|\n|\r/);
const newLines: string[] = []; let newLines: string[] = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp; let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i]; let line = lines[i];
if (IS_REMOVING_BOILERPLATE) { if (IS_REMOVING_BOILERPLATE) {
newLines.push(''); newLines.push('');
if (END_BOILERPLATE!.test(line)) { if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false; IS_REMOVING_BOILERPLATE = false;
} }
} else { } else {
for (let j = 0; j < BOILERPLATE.length; j++) { for (let j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j]; let boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) { if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) { if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true; IS_REMOVING_BOILERPLATE = true;
@@ -402,19 +398,19 @@ function emitEntryPoint(
includedModules: string[], includedModules: string[],
prepend: string[], prepend: string[],
append: string[], append: string[],
dest: string | undefined dest: string
): IEmitEntryPointResult { ): IEmitEntryPointResult {
if (!dest) { if (!dest) {
dest = entryPoint + '.js'; dest = entryPoint + '.js';
} }
const mainResult: IConcatFile = { let mainResult: IConcatFile = {
sources: [], sources: [],
dest: dest dest: dest
}, },
results: IConcatFile[] = [mainResult]; results: IConcatFile[] = [mainResult];
const usedPlugins: IPluginMap = {}; let usedPlugins: IPluginMap = {};
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => { let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
if (!usedPlugins[pluginName]) { if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports; usedPlugins[pluginName] = modulesMap[pluginName].exports;
} }
@@ -422,39 +418,39 @@ function emitEntryPoint(
}; };
includedModules.forEach((c: string) => { includedModules.forEach((c: string) => {
const bangIndex = c.indexOf('!'); let bangIndex = c.indexOf('!');
if (bangIndex >= 0) { if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex); let pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName); let plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1))); mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return; return;
} }
const module = modulesMap[c]; let module = modulesMap[c];
if (module.path === 'empty:') { if (module.path === 'empty:') {
return; return;
} }
const contents = readFileAndRemoveBOM(module.path); let contents = readFileAndRemoveBOM(module.path);
if (module.shim) { if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents)); mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} else { } else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents)); mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
} }
}); });
Object.keys(usedPlugins).forEach((pluginName: string) => { Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName]; let plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') { if (typeof plugin.writeFile === 'function') {
const req: ILoaderPluginReqFunc = <any>(() => { let req: ILoaderPluginReqFunc = <any>(() => {
throw new Error('no-no!'); throw new Error('no-no!');
}); });
req.toUrl = something => something; req.toUrl = something => something;
const write = (filename: string, contents: string) => { let write = (filename: string, contents: string) => {
results.push({ results.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -467,16 +463,16 @@ function emitEntryPoint(
} }
}); });
const toIFile = (path: string): IFile => { let toIFile = (path): IFile => {
const contents = readFileAndRemoveBOM(path); let contents = readFileAndRemoveBOM(path);
return { return {
path: path, path: path,
contents: contents contents: contents
}; };
}; };
const toPrepend = (prepend || []).map(toIFile); let toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile); let toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend); mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
@@ -487,8 +483,8 @@ function emitEntryPoint(
} }
function readFileAndRemoveBOM(path: string): string { function readFileAndRemoveBOM(path: string): string {
const BOM_CHAR_CODE = 65279; var BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8'); var contents = fs.readFileSync(path, 'utf8');
// Remove BOM // Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) { if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1); contents = contents.substring(1);
@@ -499,7 +495,7 @@ function readFileAndRemoveBOM(path: string): string {
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile { function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
let result = ''; let result = '';
if (typeof plugin.write === 'function') { if (typeof plugin.write === 'function') {
const write: ILoaderPluginWriteFunc = <any>((what: string) => { let write: ILoaderPluginWriteFunc = <any>((what) => {
result += what; result += what;
}); });
write.getEntryPoint = () => { write.getEntryPoint = () => {
@@ -517,15 +513,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
}; };
} }
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile { function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
// `defineCallPosition` is the position in code: |define() // `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col); let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|() // `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset); let parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", '; let insertStr = '"' + moduleId + '", ';
return { return {
path: path, path: path,
@@ -534,8 +530,8 @@ function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path:
} }
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile { function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : ''); let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');'; let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return { return {
path: path, path: path,
contents: contents + '\n;\n' + strDefine contents: contents + '\n;\n' + strDefine
@@ -550,8 +546,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
return desiredCol - 1; return desiredCol - 1;
} }
let line = 1; let line = 1,
let lastNewLineOffset = -1; lastNewLineOffset = -1;
do { do {
if (desiredLine === line) { if (desiredLine === line) {
@@ -569,16 +565,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
* Return a set of reachable nodes in `graph` starting from `rootNodes` * Return a set of reachable nodes in `graph` starting from `rootNodes`
*/ */
function visit(rootNodes: string[], graph: IGraph): INodeSet { function visit(rootNodes: string[], graph: IGraph): INodeSet {
const result: INodeSet = {}; let result: INodeSet = {},
const queue = rootNodes; queue = rootNodes;
rootNodes.forEach((node) => { rootNodes.forEach((node) => {
result[node] = true; result[node] = true;
}); });
while (queue.length > 0) { while (queue.length > 0) {
const el = queue.shift(); let el = queue.shift();
const myEdges = graph[el!] || []; let myEdges = graph[el] || [];
myEdges.forEach((toNode) => { myEdges.forEach((toNode) => {
if (!result[toNode]) { if (!result[toNode]) {
result[toNode] = true; result[toNode] = true;
@@ -595,7 +591,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
*/ */
function topologicalSort(graph: IGraph): string[] { function topologicalSort(graph: IGraph): string[] {
const allNodes: INodeSet = {}, let allNodes: INodeSet = {},
outgoingEdgeCount: { [node: string]: number; } = {}, outgoingEdgeCount: { [node: string]: number; } = {},
inverseEdges: IGraph = {}; inverseEdges: IGraph = {};
@@ -613,7 +609,7 @@ function topologicalSort(graph: IGraph): string[] {
}); });
// https://en.wikipedia.org/wiki/Topological_sorting // https://en.wikipedia.org/wiki/Topological_sorting
const S: string[] = [], let S: string[] = [],
L: string[] = []; L: string[] = [];
Object.keys(allNodes).forEach((node: string) => { Object.keys(allNodes).forEach((node: string) => {
@@ -627,10 +623,10 @@ function topologicalSort(graph: IGraph): string[] {
// Ensure the exact same order all the time with the same inputs // Ensure the exact same order all the time with the same inputs
S.sort(); S.sort();
const n: string = S.shift()!; let n: string = S.shift();
L.push(n); L.push(n);
const myInverseEdges = inverseEdges[n] || []; let myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m: string) => { myInverseEdges.forEach((m: string) => {
outgoingEdgeCount[m]--; outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) { if (outgoingEdgeCount[m] === 0) {

View File

@@ -4,53 +4,41 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var gulp = require("gulp");
const fs = require("fs"); var tsb = require("gulp-tsb");
const gulp = require("gulp"); var es = require("event-stream");
const bom = require("gulp-bom"); var watch = require('./watch');
const sourcemaps = require("gulp-sourcemaps"); var nls = require("./nls");
const tsb = require("gulp-tsb"); var util = require("./util");
const path = require("path"); var reporter_1 = require("./reporter");
const _ = require("underscore"); var path = require("path");
const monacodts = require("../monaco/api"); var bom = require("gulp-bom");
const nls = require("./nls"); var sourcemaps = require("gulp-sourcemaps");
const reporter_1 = require("./reporter"); var _ = require("underscore");
const util = require("./util"); var monacodts = require("../monaco/api");
const util2 = require("gulp-util"); var fs = require("fs");
const watch = require('./watch'); var reporter = reporter_1.createReporter();
const reporter = reporter_1.createReporter(); var rootDir = path.join(__dirname, '../../src');
function getTypeScriptCompilerOptions(src) { var options = require('../../src/tsconfig.json').compilerOptions;
const rootDir = path.join(__dirname, `../../${src}`); options.verbose = false;
const tsconfig = require(`../../${src}/tsconfig.json`); options.sourceMap = true;
let options; if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
if (tsconfig.extends) { options.sourceMap = false;
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
} }
function createCompile(src, build, emitError) { options.rootDir = rootDir;
const opts = _.clone(getTypeScriptCompilerOptions(src)); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build; opts.inlineSources = !!build;
opts.noFilesystemLookup = true; opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString())); var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) { return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path)); var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
const tsFilter = util.filter(data => /\.ts$/.test(data.path)); var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path))); var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
const input = es.through(); var input = es.through();
const output = input var output = input
.pipe(utf8Filter) .pipe(utf8Filter)
.pipe(bom()) .pipe(bom())
.pipe(utf8Filter.restore) .pipe(utf8Filter.restore)
@@ -63,139 +51,93 @@ function createCompile(src, build, emitError) {
.pipe(sourcemaps.write('.', { .pipe(sourcemaps.write('.', {
addComment: false, addComment: false,
includeContent: !!build, includeContent: !!build,
sourceRoot: opts.sourceRoot sourceRoot: options.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
}; };
} }
const typesDts = [ function compileTask(out, build) {
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
return function () { return function () {
const compile = createCompile(src, build, true); var compile = createCompile(build, true);
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts)); var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
let generator = new MonacoGenerator(false); // Do not write .d.ts files to disk, as they are not needed there.
if (src === 'src') { var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
generator.execute(); return src
}
return srcPipe
.pipe(generator.stream)
.pipe(compile()) .pipe(compile())
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
}; };
} }
exports.compileTask = compileTask; exports.compileTask = compileTask;
function watchTask(out, build) { function watchTask(out, build) {
return function () { return function () {
const compile = createCompile('src', build); var compile = createCompile(build);
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts)); var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
const watchSrc = watch('src/**', { base: 'src' }); var watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); // Do not write .d.ts files to disk, as they are not needed there.
generator.execute(); var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
}; };
} }
exports.watchTask = watchTask; exports.watchTask = watchTask;
const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); function monacodtsTask(out, isWatch) {
class MonacoGenerator { var basePath = path.resolve(process.cwd(), out);
constructor(isWatch) { var neededFiles = {};
this._executeSoonTimer = null; monacodts.getFilesToWatch(out).forEach(function (filePath) {
this._isWatch = isWatch; filePath = path.normalize(filePath);
this.stream = es.through(); neededFiles[filePath] = true;
this._watchers = []; });
this._watchedFiles = {}; var inputFiles = {};
let onWillReadFile = (moduleId, filePath) => { for (var filePath in neededFiles) {
if (!this._isWatch) { if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
return; // This file is needed from source => simply read it now
} inputFiles[filePath] = fs.readFileSync(filePath).toString();
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
readFileSync(moduleId, filePath) {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
} }
} }
_executeSoon() { var setInputFile = function (filePath, contents) {
if (this._executeSoonTimer !== null) { if (inputFiles[filePath] === contents) {
clearTimeout(this._executeSoonTimer); // no change
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
dispose() {
this._watchers.forEach(watcher => watcher.close());
}
_run() {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
_log(message, ...rest) {
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
}
execute() {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
return; return;
} }
if (result.isTheSame) { inputFiles[filePath] = contents;
return; var neededInputFilesCount = Object.keys(neededFiles).length;
var availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
} }
fs.writeFileSync(result.filePath, result.content); };
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); var run = function () {
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); var result = monacodts.run(out, inputFiles);
if (!this._isWatch) { if (!result.isTheSame) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
} }
};
var resultStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
} }
resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
} }

View File

@@ -5,50 +5,39 @@
'use strict'; 'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as tsb from 'gulp-tsb';
import * as es from 'event-stream';
const watch = require('./watch');
import * as nls from './nls';
import * as util from './util';
import { createReporter } from './reporter';
import * as path from 'path';
import * as bom from 'gulp-bom'; import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps'; import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore'; import * as _ from 'underscore';
import * as monacodts from '../monaco/api'; import * as monacodts from '../monaco/api';
import * as nls from './nls'; import * as fs from 'fs';
import { createReporter } from './reporter';
import * as util from './util';
import * as util2 from 'gulp-util';
const watch = require('./watch');
const reporter = createReporter(); const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) { const rootDir = path.join(__dirname, '../../src');
const rootDir = path.join(__dirname, `../../${src}`); const options = require('../../src/tsconfig.json').compilerOptions;
const tsconfig = require(`../../${src}/tsconfig.json`); options.verbose = false;
let options: { [key: string]: any }; options.sourceMap = true;
if (tsconfig.extends) { if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions); options.sourceMap = false;
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false;
}
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
} }
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream { function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src)); const opts = _.clone(options);
opts.inlineSources = !!build; opts.inlineSources = !!build;
opts.noFilesystemLookup = true; opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString())); const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
return function (token?: util.ICancellationToken) { return function (token?: util.ICancellationToken) {
@@ -70,173 +59,117 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
.pipe(sourcemaps.write('.', { .pipe(sourcemaps.write('.', {
addComment: false, addComment: false,
includeContent: !!build, includeContent: !!build,
sourceRoot: opts.sourceRoot sourceRoot: options.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
}; };
} }
const typesDts = [ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () { return function () {
const compile = createCompile(src, build, true); const compile = createCompile(build, true);
const srcPipe = es.merge( const src = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }), gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts), gulp.src('node_modules/typescript/lib/lib.d.ts'),
); );
let generator = new MonacoGenerator(false); // Do not write .d.ts files to disk, as they are not needed there.
if (src === 'src') { const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
generator.execute();
}
return srcPipe return src
.pipe(generator.stream)
.pipe(compile()) .pipe(compile())
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, false));
}; };
} }
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream { export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () { return function () {
const compile = createCompile('src', build); const compile = createCompile(build);
const src = es.merge( const src = es.merge(
gulp.src('src/**', { base: 'src' }), gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts), gulp.src('node_modules/typescript/lib/lib.d.ts'),
); );
const watchSrc = watch('src/**', { base: 'src' }); const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); // Do not write .d.ts files to disk, as they are not needed there.
generator.execute(); const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
}; };
} }
const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
class MonacoGenerator { const basePath = path.resolve(process.cwd(), out);
private readonly _isWatch: boolean;
public readonly stream: NodeJS.ReadWriteStream;
private readonly _watchers: fs.FSWatcher[]; const neededFiles: { [file: string]: boolean; } = {};
private readonly _watchedFiles: { [filePath: string]: boolean; }; monacodts.getFilesToWatch(out).forEach(function (filePath) {
private readonly _fsProvider: monacodts.FSProvider; filePath = path.normalize(filePath);
private readonly _declarationResolver: monacodts.DeclarationResolver; neededFiles[filePath] = true;
});
constructor(isWatch: boolean) { const inputFiles: { [file: string]: string; } = {};
this._isWatch = isWatch; for (let filePath in neededFiles) {
this.stream = es.through(); if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
this._watchers = []; // This file is needed from source => simply read it now
this._watchedFiles = {}; inputFiles[filePath] = fs.readFileSync(filePath).toString();
let onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
return;
}
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
public readFileSync(moduleId: string, filePath: string): Buffer {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
} }
} }
private _executeSoonTimer: NodeJS.Timer | null = null; const setInputFile = (filePath: string, contents: string) => {
private _executeSoon(): void { if (inputFiles[filePath] === contents) {
if (this._executeSoonTimer !== null) { // no change
clearTimeout(this._executeSoonTimer);
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
public dispose(): void {
this._watchers.forEach(watcher => watcher.close());
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
private _log(message: any, ...rest: any[]): void {
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
}
public execute(): void {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
return; return;
} }
if (result.isTheSame) { inputFiles[filePath] = contents;
return; const neededInputFilesCount = Object.keys(neededFiles).length;
const availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
} }
};
fs.writeFileSync(result.filePath, result.content); const run = () => {
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); const result = monacodts.run(out, inputFiles);
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); if (!result.isTheSame) {
if (!this._isWatch) { if (isWatch) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); fs.writeFileSync(result.filePath, result.content);
} else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
} }
};
let resultStream: NodeJS.ReadWriteStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
} }
resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
} }

View File

@@ -11,7 +11,6 @@ const root = path.dirname(path.dirname(__dirname));
function getElectronVersion() { function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8'); const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1]; const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target; return target;
@@ -20,7 +19,6 @@ function getElectronVersion() {
module.exports.getElectronVersion = getElectronVersion; module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron // returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) { if (require.main === module) {
const version = getElectronVersion(); const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version'); const versionFile = path.join(root, '.build', 'electron', 'version');

View File

@@ -4,317 +4,115 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var es = require("event-stream");
const fs = require("fs"); var assign = require("object-assign");
const glob = require("glob"); var remote = require("gulp-remote-src");
const gulp = require("gulp"); var flatmap = require('gulp-flatmap');
const path = require("path"); var vzip = require('gulp-vinyl-zip');
const File = require("vinyl"); var filter = require('gulp-filter');
const vsce = require("vsce"); var rename = require('gulp-rename');
const stats_1 = require("./stats"); var util = require('gulp-util');
const util2 = require("./util"); var buffer = require('gulp-buffer');
const remote = require("gulp-remote-src"); var json = require('gulp-json-editor');
const vzip = require('gulp-vinyl-zip'); var fs = require("fs");
const filter = require("gulp-filter"); var path = require("path");
const rename = require("gulp-rename"); var vsce = require("vsce");
const util = require('gulp-util'); var File = require("vinyl");
const buffer = require('gulp-buffer'); function fromLocal(extensionPath) {
const json = require("gulp-json-editor"); var result = es.through();
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
const _ = require("underscore");
const vfs = require("vinyl-fs");
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
exports.packageBuiltInExtensions = packageBuiltInExtensions;
function packageExtensionTask(extensionName, platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
}
else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
exports.packageExtensionTask = packageExtensionTask;
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath, sourceMappingURLBase) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
}
else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err, stats) => {
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => { .then(function (fileNames) {
const files = fileNames var files = fileNames
.map(fileName => path.join(extensionPath, fileName)) .map(function (fileName) { return path.join(extensionPath, fileName); })
.map(filePath => new File({ .map(function (filePath) { return new File({
path: filePath, path: filePath,
stat: fs.statSync(filePath), stat: fs.statSync(filePath),
base: extensionPath, base: extensionPath,
contents: fs.createReadStream(filePath) contents: fs.createReadStream(filePath)
})); }); });
es.readArray(files).pipe(result); es.readArray(files).pipe(result);
}) })
.catch(err => result.emit('error', err)); .catch(function (err) { return result.emit('error', err); });
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
function fromMarketplace(extensionName, version, metadata) {
const [publisher, name] = extensionName.split('.');
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac'
];
var azureExtensions = ['azurecore', 'mssql'];
const builtInExtensions = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders) {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
}
else {
const fn = streamProviders.shift();
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result; return result;
} }
function packageExtensionsStream(optsIn) { exports.fromLocal = fromLocal;
const opts = optsIn || {}; function error(err) {
const localExtensionDescriptions = glob.sync('extensions/*/package.json') var result = es.through();
.map(manifestPath => { setTimeout(function () { return result.emit('error', err); });
const extensionPath = path.dirname(path.join(root, manifestPath)); return result;
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies,])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
} }
exports.packageExtensionsStream = packageExtensionsStream; var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
function fromMarketplace(extensionName, version) {
var filterType = 7;
var value = extensionName;
var criterium = { filterType: filterType, value: value };
var criteria = [criterium];
var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap(function (stream, f) {
var rawResult = f.contents.toString('utf8');
var result = JSON.parse(rawResult);
var extension = result.results[0].extensions[0];
if (!extension) {
return error("No such extension: " + extension);
}
var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}
exports.fromMarketplace = fromMarketplace;

View File

@@ -4,221 +4,22 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import * as es from 'event-stream'; import * as es from 'event-stream';
import * as fs from 'fs';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
import { Stream } from 'stream'; import { Stream } from 'stream';
import * as File from 'vinyl'; import assign = require('object-assign');
import * as vsce from 'vsce';
import { createStatsStream } from './stats';
import * as util2 from './util';
import remote = require('gulp-remote-src'); import remote = require('gulp-remote-src');
const flatmap = require('gulp-flatmap');
const vzip = require('gulp-vinyl-zip'); const vzip = require('gulp-vinyl-zip');
import filter = require('gulp-filter'); const filter = require('gulp-filter');
import rename = require('gulp-rename'); const rename = require('gulp-rename');
const util = require('gulp-util'); const util = require('gulp-util');
const buffer = require('gulp-buffer'); const buffer = require('gulp-buffer');
import json = require('gulp-json-editor'); const json = require('gulp-json-editor');
const webpack = require('webpack'); import * as fs from 'fs';
const webpackGulp = require('webpack-stream'); import * as path from 'path';
import * as vsce from 'vsce';
import * as File from 'vinyl';
const root = path.resolve(path.join(__dirname, '..', '..')); export function fromLocal(extensionPath: string): Stream {
// {{SQL CARBON EDIT}}
import * as _ from 'underscore';
import * as vfs from 'vinyl-fs';
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
export function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
} else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = (<string[]>glob.sync(
path.join(extensionPath, '/**/extension.webpack.config.js'),
{ ignore: ['**/node_modules'] }
));
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data: any) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err: any, stats: any) => {
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data: File) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
const result = es.through(); const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
@@ -236,150 +37,96 @@ function fromLocalNormal(extensionPath: string): Stream {
}) })
.catch(err => result.emit('error', err)); .catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath))); return result;
}
function error(err: any): Stream {
const result = es.through();
setTimeout(() => result.emit('error', err));
return result;
} }
const baseHeaders = { const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build', 'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build', 'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
}; };
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream { export function fromMarketplace(extensionName: string, version: string): Stream {
const [publisher, name] = extensionName.split('.'); const filterType = 7;
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`; const value = extensionName;
const criterium = { filterType, value };
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...'); const criteria = [criterium];
const pageNumber = 1;
const pageSize = 1;
const sortBy = 0;
const sortOrder = 0;
const flags = 0x1 | 0x2 | 0x80;
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
const body = JSON.stringify({ filters, assetTypes, flags });
const headers: any = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
const options = { const options = {
base: url, base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: { requestOptions: {
method: 'POST',
gzip: true, gzip: true,
headers: baseHeaders headers,
body: body
} }
}; };
const packageJsonFilter = filter('package.json', { restore: true }); return remote('/extensionquery', options)
.pipe(flatmap((stream, f) => {
const rawResult = f.contents.toString('utf8');
const result = JSON.parse(rawResult);
const extension = result.results[0].extensions[0];
if (!extension) {
return error(`No such extension: ${extension}`);
}
return remote('', options) const metadata = {
.pipe(vzip.src()) id: extension.extensionId,
.pipe(filter('extension/**')) publisherId: extension.publisher,
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, ''))) publisherDisplayName: extension.publisher.displayName
.pipe(packageJsonFilter) };
.pipe(buffer())
.pipe(json({ __metadata: metadata })) const extensionVersion = extension.versions.filter(v => v.version === version)[0];
.pipe(packageJsonFilter.restore); if (!extensionVersion) {
} return error(`No such extension version: ${extensionName} @ ${version}`);
}
interface IPackageExtensionsOptions {
/** const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
* Set to undefined to package all of them. if (!asset) {
*/ return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
desiredExtensions?: string[]; }
sourceMappingURLBase?: string;
} util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const excludedExtensions = [ const options = {
'vscode-api-tests', base: asset.source,
'vscode-colorize-tests', requestOptions: {
'ms-vscode.node-debug', gzip: true,
'ms-vscode.node-debug2', headers: baseHeaders
// {{SQL CARBON EDIT}} }
'integration-tests' };
];
return remote('', options)
// {{SQL CARBON EDIT}} .pipe(flatmap(stream => {
const sqlBuiltInExtensions = [ const packageJsonFilter = filter('package.json', { restore: true });
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages return stream
'agent', .pipe(vzip.src())
'import', .pipe(filter('extension/**'))
'profiler', .pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
'admin-pack', .pipe(packageJsonFilter)
'big-data-cluster', .pipe(buffer())
'dacpac' .pipe(json({ __metadata: metadata }))
]; .pipe(packageJsonFilter.restore);
var azureExtensions = ['azurecore', 'mssql']; }));
// {{SQL CARBON EDIT}} - End }));
interface IBuiltInExtension {
name: string;
version: string;
repo: string;
metadata: any;
}
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders: { (): Stream }[]): Stream {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
} else {
const fn = streamProviders.shift()!;
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
const opts = optsIn || {};
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
} }

View File

@@ -4,47 +4,47 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path"); var path = require("path");
const fs = require("fs"); var fs = require("fs");
/** /**
* Returns the sha1 commit version of a repository or undefined in case of failure. * Returns the sha1 commit version of a repository or undefined in case of failure.
*/ */
function getVersion(repo) { function getVersion(repo) {
const git = path.join(repo, '.git'); var git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD'); var headPath = path.join(git, 'HEAD');
let head; var head;
try { try {
head = fs.readFileSync(headPath, 'utf8').trim(); head = fs.readFileSync(headPath, 'utf8').trim();
} }
catch (e) { catch (e) {
return undefined; return void 0;
} }
if (/^[0-9a-f]{40}$/i.test(head)) { if (/^[0-9a-f]{40}$/i.test(head)) {
return head; return head;
} }
const refMatch = /^ref: (.*)$/.exec(head); var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) { if (!refMatch) {
return undefined; return void 0;
} }
const ref = refMatch[1]; var ref = refMatch[1];
const refPath = path.join(git, ref); var refPath = path.join(git, ref);
try { try {
return fs.readFileSync(refPath, 'utf8').trim(); return fs.readFileSync(refPath, 'utf8').trim();
} }
catch (e) { catch (e) {
// noop // noop
} }
const packedRefsPath = path.join(git, 'packed-refs'); var packedRefsPath = path.join(git, 'packed-refs');
let refsRaw; var refsRaw;
try { try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim(); refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} }
catch (e) { catch (e) {
return undefined; return void 0;
} }
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm; var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch; var refsMatch;
let refs = {}; var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) { while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1]; refs[refsMatch[2]] = refsMatch[1];
} }

View File

@@ -10,7 +10,7 @@ import * as fs from 'fs';
/** /**
* Returns the sha1 commit version of a repository or undefined in case of failure. * Returns the sha1 commit version of a repository or undefined in case of failure.
*/ */
export function getVersion(repo: string): string | undefined { export function getVersion(repo: string): string {
const git = path.join(repo, '.git'); const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD'); const headPath = path.join(git, 'HEAD');
let head: string; let head: string;
@@ -18,7 +18,7 @@ export function getVersion(repo: string): string | undefined {
try { try {
head = fs.readFileSync(headPath, 'utf8').trim(); head = fs.readFileSync(headPath, 'utf8').trim();
} catch (e) { } catch (e) {
return undefined; return void 0;
} }
if (/^[0-9a-f]{40}$/i.test(head)) { if (/^[0-9a-f]{40}$/i.test(head)) {
@@ -28,7 +28,7 @@ export function getVersion(repo: string): string | undefined {
const refMatch = /^ref: (.*)$/.exec(head); const refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) { if (!refMatch) {
return undefined; return void 0;
} }
const ref = refMatch[1]; const ref = refMatch[1];
@@ -46,11 +46,11 @@ export function getVersion(repo: string): string | undefined {
try { try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim(); refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} catch (e) { } catch (e) {
return undefined; return void 0;
} }
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm; const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray | null; let refsMatch: RegExpExecArray;
let refs: { [ref: string]: string } = {}; let refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) { while (refsMatch = refsRegex.exec(refsRaw)) {

File diff suppressed because it is too large Load Diff

View File

@@ -34,10 +34,6 @@
"name": "vs/workbench/parts/codeEditor", "name": "vs/workbench/parts/codeEditor",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/comments",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/debug", "name": "vs/workbench/parts/debug",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -110,10 +106,6 @@
"name": "vs/workbench/parts/snippets", "name": "vs/workbench/parts/snippets",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/stats",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/surveys", "name": "vs/workbench/parts/surveys",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -154,22 +146,10 @@
"name": "vs/workbench/parts/welcome", "name": "vs/workbench/parts/welcome",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/outline",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/actions", "name": "vs/workbench/services/actions",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/commands",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/configuration", "name": "vs/workbench/services/configuration",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -214,10 +194,6 @@
"name": "vs/workbench/services/progress", "name": "vs/workbench/services/progress",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/remote",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/textfile", "name": "vs/workbench/services/textfile",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -238,13 +214,9 @@
"name": "vs/workbench/services/decorations", "name": "vs/workbench/services/decorations",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/label",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/preferences", "name": "vs/workbench/services/preferences",
"project": "vscode-preferences" "project": "vscode-preferences"
} }
] ]
} }

View File

@@ -7,15 +7,15 @@ import * as path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
import { through, readable, ThroughStream } from 'event-stream'; import { through, readable, ThroughStream } from 'event-stream';
import * as File from 'vinyl'; import File = require('vinyl');
import * as Is from 'is'; import * as Is from 'is';
import * as xml2js from 'xml2js'; import * as xml2js from 'xml2js';
import * as glob from 'glob'; import * as glob from 'glob';
import * as https from 'https'; import * as https from 'https';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as util from 'gulp-util'; var util = require('gulp-util');
import * as iconv from 'iconv-lite'; var iconv = require('iconv-lite');
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4; const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
]; ];
// non built-in extensions also that are transifex and need to be part of the language packs // non built-in extensions also that are transifex and need to be part of the language packs
export const externalExtensionsWithTranslations = { const externalExtensionsWithTranslations = {
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome', 'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
'vscode-node-debug': 'ms-vscode.node-debug', 'vscode-node-debug': 'ms-vscode.node-debug',
'vscode-node-debug2': 'ms-vscode.node-debug2' 'vscode-node-debug2': 'ms-vscode.node-debug2'
@@ -71,7 +71,7 @@ interface Map<V> {
interface Item { interface Item {
id: string; id: string;
message: string; message: string;
comment?: string; comment: string;
} }
export interface Resource { export interface Resource {
@@ -137,6 +137,27 @@ module PackageJsonFormat {
} }
} }
interface ModuleJsonFormat {
messages: string[];
keys: (string | LocalizeInfo)[];
}
module ModuleJsonFormat {
export function is(value: any): value is ModuleJsonFormat {
let candidate = value as ModuleJsonFormat;
return Is.defined(candidate)
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
}
}
interface BundledExtensionHeaderFormat {
id: string;
type: string;
hash: string;
outDir: string;
}
interface BundledExtensionFormat { interface BundledExtensionFormat {
[key: string]: { [key: string]: {
messages: string[]; messages: string[];
@@ -147,7 +168,7 @@ interface BundledExtensionFormat {
export class Line { export class Line {
private buffer: string[] = []; private buffer: string[] = [];
constructor(indent: number = 0) { constructor(private indent: number = 0) {
if (indent > 0) { if (indent > 0) {
this.buffer.push(new Array(indent + 1).join(' ')); this.buffer.push(new Array(indent + 1).join(' '));
} }
@@ -214,8 +235,8 @@ export class XLF {
let existingKeys = new Set<string>(); let existingKeys = new Set<string>();
for (let i = 0; i < keys.length; i++) { for (let i = 0; i < keys.length; i++) {
let key = keys[i]; let key = keys[i];
let realKey: string | undefined; let realKey: string;
let comment: string | undefined; let comment: string;
if (Is.string(key)) { if (Is.string(key)) {
realKey = key; realKey = key;
comment = undefined; comment = undefined;
@@ -265,17 +286,17 @@ export class XLF {
} }
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> { static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve) => { return new Promise((resolve, reject) => {
let parser = new xml2js.Parser(); let parser = new xml2js.Parser();
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = []; let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (_err: any, result: any) { parser.parseString(xlfString, function (err, result) {
const fileNodes: any[] = result['xliff']['file']; const fileNodes: any[] = result['xliff']['file'];
fileNodes.forEach(file => { fileNodes.forEach(file => {
const originalFilePath = file.$.original; const originalFilePath = file.$.original;
const messages: Map<string> = {}; const messages: Map<string> = {};
const transUnits = file.body[0]['trans-unit']; const transUnits = file.body[0]['trans-unit'];
if (transUnits) { if (transUnits) {
transUnits.forEach((unit: any) => { transUnits.forEach(unit => {
const key = unit.$.id; const key = unit.$.id;
const val = pseudify(unit.source[0]['_'].toString()); const val = pseudify(unit.source[0]['_'].toString());
if (key && val) { if (key && val) {
@@ -296,7 +317,7 @@ export class XLF {
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = []; let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (err: any, result: any) { parser.parseString(xlfString, function (err, result) {
if (err) { if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`)); reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
} }
@@ -319,20 +340,17 @@ export class XLF {
const transUnits = file.body[0]['trans-unit']; const transUnits = file.body[0]['trans-unit'];
if (transUnits) { if (transUnits) {
transUnits.forEach((unit: any) => { transUnits.forEach(unit => {
const key = unit.$.id; const key = unit.$.id;
if (!unit.target) { if (!unit.target) {
return; // No translation available return; // No translation available
} }
let val = unit.target[0]; const val = unit.target.toString();
if (typeof val !== 'string') {
val = val._;
}
if (key && val) { if (key && val) {
messages[key] = decodeEntities(val); messages[key] = decodeEntities(val);
} else { } else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`)); reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
} }
}); });
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() }); files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
@@ -351,7 +369,7 @@ export interface ITask<T> {
interface ILimitedTaskFactory<T> { interface ILimitedTaskFactory<T> {
factory: ITask<Promise<T>>; factory: ITask<Promise<T>>;
c: (value?: T | Promise<T>) => void; c: (value?: T | Thenable<T>) => void;
e: (error?: any) => void; e: (error?: any) => void;
} }
@@ -373,7 +391,7 @@ export class Limiter<T> {
private consume(): void { private consume(): void {
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) { while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
const iLimitedTask = this.outstandingPromises.shift()!; const iLimitedTask = this.outstandingPromises.shift();
this.runningPromises++; this.runningPromises++;
const promise = iLimitedTask.factory(); const promise = iLimitedTask.factory();
@@ -401,8 +419,8 @@ function stripComments(content: string): string {
* Third matches block comments * Third matches block comments
* Fourth matches line comments * Fourth matches line comments
*/ */
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g; var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => { let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches // Only one of m1, m2, m3, m4 matches
if (m3) { if (m3) {
// A block comment. Replace with nothing // A block comment. Replace with nothing
@@ -424,9 +442,9 @@ function stripComments(content: string): string {
} }
function escapeCharacters(value: string): string { function escapeCharacters(value: string): string {
const result: string[] = []; var result: string[] = [];
for (let i = 0; i < value.length; i++) { for (var i = 0; i < value.length; i++) {
const ch = value.charAt(i); var ch = value.charAt(i);
switch (ch) { switch (ch) {
case '\'': case '\'':
result.push('\\\''); result.push('\\\'');
@@ -466,6 +484,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let statistics: Map<number> = Object.create(null); let statistics: Map<number> = Object.create(null);
let total: number = 0;
let defaultMessages: Map<Map<string>> = Object.create(null); let defaultMessages: Map<Map<string>> = Object.create(null);
let modules = Object.keys(keysSection); let modules = Object.keys(keysSection);
modules.forEach((module) => { modules.forEach((module) => {
@@ -478,6 +497,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let messageMap: Map<string> = Object.create(null); let messageMap: Map<string> = Object.create(null);
defaultMessages[module] = messageMap; defaultMessages[module] = messageMap;
keys.map((key, i) => { keys.map((key, i) => {
total++;
if (typeof key === 'string') { if (typeof key === 'string') {
messageMap[key] = messages[i]; messageMap[key] = messages[i];
} else { } else {
@@ -500,7 +520,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
modules.forEach((module) => { modules.forEach((module) => {
let order = keysSection[module]; let order = keysSection[module];
let i18nFile = path.join(cwd, module) + '.i18n.json'; let i18nFile = path.join(cwd, module) + '.i18n.json';
let messages: Map<string> | null = null; let messages: Map<string> = null;
if (fs.existsSync(i18nFile)) { if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8')); let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
messages = JSON.parse(content); messages = JSON.parse(content);
@@ -513,13 +533,13 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
} }
let localizedMessages: string[] = []; let localizedMessages: string[] = [];
order.forEach((keyInfo) => { order.forEach((keyInfo) => {
let key: string | null = null; let key: string = null;
if (typeof keyInfo === 'string') { if (typeof keyInfo === 'string') {
key = keyInfo; key = keyInfo;
} else { } else {
key = keyInfo.key; key = keyInfo.key;
} }
let message: string = messages![key]; let message: string = messages[key];
if (!message) { if (!message) {
if (process.env['VSCODE_BUILD_VERBOSE']) { if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`No localized message found for key ${key} in module ${module}. Using default message.`); log(`No localized message found for key ${key} in module ${module}. Using default message.`);
@@ -804,8 +824,8 @@ export function createXlfFilesForIsl(): ThroughStream {
} }
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream { export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises: Array<Promise<boolean>> = []; let tryGetPromises = [];
let updateCreatePromises: Array<Promise<boolean>> = []; let updateCreatePromises = [];
return through(function (this: ThroughStream, file: File) { return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
@@ -870,7 +890,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream { export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
let resourcesByProject: Map<string[]> = Object.create(null); let resourcesByProject: Map<string[]> = Object.create(null);
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
return through(function (this: ThroughStream, file: File) { return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
@@ -887,7 +907,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8')); const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_')); let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources: string[] = []; let extractedResources = [];
for (let project of [workbenchProject, editorProject]) { for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) { for (let resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') { if (resource !== 'setup_messages') {
@@ -900,7 +920,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`); console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
} }
let promises: Array<Promise<void>> = []; let promises = [];
for (let project in resourcesByProject) { for (let project in resourcesByProject) {
promises.push( promises.push(
getAllResources(project, apiHostname, username, password).then(resources => { getAllResources(project, apiHostname, username, password).then(resources => {
@@ -945,7 +965,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
} }
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> { function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
return new Promise((_resolve, reject) => { return new Promise((resolve, reject) => {
const data = JSON.stringify({ const data = JSON.stringify({
'content': xlfFile.contents.toString(), 'content': xlfFile.contents.toString(),
'name': slug, 'name': slug,
@@ -1036,8 +1056,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions // extensions
let extensionsToLocalize = Object.create(null); let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => { Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject }); _coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1065,7 +1085,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
let expectedTranslationsCount = resources.length; let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false; let translationsRetrieved = 0, called = false;
return readable(function (_count: any, callback: any) { return readable(function (count, callback) {
// Mark end of stream when all resources were retrieved // Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) { if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end'); return this.emit('end');
@@ -1075,7 +1095,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
called = true; called = true;
const stream = this; const stream = this;
resources.map(function (resource) { resources.map(function (resource) {
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => { retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
if (file) { if (file) {
stream.emit('data', file); stream.emit('data', file);
} }
@@ -1087,10 +1107,10 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
callback(); callback();
}); });
} }
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS); const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> { function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
return limiter.queue(() => new Promise<File | null>((resolve, reject) => { return limiter.queue(() => new Promise<File>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_'); const slug = resource.name.replace(/\//g, '_');
const project = resource.project; const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id; let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
@@ -1192,10 +1212,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
let parsePromises: Promise<ParsedXLF[]>[] = []; let parsePromises: Promise<ParsedXLF[]>[] = [];
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} }; let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks: Map<I18nPack> = {}; let extensionsPacks: Map<I18nPack> = {};
let errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) { return through(function (this: ThroughStream, xlf: File) {
let project = path.dirname(xlf.relative); let stream = this;
let resource = path.basename(xlf.relative, '.xlf'); let project = path.dirname(xlf.path);
let resource = path.basename(xlf.path, '.xlf');
let contents = xlf.contents.toString(); let contents = xlf.contents.toString();
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents); let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise); parsePromises.push(parsePromise);
@@ -1222,15 +1242,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
} }
}); });
} }
).catch(reason => { );
errors.push(reason);
});
}, function () { }, function () {
Promise.all(parsePromises) Promise.all(parsePromises)
.then(() => { .then(() => {
if (errors.length > 0) {
throw errors;
}
const translatedMainFile = createI18nFile('./main', mainPack); const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' }); resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
@@ -1249,9 +1264,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
} }
this.queue(null); this.queue(null);
}) })
.catch((reason) => { .catch(reason => { throw new Error(reason); });
this.emit('error', reason);
});
}); });
} }
@@ -1272,15 +1285,11 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
stream.queue(translatedFile); stream.queue(translatedFile);
}); });
} }
).catch(reason => { );
this.emit('error', reason);
});
}, function () { }, function () {
Promise.all(parsePromises) Promise.all(parsePromises)
.then(() => { this.queue(null); }) .then(() => { this.queue(null); })
.catch(reason => { .catch(reason => { throw new Error(reason); });
this.emit('error', reason);
});
}); });
} }
@@ -1297,7 +1306,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let firstChar = line.charAt(0); let firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') { if (firstChar === '[' || firstChar === ';') {
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') { if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`); content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
} else { } else {
content.push(line); content.push(line);
} }
@@ -1307,9 +1316,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let translated = line; let translated = line;
if (key) { if (key) {
if (key === 'LanguageName') { if (key === 'LanguageName') {
translated = `${key}=${innoSetup.defaultInfo!.name}`; translated = `${key}=${innoSetup.defaultInfo.name}`;
} else if (key === 'LanguageID') { } else if (key === 'LanguageID') {
translated = `${key}=${innoSetup.defaultInfo!.id}`; translated = `${key}=${innoSetup.defaultInfo.id}`;
} else if (key === 'LanguageCodePage') { } else if (key === 'LanguageCodePage') {
translated = `${key}=${innoSetup.codePage.substr(2)}`; translated = `${key}=${innoSetup.codePage.substr(2)}`;
} else { } else {
@@ -1330,14 +1339,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
return new File({ return new File({
path: filePath, path: filePath,
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage) contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
}); });
} }
function encodeEntities(value: string): string { function encodeEntities(value: string): string {
let result: string[] = []; var result: string[] = [];
for (let i = 0; i < value.length; i++) { for (var i = 0; i < value.length; i++) {
let ch = value[i]; var ch = value[i];
switch (ch) { switch (ch) {
case '<': case '<':
result.push('&lt;'); result.push('&lt;');

View File

@@ -3,12 +3,13 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const ts = require("typescript"); var ts = require("typescript");
const lazy = require("lazy.js"); var lazy = require("lazy.js");
const event_stream_1 = require("event-stream"); var event_stream_1 = require("event-stream");
const File = require("vinyl"); var File = require("vinyl");
const sm = require("source-map"); var sm = require("source-map");
const path = require("path"); var assign = require("object-assign");
var path = require("path");
var CollectStepResult; var CollectStepResult;
(function (CollectStepResult) { (function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes"; CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
@@ -17,9 +18,9 @@ var CollectStepResult;
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse"; CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {})); })(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) { function collect(node, fn) {
const result = []; var result = [];
function loop(node) { function loop(node) {
const stepResult = fn(node); var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) { if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node); result.push(node);
} }
@@ -31,45 +32,43 @@ function collect(node, fn) {
return result; return result;
} }
function clone(object) { function clone(object) {
const result = {}; var result = {};
for (const id in object) { for (var id in object) {
result[id] = object[id]; result[id] = object[id];
} }
return result; return result;
} }
function template(lines) { function template(lines) {
let indent = '', wrap = ''; var indent = '', wrap = '';
if (lines.length > 1) { if (lines.length > 1) {
indent = '\t'; indent = '\t';
wrap = '\n'; wrap = '\n';
} }
return `/*--------------------------------------------------------- return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
} }
/** /**
* Returns a stream containing the patched JavaScript and source maps. * Returns a stream containing the patched JavaScript and source maps.
*/ */
function nls() { function nls() {
const input = event_stream_1.through(); var input = event_stream_1.through();
const output = input.pipe(event_stream_1.through(function (f) { var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) { if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
} }
let source = f.sourceMap.sources[0]; var source = f.sourceMap.sources[0];
if (!source) { if (!source) {
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`)); return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
} }
const root = f.sourceMap.sourceRoot; var root = f.sourceMap.sourceRoot;
if (root) { if (root) {
source = path.join(root, source); source = path.join(root, source);
} }
const typescript = f.sourceMap.sourcesContent[0]; var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) { if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`)); return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
} }
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f)); nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
})); }));
return event_stream_1.duplex(input, output); return event_stream_1.duplex(input, output);
} }
@@ -77,7 +76,8 @@ function isImportNode(node) {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration; return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
} }
(function (nls_1) { (function (nls_1) {
function fileFrom(file, contents, path = file.path) { function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({ return new File({
contents: Buffer.from(contents), contents: Buffer.from(contents),
base: file.base, base: file.base,
@@ -87,27 +87,29 @@ function isImportNode(node) {
} }
nls_1.fileFrom = fileFrom; nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) { function mappedPositionFrom(source, lc) {
return { source, line: lc.line + 1, column: lc.character }; return { source: source, line: lc.line + 1, column: lc.character };
} }
nls_1.mappedPositionFrom = mappedPositionFrom; nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) { function lcFrom(position) {
return { line: position.line - 1, character: position.column }; return { line: position.line - 1, character: position.column };
} }
nls_1.lcFrom = lcFrom; nls_1.lcFrom = lcFrom;
class SingleFileServiceHost { var SingleFileServiceHost = /** @class */ (function () {
constructor(options, filename, contents) { function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options; this.options = options;
this.filename = filename; this.filename = filename;
this.getCompilationSettings = () => this.options; this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = () => [this.filename]; this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = () => '1'; this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib; this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = () => ''; this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = () => 'lib.d.ts'; this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents); this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString(''); this.lib = ts.ScriptSnapshot.fromString('');
} }
} return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost; nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) { function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) { if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
@@ -115,96 +117,97 @@ function isImportNode(node) {
} }
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
} }
function analyze(contents, options = {}) { function analyze(contents, options) {
const filename = 'file.ts'; if (options === void 0) { options = {}; }
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents); var filename = 'file.ts';
const service = ts.createLanguageService(serviceHost); var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); var service = ts.createLanguageService(serviceHost);
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports // all imports
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse)); var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls'); // import nls = require('vs/nls');
const importEqualsDeclarations = imports var importEqualsDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration) .filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
.map(n => n) .map(function (n) { return n; })
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) .filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\''); .filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls'; // import ... from 'vs/nls';
const importDeclarations = imports var importDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration) .filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
.map(n => n) .map(function (n) { return n; })
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral) .filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'') .filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(d => !!d.importClause && !!d.importClause.namedBindings); .filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
const nlsExpressions = importEqualsDeclarations var nlsExpressions = importEqualsDeclarations
.map(d => d.moduleReference.expression) .map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(d => d.moduleSpecifier)) .concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(d => ({ .map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()), start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd()) end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
})); }); });
// `nls.localize(...)` calls // `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations var nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) .filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(d => d.importClause.namedBindings.name) .map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(d => d.name)) .concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls` // find read-only references to `nls`
.map(n => service.getReferencesAtPosition(filename, n.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess) .filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references // find the deepest call expressions AST nodes that contain those references
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n))) .map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(a => lazy(a).last()) .map(function (a) { return lazy(a).last(); })
.filter(n => !!n) .filter(function (n) { return !!n; })
.map(n => n) .map(function (n) { return n; })
// only `localize` calls // only `localize` calls
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'); .filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports // `localize` named imports
const allLocalizeImportDeclarations = importDeclarations var allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) .filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
.map(d => [].concat(d.importClause.namedBindings.elements)) .map(function (d) { return [].concat(d.importClause.namedBindings.elements); })
.flatten(); .flatten();
// `localize` read-only references // `localize` read-only references
const localizeReferences = allLocalizeImportDeclarations var localizeReferences = allLocalizeImportDeclarations
.filter(d => d.name.getText() === 'localize') .filter(function (d) { return d.name.getText() === 'localize'; })
.map(n => service.getReferencesAtPosition(filename, n.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess); .filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references // custom named `localize` read-only references
const namedLocalizeReferences = allLocalizeImportDeclarations var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(d => d.propertyName && d.propertyName.getText() === 'localize') .filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess); .filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references // find the deepest call expressions AST nodes that contain those references
const localizeCallExpressions = localizeReferences var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences) .concat(namedLocalizeReferences)
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n))) .map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(a => lazy(a).last()) .map(function (a) { return lazy(a).last(); })
.filter(n => !!n) .filter(function (n) { return !!n; })
.map(n => n); .map(function (n) { return n; });
// collect everything // collect everything
const localizeCalls = nlsLocalizeCallExpressions var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions) .concat(localizeCallExpressions)
.map(e => e.arguments) .map(function (e) { return e.arguments; })
.filter(a => a.length > 1) .filter(function (a) { return a.length > 1; })
.sort((a, b) => a[0].getStart() - b[0].getStart()) .sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(a => ({ .map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) }, keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(), key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) }, valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText() value: a[1].getText()
})); }); });
return { return {
localizeCalls: localizeCalls.toArray(), localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray() nlsExpressions: nlsExpressions.toArray()
}; };
} }
nls_1.analyze = analyze; nls_1.analyze = analyze;
class TextModel { var TextModel = /** @class */ (function () {
constructor(contents) { function TextModel(contents) {
const regex = /\r\n|\r|\n/g; var regex = /\r\n|\r|\n/g;
let index = 0; var index = 0;
let match; var match;
this.lines = []; this.lines = [];
this.lineEndings = []; this.lineEndings = [];
while (match = regex.exec(contents)) { while (match = regex.exec(contents)) {
@@ -217,80 +220,85 @@ function isImportNode(node) {
this.lineEndings.push(''); this.lineEndings.push('');
} }
} }
get(index) { TextModel.prototype.get = function (index) {
return this.lines[index]; return this.lines[index];
} };
set(index, line) { TextModel.prototype.set = function (index, line) {
this.lines[index] = line; this.lines[index] = line;
} };
get lineCount() { Object.defineProperty(TextModel.prototype, "lineCount", {
return this.lines.length; get: function () {
} return this.lines.length;
},
enumerable: true,
configurable: true
});
/** /**
* Applies patch(es) to the model. * Applies patch(es) to the model.
* Multiple patches must be ordered. * Multiple patches must be ordered.
* Does not support patches spanning multiple lines. * Does not support patches spanning multiple lines.
*/ */
apply(patch) { TextModel.prototype.apply = function (patch) {
const startLineNumber = patch.span.start.line; var startLineNumber = patch.span.start.line;
const endLineNumber = patch.span.end.line; var endLineNumber = patch.span.end.line;
const startLine = this.lines[startLineNumber] || ''; var startLine = this.lines[startLineNumber] || '';
const endLine = this.lines[endLineNumber] || ''; var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [ this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character), startLine.substring(0, patch.span.start.character),
patch.content, patch.content,
endLine.substring(patch.span.end.character) endLine.substring(patch.span.end.character)
].join(''); ].join('');
for (let i = startLineNumber + 1; i <= endLineNumber; i++) { for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = ''; this.lines[i] = '';
} }
} };
toString() { TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings) return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join(''); .flatten().toArray().join('');
} };
} return TextModel;
}());
nls_1.TextModel = TextModel; nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) { function patchJavascript(patches, contents, moduleId) {
const model = new nls.TextModel(contents); var model = new nls.TextModel(contents);
// patch the localize calls // patch the localize calls
lazy(patches).reverse().each(p => model.apply(p)); lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports // patch the 'vs/nls' imports
const firstLine = model.get(0); var firstLine = model.get(0);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`); var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine); model.set(0, patchedFirstLine);
return model.toString(); return model.toString();
} }
nls_1.patchJavascript = patchJavascript; nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) { function patchSourcemap(patches, rsm, smc) {
const smg = new sm.SourceMapGenerator({ var smg = new sm.SourceMapGenerator({
file: rsm.file, file: rsm.file,
sourceRoot: rsm.sourceRoot sourceRoot: rsm.sourceRoot
}); });
patches = patches.reverse(); patches = patches.reverse();
let currentLine = -1; var currentLine = -1;
let currentLineDiff = 0; var currentLineDiff = 0;
let source = null; var source = null;
smc.eachMapping(m => { smc.eachMapping(function (m) {
const patch = patches[patches.length - 1]; var patch = patches[patches.length - 1];
const original = { line: m.originalLine, column: m.originalColumn }; var original = { line: m.originalLine, column: m.originalColumn };
const generated = { line: m.generatedLine, column: m.generatedColumn }; var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) { if (currentLine !== generated.line) {
currentLineDiff = 0; currentLineDiff = 0;
} }
currentLine = generated.line; currentLine = generated.line;
generated.column += currentLineDiff; generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) { if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
const originalLength = patch.span.end.character - patch.span.start.character; var originalLength = patch.span.end.character - patch.span.start.character;
const modifiedLength = patch.content.length; var modifiedLength = patch.content.length;
const lengthDiff = modifiedLength - originalLength; var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff; currentLineDiff += lengthDiff;
generated.column += lengthDiff; generated.column += lengthDiff;
patches.pop(); patches.pop();
} }
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source; source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/'); source = source.replace(/\\/g, '/');
smg.addMapping({ source, name: m.name, original, generated }); smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER); }, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) { if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source)); smg.setSourceContent(source, smc.sourceContentFor(source));
@@ -299,47 +307,47 @@ function isImportNode(node) {
} }
nls_1.patchSourcemap = patchSourcemap; nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) { function patch(moduleId, typescript, javascript, sourcemap) {
const { localizeCalls, nlsExpressions } = analyze(typescript); var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) { if (localizeCalls.length === 0) {
return { javascript, sourcemap }; return { javascript: javascript, sourcemap: sourcemap };
} }
const nlsKeys = template(localizeCalls.map(lc => lc.key)); var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
const nls = template(localizeCalls.map(lc => lc.value)); var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
const smc = new sm.SourceMapConsumer(sourcemap); var smc = new sm.SourceMapConsumer(sourcemap);
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]); var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
let i = 0; var i = 0;
// build patches // build patches
const patches = lazy(localizeCalls) var patches = lazy(localizeCalls)
.map(lc => ([ .map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) }, { range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' } { range: lc.valueSpan, content: 'null' }
])) ]); })
.flatten() .flatten()
.map(c => { .map(function (c) {
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start))); var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end))); var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start, end }, content: c.content }; return { span: { start: start, end: end }, content: c.content };
}) })
.toArray(); .toArray();
javascript = patchJavascript(patches, javascript, moduleId); javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information, // since imports are not within the sourcemap information,
// we must do this MacGyver style // we must do this MacGyver style
if (nlsExpressions.length) { if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, line => { javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`); return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
}); });
} }
sourcemap = patchSourcemap(patches, sourcemap, smc); sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript, sourcemap, nlsKeys, nls }; return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
} }
nls_1.patch = patch; nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) { function patchFiles(javascriptFile, typescript) {
// hack? // hack?
const moduleId = javascriptFile.relative var moduleId = javascriptFile.relative
.replace(/\.js$/, '') .replace(/\.js$/, '')
.replace(/\\/g, '/'); .replace(/\\/g, '/');
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap); var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
const result = [fileFrom(javascriptFile, javascript)]; var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap; result[0].sourceMap = sourcemap;
if (nlsKeys) { if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js'))); result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));

View File

@@ -6,9 +6,10 @@
import * as ts from 'typescript'; import * as ts from 'typescript';
import * as lazy from 'lazy.js'; import * as lazy from 'lazy.js';
import { duplex, through } from 'event-stream'; import { duplex, through } from 'event-stream';
import * as File from 'vinyl'; import File = require('vinyl');
import * as sm from 'source-map'; import * as sm from 'source-map';
import * as path from 'path'; import assign = require('object-assign');
import path = require('path');
declare class FileSourceMap extends File { declare class FileSourceMap extends File {
public sourceMap: sm.RawSourceMap; public sourceMap: sm.RawSourceMap;
@@ -25,7 +26,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
const result: ts.Node[] = []; const result: ts.Node[] = [];
function loop(node: ts.Node) { function loop(node: ts.Node) {
const stepResult = fn(node); var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) { if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node); result.push(node);
@@ -41,8 +42,8 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
} }
function clone<T>(object: T): T { function clone<T>(object: T): T {
const result = <T>{}; var result = <T>{};
for (const id in object) { for (var id in object) {
result[id] = object[id]; result[id] = object[id];
} }
return result; return result;
@@ -66,8 +67,8 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps. * Returns a stream containing the patched JavaScript and source maps.
*/ */
function nls(): NodeJS.ReadWriteStream { function nls(): NodeJS.ReadWriteStream {
const input = through(); var input = through();
const output = input.pipe(through(function (f: FileSourceMap) { var output = input.pipe(through(function (f: FileSourceMap) {
if (!f.sourceMap) { if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
} }
@@ -82,7 +83,7 @@ function nls(): NodeJS.ReadWriteStream {
source = path.join(root, source); source = path.join(root, source);
} }
const typescript = f.sourceMap.sourcesContent![0]; const typescript = f.sourceMap.sourcesContent[0];
if (!typescript) { if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`)); return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
} }
@@ -173,7 +174,7 @@ module nls {
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult { export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
const filename = 'file.ts'; const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents); const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost); const service = ts.createLanguageService(serviceHost);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
@@ -205,8 +206,8 @@ module nls {
// `nls.localize(...)` calls // `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations const nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) .filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
.map(d => (<ts.NamespaceImport>d.importClause!.namedBindings).name) .map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
.concat(importEqualsDeclarations.map(d => d.name)) .concat(importEqualsDeclarations.map(d => d.name))
// find read-only references to `nls` // find read-only references to `nls`
@@ -225,8 +226,8 @@ module nls {
// `localize` named imports // `localize` named imports
const allLocalizeImportDeclarations = importDeclarations const allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) .filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
.map(d => ([] as any[]).concat((<ts.NamedImports>d.importClause!.namedBindings!).elements)) .map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
.flatten(); .flatten();
// `localize` read-only references // `localize` read-only references
@@ -278,7 +279,7 @@ module nls {
constructor(contents: string) { constructor(contents: string) {
const regex = /\r\n|\r|\n/g; const regex = /\r\n|\r|\n/g;
let index = 0; let index = 0;
let match: RegExpExecArray | null; let match: RegExpExecArray;
this.lines = []; this.lines = [];
this.lineEndings = []; this.lineEndings = [];
@@ -359,7 +360,7 @@ module nls {
patches = patches.reverse(); patches = patches.reverse();
let currentLine = -1; let currentLine = -1;
let currentLineDiff = 0; let currentLineDiff = 0;
let source: string | null = null; let source = null;
smc.eachMapping(m => { smc.eachMapping(m => {
const patch = patches[patches.length - 1]; const patch = patches[patches.length - 1];

View File

@@ -4,31 +4,30 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var path = require("path");
const gulp = require("gulp"); var gulp = require("gulp");
const concat = require("gulp-concat"); var sourcemaps = require("gulp-sourcemaps");
const minifyCSS = require("gulp-cssnano"); var filter = require("gulp-filter");
const filter = require("gulp-filter"); var minifyCSS = require("gulp-cssnano");
const flatmap = require("gulp-flatmap"); var uglify = require("gulp-uglify");
const sourcemaps = require("gulp-sourcemaps"); var composer = require("gulp-uglify/composer");
const uglify = require("gulp-uglify"); var uglifyes = require("uglify-es");
const composer = require("gulp-uglify/composer"); var es = require("event-stream");
const gulpUtil = require("gulp-util"); var concat = require("gulp-concat");
const path = require("path"); var VinylFile = require("vinyl");
const pump = require("pump"); var bundle = require("./bundle");
const uglifyes = require("uglify-es"); var util = require("./util");
const VinylFile = require("vinyl"); var i18n = require("./i18n");
const bundle = require("./bundle"); var gulpUtil = require("gulp-util");
const i18n_1 = require("./i18n"); var flatmap = require("gulp-flatmap");
const stats_1 = require("./stats"); var pump = require("pump");
const util = require("./util"); var REPO_ROOT_PATH = path.join(__dirname, '../..');
const REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) { function log(prefix, message) {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message); gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
} }
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
function loaderConfig(emptyPaths) { function loaderConfig(emptyPaths) {
const result = { var result = {
paths: { paths: {
'vs': 'out-build/vs', 'vs': 'out-build/vs',
'sql': 'out-build/sql', 'sql': 'out-build/sql',
@@ -40,26 +39,26 @@ function loaderConfig(emptyPaths) {
return result; return result;
} }
exports.loaderConfig = loaderConfig; exports.loaderConfig = loaderConfig;
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i; var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src, bundledFileHeader, bundleLoader) { function loader(bundledFileHeader, bundleLoader) {
let sources = [ var sources = [
`${src}/vs/loader.js` 'out-build/vs/loader.js'
]; ];
if (bundleLoader) { if (bundleLoader) {
sources = sources.concat([ sources = sources.concat([
`${src}/vs/css.js`, 'out-build/vs/css.js',
`${src}/vs/nls.js` 'out-build/vs/nls.js'
]); ]);
} }
let isFirst = true; var isFirst = true;
return (gulp return (gulp
.src(sources, { base: `${src}` }) .src(sources, { base: 'out-build' })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
if (isFirst) { if (isFirst) {
isFirst = false; isFirst = false;
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: undefined, base: '',
contents: Buffer.from(bundledFileHeader) contents: Buffer.from(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
@@ -75,13 +74,13 @@ function loader(src, bundledFileHeader, bundleLoader) {
return f; return f;
}))); })));
} }
function toConcatStream(src, bundledFileHeader, sources, dest) { function toConcatStream(bundledFileHeader, sources, dest) {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest); var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then // If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright // insert a fake source at the beginning of each bundle with our copyright
let containsOurCopyright = false; var containsOurCopyright = false;
for (let i = 0, len = sources.length; i < len; i++) { for (var i = 0, len = sources.length; i < len; i++) {
const fileContents = sources[i].contents; var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) { if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true; containsOurCopyright = true;
break; break;
@@ -93,9 +92,9 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
contents: bundledFileHeader contents: bundledFileHeader
}); });
} }
const treatedSources = sources.map(function (source) { var treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : ''; var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : undefined; var base = source.path ? root + '/out-build' : '';
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base, base: base,
@@ -104,42 +103,40 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
}); });
return es.readArray(treatedSources) return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through()) .pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest)) .pipe(concat(dest));
.pipe(stats_1.createStatsStream(dest));
} }
function toBundleStream(src, bundledFileHeader, bundles) { function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) { return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest); return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
})); }));
} }
function optimizeTask(opts) { function optimizeTask(opts) {
const src = opts.src; var entryPoints = opts.entryPoints;
const entryPoints = opts.entryPoints; var otherSources = opts.otherSources;
const otherSources = opts.otherSources; var resources = opts.resources;
const resources = opts.resources; var loaderConfig = opts.loaderConfig;
const loaderConfig = opts.loaderConfig; var bundledFileHeader = opts.header;
const bundledFileHeader = opts.header; var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader); var out = opts.out;
const out = opts.out;
return function () { return function () {
const bundlesStream = es.through(); // this stream will contain the bundled files var bundlesStream = es.through(); // this stream will contain the bundled files
const resourcesStream = es.through(); // this stream will contain the resources var resourcesStream = es.through(); // this stream will contain the resources
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) { bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { if (err) {
return bundlesStream.emit('error', JSON.stringify(err)); return bundlesStream.emit('error', JSON.stringify(err));
} }
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources // Remove css inlined resources
const filteredResources = resources.slice(); var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) { result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) { if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource); log('optimizer', 'excluding inlined: ' + resource);
} }
filteredResources.push('!' + resource); filteredResources.push('!' + resource);
}); });
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream); gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
const bundleInfoArray = []; var bundleInfoArray = [];
if (opts.bundleInfo) { if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({ bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json', path: 'bundleInfo.json',
@@ -149,11 +146,11 @@ function optimizeTask(opts) {
} }
es.readArray(bundleInfoArray).pipe(bundleInfoStream); es.readArray(bundleInfoArray).pipe(bundleInfoStream);
}); });
const otherSourcesStream = es.through(); var otherSourcesStream = es.through();
const otherSourcesStreamArr = []; var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: `${src}` }) gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative)); otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () { }, function () {
if (!otherSourcesStreamArr.length) { if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0); setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
@@ -162,17 +159,17 @@ function optimizeTask(opts) {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream); es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
} }
})); }));
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream); var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result return result
.pipe(sourcemaps.write('./', { .pipe(sourcemaps.write('./', {
sourceRoot: undefined, sourceRoot: null,
addComment: true, addComment: true,
includeContent: true includeContent: true
})) }))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({ .pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader, fileHeader: bundledFileHeader,
languages: opts.languages languages: opts.languages
}) : es.through()) }))
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}; };
} }
@@ -182,14 +179,14 @@ exports.optimizeTask = optimizeTask;
* to have a file "context" to include our copyright only once per file. * to have a file "context" to include our copyright only once per file.
*/ */
function uglifyWithCopyrights() { function uglifyWithCopyrights() {
const preserveComments = (f) => { var preserveComments = function (f) {
return (_node, comment) => { return function (node, comment) {
const text = comment.value; var text = comment.value;
const type = comment.type; var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) { if (/@minifier_do_not_preserve/.test(text)) {
return false; return false;
} }
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text); var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) { if (isOurCopyright) {
if (f.__hasOurCopyright) { if (f.__hasOurCopyright) {
return false; return false;
@@ -207,10 +204,10 @@ function uglifyWithCopyrights() {
return false; return false;
}; };
}; };
const minify = composer(uglifyes); var minify = composer(uglifyes);
const input = es.through(); var input = es.through();
const output = input var output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap(function (stream, f) {
return stream.pipe(minify({ return stream.pipe(minify({
output: { output: {
comments: preserveComments(f), comments: preserveComments(f),
@@ -221,18 +218,18 @@ function uglifyWithCopyrights() {
return es.duplex(input, output); return es.duplex(input, output);
} }
function minifyTask(src, sourceMapBaseUrl) { function minifyTask(src, sourceMapBaseUrl) {
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined; var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return cb => { return function (cb) {
const jsFilter = filter('**/*.js', { restore: true }); var jsFilter = filter('**/*.js', { restore: true });
const cssFilter = filter('**/*.css', { restore: true }); var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', { pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL, sourceMappingURL: sourceMappingURL,
sourceRoot: undefined, sourceRoot: null,
includeContent: true, includeContent: true,
addComment: true addComment: true
}), gulp.dest(src + '-min'), (err) => { }), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) { if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`); console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
} }
cb(err); cb(err);
}); });

View File

@@ -5,25 +5,24 @@
'use strict'; 'use strict';
import * as es from 'event-stream'; import * as path from 'path';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
import * as filter from 'gulp-filter';
import * as flatmap from 'gulp-flatmap';
import * as sourcemaps from 'gulp-sourcemaps'; import * as sourcemaps from 'gulp-sourcemaps';
import * as filter from 'gulp-filter';
import * as minifyCSS from 'gulp-cssnano';
import * as uglify from 'gulp-uglify'; import * as uglify from 'gulp-uglify';
import * as composer from 'gulp-uglify/composer'; import * as composer from 'gulp-uglify/composer';
import * as gulpUtil from 'gulp-util';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as uglifyes from 'uglify-es'; import * as uglifyes from 'uglify-es';
import * as es from 'event-stream';
import * as concat from 'gulp-concat';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import * as bundle from './bundle'; import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
import { createStatsStream } from './stats';
import * as util from './util'; import * as util from './util';
import * as i18n from './i18n';
import * as gulpUtil from 'gulp-util';
import * as flatmap from 'gulp-flatmap';
import * as pump from 'pump';
import * as sm from 'source-map';
const REPO_ROOT_PATH = path.join(__dirname, '../..'); const REPO_ROOT_PATH = path.join(__dirname, '../..');
@@ -33,7 +32,7 @@ function log(prefix: string, message: string): void {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths?: string[]) { export function loaderConfig(emptyPaths?: string[]) {
const result: any = { const result = {
paths: { paths: {
'vs': 'out-build/vs', 'vs': 'out-build/vs',
'sql': 'out-build/sql', 'sql': 'out-build/sql',
@@ -53,27 +52,27 @@ declare class FileSourceMap extends VinylFile {
public sourceMap: sm.RawSourceMap; public sourceMap: sm.RawSourceMap;
} }
function loader(src: string, bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream { function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
let sources = [ let sources = [
`${src}/vs/loader.js` 'out-build/vs/loader.js'
]; ];
if (bundleLoader) { if (bundleLoader) {
sources = sources.concat([ sources = sources.concat([
`${src}/vs/css.js`, 'out-build/vs/css.js',
`${src}/vs/nls.js` 'out-build/vs/nls.js'
]); ]);
} }
let isFirst = true; let isFirst = true;
return ( return (
gulp gulp
.src(sources, { base: `${src}` }) .src(sources, { base: 'out-build' })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
if (isFirst) { if (isFirst) {
isFirst = false; isFirst = false;
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: undefined, base: '',
contents: Buffer.from(bundledFileHeader) contents: Buffer.from(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
@@ -90,7 +89,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
); );
} }
function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream { function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest); const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then // If a bundle ends up including in any of the sources our copyright, then
@@ -113,7 +112,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) { const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : ''; const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : undefined; const base = source.path ? root + '/out-build' : '';
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
@@ -124,21 +123,16 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
return es.readArray(treatedSources) return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through()) .pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest)) .pipe(concat(dest));
.pipe(createStatsStream(dest));
} }
function toBundleStream(src: string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream { function toBundleStream(bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function (bundle) { return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest); return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
})); }));
} }
export interface IOptimizeTaskOpts { export interface IOptimizeTaskOpts {
/**
* The folder to read files from.
*/
src: string;
/** /**
* (for AMD files, will get bundled and get Copyright treatment) * (for AMD files, will get bundled and get Copyright treatment)
*/ */
@@ -169,13 +163,11 @@ export interface IOptimizeTaskOpts {
*/ */
out: string; out: string;
/** /**
* (out folder name) * (languages to process)
*/ */
languages?: Language[]; languages: i18n.Language[];
} }
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream { export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const src = opts.src;
const entryPoints = opts.entryPoints; const entryPoints = opts.entryPoints;
const otherSources = opts.otherSources; const otherSources = opts.otherSources;
const resources = opts.resources; const resources = opts.resources;
@@ -190,9 +182,9 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) { bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); } if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources // Remove css inlined resources
const filteredResources = resources.slice(); const filteredResources = resources.slice();
@@ -202,7 +194,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
} }
filteredResources.push('!' + resource); filteredResources.push('!' + resource);
}); });
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream); gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
const bundleInfoArray: VinylFile[] = []; const bundleInfoArray: VinylFile[] = [];
if (opts.bundleInfo) { if (opts.bundleInfo) {
@@ -218,9 +210,9 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
const otherSourcesStream = es.through(); const otherSourcesStream = es.through();
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = []; const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
gulp.src(otherSources, { base: `${src}` }) gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative)); otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () { }, function () {
if (!otherSourcesStreamArr.length) { if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0); setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
@@ -230,7 +222,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
})); }));
const result = es.merge( const result = es.merge(
loader(src, bundledFileHeader, bundleLoader), loader(bundledFileHeader, bundleLoader),
bundlesStream, bundlesStream,
otherSourcesStream, otherSourcesStream,
resourcesStream, resourcesStream,
@@ -239,14 +231,14 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
return result return result
.pipe(sourcemaps.write('./', { .pipe(sourcemaps.write('./', {
sourceRoot: undefined, sourceRoot: null,
addComment: true, addComment: true,
includeContent: true includeContent: true
})) }))
.pipe(opts.languages && opts.languages.length ? processNlsFiles({ .pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader, fileHeader: bundledFileHeader,
languages: opts.languages languages: opts.languages
}) : es.through()) }))
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}; };
} }
@@ -260,7 +252,7 @@ declare class FileWithCopyright extends VinylFile {
*/ */
function uglifyWithCopyrights(): NodeJS.ReadWriteStream { function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
const preserveComments = (f: FileWithCopyright) => { const preserveComments = (f: FileWithCopyright) => {
return (_node: any, comment: { value: string; type: string; }) => { return (node, comment: { value: string; type: string; }) => {
const text = comment.value; const text = comment.value;
const type = comment.type; const type = comment.type;
@@ -288,7 +280,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
}; };
}; };
const minify = (composer as any)(uglifyes); const minify = composer(uglifyes);
const input = es.through(); const input = es.through();
const output = input const output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap((stream, f) => {
@@ -304,7 +296,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
} }
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void { export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl ? ((f: any) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined; const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => { return cb => {
const jsFilter = filter('**/*.js', { restore: true }); const jsFilter = filter('**/*.js', { restore: true });
@@ -321,13 +313,13 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
cssFilter.restore, cssFilter.restore,
sourcemaps.write('./', { sourcemaps.write('./', {
sourceMappingURL, sourceMappingURL,
sourceRoot: undefined, sourceRoot: null,
includeContent: true, includeContent: true,
addComment: true addComment: true
} as any), }),
gulp.dest(src + '-min') gulp.dest(src + '-min')
, (err: any) => { , (err: any) => {
if (err instanceof (uglify as any).GulpUglifyError) { if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`); console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
} }

View File

@@ -4,20 +4,20 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var es = require("event-stream");
const _ = require("underscore"); var _ = require("underscore");
const util = require("gulp-util"); var util = require("gulp-util");
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const allErrors = []; var allErrors = [];
let startTime = null; var startTime = null;
let count = 0; var count = 0;
function onStart() { function onStart() {
if (count++ > 0) { if (count++ > 0) {
return; return;
} }
startTime = new Date().getTime(); startTime = new Date().getTime();
util.log(`Starting ${util.colors.green('compilation')}...`); util.log("Starting " + util.colors.green('compilation') + "...");
} }
function onEnd() { function onEnd() {
if (--count > 0) { if (--count > 0) {
@@ -25,7 +25,7 @@ function onEnd() {
} }
log(); log();
} }
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log'); var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try { try {
fs.mkdirSync(path.dirname(buildLogPath)); fs.mkdirSync(path.dirname(buildLogPath));
} }
@@ -33,52 +33,61 @@ catch (err) {
// ignore // ignore
} }
function log() { function log() {
const errors = _.flatten(allErrors); var errors = _.flatten(allErrors);
const seen = new Set(); var seen = new Set();
errors.map(err => { errors.map(function (err) {
if (!seen.has(err)) { if (!seen.has(err)) {
seen.add(err); seen.add(err);
util.log(`${util.colors.red('Error')}: ${err}`); util.log(util.colors.red('Error') + ": " + err);
} }
}); });
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/; var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors var messages = errors
.map(err => regex.exec(err)) .map(function (err) { return regex.exec(err); })
.filter(match => !!match) .filter(function (match) { return !!match; })
.map(x => x) .map(function (_a) {
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
});
try { try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages)); fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} }
catch (err) { catch (err) {
//noop //noop
} }
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`); util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
} }
function createReporter() { function createReporter() {
const errors = []; var errors = [];
allErrors.push(errors); allErrors.push(errors);
const result = (err) => errors.push(err); var ReportFunc = /** @class */ (function () {
result.hasErrors = () => errors.length > 0; function ReportFunc(err) {
result.end = (emitError) => { errors.push(err);
errors.length = 0; }
onStart(); ReportFunc.hasErrors = function () {
return es.through(undefined, function () { return errors.length > 0;
onEnd(); };
if (emitError && errors.length > 0) { ReportFunc.end = function (emitError) {
if (!errors.__logged__) { errors.length = 0;
log(); onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
errors.__logged__ = true;
if (!errors.__logged__) {
log();
}
var err = new Error("Found " + errors.length + " errors");
err.__reporter__ = true;
this.emit('error', err);
} }
errors.__logged__ = true; else {
const err = new Error(`Found ${errors.length} errors`); this.emit('end');
err.__reporter__ = true; }
this.emit('error', err); });
} };
else { return ReportFunc;
this.emit('end'); }());
} return ReportFunc;
});
};
return result;
} }
exports.createReporter = createReporter; exports.createReporter = createReporter;

View File

@@ -12,7 +12,7 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
const allErrors: string[][] = []; const allErrors: string[][] = [];
let startTime: number | null = null; let startTime: number = null;
let count = 0; let count = 0;
function onStart(): void { function onStart(): void {
@@ -55,7 +55,6 @@ function log(): void {
const messages = errors const messages = errors
.map(err => regex.exec(err)) .map(err => regex.exec(err))
.filter(match => !!match) .filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); .map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try { try {
@@ -65,7 +64,7 @@ function log(): void {
//noop //noop
} }
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime!) + ' ms')}`); util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
} }
export interface IReporter { export interface IReporter {
@@ -78,32 +77,38 @@ export function createReporter(): IReporter {
const errors: string[] = []; const errors: string[] = [];
allErrors.push(errors); allErrors.push(errors);
const result = (err: string) => errors.push(err); class ReportFunc {
constructor(err: string) {
errors.push(err);
}
result.hasErrors = () => errors.length > 0; static hasErrors(): boolean {
return errors.length > 0;
}
result.end = (emitError: boolean): NodeJS.ReadWriteStream => { static end(emitError: boolean): NodeJS.ReadWriteStream {
errors.length = 0; errors.length = 0;
onStart(); onStart();
return es.through(undefined, function () { return es.through(null, function () {
onEnd(); onEnd();
if (emitError && errors.length > 0) { if (emitError && errors.length > 0) {
if (!(errors as any).__logged__) { (errors as any).__logged__ = true;
log();
if (!(errors as any).__logged__) {
log();
}
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
} else {
this.emit('end');
} }
});
}
}
(errors as any).__logged__ = true; return <IReporter><any>ReportFunc;
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
} else {
this.emit('end');
}
});
};
return result;
} }

View File

@@ -5,51 +5,35 @@
'use strict'; 'use strict';
var snaps; var snaps;
(function (snaps) { (function (snaps) {
const fs = require('fs'); var fs = require('fs');
const path = require('path'); var path = require('path');
const os = require('os'); var os = require('os');
const cp = require('child_process'); var cp = require('child_process');
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`); var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
const product = require('../../product.json'); var product = require('../../product.json');
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1]; var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
// //
let loaderFilepath; var loaderFilepath;
let startupBlobFilepath; var startupBlobFilepath;
switch (process.platform) { switch (process.platform) {
case 'darwin': case 'darwin':
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`; loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`; startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
break; break;
case 'win32': case 'win32':
case 'linux': case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`; loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`; startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
break;
default:
throw new Error('Unknown platform');
} }
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath); startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath); snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath, startupBlobFilepath) { function snapshotLoader(loaderFilepath, startupBlobFilepath) {
const inputFile = fs.readFileSync(loaderFilepath); var inputFile = fs.readFileSync(loaderFilepath);
const wrappedInputFile = ` var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
var Monaco_Loader_Init; var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
(function() {
var doNotInitLoader = true;
${inputFile.toString()};
Monaco_Loader_Init = function() {
AMDLoader.init();
CSSLoaderPlugin.init();
NLSLoaderPlugin.init();
return { define, require };
}
})();
`;
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath); console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile); fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]); cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
} }
})(snaps || (snaps = {})); })(snaps || (snaps = {}));

View File

@@ -30,10 +30,6 @@ namespace snaps {
case 'linux': case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`; loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`; startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
break;
default:
throw new Error('Unknown platform');
} }
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);

View File

@@ -4,273 +4,201 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const tss = require("./treeshaking"); var REPO_ROOT = path.join(__dirname, '../../');
const REPO_ROOT = path.join(__dirname, '../../'); var SRC_DIR = path.join(REPO_ROOT, 'src');
const SRC_DIR = path.join(REPO_ROOT, 'src'); var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
let dirCache = {}; function createESMSourcesAndResources(options) {
function writeFile(filePath, contents) { var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
function ensureDirs(dirPath) { var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
if (dirCache[dirPath]) { var in_queue = Object.create(null);
var queue = [];
var enqueue = function (module) {
if (in_queue[module]) {
return; return;
} }
dirCache[dirPath] = true; in_queue[module] = true;
ensureDirs(path.dirname(dirPath)); queue.push(module);
if (fs.existsSync(dirPath)) { };
var seenDir = {};
var createDirectoryRecursive = function (dir) {
if (seenDir[dir]) {
return; return;
} }
fs.mkdirSync(dirPath); var lastSlash = dir.lastIndexOf('/');
} if (lastSlash === -1) {
ensureDirs(path.dirname(filePath)); lastSlash = dir.lastIndexOf('\\');
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
let compilerOptions;
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
}
else {
compilerOptions = tsConfig.compilerOptions;
}
tsConfig.compilerOptions = compilerOptions;
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
delete compilerOptions.types;
delete tsConfig.extends;
tsConfig.exclude = [];
options.compilerOptions = compilerOptions;
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
} }
} if (lastSlash !== -1) {
let copied = {}; createDirectoryRecursive(dir.substring(0, lastSlash));
const copyFile = (fileName) => {
if (copied[fileName]) {
return;
} }
copied[fileName] = true; seenDir[dir] = true;
const srcPath = path.join(options.sourcesRoot, fileName); try {
const dstPath = path.join(options.destRoot, fileName); fs.mkdirSync(dir);
writeFile(dstPath, fs.readFileSync(srcPath)); }
catch (err) { }
}; };
const writeOutputFile = (fileName, contents) => { seenDir[REPO_ROOT] = true;
writeFile(path.join(options.destRoot, fileName), contents); var toggleComments = function (fileContents) {
var lines = fileContents.split(/\r\n|\r|\n/);
var mode = 0;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
}; };
for (let fileName in result) { var write = function (filePath, contents) {
if (result.hasOwnProperty(fileName)) { var absoluteFilePath;
const fileContents = result[fileName]; if (/\.ts$/.test(filePath)) {
const info = ts.preProcessFile(fileContents); absoluteFilePath = path.join(OUT_FOLDER, filePath);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
let importedFilePath;
if (/^vs\/css!/.test(importedFileName)) {
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
}
else {
importedFilePath = importedFileName;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
}
if (/\.css$/.test(importedFilePath)) {
transportCSS(importedFilePath, copyFile, writeOutputFile);
}
else {
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
copyFile(importedFilePath + '.js');
}
}
}
} }
} else {
delete tsConfig.compilerOptions.moduleResolution; absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/loader.js',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/nls.mock.ts',
].forEach(copyFile);
}
exports.extractEditor = extractEditor;
function createESMSourcesAndResources2(options) {
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file) => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
} }
if (/\.ts$/.test(dest)) { createDirectoryRecursive(path.dirname(absoluteFilePath));
return path.join(OUT_FOLDER, dest); if (/(\.ts$)|(\.js$)/.test(filePath)) {
}
return path.join(OUT_RESOURCES_FOLDER, dest);
};
const allFiles = walkDirRecursive(SRC_FOLDER);
for (const file of allFiles) {
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) {
continue;
}
if (file === 'tsconfig.json') {
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString());
tsConfig.compilerOptions.module = 'es6';
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
}
if (/\.ts$/.test(file)) {
// Transform the .ts file
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(file), importedFilepath);
}
let relativePath;
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
relativePath = '../' + path.basename(path.dirname(file));
}
else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
}
else {
relativePath = path.relative(path.dirname(file), importedFilepath);
}
relativePath = relativePath.replace(/\\/g, '/');
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
write(getDestAbsoluteFilePath(file), fileContents);
continue;
}
console.log(`UNKNOWN FILE: ${file}`);
}
function walkDirRecursive(dir) {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
function _walkDirRecursive(dir, result, trimPos) {
const files = fs.readdirSync(dir);
for (let i = 0; i < files.length; i++) {
const file = path.join(dir, files[i]);
if (fs.statSync(file).isDirectory()) {
_walkDirRecursive(file, result, trimPos);
}
else {
result.push(file.substr(trimPos));
}
}
}
function write(absoluteFilePath, contents) {
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) {
contents = toggleComments(contents.toString()); contents = toggleComments(contents.toString());
} }
writeFile(absoluteFilePath, contents); fs.writeFileSync(absoluteFilePath, contents);
function toggleComments(fileContents) { };
let lines = fileContents.split(/\r\n|\r|\n/); options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
let mode = 0; while (queue.length > 0) {
for (let i = 0; i < lines.length; i++) { var module_1 = queue.shift();
const line = lines[i]; if (transportCSS(options, module_1, enqueue, write)) {
if (mode === 0) { continue;
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
} }
if (transportResource(options, module_1, enqueue, write)) {
continue;
}
if (transportDTS(options, module_1, enqueue, write)) {
continue;
}
var filename = void 0;
if (options.redirects[module_1]) {
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
}
else {
filename = path.join(SRC_DIR, module_1 + '.ts');
}
var fileContents = fs.readFileSync(filename).toString();
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFilename = info.importedFiles[i].fileName;
var pos = info.importedFiles[i].pos;
var end = info.importedFiles[i].end;
var importedFilepath = void 0;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
}
enqueue(importedFilepath);
var relativePath = void 0;
if (importedFilepath === path.dirname(module_1)) {
relativePath = '../' + path.basename(path.dirname(module_1));
}
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
}
else {
relativePath = path.relative(path.dirname(module_1), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return "import * as " + m1 + " from " + m2 + ";";
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module_1 + '.ts', fileContents);
} }
var esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": []
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
} }
exports.createESMSourcesAndResources2 = createESMSourcesAndResources2; exports.createESMSourcesAndResources = createESMSourcesAndResources;
function transportCSS(module, enqueue, write) { function transportCSS(options, module, enqueue, write) {
if (!/\.css/.test(module)) { if (!/\.css/.test(module)) {
return false; return false;
} }
const filename = path.join(SRC_DIR, module); var filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString(); var fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148 var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336 var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit); var newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents); write(module, newContents);
return true; return true;
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) { function _rewriteOrInlineUrls(originalFileFSPath, contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, (url) => { return _replaceURL(contents, function (url) {
let imagePath = path.join(path.dirname(module), url); var imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) { if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64'); var DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) { if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris // .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString() var newText = fileContents.toString()
.replace(/"/g, '\'') .replace(/"/g, '\'')
.replace(/</g, '%3C') .replace(/</g, '%3C')
.replace(/>/g, '%3E') .replace(/>/g, '%3E')
.replace(/&/g, '%26') .replace(/&/g, '%26')
.replace(/#/g, '%23') .replace(/#/g, '%23')
.replace(/\s+/g, ' '); .replace(/\s+/g, ' ');
let encodedData = ',' + newText; var encodedData = ',' + newText;
if (encodedData.length < DATA.length) { if (encodedData.length < DATA.length) {
DATA = encodedData; DATA = encodedData;
} }
@@ -283,8 +211,12 @@ function transportCSS(module, enqueue, write) {
} }
function _replaceURL(contents, replacer) { function _replaceURL(contents, replacer) {
// Use ")" as the terminator as quotes are oftentimes not used at all // Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_, ...matches) => { return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
let url = matches[0]; var matches = [];
for (var _i = 1; _i < arguments.length; _i++) {
matches[_i - 1] = arguments[_i];
}
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured) // Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') { if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1); url = url.substring(1);
@@ -307,3 +239,27 @@ function transportCSS(module, enqueue, write) {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle; return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
} }
} }
function transportResource(options, module, enqueue, write) {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options, module, enqueue, write) {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
var filename;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
}
else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -6,281 +6,199 @@
import * as ts from 'typescript'; import * as ts from 'typescript';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as tss from './treeshaking';
const REPO_ROOT = path.join(__dirname, '../../'); const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src'); const SRC_DIR = path.join(REPO_ROOT, 'src');
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
let dirCache: { [dir: string]: boolean; } = {}; export interface IOptions {
entryPoints: string[];
function writeFile(filePath: string, contents: Buffer | string): void {
function ensureDirs(dirPath: string): void {
if (dirCache[dirPath]) {
return;
}
dirCache[dirPath] = true;
ensureDirs(path.dirname(dirPath));
if (fs.existsSync(dirPath)) {
return;
}
fs.mkdirSync(dirPath);
}
ensureDirs(path.dirname(filePath));
fs.writeFileSync(filePath, contents);
}
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
let compilerOptions: { [key: string]: any };
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
} else {
compilerOptions = tsConfig.compilerOptions;
}
tsConfig.compilerOptions = compilerOptions;
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
delete compilerOptions.types;
delete tsConfig.extends;
tsConfig.exclude = [];
options.compilerOptions = compilerOptions;
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied: { [fileName: string]: boolean; } = {};
const copyFile = (fileName: string) => {
if (copied[fileName]) {
return;
}
copied[fileName] = true;
const srcPath = path.join(options.sourcesRoot, fileName);
const dstPath = path.join(options.destRoot, fileName);
writeFile(dstPath, fs.readFileSync(srcPath));
};
const writeOutputFile = (fileName: string, contents: string | Buffer) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
let importedFilePath: string;
if (/^vs\/css!/.test(importedFileName)) {
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
} else {
importedFilePath = importedFileName;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
}
if (/\.css$/.test(importedFilePath)) {
transportCSS(importedFilePath, copyFile, writeOutputFile);
} else {
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
copyFile(importedFilePath + '.js');
}
}
}
}
}
delete tsConfig.compilerOptions.moduleResolution;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/loader.js',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/nls.mock.ts',
].forEach(copyFile);
}
export interface IOptions2 {
srcFolder: string;
outFolder: string; outFolder: string;
outResourcesFolder: string; outResourcesFolder: string;
ignores: string[]; redirects: { [module: string]: string; };
renames: { [filename: string]: string; };
} }
export function createESMSourcesAndResources2(options: IOptions2): void { export function createESMSourcesAndResources(options: IOptions): void {
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder); const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder); const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file: string): string => { let in_queue: { [module: string]: boolean; } = Object.create(null);
let dest = options.renames[file.replace(/\\/g, '/')] || file; let queue: string[] = [];
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`); const enqueue = (module: string) => {
if (in_queue[module]) {
return;
} }
if (/\.ts$/.test(dest)) { in_queue[module] = true;
return path.join(OUT_FOLDER, dest); queue.push(module);
}
return path.join(OUT_RESOURCES_FOLDER, dest);
}; };
const allFiles = walkDirRecursive(SRC_FOLDER); const seenDir: { [key: string]: boolean; } = {};
for (const file of allFiles) { const createDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) { return;
continue;
} }
if (file === 'tsconfig.json') { let lastSlash = dir.lastIndexOf('/');
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString()); if (lastSlash === -1) {
tsConfig.compilerOptions.module = 'es6'; lastSlash = dir.lastIndexOf('\\');
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
} }
if (lastSlash !== -1) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) { createDirectoryRecursive(dir.substring(0, lastSlash));
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
} }
seenDir[dir] = true;
try { fs.mkdirSync(dir); } catch (err) { }
};
if (/\.ts$/.test(file)) { seenDir[REPO_ROOT] = true;
// Transform the .ts file
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
const info = ts.preProcessFile(fileContents); const toggleComments = (fileContents: string) => {
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
for (let i = info.importedFiles.length - 1; i >= 0; i--) { if (mode === 0) {
const importedFilename = info.importedFiles[i].fileName; if (/\/\/ ESM-comment-begin/.test(line)) {
const pos = info.importedFiles[i].pos; mode = 1;
const end = info.importedFiles[i].end; continue;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
} }
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) { if (/\/\/ ESM-uncomment-begin/.test(line)) {
importedFilepath = path.join(path.dirname(file), importedFilepath); mode = 2;
continue;
} }
continue;
let relativePath: string;
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
relativePath = '../' + path.basename(path.dirname(file));
} else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
} else {
relativePath = path.relative(path.dirname(file), importedFilepath);
}
relativePath = relativePath.replace(/\\/g, '/');
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
} }
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) { if (mode === 1) {
return `import * as ${m1} from ${m2};`; if (/\/\/ ESM-comment-end/.test(line)) {
}); mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
write(getDestAbsoluteFilePath(file), fileContents); if (mode === 2) {
continue; if (/\/\/ ESM-uncomment-end/.test(line)) {
} mode = 0;
continue;
console.log(`UNKNOWN FILE: ${file}`); }
} lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
function walkDirRecursive(dir: string): string[] {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result: string[] = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
function _walkDirRecursive(dir: string, result: string[], trimPos: number): void {
const files = fs.readdirSync(dir);
for (let i = 0; i < files.length; i++) {
const file = path.join(dir, files[i]);
if (fs.statSync(file).isDirectory()) {
_walkDirRecursive(file, result, trimPos);
} else {
result.push(file.substr(trimPos));
} }
} }
}
function write(absoluteFilePath: string, contents: string | Buffer): void { return lines.join('\n');
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) { };
const write = (filePath: string, contents: string | Buffer) => {
let absoluteFilePath: string;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
} else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString()); contents = toggleComments(contents.toString());
} }
writeFile(absoluteFilePath, contents); fs.writeFileSync(absoluteFilePath, contents);
};
function toggleComments(fileContents: string): string { options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) { while (queue.length > 0) {
if (/\/\/ ESM-comment-end/.test(line)) { const module = queue.shift();
mode = 0; if (transportCSS(options, module, enqueue, write)) {
continue; continue;
} }
lines[i] = '// ' + line; if (transportResource(options, module, enqueue, write)) {
continue; continue;
} }
if (transportDTS(options, module, enqueue, write)) {
continue;
}
if (mode === 2) { let filename: string;
if (/\/\/ ESM-uncomment-end/.test(line)) { if (options.redirects[module]) {
mode = 0; filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
continue; } else {
} filename = path.join(SRC_DIR, module + '.ts');
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) { }
return indent; let fileContents = fs.readFileSync(filename).toString();
});
} const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module), importedFilepath);
} }
return lines.join('\n'); enqueue(importedFilepath);
let relativePath: string;
if (importedFilepath === path.dirname(module)) {
relativePath = '../' + path.basename(path.dirname(module));
} else if (importedFilepath === path.dirname(path.dirname(module))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
} else {
relativePath = path.relative(path.dirname(module), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
} }
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module + '.ts', fileContents);
} }
const esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": [
]
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
} }
function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean { function transportCSS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.css/.test(module)) { if (!/\.css/.test(module)) {
return false; return false;
@@ -291,11 +209,11 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148 const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336 const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit); const newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents); write(module, newContents);
return true; return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string { function _rewriteOrInlineUrls(originalFileFSPath: string, contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => { return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url); let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
@@ -329,7 +247,7 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
function _replaceURL(contents: string, replacer: (url: string) => string): string { function _replaceURL(contents: string, replacer: (url: string) => string): string {
// Use ")" as the terminator as quotes are oftentimes not used at all // Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => { return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
let url = matches[0]; var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured) // Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') { if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1); url = url.substring(1);
@@ -355,3 +273,33 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle; return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
} }
} }
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
let filename: string;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
} else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,135 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const util = require("gulp-util");
const appInsights = require("applicationinsights");
class Entry {
constructor(name, totalCount, totalSize) {
this.name = name;
this.totalCount = totalCount;
this.totalSize = totalSize;
}
toString(pretty) {
if (!pretty) {
if (this.totalCount === 1) {
return `${this.name}: ${this.totalSize} bytes`;
}
else {
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
}
}
else {
if (this.totalCount === 1) {
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
}
else {
const count = this.totalCount < 100
? util.colors.green(this.totalCount.toString())
: util.colors.red(this.totalCount.toString());
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
}
}
}
}
const _entries = new Map();
function createStatsStream(group, log) {
const entry = new Entry(group, 0, 0);
_entries.set(entry.name, entry);
return es.through(function (data) {
const file = data;
if (typeof file.path === 'string') {
entry.totalCount += 1;
if (Buffer.isBuffer(file.contents)) {
entry.totalSize += file.contents.length;
}
else if (file.stat && typeof file.stat.size === 'number') {
entry.totalSize += file.stat.size;
}
else {
// funky file...
}
}
this.emit('data', data);
}, function () {
if (log) {
if (entry.totalCount === 1) {
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
}
else {
const count = entry.totalCount < 100
? util.colors.green(entry.totalCount.toString())
: util.colors.red(entry.totalCount.toString());
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
}
}
this.emit('end');
});
}
exports.createStatsStream = createStatsStream;
function submitAllStats(productJson, commit) {
const sorted = [];
// move entries for single files to the front
_entries.forEach(value => {
if (value.totalCount === 1) {
sorted.unshift(value);
}
else {
sorted.push(value);
}
});
// print to console
for (const entry of sorted) {
console.log(entry.toString(true));
}
// send data as telementry event when the
// product is configured to send telemetry
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
return Promise.resolve(false);
}
return new Promise(resolve => {
try {
const sizes = {};
const counts = {};
for (const entry of sorted) {
sizes[entry.name] = entry.totalSize;
counts[entry.name] = entry.totalCount;
}
appInsights.setup(productJson.aiConfig.asimovKey)
.setAutoCollectConsole(false)
.setAutoCollectExceptions(false)
.setAutoCollectPerformance(false)
.setAutoCollectRequests(false)
.setAutoCollectDependencies(false)
.setAutoDependencyCorrelation(false)
.start();
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
/* __GDPR__
"monacoworkbench/packagemetrics" : {
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});
appInsights.defaultClient.flush({
callback: () => {
appInsights.dispose();
resolve(true);
}
});
}
catch (err) {
console.error('ERROR sending build stats as telemetry event!');
console.error(err);
resolve(false);
}
});
}
exports.submitAllStats = submitAllStats;

View File

@@ -1,147 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as util from 'gulp-util';
import * as File from 'vinyl';
import * as appInsights from 'applicationinsights';
class Entry {
constructor(readonly name: string, public totalCount: number, public totalSize: number) { }
toString(pretty?: boolean): string {
if (!pretty) {
if (this.totalCount === 1) {
return `${this.name}: ${this.totalSize} bytes`;
} else {
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
}
} else {
if (this.totalCount === 1) {
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
} else {
const count = this.totalCount < 100
? util.colors.green(this.totalCount.toString())
: util.colors.red(this.totalCount.toString());
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
}
}
}
}
const _entries = new Map<string, Entry>();
export function createStatsStream(group: string, log?: boolean): es.ThroughStream {
const entry = new Entry(group, 0, 0);
_entries.set(entry.name, entry);
return es.through(function (data) {
const file = data as File;
if (typeof file.path === 'string') {
entry.totalCount += 1;
if (Buffer.isBuffer(file.contents)) {
entry.totalSize += file.contents.length;
} else if (file.stat && typeof file.stat.size === 'number') {
entry.totalSize += file.stat.size;
} else {
// funky file...
}
}
this.emit('data', data);
}, function () {
if (log) {
if (entry.totalCount === 1) {
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
} else {
const count = entry.totalCount < 100
? util.colors.green(entry.totalCount.toString())
: util.colors.red(entry.totalCount.toString());
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
}
}
this.emit('end');
});
}
export function submitAllStats(productJson: any, commit: string): Promise<boolean> {
const sorted: Entry[] = [];
// move entries for single files to the front
_entries.forEach(value => {
if (value.totalCount === 1) {
sorted.unshift(value);
} else {
sorted.push(value);
}
});
// print to console
for (const entry of sorted) {
console.log(entry.toString(true));
}
// send data as telementry event when the
// product is configured to send telemetry
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
return Promise.resolve(false);
}
return new Promise(resolve => {
try {
const sizes: any = {};
const counts: any = {};
for (const entry of sorted) {
sizes[entry.name] = entry.totalSize;
counts[entry.name] = entry.totalCount;
}
appInsights.setup(productJson.aiConfig.asimovKey)
.setAutoCollectConsole(false)
.setAutoCollectExceptions(false)
.setAutoCollectPerformance(false)
.setAutoCollectRequests(false)
.setAutoCollectDependencies(false)
.setAutoDependencyCorrelation(false)
.start();
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
/* __GDPR__
"monacoworkbench/packagemetrics" : {
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});
appInsights.defaultClient.flush({
callback: () => {
appInsights.dispose();
resolve(true);
}
});
} catch (err) {
console.error('ERROR sending build stats as telemetry event!');
console.error(err);
resolve(false);
}
});
}

View File

@@ -4,30 +4,30 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const assert = require("assert"); var assert = require("assert");
const i18n = require("../i18n"); var i18n = require("../i18n");
suite('XLF Parser Tests', () => { suite('XLF Parser Tests', function () {
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>'; var sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>'; var sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
const originalFilePath = 'vs/base/common/keybinding'; var originalFilePath = 'vs/base/common/keybinding';
const keys = ['key1', 'key2']; var keys = ['key1', 'key2'];
const messages = ['Key #1', 'Key #2 &']; var messages = ['Key #1', 'Key #2 &'];
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' }; var translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => { test('Keys & messages to XLF conversion', function () {
const xlf = new i18n.XLF('vscode-workbench'); var xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages); xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString(); var xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf); assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
}); });
test('XLF to keys & messages conversion', () => { test('XLF to keys & messages conversion', function () {
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) { i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages); assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath); assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
}); });
}); });
test('JSON file source path to Transifex resource match', () => { test('JSON file source path to Transifex resource match', function () {
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench'; var editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject }; var platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform); assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib); assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor); assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);

View File

@@ -15,7 +15,7 @@ suite('XLF Parser Tests', () => {
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' }; const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => { test('Keys & messages to XLF conversion', () => {
const xlf = new i18n.XLF('vscode-workbench'); let xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages); xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString(); const xlfString = xlf.toString();

View File

@@ -0,0 +1,56 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var assert = require("assert");
var util = require("../util");
function getMockTagExists(tags) {
return function (tag) { return tags.indexOf(tag) >= 0; };
}
suite('util tests', function () {
test('getPreviousVersion - patch', function () {
assert.equal(util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])), '1.2.2');
});
test('getPreviousVersion - patch invalid', function () {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])), '1.1.3');
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])), '1.1.0');
});
test('getPreviousVersion - minor gap', function () {
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])), '1.1.1');
});
test('getPreviousVersion - minor invalid', function () {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', function () {
assert.equal(util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])), '1.2.2');
});
test('getPreviousVersion - major invalid', function () {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
}
catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -0,0 +1,79 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import assert = require('assert');
import util = require('../util');
function getMockTagExists(tags: string[]) {
return (tag: string) => tags.indexOf(tag) >= 0;
}
suite('util tests', () => {
test('getPreviousVersion - patch', () => {
assert.equal(
util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])),
'1.2.2'
);
});
test('getPreviousVersion - patch invalid', () => {
try {
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - minor', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])),
'1.1.3'
);
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])),
'1.1.0'
);
});
test('getPreviousVersion - minor gap', () => {
assert.equal(
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])),
'1.1.1'
);
});
test('getPreviousVersion - minor invalid', () => {
try {
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
test('getPreviousVersion - major', () => {
assert.equal(
util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])),
'1.2.2'
);
});
test('getPreviousVersion - major invalid', () => {
try {
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
} catch (e) {
// expected
return;
}
throw new Error('Expected an exception');
});
});

View File

@@ -1,681 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const ts = require("typescript");
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
var ShakeLevel;
(function (ShakeLevel) {
ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function printDiagnostics(diagnostics) {
for (const diag of diagnostics) {
let result = '';
if (diag.file) {
result += `${diag.file.fileName}: `;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `- ${location.line + 1},${location.character} - `;
}
result += JSON.stringify(diag.messageText);
console.log(result);
}
}
function shake(options) {
const languageService = createTypeScriptLanguageService(options);
const program = languageService.getProgram();
const globalDiagnostics = program.getGlobalDiagnostics();
if (globalDiagnostics.length > 0) {
printDiagnostics(globalDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const syntacticDiagnostics = program.getSyntacticDiagnostics();
if (syntacticDiagnostics.length > 0) {
printDiagnostics(syntacticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const semanticDiagnostics = program.getSemanticDiagnostics();
if (semanticDiagnostics.length > 0) {
printDiagnostics(semanticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
markNodes(languageService, options);
return generateResult(languageService, options.shakeLevel);
}
exports.shake = shake;
//#region Discovery, LanguageService & Setup
function createTypeScriptLanguageService(options) {
// Discover referenced files
const FILES = discoverAndReadFiles(options);
// Add fake usage files
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint;
});
// Add additional typings
options.typings.forEach((typing) => {
const filePath = path.join(options.sourcesRoot, typing);
FILES[typing] = fs.readFileSync(filePath).toString();
});
// Resolve libs
const RESOLVED_LIBS = {};
options.libs.forEach((filename) => {
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
});
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
return ts.createLanguageService(host);
}
/**
* Read imports and follow them until all files have been handled
*/
function discoverAndReadFiles(options) {
const FILES = {};
const in_queue = Object.create(null);
const queue = [];
const enqueue = (moduleId) => {
if (in_queue[moduleId]) {
return;
}
in_queue[moduleId] = true;
queue.push(moduleId);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const moduleId = queue.shift();
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) {
const dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[`${moduleId}.d.ts`] = dts_filecontents;
continue;
}
const js_filename = path.join(options.sourcesRoot, moduleId + '.js');
if (fs.existsSync(js_filename)) {
// This is an import for a .js file, so ignore it...
continue;
}
let ts_filename;
if (options.redirects[moduleId]) {
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
}
else {
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
}
const ts_filecontents = fs.readFileSync(ts_filename).toString();
const info = ts.preProcessFile(ts_filecontents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
if (options.importIgnorePattern.test(importedFileName)) {
// Ignore vs/css! imports
continue;
}
let importedModuleId = importedFileName;
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
}
enqueue(importedModuleId);
}
FILES[`${moduleId}.ts`] = ts_filecontents;
}
return FILES;
}
/**
* A TypeScript language service host
*/
class TypeScriptLanguageServiceHost {
constructor(libs, files, compilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings() {
return this._compilerOptions;
}
getScriptFileNames() {
return ([]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files)));
}
getScriptVersion(_fileName) {
return '1';
}
getProjectVersion() {
return '1';
}
getScriptSnapshot(fileName) {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
}
else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
}
else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(_fileName) {
return ts.ScriptKind.TS;
}
getCurrentDirectory() {
return '';
}
getDefaultLibFileName(_options) {
return 'defaultLib:lib.d.ts';
}
isDefaultLibFileName(fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
//#endregion
//#region Tree Shaking
var NodeColor;
(function (NodeColor) {
NodeColor[NodeColor["White"] = 0] = "White";
NodeColor[NodeColor["Gray"] = 1] = "Gray";
NodeColor[NodeColor["Black"] = 2] = "Black";
})(NodeColor || (NodeColor = {}));
function getColor(node) {
return node.$$$color || 0 /* White */;
}
function setColor(node, color) {
node.$$$color = color;
}
function nodeOrParentIsBlack(node) {
while (node) {
const color = getColor(node);
if (color === 2 /* Black */) {
return true;
}
node = node.parent;
}
return false;
}
function nodeOrChildIsBlack(node) {
if (getColor(node) === 2 /* Black */) {
return true;
}
for (const child of node.getChildren()) {
if (nodeOrChildIsBlack(child)) {
return true;
}
}
return false;
}
function markNodes(languageService, options) {
const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === 0 /* Files */) {
// Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => {
setColor(sourceFile, 2 /* Black */);
});
return;
}
const black_queue = [];
const gray_queue = [];
const sourceFilesLoaded = {};
function enqueueTopLevelModuleStatements(sourceFile) {
sourceFile.forEachChild((node) => {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExportDeclaration(node)) {
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExpressionStatement(node)
|| ts.isIfStatement(node)
|| ts.isIterationStatement(node, true)
|| ts.isExportAssignment(node)) {
enqueue_black(node);
}
if (ts.isImportEqualsDeclaration(node)) {
if (/export/.test(node.getFullText(sourceFile))) {
// e.g. "export import Severity = BaseSeverity;"
enqueue_black(node);
}
}
});
}
function enqueue_gray(node) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
return;
}
setColor(node, 1 /* Gray */);
gray_queue.push(node);
}
function enqueue_black(node) {
const previousColor = getColor(node);
if (previousColor === 2 /* Black */) {
return;
}
if (previousColor === 1 /* Gray */) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, 0 /* White */);
// add to black queue
enqueue_black(node);
// // move from one queue to the other
// black_queue.push(node);
// setColor(node, NodeColor.Black);
return;
}
if (nodeOrParentIsBlack(node)) {
return;
}
const fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, 2 /* Black */);
return;
}
const sourceFile = node.getSourceFile();
if (!sourceFilesLoaded[sourceFile.fileName]) {
sourceFilesLoaded[sourceFile.fileName] = true;
enqueueTopLevelModuleStatements(sourceFile);
}
if (ts.isSourceFile(node)) {
return;
}
setColor(node, 2 /* Black */);
black_queue.push(node);
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (let i = 0, len = references.length; i < len; i++) {
const reference = references[i];
const referenceSourceFile = program.getSourceFile(reference.fileName);
if (!referenceSourceFile) {
continue;
}
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if (ts.isMethodDeclaration(referenceNode.parent)
|| ts.isPropertyDeclaration(referenceNode.parent)
|| ts.isGetAccessor(referenceNode.parent)
|| ts.isSetAccessor(referenceNode.parent)) {
enqueue_gray(referenceNode.parent);
}
}
}
}
}
function enqueueFile(filename) {
const sourceFile = program.getSourceFile(filename);
if (!sourceFile) {
console.warn(`Cannot find source file ${filename}`);
return;
}
enqueue_black(sourceFile);
}
function enqueueImport(node, importText) {
if (options.importIgnorePattern.test(importText)) {
// this import should be ignored
return;
}
const nodeSourceFile = node.getSourceFile();
let fullPath;
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
}
else {
fullPath = importText + '.ts';
}
enqueueFile(fullPath);
}
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
// Add fake usage files
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`));
let step = 0;
const checker = program.getTypeChecker();
while (black_queue.length > 0 || gray_queue.length > 0) {
++step;
let node;
if (step % 100 === 0) {
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
}
if (black_queue.length === 0) {
for (let i = 0; i < gray_queue.length; i++) {
const node = gray_queue[i];
const nodeParent = node.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node);
setColor(node, 2 /* Black */);
i--;
}
}
}
if (black_queue.length > 0) {
node = black_queue.shift();
}
else {
// only gray nodes remaining...
break;
}
const nodeSourceFile = node.getSourceFile();
const loop = (node) => {
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
if (symbolImportNode) {
setColor(symbolImportNode, 2 /* Black */);
}
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
if (ts.isSourceFile(declaration)) {
// Do not enqueue full source files
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j];
const memberName = member.name ? member.name.getText() : null;
if (ts.isConstructorDeclaration(member)
|| ts.isConstructSignatureDeclaration(member)
|| ts.isIndexSignatureDeclaration(member)
|| ts.isCallSignatureDeclaration(member)
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose' // TODO: keeping all `dispose` methods
) {
enqueue_black(member);
}
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
}
else {
enqueue_black(declaration);
}
}
}
node.forEachChild(loop);
};
node.forEachChild(loop);
}
}
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
const declarationSourceFile = declaration.getSourceFile();
if (nodeSourceFile === declarationSourceFile) {
if (declaration.pos <= node.pos && node.end <= declaration.end) {
return true;
}
}
}
return false;
}
function generateResult(languageService, shakeLevel) {
const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
let result = {};
const writeFile = (filePath, contents) => {
result[filePath] = contents;
};
program.getSourceFiles().forEach((sourceFile) => {
const fileName = sourceFile.fileName;
if (/^defaultLib:/.test(fileName)) {
return;
}
const destination = fileName;
if (/\.d\.ts$/.test(fileName)) {
if (nodeOrChildIsBlack(sourceFile)) {
writeFile(destination, sourceFile.text);
}
return;
}
let text = sourceFile.text;
let result = '';
function keep(node) {
result += text.substring(node.pos, node.end);
}
function write(data) {
result += data;
}
function writeMarkedNodes(node) {
if (getColor(node) === 2 /* Black */) {
return keep(node);
}
// Always keep certain top-level statements
if (ts.isSourceFile(node.parent)) {
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
return keep(node);
}
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
return keep(node);
}
}
// Keep the entire import in import * as X cases
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
return keep(node);
}
}
else {
let survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
}
}
}
else {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return keep(node);
}
}
}
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i];
if (getColor(member) === 2 /* Black */ || !member.name) {
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
}
if (ts.isFunctionDeclaration(node)) {
// Do not go inside functions if they haven't been marked
return;
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== 2 /* Black */) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
}
else {
result = text;
}
writeFile(destination, result);
});
return result;
}
//#endregion
//#region Utils
/**
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/
function getRealNodeSymbol(checker, node) {
const getPropertySymbolsFromContextualType = ts.getPropertySymbolsFromContextualType;
const getContainingObjectLiteralElement = ts.getContainingObjectLiteralElement;
const getNameFromPropertyName = ts.getNameFromPropertyName;
// Go to the original declaration for cases:
//
// (1) when the aliased symbol was declared in the location(parent).
// (2) when the aliased symbol is originating from an import.
//
function shouldSkipAlias(node, declaration) {
if (node.kind !== ts.SyntaxKind.Identifier) {
return false;
}
if (node.parent === declaration) {
return true;
}
switch (declaration.kind) {
case ts.SyntaxKind.ImportClause:
case ts.SyntaxKind.ImportEqualsDeclaration:
return true;
case ts.SyntaxKind.ImportSpecifier:
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
default:
return false;
}
}
if (!ts.isShorthandPropertyAssignment(node)) {
if (node.getChildCount() !== 0) {
return [null, null];
}
}
const { parent } = node;
let symbol = checker.getSymbolAtLocation(node);
let importNode = null;
// If this is an alias, and the request came at the declaration location
// get the aliased symbol instead. This allows for goto def on an import e.g.
// import {A, B} from "mod";
// to jump to the implementation directly.
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) {
// We should mark the import as visited
importNode = symbol.declarations[0];
symbol = aliased;
}
}
if (symbol) {
// Because name in short-hand property assignment has two different meanings: property name and property value,
// using go-to-definition at such position should go to the variable declaration of the property value rather than
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
// assignment. This case and others are handled by the following code.
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
}
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
// and return the property declaration for the referenced property.
// For example:
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
//
// function bar<T>(onfulfilled: (value: T) => void) { //....}
// interface Test {
// pr/*destination*/op1: number
// }
// bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) &&
(node === (parent.propertyName || parent.name))) {
const name = getNameFromPropertyName(node);
const type = checker.getTypeAtLocation(parent.parent);
if (name && type) {
if (type.isUnion()) {
const prop = type.types[0].getProperty(name);
if (prop) {
symbol = prop;
}
}
else {
const prop = type.getProperty(name);
if (prop) {
symbol = prop;
}
}
}
}
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
// For example
// interface Props{
// /*first*/prop1: number
// prop2: boolean
// }
// function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false })
const element = getContainingObjectLiteralElement(node);
if (element) {
const contextualType = element && checker.getContextualType(element.parent);
if (contextualType) {
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false);
if (propertySymbols) {
symbol = propertySymbols[0];
}
}
}
}
if (symbol && symbol.declarations) {
return [symbol, importNode];
}
return [null, null];
}
/** Get the token whose text contains the position */
function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
let current = sourceFile;
outer: while (true) {
// find the child that contains 'position'
for (const child of current.getChildren()) {
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
if (start > position) {
// If this child begins after position, then all subsequent children will as well.
break;
}
const end = child.getEnd();
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child;
continue outer;
}
}
return current;
}
}

View File

@@ -1,834 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as ts from 'typescript';
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
export const enum ShakeLevel {
Files = 0,
InnerFile = 1,
ClassMembers = 2
}
export interface ITreeShakingOptions {
/**
* The full path to the root where sources are.
*/
sourcesRoot: string;
/**
* Module ids.
* e.g. `vs/editor/editor.main` or `index`
*/
entryPoints: string[];
/**
* Inline usages.
*/
inlineEntryPoints: string[];
/**
* TypeScript libs.
* e.g. `lib.d.ts`, `lib.es2015.collection.d.ts`
*/
libs: string[];
/**
* Other .d.ts files
*/
typings: string[];
/**
* TypeScript compiler options.
*/
compilerOptions?: any;
/**
* The shake level to perform.
*/
shakeLevel: ShakeLevel;
/**
* regex pattern to ignore certain imports e.g. `vs/css!` imports
*/
importIgnorePattern: RegExp;
redirects: { [module: string]: string; };
}
export interface ITreeShakingResult {
[file: string]: string;
}
function printDiagnostics(diagnostics: ReadonlyArray<ts.Diagnostic>): void {
for (const diag of diagnostics) {
let result = '';
if (diag.file) {
result += `${diag.file.fileName}: `;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `- ${location.line + 1},${location.character} - `;
}
result += JSON.stringify(diag.messageText);
console.log(result);
}
}
export function shake(options: ITreeShakingOptions): ITreeShakingResult {
const languageService = createTypeScriptLanguageService(options);
const program = languageService.getProgram()!;
const globalDiagnostics = program.getGlobalDiagnostics();
if (globalDiagnostics.length > 0) {
printDiagnostics(globalDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const syntacticDiagnostics = program.getSyntacticDiagnostics();
if (syntacticDiagnostics.length > 0) {
printDiagnostics(syntacticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const semanticDiagnostics = program.getSemanticDiagnostics();
if (semanticDiagnostics.length > 0) {
printDiagnostics(semanticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
markNodes(languageService, options);
return generateResult(languageService, options.shakeLevel);
}
//#region Discovery, LanguageService & Setup
function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.LanguageService {
// Discover referenced files
const FILES = discoverAndReadFiles(options);
// Add fake usage files
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint;
});
// Add additional typings
options.typings.forEach((typing) => {
const filePath = path.join(options.sourcesRoot, typing);
FILES[typing] = fs.readFileSync(filePath).toString();
});
// Resolve libs
const RESOLVED_LIBS: ILibMap = {};
options.libs.forEach((filename) => {
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
});
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
return ts.createLanguageService(host);
}
/**
* Read imports and follow them until all files have been handled
*/
function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
const FILES: IFileMap = {};
const in_queue: { [module: string]: boolean; } = Object.create(null);
const queue: string[] = [];
const enqueue = (moduleId: string) => {
if (in_queue[moduleId]) {
return;
}
in_queue[moduleId] = true;
queue.push(moduleId);
};
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) {
const moduleId = queue.shift()!;
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) {
const dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[`${moduleId}.d.ts`] = dts_filecontents;
continue;
}
const js_filename = path.join(options.sourcesRoot, moduleId + '.js');
if (fs.existsSync(js_filename)) {
// This is an import for a .js file, so ignore it...
continue;
}
let ts_filename: string;
if (options.redirects[moduleId]) {
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
} else {
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
}
const ts_filecontents = fs.readFileSync(ts_filename).toString();
const info = ts.preProcessFile(ts_filecontents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName;
if (options.importIgnorePattern.test(importedFileName)) {
// Ignore vs/css! imports
continue;
}
let importedModuleId = importedFileName;
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
}
enqueue(importedModuleId);
}
FILES[`${moduleId}.ts`] = ts_filecontents;
}
return FILES;
}
interface ILibMap { [libName: string]: string; }
interface IFileMap { [fileName: string]: string; }
/**
* A TypeScript language service host
*/
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
private readonly _libs: ILibMap;
private readonly _files: IFileMap;
private readonly _compilerOptions: ts.CompilerOptions;
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings(): ts.CompilerOptions {
return this._compilerOptions;
}
getScriptFileNames(): string[] {
return (
([] as string[])
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files))
);
}
getScriptVersion(_fileName: string): string {
return '1';
}
getProjectVersion(): string {
return '1';
}
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
} else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
} else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(_fileName: string): ts.ScriptKind {
return ts.ScriptKind.TS;
}
getCurrentDirectory(): string {
return '';
}
getDefaultLibFileName(_options: ts.CompilerOptions): string {
return 'defaultLib:lib.d.ts';
}
isDefaultLibFileName(fileName: string): boolean {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
//#endregion
//#region Tree Shaking
const enum NodeColor {
White = 0,
Gray = 1,
Black = 2
}
function getColor(node: ts.Node): NodeColor {
return (<any>node).$$$color || NodeColor.White;
}
function setColor(node: ts.Node, color: NodeColor): void {
(<any>node).$$$color = color;
}
function nodeOrParentIsBlack(node: ts.Node): boolean {
while (node) {
const color = getColor(node);
if (color === NodeColor.Black) {
return true;
}
node = node.parent;
}
return false;
}
function nodeOrChildIsBlack(node: ts.Node): boolean {
if (getColor(node) === NodeColor.Black) {
return true;
}
for (const child of node.getChildren()) {
if (nodeOrChildIsBlack(child)) {
return true;
}
}
return false;
}
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === ShakeLevel.Files) {
// Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => {
setColor(sourceFile, NodeColor.Black);
});
return;
}
const black_queue: ts.Node[] = [];
const gray_queue: ts.Node[] = [];
const sourceFilesLoaded: { [fileName: string]: boolean } = {};
function enqueueTopLevelModuleStatements(sourceFile: ts.SourceFile): void {
sourceFile.forEachChild((node: ts.Node) => {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, NodeColor.Black);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (ts.isExportDeclaration(node)) {
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, NodeColor.Black);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
}
if (
ts.isExpressionStatement(node)
|| ts.isIfStatement(node)
|| ts.isIterationStatement(node, true)
|| ts.isExportAssignment(node)
) {
enqueue_black(node);
}
if (ts.isImportEqualsDeclaration(node)) {
if (/export/.test(node.getFullText(sourceFile))) {
// e.g. "export import Severity = BaseSeverity;"
enqueue_black(node);
}
}
});
}
function enqueue_gray(node: ts.Node): void {
if (nodeOrParentIsBlack(node) || getColor(node) === NodeColor.Gray) {
return;
}
setColor(node, NodeColor.Gray);
gray_queue.push(node);
}
function enqueue_black(node: ts.Node): void {
const previousColor = getColor(node);
if (previousColor === NodeColor.Black) {
return;
}
if (previousColor === NodeColor.Gray) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, NodeColor.White);
// add to black queue
enqueue_black(node);
// // move from one queue to the other
// black_queue.push(node);
// setColor(node, NodeColor.Black);
return;
}
if (nodeOrParentIsBlack(node)) {
return;
}
const fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, NodeColor.Black);
return;
}
const sourceFile = node.getSourceFile();
if (!sourceFilesLoaded[sourceFile.fileName]) {
sourceFilesLoaded[sourceFile.fileName] = true;
enqueueTopLevelModuleStatements(sourceFile);
}
if (ts.isSourceFile(node)) {
return;
}
setColor(node, NodeColor.Black);
black_queue.push(node);
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (let i = 0, len = references.length; i < len; i++) {
const reference = references[i];
const referenceSourceFile = program!.getSourceFile(reference.fileName);
if (!referenceSourceFile) {
continue;
}
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if (
ts.isMethodDeclaration(referenceNode.parent)
|| ts.isPropertyDeclaration(referenceNode.parent)
|| ts.isGetAccessor(referenceNode.parent)
|| ts.isSetAccessor(referenceNode.parent)
) {
enqueue_gray(referenceNode.parent);
}
}
}
}
}
function enqueueFile(filename: string): void {
const sourceFile = program!.getSourceFile(filename);
if (!sourceFile) {
console.warn(`Cannot find source file ${filename}`);
return;
}
enqueue_black(sourceFile);
}
function enqueueImport(node: ts.Node, importText: string): void {
if (options.importIgnorePattern.test(importText)) {
// this import should be ignored
return;
}
const nodeSourceFile = node.getSourceFile();
let fullPath: string;
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
} else {
fullPath = importText + '.ts';
}
enqueueFile(fullPath);
}
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
// Add fake usage files
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`));
let step = 0;
const checker = program.getTypeChecker();
while (black_queue.length > 0 || gray_queue.length > 0) {
++step;
let node: ts.Node;
if (step % 100 === 0) {
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
}
if (black_queue.length === 0) {
for (let i = 0; i< gray_queue.length; i++) {
const node = gray_queue[i];
const nodeParent = node.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node);
setColor(node, NodeColor.Black);
i--;
}
}
}
if (black_queue.length > 0) {
node = black_queue.shift()!;
} else {
// only gray nodes remaining...
break;
}
const nodeSourceFile = node.getSourceFile();
const loop = (node: ts.Node) => {
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
if (symbolImportNode) {
setColor(symbolImportNode, NodeColor.Black);
}
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
if (ts.isSourceFile(declaration)) {
// Do not enqueue full source files
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name!);
for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j];
const memberName = member.name ? member.name.getText() : null;
if (
ts.isConstructorDeclaration(member)
|| ts.isConstructSignatureDeclaration(member)
|| ts.isIndexSignatureDeclaration(member)
|| ts.isCallSignatureDeclaration(member)
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose'// TODO: keeping all `dispose` methods
) {
enqueue_black(member);
}
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
} else {
enqueue_black(declaration);
}
}
}
node.forEachChild(loop);
};
node.forEachChild(loop);
}
}
function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node, symbol: ts.Symbol): boolean {
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i];
const declarationSourceFile = declaration.getSourceFile();
if (nodeSourceFile === declarationSourceFile) {
if (declaration.pos <= node.pos && node.end <= declaration.end) {
return true;
}
}
}
return false;
}
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
let result: ITreeShakingResult = {};
const writeFile = (filePath: string, contents: string): void => {
result[filePath] = contents;
};
program.getSourceFiles().forEach((sourceFile) => {
const fileName = sourceFile.fileName;
if (/^defaultLib:/.test(fileName)) {
return;
}
const destination = fileName;
if (/\.d\.ts$/.test(fileName)) {
if (nodeOrChildIsBlack(sourceFile)) {
writeFile(destination, sourceFile.text);
}
return;
}
let text = sourceFile.text;
let result = '';
function keep(node: ts.Node): void {
result += text.substring(node.pos, node.end);
}
function write(data: string): void {
result += data;
}
function writeMarkedNodes(node: ts.Node): void {
if (getColor(node) === NodeColor.Black) {
return keep(node);
}
// Always keep certain top-level statements
if (ts.isSourceFile(node.parent)) {
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
return keep(node);
}
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
return keep(node);
}
}
// Keep the entire import in import * as X cases
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === NodeColor.Black) {
return keep(node);
}
} else {
let survivingImports: string[] = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === NodeColor.Black) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
} else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
}
}
} else {
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return keep(node);
}
}
}
if (shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i];
if (getColor(member) === NodeColor.Black || !member.name) {
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
}
if (ts.isFunctionDeclaration(node)) {
// Do not go inside functions if they haven't been marked
return;
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== NodeColor.Black) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
} else {
result = text;
}
writeFile(destination, result);
});
return result;
}
//#endregion
//#region Utils
/**
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol | null, ts.Declaration | null] {
// Use some TypeScript internals to avoid code duplication
type ObjectLiteralElementWithName = ts.ObjectLiteralElement & { name: ts.PropertyName; parent: ts.ObjectLiteralExpression | ts.JsxAttributes };
const getPropertySymbolsFromContextualType: (node: ObjectLiteralElementWithName, checker: ts.TypeChecker, contextualType: ts.Type, unionSymbolOk: boolean) => ReadonlyArray<ts.Symbol> = (<any>ts).getPropertySymbolsFromContextualType;
const getContainingObjectLiteralElement: (node: ts.Node) => ObjectLiteralElementWithName | undefined = (<any>ts).getContainingObjectLiteralElement;
const getNameFromPropertyName: (name: ts.PropertyName) => string | undefined = (<any>ts).getNameFromPropertyName;
// Go to the original declaration for cases:
//
// (1) when the aliased symbol was declared in the location(parent).
// (2) when the aliased symbol is originating from an import.
//
function shouldSkipAlias(node: ts.Node, declaration: ts.Node): boolean {
if (node.kind !== ts.SyntaxKind.Identifier) {
return false;
}
if (node.parent === declaration) {
return true;
}
switch (declaration.kind) {
case ts.SyntaxKind.ImportClause:
case ts.SyntaxKind.ImportEqualsDeclaration:
return true;
case ts.SyntaxKind.ImportSpecifier:
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
default:
return false;
}
}
if (!ts.isShorthandPropertyAssignment(node)) {
if (node.getChildCount() !== 0) {
return [null, null];
}
}
const { parent } = node;
let symbol = checker.getSymbolAtLocation(node);
let importNode: ts.Declaration | null = null;
// If this is an alias, and the request came at the declaration location
// get the aliased symbol instead. This allows for goto def on an import e.g.
// import {A, B} from "mod";
// to jump to the implementation directly.
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) {
// We should mark the import as visited
importNode = symbol.declarations[0];
symbol = aliased;
}
}
if (symbol) {
// Because name in short-hand property assignment has two different meanings: property name and property value,
// using go-to-definition at such position should go to the variable declaration of the property value rather than
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
// assignment. This case and others are handled by the following code.
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
}
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
// and return the property declaration for the referenced property.
// For example:
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
//
// function bar<T>(onfulfilled: (value: T) => void) { //....}
// interface Test {
// pr/*destination*/op1: number
// }
// bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) &&
(node === (parent.propertyName || parent.name))) {
const name = getNameFromPropertyName(node);
const type = checker.getTypeAtLocation(parent.parent);
if (name && type) {
if (type.isUnion()) {
const prop = type.types[0].getProperty(name);
if (prop) {
symbol = prop;
}
} else {
const prop = type.getProperty(name);
if (prop) {
symbol = prop;
}
}
}
}
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
// For example
// interface Props{
// /*first*/prop1: number
// prop2: boolean
// }
// function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false })
const element = getContainingObjectLiteralElement(node);
if (element) {
const contextualType = element && checker.getContextualType(element.parent);
if (contextualType) {
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false);
if (propertySymbols) {
symbol = propertySymbols[0];
}
}
}
}
if (symbol && symbol.declarations) {
return [symbol, importNode];
}
return [null, null];
}
/** Get the token whose text contains the position */
function getTokenAtPosition(sourceFile: ts.SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeEndPosition: boolean): ts.Node {
let current: ts.Node = sourceFile;
outer: while (true) {
// find the child that contains 'position'
for (const child of current.getChildren()) {
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
if (start > position) {
// If this child begins after position, then all subsequent children will as well.
break;
}
const end = child.getEnd();
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child;
continue outer;
}
}
return current;
}
}
//#endregion

View File

@@ -3,30 +3,48 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = require("path"); var path_1 = require("path");
const Lint = require("tslint"); var Lint = require("tslint");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions())); return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class ImportPatterns extends Lint.RuleWalker { var ImportPatterns = /** @class */ (function (_super) {
constructor(file, opts) { __extends(ImportPatterns, _super);
super(file, opts); function ImportPatterns(file, opts) {
this.imports = Object.create(null); var _this = _super.call(this, file, opts) || this;
_this.imports = Object.create(null);
return _this;
} }
visitImportDeclaration(node) { ImportPatterns.prototype.visitImportDeclaration = function (node) {
let path = node.moduleSpecifier.getText(); var path = node.moduleSpecifier.getText();
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path); path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
} }
if (this.imports[path]) { if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Duplicate imports for '${path}'.`)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
} }
this.imports[path] = true; this.imports[path] = true;
} };
} return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -3,60 +3,79 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
const minimatch = require("minimatch"); var minimatch = require("minimatch");
const path_1 = require("path"); var path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
const configs = this.getOptions().ruleArguments; function Rule() {
for (const config of configs) { return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var configs = this.getOptions().ruleArguments;
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
var config = configs_1[_i];
if (minimatch(sourceFile.fileName, config.target)) { if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config)); return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
} }
} }
return []; return [];
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class ImportPatterns extends Lint.RuleWalker { var ImportPatterns = /** @class */ (function (_super) {
constructor(file, opts, _config) { __extends(ImportPatterns, _super);
super(file, opts); function ImportPatterns(file, opts, _config) {
this._config = _config; var _this = _super.call(this, file, opts) || this;
_this._config = _config;
return _this;
} }
visitImportEqualsDeclaration(node) { ImportPatterns.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) { if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node); this._validateImport(node.moduleReference.expression.getText(), node);
} }
} };
visitImportDeclaration(node) { ImportPatterns.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node); this._validateImport(node.moduleSpecifier.getText(), node);
} };
visitCallExpression(node) { ImportPatterns.prototype.visitCallExpression = function (node) {
super.visitCallExpression(node); _super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code // import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) { if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments; var path = node.arguments[0];
this._validateImport(path.getText(), node); this._validateImport(path.getText(), node);
} }
} };
_validateImport(path, node) { ImportPatterns.prototype._validateImport = function (path, node) {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
// resolve relative paths // resolve relative paths
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path); path = path_1.join(this.getSourceFile().fileName, path);
} }
let restrictions; var restrictions;
if (typeof this._config.restrictions === 'string') { if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions]; restrictions = [this._config.restrictions];
} }
else { else {
restrictions = this._config.restrictions; restrictions = this._config.restrictions;
} }
let matched = false; var matched = false;
for (const pattern of restrictions) { for (var _i = 0, restrictions_1 = restrictions; _i < restrictions_1.length; _i++) {
var pattern = restrictions_1[_i];
if (minimatch(path, pattern)) { if (minimatch(path, pattern)) {
matched = true; matched = true;
break; break;
@@ -64,7 +83,8 @@ class ImportPatterns extends Lint.RuleWalker {
} }
if (!matched) { if (!matched) {
// None of the restrictions matched // None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${restrictions.join(' or ')}' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization`)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + restrictions.join(' or ') + "' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization"));
} }
} };
} return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -3,22 +3,36 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
const path_1 = require("path"); var path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
const parts = path_1.dirname(sourceFile.fileName).split(/\\|\//); function Rule() {
const ruleArgs = this.getOptions().ruleArguments[0]; return _super !== null && _super.apply(this, arguments) || this;
let config; }
for (let i = parts.length - 1; i >= 0; i--) { Rule.prototype.apply = function (sourceFile) {
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
var ruleArgs = this.getOptions().ruleArguments[0];
var config;
for (var i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) { if (ruleArgs[parts[i]]) {
config = { config = {
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]), allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set() disallowed: new Set()
}; };
Object.keys(ruleArgs).forEach(key => { Object.keys(ruleArgs).forEach(function (key) {
if (!config.allowed.has(key)) { if (!config.allowed.has(key)) {
config.disallowed.add(key); config.disallowed.add(key);
} }
@@ -30,54 +44,58 @@ class Rule extends Lint.Rules.AbstractRule {
return []; return [];
} }
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions())); return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class LayeringRule extends Lint.RuleWalker { var LayeringRule = /** @class */ (function (_super) {
constructor(file, config, opts) { __extends(LayeringRule, _super);
super(file, opts); function LayeringRule(file, config, opts) {
this._config = config; var _this = _super.call(this, file, opts) || this;
_this._config = config;
return _this;
} }
visitImportEqualsDeclaration(node) { LayeringRule.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) { if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node); this._validateImport(node.moduleReference.expression.getText(), node);
} }
} };
visitImportDeclaration(node) { LayeringRule.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node); this._validateImport(node.moduleSpecifier.getText(), node);
} };
visitCallExpression(node) { LayeringRule.prototype.visitCallExpression = function (node) {
super.visitCallExpression(node); _super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code // import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) { if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments; var path = node.arguments[0];
this._validateImport(path.getText(), node); this._validateImport(path.getText(), node);
} }
} };
_validateImport(path, node) { LayeringRule.prototype._validateImport = function (path, node) {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path); path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
} }
const parts = path_1.dirname(path).split(/\\|\//); var parts = path_1.dirname(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) { for (var i = parts.length - 1; i >= 0; i--) {
const part = parts[i]; var part = parts[i];
if (this._config.allowed.has(part)) { if (this._config.allowed.has(part)) {
// GOOD - same layer // GOOD - same layer
return; return;
} }
if (this._config.disallowed.has(part)) { if (this._config.disallowed.has(part)) {
// BAD - wrong layer // BAD - wrong layer
const message = `Bad layering. You are not allowed to access '${part}' from here, allowed layers are: [${LayeringRule._print(this._config.allowed)}]`; var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return; return;
} }
} }
} };
static _print(set) { LayeringRule._print = function (set) {
const r = []; var r = [];
set.forEach(e => r.push(e)); set.forEach(function (e) { return r.push(e); });
return r.join(', '); return r.join(', ');
} };
} return LayeringRule;
}(Lint.RuleWalker));

View File

@@ -16,9 +16,9 @@ export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] { public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const parts = dirname(sourceFile.fileName).split(/\\|\//); const parts = dirname(sourceFile.fileName).split(/\\|\//);
const ruleArgs = this.getOptions().ruleArguments[0]; let ruleArgs = this.getOptions().ruleArguments[0];
let config: Config | undefined; let config: Config;
for (let i = parts.length - 1; i >= 0; i--) { for (let i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) { if (ruleArgs[parts[i]]) {
config = { config = {
@@ -26,8 +26,8 @@ export class Rule extends Lint.Rules.AbstractRule {
disallowed: new Set<string>() disallowed: new Set<string>()
}; };
Object.keys(ruleArgs).forEach(key => { Object.keys(ruleArgs).forEach(key => {
if (!config!.allowed.has(key)) { if (!config.allowed.has(key)) {
config!.disallowed.add(key); config.disallowed.add(key);
} }
}); });
break; break;
@@ -98,7 +98,7 @@ class LayeringRule extends Lint.RuleWalker {
} }
static _print(set: Set<string>): string { static _print(set: Set<string>): string {
const r: string[] = []; let r: string[] = [];
set.forEach(e => r.push(e)); set.forEach(e => r.push(e));
return r.join(', '); return r.join(', ');
} }

View File

@@ -1,22 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript");
const Lint = require("tslint");
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
}
}
exports.Rule = Rule;
class NewBufferRuleWalker extends Lint.RuleWalker {
visitNewExpression(node) {
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && node.expression.text === 'Buffer') {
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
}
super.visitNewExpression(node);
}
}

View File

@@ -1,23 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
export class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
}
}
class NewBufferRuleWalker extends Lint.RuleWalker {
visitNewExpression(node: ts.NewExpression) {
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && (node.expression as ts.Identifier).text === 'Buffer') {
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
}
super.visitNewExpression(node);
}
}

View File

@@ -1,57 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript");
const Lint = require("tslint");
const path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
// the vs/editor folder is allowed to use the standalone editor
return [];
}
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
}
}
exports.Rule = Rule;
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
constructor(file, opts) {
super(file, opts);
}
visitImportEqualsDeclaration(node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
visitImportDeclaration(node) {
this._validateImport(node.moduleSpecifier.getText(), node);
}
visitCallExpression(node) {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used
_validateImport(path, _node) {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path);
}
if (/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)) {
// {{SQL CARBON EDIT}}
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
}
}

View File

@@ -1,67 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import { join } from 'path';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
// the vs/editor folder is allowed to use the standalone editor
return [];
}
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
}
}
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used
private _validateImport(path: string, _node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = join(this.getSourceFile().fileName, path);
}
if (
/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)
) {
// {{SQL CARBON EDIT}}
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
}
}

View File

@@ -3,17 +3,32 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
/** /**
* Implementation of the no-unexternalized-strings rule. * Implementation of the no-unexternalized-strings rule.
*/ */
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions())); function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
} }
} Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
function isStringLiteral(node) { function isStringLiteral(node) {
return node && node.kind === ts.SyntaxKind.StringLiteral; return node && node.kind === ts.SyntaxKind.StringLiteral;
@@ -24,76 +39,73 @@ function isObjectLiteral(node) {
function isPropertyAssignment(node) { function isPropertyAssignment(node) {
return node && node.kind === ts.SyntaxKind.PropertyAssignment; return node && node.kind === ts.SyntaxKind.PropertyAssignment;
} }
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker { var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
constructor(file, opts) { __extends(NoUnexternalizedStringsRuleWalker, _super);
super(file, opts); function NoUnexternalizedStringsRuleWalker(file, opts) {
this.signatures = Object.create(null); var _this = _super.call(this, file, opts) || this;
this.ignores = Object.create(null); _this.signatures = Object.create(null);
this.messageIndex = undefined; _this.ignores = Object.create(null);
this.keyIndex = undefined; _this.messageIndex = undefined;
this.usedKeys = Object.create(null); _this.keyIndex = undefined;
const options = this.getOptions(); _this.usedKeys = Object.create(null);
const first = options && options.length > 0 ? options[0] : null; var options = _this.getOptions();
var first = options && options.length > 0 ? options[0] : null;
if (first) { if (first) {
if (Array.isArray(first.signatures)) { if (Array.isArray(first.signatures)) {
first.signatures.forEach((signature) => this.signatures[signature] = true); first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
} }
if (Array.isArray(first.ignores)) { if (Array.isArray(first.ignores)) {
first.ignores.forEach((ignore) => this.ignores[ignore] = true); first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
} }
if (typeof first.messageIndex !== 'undefined') { if (typeof first.messageIndex !== 'undefined') {
this.messageIndex = first.messageIndex; _this.messageIndex = first.messageIndex;
} }
if (typeof first.keyIndex !== 'undefined') { if (typeof first.keyIndex !== 'undefined') {
this.keyIndex = first.keyIndex; _this.keyIndex = first.keyIndex;
} }
} }
return _this;
} }
visitSourceFile(node) { NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
super.visitSourceFile(node); var _this = this;
Object.keys(this.usedKeys).forEach(key => { _super.prototype.visitSourceFile.call(this, node);
// Keys are quoted. Object.keys(this.usedKeys).forEach(function (key) {
let identifier = key.substr(1, key.length - 2); var occurrences = _this.usedKeys[key];
if (!NoUnexternalizedStringsRuleWalker.IDENTIFIER.test(identifier)) {
let occurrence = this.usedKeys[key][0];
this.addFailure(this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `The key ${occurrence.key.getText()} doesn't conform to a valid localize identifier`));
}
const occurrences = this.usedKeys[key];
if (occurrences.length > 1) { if (occurrences.length > 1) {
occurrences.forEach(occurrence => { occurrences.forEach(function (occurrence) {
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`))); _this.addFailure((_this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), "Duplicate key " + occurrence.key.getText() + " with different message value.")));
}); });
} }
}); });
} };
visitStringLiteral(node) { NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
this.checkStringLiteral(node); this.checkStringLiteral(node);
super.visitStringLiteral(node); _super.prototype.visitStringLiteral.call(this, node);
} };
checkStringLiteral(node) { NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
const text = node.getText(); var text = node.getText();
const doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE; var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
const info = this.findDescribingParent(node); var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes. // Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) { if (info && info.isImport && doubleQuoted) {
const fix = [ var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''), Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''), Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
]; ];
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix); this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
return; return;
} }
const callInfo = info ? info.callInfo : null; var callInfo = info ? info.callInfo : null;
const functionName = callInfo ? callInfo.callExpression.expression.getText() : null; var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) { if (functionName && this.ignores[functionName]) {
return; return;
} }
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) { if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText(); var s = node.getText();
const fix = [ var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`), Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"),
]; ];
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return; return;
} }
// We have a single quoted string outside a localize function name. // We have a single quoted string outside a localize function name.
@@ -101,21 +113,22 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
return; return;
} }
// We have a string that is a direct argument into the localize call. // We have a string that is a direct argument into the localize call.
const keyArg = callInfo && callInfo.argIndex === this.keyIndex var keyArg = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex] ? callInfo.callExpression.arguments[this.keyIndex]
: null; : null;
if (keyArg) { if (keyArg) {
if (isStringLiteral(keyArg)) { if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} }
else if (isObjectLiteral(keyArg)) { else if (isObjectLiteral(keyArg)) {
for (const property of keyArg.properties) { for (var i = 0; i < keyArg.properties.length; i++) {
var property = keyArg.properties[i];
if (isPropertyAssignment(property)) { if (isPropertyAssignment(property)) {
const name = property.name.getText(); var name_1 = property.name.getText();
if (name === 'key') { if (name_1 === 'key') {
const initializer = property.initializer; var initializer = property.initializer;
if (isStringLiteral(initializer)) { if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} }
break; break;
} }
@@ -123,42 +136,42 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
} }
} }
const messageArg = callInfo.callExpression.arguments[this.messageIndex]; var messageArg = callInfo.callExpression.arguments[this.messageIndex];
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) { if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), `Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`)); this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return; return;
} }
} };
recordKey(keyNode, messageNode) { NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
const text = keyNode.getText(); var text = keyNode.getText();
// We have an empty key // We have an empty key
if (text.match(/(['"]) *\1/)) { if (text.match(/(['"]) *\1/)) {
if (messageNode) { if (messageNode) {
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`); this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
} }
else { else {
this.addFailureAtNode(keyNode, `Key is empty.`); this.addFailureAtNode(keyNode, "Key is empty.");
} }
return; return;
} }
let occurrences = this.usedKeys[text]; var occurrences = this.usedKeys[text];
if (!occurrences) { if (!occurrences) {
occurrences = []; occurrences = [];
this.usedKeys[text] = occurrences; this.usedKeys[text] = occurrences;
} }
if (messageNode) { if (messageNode) {
if (occurrences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) { if (occurrences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
return; return;
} }
} }
occurrences.push({ key: keyNode, message: messageNode }); occurrences.push({ key: keyNode, message: messageNode });
} };
findDescribingParent(node) { NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
let parent; var parent;
while ((parent = node.parent)) { while ((parent = node.parent)) {
const kind = parent.kind; var kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) { if (kind === ts.SyntaxKind.CallExpression) {
const callExpression = parent; var callExpression = parent;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } }; return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
} }
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) { else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
@@ -172,9 +185,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
node = parent; node = parent;
} }
return null; };
} NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
} NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.'; return NoUnexternalizedStringsRuleWalker;
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"'; }(Lint.RuleWalker));
NoUnexternalizedStringsRuleWalker.IDENTIFIER = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/;

Some files were not shown because too many files have changed in this diff Show More