mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Fix regressions in web build (#15920)
* Update build for web issues * Fix remote\web package.json * Remove extension activate test * Add terminal entry point * Bump distro * Update docker files and pipelines * Add linux task * Update build pool * Correct image name * Update pipelines * Fix indention * Update sql-web-build.yml for Azure Pipelines Add new build variable * Update sql-web-build.yml for Azure Pipelines Revert previous commit * Disable sourcemaps upload * Remove compile step * Fix yaml typo * Fix dependson error * Update yaml * Add notebook parameters * Remove web from desktop builds * Add web createDrop script * Bump distro * Change execution mode on createDrop script * Code review feedback * Update CI hygiene cache key * Bump distro * Fix merge conflicts * Revert "Fix merge conflicts" This reverts commit 06f7a58b6e0a065520b7686e8469e4e7682e157a. * Bump distro to before smoke test update
This commit is contained in:
@@ -83,7 +83,7 @@ async function main() {
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
|
||||
@@ -40,7 +40,7 @@ async function main() {
|
||||
};
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
|
||||
@@ -39,7 +39,7 @@ async function publish(commit, files) {
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, commit);
|
||||
for (const file of files) {
|
||||
const blobName = path_1.basename(file);
|
||||
const blobName = (0, path_1.basename)(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
@@ -58,7 +58,7 @@ function main() {
|
||||
}
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
const files = fileNames.map(fileName => path_1.join(directory, fileName));
|
||||
const files = fileNames.map(fileName => (0, path_1.join)(directory, fileName));
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -39,7 +39,7 @@ async function main() {
|
||||
}
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
|
||||
@@ -62,7 +62,7 @@ async function sync(commit, quality) {
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
log(` Updating build in DB...`);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
await (0, retry_1.retry)(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
||||
log(` Done ✔️`);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
#Download base image ubuntu 18.04
|
||||
FROM ubuntu:18.04
|
||||
#Download base image ubuntu 21.04
|
||||
FROM ubuntu:21.04
|
||||
ENV TZ=America/Los_Angeles
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python3-dev python3-pip
|
||||
|
||||
ADD ./ /opt/ads-server
|
||||
|
||||
|
||||
@@ -14,22 +14,4 @@ TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# # Publish Remote Extension Host
|
||||
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# Publish Remote Extension Host (Web)
|
||||
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
|
||||
@@ -94,8 +94,6 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-web-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -224,19 +222,6 @@ steps:
|
||||
displayName: 'Signing Extensions and Langpacks'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# cd ./extensions/mssql/node_modules/@microsoft/ads-kerberos
|
||||
# # npx node-gyp rebuild
|
||||
# yarn install
|
||||
# displayName: Recompile native node modules
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
# yarn gulp vscode-reh-web-linux-x64-min
|
||||
# displayName: Build web server
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
|
||||
@@ -60,7 +60,7 @@ function getNewFileHeader(tag) {
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/main/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
|
||||
@@ -50,30 +50,6 @@ jobs:
|
||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: web/sql-product-build-web.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Docker
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Linux
|
||||
steps:
|
||||
- template: docker/sql-product-build-docker.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
pool:
|
||||
@@ -102,10 +78,8 @@ jobs:
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
# - Docker
|
||||
- Windows
|
||||
- Windows_Test
|
||||
- LinuxWeb
|
||||
- macOS_Signing
|
||||
steps:
|
||||
- template: sql-release.yml
|
||||
|
||||
@@ -91,8 +91,6 @@ steps:
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Compile
|
||||
|
||||
- script: |
|
||||
|
||||
29
build/azure-pipelines/sql-web-build.yml
Normal file
29
build/azure-pipelines/sql-web-build.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: linux-x64
|
||||
image: sqltoolscontainers.azurecr.io/web-build-image:1
|
||||
endpoint: ContainerRegistry
|
||||
|
||||
jobs:
|
||||
- job: LinuxWeb
|
||||
pool:
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/sql-product-build-web.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Docker
|
||||
pool:
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- LinuxWeb
|
||||
steps:
|
||||
- template: docker/sql-product-build-docker.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
24
build/azure-pipelines/web/build/Dockerfile
Normal file
24
build/azure-pipelines/web/build/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
#Download base image ubuntu 21.04
|
||||
FROM ubuntu:21.04
|
||||
ENV TZ=America/Los_Angeles
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python
|
||||
|
||||
#docker
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN apt-key fingerprint 0EBFCD88
|
||||
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
|
||||
# if built on a windows host.
|
||||
ADD ./xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
53
build/azure-pipelines/web/build/xvfb.init
Normal file
53
build/azure-pipelines/web/build/xvfb.init
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# /etc/rc.d/init.d/xvfbd
|
||||
#
|
||||
# chkconfig: 345 95 28
|
||||
# description: Starts/Stops X Virtual Framebuffer server
|
||||
# processname: Xvfb
|
||||
#
|
||||
### BEGIN INIT INFO
|
||||
# Provides: xvfb
|
||||
# Required-Start: $remote_fs $syslog
|
||||
# Required-Stop: $remote_fs $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: Start xvfb at boot time
|
||||
# Description: Enable xvfb provided by daemon.
|
||||
### END INIT INFO
|
||||
|
||||
[ "${NETWORKING}" = "no" ] && exit 0
|
||||
|
||||
PROG="/usr/bin/Xvfb"
|
||||
PROG_OPTIONS=":10 -ac -screen 0 1024x768x24"
|
||||
PROG_OUTPUT="/tmp/Xvfb.out"
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo "Starting : X Virtual Frame Buffer "
|
||||
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
|
||||
disown -ar
|
||||
;;
|
||||
stop)
|
||||
echo "Shutting down : X Virtual Frame Buffer"
|
||||
killproc $PROG
|
||||
RETVAL=$?
|
||||
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
|
||||
/var/run/Xvfb.pid
|
||||
echo
|
||||
;;
|
||||
restart|reload)
|
||||
$0 stop
|
||||
$0 start
|
||||
RETVAL=$?
|
||||
;;
|
||||
status)
|
||||
status Xvfb
|
||||
RETVAL=$?
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 (start|stop|restart|reload|status)"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit $RETVAL
|
||||
35
build/azure-pipelines/web/createDrop.sh
Executable file
35
build/azure-pipelines/web/createDrop.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
mkdir -p $REPO/.build/linux/{archive,server}
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# # Publish Remote Extension Host
|
||||
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# Publish Remote Extension Host (Web)
|
||||
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
@@ -13,17 +12,6 @@ steps:
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -44,33 +32,46 @@ steps:
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# displayName: Restore Cache - Node Modules
|
||||
# inputs:
|
||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: 'npm-vscode'
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache key
|
||||
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore Cache - Node Modules
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['NODE_MODULES_RESTORED'], 'true'))
|
||||
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# displayName: Save Cache - Node Modules
|
||||
# inputs:
|
||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: 'npm-vscode'
|
||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# yarn postinstall
|
||||
# displayName: Run postinstall scripts
|
||||
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['NODE_MODULES_RESTORED'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
@@ -78,25 +79,128 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-web-min-ci
|
||||
displayName: Build
|
||||
yarn sqllint
|
||||
yarn gulp hygiene
|
||||
yarn strict-vscode
|
||||
yarn valid-layers-check
|
||||
displayName: Run hygiene, eslint
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
# upload only the workbench.web.api.js source maps because
|
||||
# we just compiled these bits in the previous step and the
|
||||
# general task to upload source maps has already been run
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
|
||||
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
|
||||
displayName: Upload sourcemaps (Web)
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-web-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-yarnrc-extensions
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Compile
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
# AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
# ./build/azure-pipelines/web/publish.sh
|
||||
# displayName: Publish
|
||||
# AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
|
||||
# AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
|
||||
# node build/azure-pipelines/upload-sourcemaps
|
||||
# displayName: Upload sourcemaps
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
VERSION=$(node -p "require(\"./package.json\").version")
|
||||
|
||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$BUILD_SOURCEVERSION\" }" > ".build/version.json"
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
displayName: Write Version Information
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
|
||||
displayName: Compress compilation artifact
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
artifactName: Compilation
|
||||
displayName: Publish compilation artifact
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp compile-extensions
|
||||
yarn gulp package-external-extensions
|
||||
displayName: Package External extensions
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-langpacks
|
||||
displayName: Package Langpacks
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
displayName: Build Deb
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Install .NET Core sdk for signing'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: 2.1.x
|
||||
installationPath: $(Agent.ToolsDirectory)/dotnet
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build'
|
||||
Pattern: 'extensions/*.vsix,langpacks/*.vsix'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-233016",
|
||||
"operationSetCode": "OpcSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-233016",
|
||||
"operationSetCode": "OpcVerify",
|
||||
"parameters": [],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: 'Signing Extensions and Langpacks'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/web/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
|
||||
@@ -23,7 +23,7 @@ async function main() {
|
||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||
await vscode_universal_1.makeUniversalApp({
|
||||
await (0, vscode_universal_1.makeUniversalApp)({
|
||||
x64AppPath,
|
||||
arm64AppPath,
|
||||
x64AsarPath,
|
||||
@@ -33,7 +33,7 @@ async function main() {
|
||||
'Credits.rtf',
|
||||
'CodeResources',
|
||||
'fsevents.node',
|
||||
'Info.plist', // TODO@deepak1556: regressed with 11.4.2 internal builds
|
||||
'Info.plist',
|
||||
'.npmrc'
|
||||
],
|
||||
outAppPath,
|
||||
|
||||
@@ -28,6 +28,7 @@ const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileContentMapper } = require('./gulpfile.vscode.web');
|
||||
const cp = require('child_process');
|
||||
const { rollupAngular } = require('./lib/rollup');
|
||||
|
||||
const REPO_ROOT = path.dirname(__dirname);
|
||||
const commit = util.getVersion(REPO_ROOT);
|
||||
@@ -114,6 +115,10 @@ const serverEntryPoints = [
|
||||
{
|
||||
name: 'vs/platform/files/node/watcher/nsfw/watcherApp',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
},
|
||||
{
|
||||
name: 'vs/platform/terminal/node/ptyHostMain',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
}
|
||||
];
|
||||
|
||||
@@ -437,8 +442,46 @@ function packagePkgTask(platform, arch, pkgTarget) {
|
||||
const sourceFolderName = `out-vscode-${type}${dashed(minified)}`;
|
||||
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
|
||||
|
||||
const rollupAngularTask = task.define(`vscode-web-${type}${dashed(platform)}${dashed(arch)}-angular-rollup`, () => {
|
||||
return rollupAngular(REMOTE_FOLDER);
|
||||
});
|
||||
gulp.task(rollupAngularTask);
|
||||
|
||||
// rebuild extensions that contain native npm modules or have conditional webpack rules
|
||||
// when building with the web .yarnrc settings (e.g. runtime=node, etc.)
|
||||
// this is needed to have correct module set published with desired ABI
|
||||
const rebuildExtensions = ['big-data-cluster', 'mssql', 'notebook'];
|
||||
const EXTENSIONS = path.join(REPO_ROOT, 'extensions');
|
||||
function exec(cmdLine, cwd) {
|
||||
console.log(cmdLine);
|
||||
cp.execSync(cmdLine, { stdio: 'inherit', cwd: cwd });
|
||||
}
|
||||
const tasks = [];
|
||||
rebuildExtensions.forEach(scope => {
|
||||
const root = path.join(EXTENSIONS, scope);
|
||||
tasks.push(
|
||||
() => gulp.src(path.join(REMOTE_FOLDER, '.yarnrc')).pipe(gulp.dest(root)),
|
||||
util.rimraf(path.join(root, 'node_modules')),
|
||||
() => exec('yarn', root)
|
||||
);
|
||||
});
|
||||
const yarnrcExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-yarnrc-extensions`, task.series(...tasks));
|
||||
gulp.task(yarnrcExtensions);
|
||||
|
||||
const cleanupExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-cleanup-extensions`, () => {
|
||||
return Promise.all(rebuildExtensions.map(scope => {
|
||||
const root = path.join(EXTENSIONS, scope);
|
||||
return util.rimraf(path.join(root, '.yarnrc'))();
|
||||
}));
|
||||
});
|
||||
gulp.task(cleanupExtensions);
|
||||
|
||||
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
|
||||
gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
|
||||
yarnrcExtensions,
|
||||
compileExtensionsBuildTask,
|
||||
cleanupExtensions,
|
||||
rollupAngularTask,
|
||||
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
||||
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
|
||||
));
|
||||
|
||||
@@ -25,7 +25,7 @@ async function downloadExtensionDetails(extension) {
|
||||
const promises = [];
|
||||
for (const fileName of contentFileNames) {
|
||||
promises.push(new Promise(resolve => {
|
||||
got_1.default(`${repositoryContentBaseUrl}/${fileName}`)
|
||||
(0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
|
||||
.then(response => {
|
||||
resolve({ fileName, body: response.rawBody });
|
||||
})
|
||||
|
||||
@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const os = require("os");
|
||||
const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
const reporter = (0, reporter_1.createReporter)();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
let options = {};
|
||||
|
||||
@@ -21,7 +21,7 @@ module.exports = new class {
|
||||
const configs = context.options;
|
||||
for (const config of configs) {
|
||||
if (minimatch(context.getFilename(), config.target)) {
|
||||
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
||||
return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
|
||||
}
|
||||
}
|
||||
return {};
|
||||
@@ -29,7 +29,7 @@ module.exports = new class {
|
||||
_checkImport(context, config, node, path) {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
path = (0, path_1.join)(context.getFilename(), path);
|
||||
}
|
||||
let restrictions;
|
||||
if (typeof config.restrictions === 'string') {
|
||||
|
||||
@@ -17,7 +17,7 @@ module.exports = new class {
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileDirname = path_1.dirname(context.getFilename());
|
||||
const fileDirname = (0, path_1.dirname)(context.getFilename());
|
||||
const parts = fileDirname.split(/\\|\//);
|
||||
const ruleArgs = context.options[0];
|
||||
let config;
|
||||
@@ -39,11 +39,11 @@ module.exports = new class {
|
||||
// nothing
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(context.getFilename()), path);
|
||||
path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
|
||||
}
|
||||
const parts = path_1.dirname(path).split(/\\|\//);
|
||||
const parts = (0, path_1.dirname)(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
if (config.allowed.has(part)) {
|
||||
|
||||
@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
path = (0, path_1.join)(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)nls/.test(path)) {
|
||||
context.report({
|
||||
|
||||
@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
path = (0, path_1.join)(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|
||||
@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
|
||||
return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
|
||||
}
|
||||
_checkImport(context, node, path) {
|
||||
if (path !== TranslationRemind.NLS_MODULE) {
|
||||
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
|
||||
let resourceDefined = false;
|
||||
let json;
|
||||
try {
|
||||
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
|
||||
json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
|
||||
}
|
||||
catch (e) {
|
||||
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
||||
|
||||
@@ -144,7 +144,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
@@ -162,7 +162,7 @@ function fromLocalNormal(extensionPath) {
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
|
||||
}
|
||||
exports.fromLocalNormal = fromLocalNormal;
|
||||
const baseHeaders = {
|
||||
|
||||
@@ -53,8 +53,8 @@ define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
||||
* Returns a stream containing the patched JavaScript and source maps.
|
||||
*/
|
||||
function nls() {
|
||||
const input = event_stream_1.through();
|
||||
const output = input.pipe(event_stream_1.through(function (f) {
|
||||
const input = (0, event_stream_1.through)();
|
||||
const output = input.pipe((0, event_stream_1.through)(function (f) {
|
||||
if (!f.sourceMap) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||
}
|
||||
@@ -72,7 +72,7 @@ function nls() {
|
||||
}
|
||||
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
}));
|
||||
return event_stream_1.duplex(input, output);
|
||||
return (0, event_stream_1.duplex)(input, output);
|
||||
}
|
||||
exports.nls = nls;
|
||||
function isImportNode(ts, node) {
|
||||
|
||||
@@ -98,7 +98,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
|
||||
return es.readArray(treatedSources)
|
||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||
.pipe(concat(dest))
|
||||
.pipe(stats_1.createStatsStream(dest));
|
||||
.pipe((0, stats_1.createStatsStream)(dest));
|
||||
}
|
||||
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
@@ -155,7 +155,7 @@ function optimizeTask(opts) {
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
|
||||
.pipe(opts.languages && opts.languages.length ? (0, i18n_1.processNlsFiles)({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}) : es.through())
|
||||
|
||||
@@ -12,7 +12,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
function runProcess(command, args = []) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = child_process_1.spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||
const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
|
||||
child.on('error', reject);
|
||||
});
|
||||
|
||||
89
build/lib/rollup.js
Normal file
89
build/lib/rollup.js
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.rollupAngular = void 0;
|
||||
const fs = require("fs");
|
||||
const rollup = require("rollup");
|
||||
const path = require("path");
|
||||
// getting around stupid import rules
|
||||
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||
const commonjs = require('rollup-plugin-commonjs');
|
||||
async function rollupModule(options) {
|
||||
const moduleName = options.moduleName;
|
||||
try {
|
||||
const inputFile = options.inputFile;
|
||||
const outputDirectory = options.outputDirectory;
|
||||
await fs.promises.mkdir(outputDirectory, {
|
||||
recursive: true
|
||||
});
|
||||
const outputFileName = options.outputFileName;
|
||||
const outputMapName = `${outputFileName}.map`;
|
||||
const external = options.external || [];
|
||||
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||
const bundle = await rollup.rollup({
|
||||
input: inputFile,
|
||||
plugins: [
|
||||
nodeResolve(),
|
||||
commonjs(),
|
||||
],
|
||||
external,
|
||||
});
|
||||
const generatedBundle = await bundle.generate({
|
||||
name: moduleName,
|
||||
format: 'umd',
|
||||
sourcemap: true
|
||||
});
|
||||
const result = generatedBundle.output[0];
|
||||
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||
await fs.promises.writeFile(outputFilePath, result.code);
|
||||
await fs.promises.writeFile(outputMapPath, result.map);
|
||||
return {
|
||||
name: moduleName,
|
||||
result: true
|
||||
};
|
||||
}
|
||||
catch (ex) {
|
||||
return {
|
||||
name: moduleName,
|
||||
result: false,
|
||||
exception: ex
|
||||
};
|
||||
}
|
||||
}
|
||||
function rollupAngular(root) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||
const tasks = modules.map((module) => {
|
||||
return rollupModule({
|
||||
moduleName: `ng.${module}`,
|
||||
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||
outputFileName: `${module}.umd.js`,
|
||||
external: modules.map(mn => `@angular/${mn}`)
|
||||
});
|
||||
});
|
||||
// array of booleans
|
||||
const x = await Promise.all(tasks);
|
||||
const result = x.reduce((prev, current) => {
|
||||
if (!current.result) {
|
||||
prev.fails.push(current.name);
|
||||
prev.exceptions.push(current.exception);
|
||||
prev.result = false;
|
||||
}
|
||||
return prev;
|
||||
}, {
|
||||
fails: [],
|
||||
exceptions: [],
|
||||
result: true,
|
||||
});
|
||||
if (!result.result) {
|
||||
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
exports.rollupAngular = rollupAngular;
|
||||
109
build/lib/rollup.ts
Normal file
109
build/lib/rollup.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as rollup from 'rollup';
|
||||
import * as path from 'path';
|
||||
|
||||
// getting around import rules
|
||||
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||
const commonjs = require('rollup-plugin-commonjs');
|
||||
|
||||
export interface IRollupOptions {
|
||||
moduleName: string;
|
||||
inputFile: string;
|
||||
outputDirectory: string;
|
||||
outputFileName: string;
|
||||
external?: string[];
|
||||
}
|
||||
|
||||
async function rollupModule(options: IRollupOptions) {
|
||||
const moduleName = options.moduleName;
|
||||
try {
|
||||
const inputFile = options.inputFile;
|
||||
const outputDirectory = options.outputDirectory;
|
||||
|
||||
await fs.promises.mkdir(outputDirectory, {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
const outputFileName = options.outputFileName;
|
||||
const outputMapName = `${outputFileName}.map`;
|
||||
const external = options.external || [];
|
||||
|
||||
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input: inputFile,
|
||||
plugins: [
|
||||
nodeResolve(),
|
||||
commonjs(),
|
||||
],
|
||||
external,
|
||||
});
|
||||
|
||||
const generatedBundle = await bundle.generate({
|
||||
name: moduleName,
|
||||
format: 'umd',
|
||||
sourcemap: true
|
||||
});
|
||||
|
||||
const result = generatedBundle.output[0];
|
||||
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||
|
||||
await fs.promises.writeFile(outputFilePath, result.code);
|
||||
await fs.promises.writeFile(outputMapPath, <any>result.map);
|
||||
|
||||
return {
|
||||
name: moduleName,
|
||||
result: true
|
||||
};
|
||||
} catch (ex) {
|
||||
return {
|
||||
name: moduleName,
|
||||
result: false,
|
||||
exception: ex
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function rollupAngular(root: string): Promise<void> {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
|
||||
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||
const tasks = modules.map((module) => {
|
||||
return rollupModule({
|
||||
moduleName: `ng.${module}`,
|
||||
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||
outputFileName: `${module}.umd.js`,
|
||||
external: modules.map(mn => `@angular/${mn}`)
|
||||
});
|
||||
});
|
||||
|
||||
// array of booleans
|
||||
const x = await Promise.all(tasks);
|
||||
|
||||
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
|
||||
if (!current.result) {
|
||||
prev.fails.push(current.name);
|
||||
prev.exceptions.push(current.exception);
|
||||
prev.result = false;
|
||||
}
|
||||
return prev;
|
||||
}, {
|
||||
fails: [],
|
||||
exceptions: [],
|
||||
result: true,
|
||||
});
|
||||
|
||||
if (!result.result) {
|
||||
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
|
||||
}
|
||||
@@ -52,6 +52,9 @@
|
||||
"mkdirp": "^1.0.4",
|
||||
"p-limit": "^3.1.0",
|
||||
"plist": "^3.0.1",
|
||||
"rollup": "^1.20.3",
|
||||
"rollup-plugin-commonjs": "^10.1.0",
|
||||
"rollup-plugin-node-resolve": "^5.2.0",
|
||||
"source-map": "0.6.1",
|
||||
"typescript": "^4.3.0-dev.20210426",
|
||||
"vsce": "1.48.0",
|
||||
|
||||
116
build/yarn.lock
116
build/yarn.lock
@@ -425,6 +425,13 @@
|
||||
"@types/node" "*"
|
||||
"@types/tough-cookie" "*"
|
||||
|
||||
"@types/resolve@0.0.8":
|
||||
version "0.0.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194"
|
||||
integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/responselike@*", "@types/responselike@^1.0.0":
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29"
|
||||
@@ -576,6 +583,11 @@
|
||||
dependencies:
|
||||
eslint-visitor-keys "^1.1.0"
|
||||
|
||||
acorn@^7.1.0:
|
||||
version "7.4.1"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
||||
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
|
||||
|
||||
ajv@^6.12.3:
|
||||
version "6.12.6"
|
||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
|
||||
@@ -736,6 +748,11 @@ buffer-fill@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c"
|
||||
integrity sha1-+PeLdniYiO858gXNY39o5wISKyw=
|
||||
|
||||
builtin-modules@^3.1.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887"
|
||||
integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==
|
||||
|
||||
byline@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1"
|
||||
@@ -1063,6 +1080,11 @@ estraverse@^5.2.0:
|
||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
|
||||
integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
|
||||
|
||||
estree-walker@^0.6.1:
|
||||
version "0.6.1"
|
||||
resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362"
|
||||
integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==
|
||||
|
||||
events@^3.0.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379"
|
||||
@@ -1138,6 +1160,11 @@ fs.realpath@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
||||
integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
|
||||
|
||||
function-bind@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
|
||||
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
|
||||
|
||||
get-stream@^5.1.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3"
|
||||
@@ -1204,6 +1231,13 @@ har-validator@~5.1.3:
|
||||
ajv "^6.12.3"
|
||||
har-schema "^2.0.0"
|
||||
|
||||
has@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
|
||||
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
|
||||
dependencies:
|
||||
function-bind "^1.1.1"
|
||||
|
||||
hash-base@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33"
|
||||
@@ -1263,6 +1297,13 @@ inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1:
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
||||
is-core-module@^2.2.0:
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1"
|
||||
integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==
|
||||
dependencies:
|
||||
has "^1.0.3"
|
||||
|
||||
is-extglob@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
|
||||
@@ -1275,6 +1316,18 @@ is-glob@^4.0.1:
|
||||
dependencies:
|
||||
is-extglob "^2.1.1"
|
||||
|
||||
is-module@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
|
||||
integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=
|
||||
|
||||
is-reference@^1.1.2:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7"
|
||||
integrity sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==
|
||||
dependencies:
|
||||
"@types/estree" "*"
|
||||
|
||||
is-typedarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
|
||||
@@ -1404,6 +1457,13 @@ lru-cache@^6.0.0:
|
||||
dependencies:
|
||||
yallist "^4.0.0"
|
||||
|
||||
magic-string@^0.25.2:
|
||||
version "0.25.7"
|
||||
resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051"
|
||||
integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==
|
||||
dependencies:
|
||||
sourcemap-codec "^1.4.4"
|
||||
|
||||
markdown-it@^8.3.1:
|
||||
version "8.4.2"
|
||||
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-8.4.2.tgz#386f98998dc15a37722aa7722084f4020bdd9b54"
|
||||
@@ -1580,6 +1640,11 @@ path-key@^3.1.0:
|
||||
resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
|
||||
integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
|
||||
|
||||
path-parse@^1.0.6:
|
||||
version "1.0.7"
|
||||
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
|
||||
integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
|
||||
|
||||
pend@~1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
|
||||
@@ -1706,6 +1771,14 @@ resolve-alpn@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.0.0.tgz#745ad60b3d6aff4b4a48e01b8c0bdc70959e0e8c"
|
||||
integrity sha512-rTuiIEqFmGxne4IovivKSDzld2lWW9QCjqv80SYjPgf+gS35eaCAjaP54CCwGAwBtnCsvNLYtqxe1Nw+i6JEmA==
|
||||
|
||||
resolve@^1.11.0, resolve@^1.11.1:
|
||||
version "1.20.0"
|
||||
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975"
|
||||
integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==
|
||||
dependencies:
|
||||
is-core-module "^2.2.0"
|
||||
path-parse "^1.0.6"
|
||||
|
||||
responselike@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.0.tgz#26391bcc3174f750f9a79eacc40a12a5c42d7723"
|
||||
@@ -1713,6 +1786,44 @@ responselike@^2.0.0:
|
||||
dependencies:
|
||||
lowercase-keys "^2.0.0"
|
||||
|
||||
rollup-plugin-commonjs@^10.1.0:
|
||||
version "10.1.0"
|
||||
resolved "https://registry.yarnpkg.com/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz#417af3b54503878e084d127adf4d1caf8beb86fb"
|
||||
integrity sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==
|
||||
dependencies:
|
||||
estree-walker "^0.6.1"
|
||||
is-reference "^1.1.2"
|
||||
magic-string "^0.25.2"
|
||||
resolve "^1.11.0"
|
||||
rollup-pluginutils "^2.8.1"
|
||||
|
||||
rollup-plugin-node-resolve@^5.2.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz#730f93d10ed202473b1fb54a5997a7db8c6d8523"
|
||||
integrity sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==
|
||||
dependencies:
|
||||
"@types/resolve" "0.0.8"
|
||||
builtin-modules "^3.1.0"
|
||||
is-module "^1.0.0"
|
||||
resolve "^1.11.1"
|
||||
rollup-pluginutils "^2.8.1"
|
||||
|
||||
rollup-pluginutils@^2.8.1:
|
||||
version "2.8.2"
|
||||
resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e"
|
||||
integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==
|
||||
dependencies:
|
||||
estree-walker "^0.6.1"
|
||||
|
||||
rollup@^1.20.3:
|
||||
version "1.32.1"
|
||||
resolved "https://registry.yarnpkg.com/rollup/-/rollup-1.32.1.tgz#4480e52d9d9e2ae4b46ba0d9ddeaf3163940f9c4"
|
||||
integrity sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==
|
||||
dependencies:
|
||||
"@types/estree" "*"
|
||||
"@types/node" "*"
|
||||
acorn "^7.1.0"
|
||||
|
||||
safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
|
||||
@@ -1777,6 +1888,11 @@ source-map@0.6.1:
|
||||
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
|
||||
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
|
||||
|
||||
sourcemap-codec@^1.4.4:
|
||||
version "1.4.8"
|
||||
resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4"
|
||||
integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==
|
||||
|
||||
sprintf-js@~1.0.2:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
|
||||
|
||||
Reference in New Issue
Block a user