mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Add scripts for creating artifacts (#8602)
* add remote build to the pipeline * add a separte compile step * fix darwin build * add linux container and fix docker creation * fix distro * remove system install and add xvfb start * distro * add logic to only run tests on hosted machine * fix yml * fix yml * add linux docker container * fix docker file * fixdocker * fix darwin * fix linux build * add cache salt to npm cache * intentially ignore kerberos binaries * disable docker for now * remove vsix from win32 * fix linxu and win32 * fix linux and win32 * fix linux and win32 * fix linux * maybe fix win32 * fix linux * fix linux image; disable server package for noe * fix minimatch for win32 test * fix linux build * add back in docker * fix test * use tmp directory insteado workspace * change name of docker image * try a differnt folder * fix download * add a git clean step * bump cache * fix issues with builidng * readd windows build, revert signing changes * simplify win32 server packaage * some more optimizations * use decompress task * add back in install for windows test * fix linux build * add integration test to bat file * make platform the same * add copy extension to windows test * revert tests back * fix vsix drop aquasition * inital changes * fix download * fix dependent on for release * just downlaod everything which makes it easier * setup pipeline artifacts * more clean up * fix linux * add logic to install extensions for integration tests * fix extension install * fix build failures * fix some issues * fix darwin drop * change linux build copy to js * fix darwin archive * fix copy artifacts and use it for windows * use for darinw * fix darwin * ad dep on linux * fix win32 * fix darwin * fix copy artifacts * mkdir p darwin * fix copy * add error handler * add more binaries * add more binaries * fix archive path on linux * add more options to integration extension install * add more binaries * add verbose to installer copy * fix ip rate issues * fix bat file for including extensions * move echo * add windows test condition * use powershell cmdlet rather than cp * remove verbose * remove compiling of extensions * fix pipelines * update docker location * fix copy item * fix signing on win32 * fix locations * move back to using cp * ensure the docker folder exists * test a createdrop script on darwin * wip * fix copy * add drop for linux * fix builds * fix drop * fix docker on linx * fix darwin * lets try this again * fix linux drop * i guess try the copy files task * add create drop for win32 * ensure windows drop location exists * fix extension install * just use mkdir * add better logic for installing extensions * ignore errors? * try force * testing * ok this should work * use production cli * fix liveshare vscodeignore * fix integration test script * revert changes to integration tests to fix them * try newitem * remove exec * explicitly clear last exit code * fix test build * revert publish scripts * add version json * fix tests * add back sources creation * this is stupid * fix clean positioning * add version information to drop * fix locations of artifacts in publush scripts
This commit is contained in:
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
|
||||||
|
const files = [
|
||||||
|
'.build/extensions/**/*.vsix', // external extensions
|
||||||
|
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||||
|
'.build/linux/sha256hashes.txt', // linux hashes
|
||||||
|
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||||
|
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||||
|
'.build/linux/server/*', // linux server
|
||||||
|
'.build/linux/archive/*', // linux archive
|
||||||
|
'.build/docker/**', // docker images
|
||||||
|
'.build/darwin/**', // darwin binaries
|
||||||
|
'.build/version.json' // version information
|
||||||
|
];
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||||
|
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||||
|
|
||||||
|
stream.on('end', () => resolve());
|
||||||
|
stream.on('error', e => reject(e));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
|
||||||
|
# ensure drop directories exist
|
||||||
|
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||||
|
|
||||||
|
# remove pkg from archive
|
||||||
|
zip -d $REPO/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||||
|
|
||||||
|
# package Remote Extension Host
|
||||||
|
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -99,38 +99,23 @@ steps:
|
|||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
|
- script: |
|
||||||
displayName: 'Archive build scripts source'
|
set -e
|
||||||
inputs:
|
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
displayName: Run unit tests
|
||||||
archiveType: tar
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1 # WHY ARE WE DOING THIS?
|
|
||||||
displayName: 'Publish Artifact: build scripts source'
|
|
||||||
inputs:
|
|
||||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
ArtifactName: source
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
cd $(Build.SourcesDirectory)/../azuredatastudio-darwin
|
mkdir -p .build/darwin/archive
|
||||||
zip -r -y $(Build.ArtifactStagingDirectory)/azuredatastudio-darwin.zip "Azure Data Studio.app"
|
pushd ../azuredatastudio-darwin && zip -r -X -y $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip * && popd
|
||||||
displayName: 'Stable - Run zip'
|
displayName: 'Archive'
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'stable'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
cd $(Build.SourcesDirectory)/../azuredatastudio-darwin
|
|
||||||
zip -r -y $(Build.ArtifactStagingDirectory)/azuredatastudio-darwin.zip "Azure Data Studio - Insiders.app"
|
|
||||||
displayName: 'Insiders - Run zip'
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'insider'))
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
displayName: 'ESRP CodeSigning'
|
displayName: 'ESRP CodeSigning'
|
||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'Code Signing'
|
ConnectedServiceName: 'Code Signing'
|
||||||
FolderPath: '$(Build.ArtifactStagingDirectory)'
|
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||||
Pattern: 'azuredatastudio-darwin.zip'
|
Pattern: 'azuredatastudio-darwin.zip'
|
||||||
signConfigType: inlineSignParams
|
signConfigType: inlineSignParams
|
||||||
inlineOperation: |
|
inlineOperation: |
|
||||||
@@ -145,48 +130,10 @@ steps:
|
|||||||
]
|
]
|
||||||
SessionTimeout: 20
|
SessionTimeout: 20
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
zip -d $(Build.ArtifactStagingDirectory)/azuredatastudio-darwin.zip "Azure Data Studio.pkg"
|
|
||||||
displayName: 'Stable - Remove .pkg from signed zip'
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'stable'))
|
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
zip -d $(Build.ArtifactStagingDirectory)/azuredatastudio-darwin.zip "Azure Data Studio - Insiders.pkg"
|
|
||||||
displayName: 'Insiders - Remove .pkg from signed zip'
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'insider'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
./build/azure-pipelines/darwin/createDrop.sh
|
||||||
displayName: Run unit tests
|
displayName: Create Drop
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
|
||||||
timeoutInMinutes: 20
|
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $(Build.ArtifactStagingDirectory)/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
|
||||||
displayName: 'Package server'
|
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
PACKAGEJSON=`ls $(Build.SourcesDirectory)/package.json`
|
|
||||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
|
||||||
COMMIT_ID=`git rev-parse HEAD`
|
|
||||||
|
|
||||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$(Build.ArtifactStagingDirectory)/version.json"
|
|
||||||
displayName: 'Create version.json'
|
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
for f in *
|
|
||||||
do
|
|
||||||
shasum -a 256 "$f" >> sha256hashes.txt
|
|
||||||
done
|
|
||||||
workingDirectory: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
displayName: 'Get SHA256 Hashes'
|
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1
|
- task: PublishBuildArtifacts@1
|
||||||
displayName: 'Publish Artifact: drop'
|
displayName: 'Publish Artifact: drop'
|
||||||
@@ -220,7 +167,3 @@ steps:
|
|||||||
displayName: 'Component Detection'
|
displayName: 'Component Detection'
|
||||||
inputs:
|
inputs:
|
||||||
failOnAlert: true
|
failOnAlert: true
|
||||||
|
|
||||||
- script: 'echo "##vso[build.addbuildtag]Scheduled" '
|
|
||||||
displayName: 'Add scheduled tag if needed'
|
|
||||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['Build.Reason'], 'Schedule'))
|
|
||||||
|
|||||||
@@ -14,6 +14,6 @@ $Quality = $VersionJson.quality
|
|||||||
$CommitId = $VersionJson.commit
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
$ZipName = "azuredatastudio-darwin.zip"
|
$ZipName = "azuredatastudio-darwin.zip"
|
||||||
$Zip = "$artifactsDir\$ZipName"
|
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||||
|
|
||||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||||
|
|||||||
37
build/azure-pipelines/linux/createDrop.sh
Executable file
37
build/azure-pipelines/linux/createDrop.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
ROOT="$REPO/.."
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
mkdir -p $REPO/.build/linux/{archive,server}
|
||||||
|
PLATFORM_LINUX="linux-x64"
|
||||||
|
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||||
|
BUILD="$ROOT/$BUILDNAME"
|
||||||
|
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||||
|
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||||
|
|
||||||
|
# create version
|
||||||
|
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||||
|
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||||
|
COMMIT_ID=$(git rev-parse HEAD)
|
||||||
|
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||||
|
|
||||||
|
rm -rf $ROOT/code-*.tar.*
|
||||||
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
|
# Publish Remote Extension Host
|
||||||
|
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||||
|
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||||
|
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||||
|
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||||
|
|
||||||
|
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||||
|
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
|
# create docker
|
||||||
|
mkdir -p $REPO/.build/docker
|
||||||
|
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||||
|
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -111,20 +111,8 @@ steps:
|
|||||||
yarn gulp package-external-extensions
|
yarn gulp package-external-extensions
|
||||||
displayName: Package External extensions
|
displayName: Package External extensions
|
||||||
|
|
||||||
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
|
|
||||||
displayName: 'Archive build scripts source'
|
|
||||||
inputs:
|
|
||||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
|
||||||
archiveType: tar
|
|
||||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1 # WHY ARE WE DOING THIS?
|
|
||||||
displayName: 'Publish Artifact: build scripts source'
|
|
||||||
inputs:
|
|
||||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
ArtifactName: source
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
@@ -133,6 +121,7 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
@@ -151,65 +140,10 @@ steps:
|
|||||||
yarn gulp vscode-linux-x64-build-rpm
|
yarn gulp vscode-linux-x64-build-rpm
|
||||||
displayName: Build Rpm
|
displayName: Build Rpm
|
||||||
|
|
||||||
- task: ArchiveFiles@1 # WHY ARE WE DOING THIS?
|
|
||||||
displayName: 'Archive files '
|
|
||||||
inputs:
|
|
||||||
rootFolder: '$(Build.SourcesDirectory)/../azuredatastudio-linux-x64'
|
|
||||||
archiveType: tar
|
|
||||||
archiveFile: '$(Build.ArtifactStagingDirectory)/azuredatastudio-linux-x64.tar.gz'
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (deb)'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/deb/amd64/deb'
|
|
||||||
Contents: '*.deb'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (rpm)'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/rpm/x86_64/'
|
|
||||||
Contents: '*.rpm'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory)/vsix'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/../vsix'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/vsix'
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
docker build -t azuredatastudio-server -f build/azure-pipelines/docker/Dockerfile $(agent.builddirectory)/azuredatastudio-reh-linux-x64
|
./build/azure-pipelines/linux/createDrop.sh
|
||||||
docker save azuredatastudio-server | gzip > azuredatastudio-server-docker.tar.gz
|
displayName: Create Drop
|
||||||
cp azuredatastudio-server-docker.tar.gz $(Build.ArtifactStagingDirectory)
|
|
||||||
displayName: "Create docker image"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
cd $(agent.builddirectory)
|
|
||||||
tar --owner=0 --group=0 -czf azuredatastudio-server-linux-x64.tar.gz azuredatastudio-reh-linux-x64
|
|
||||||
cp azuredatastudio-server-linux-x64.tar.gz $(Build.ArtifactStagingDirectory)
|
|
||||||
displayName: 'Package server'
|
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
PACKAGEJSON="$(Build.SourcesDirectory)/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
COMMIT_ID=$(git rev-parse HEAD)
|
|
||||||
|
|
||||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$(Build.ArtifactStagingDirectory)/version.json"
|
|
||||||
displayName: 'Create version.json'
|
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
|
||||||
set -e
|
|
||||||
for f in *
|
|
||||||
do
|
|
||||||
shasum -a 256 "$f" >> sha256hashes.txt
|
|
||||||
done
|
|
||||||
workingDirectory: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
displayName: 'Get SHA256 Hashes'
|
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1
|
- task: PublishBuildArtifacts@1
|
||||||
displayName: 'Publish Artifact: drop'
|
displayName: 'Publish Artifact: drop'
|
||||||
@@ -234,11 +168,3 @@ steps:
|
|||||||
displayName: 'Component Detection'
|
displayName: 'Component Detection'
|
||||||
inputs:
|
inputs:
|
||||||
failOnAlert: true
|
failOnAlert: true
|
||||||
|
|
||||||
- script: 'echo "##vso[build.addbuildtag]Scheduled" '
|
|
||||||
displayName: 'Add scheduled tag if needed'
|
|
||||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['Build.Reason'], 'Schedule'))
|
|
||||||
|
|
||||||
- script: 'echo "##vso[build.addbuildtag]PerfTestCandidate" '
|
|
||||||
displayName: 'Add PerfTestCandidate tag if needed'
|
|
||||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['VSCODE_QUALITY'], 'insider'))
|
|
||||||
|
|||||||
@@ -17,20 +17,20 @@ $Arch = "x64"
|
|||||||
# Publish tarball
|
# Publish tarball
|
||||||
$PlatformLinux = "linux-$Arch"
|
$PlatformLinux = "linux-$Arch"
|
||||||
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||||
$TarballPath = "$artifactsDir\$TarballFilename"
|
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||||
|
|
||||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||||
|
|
||||||
# Publish DEB
|
# Publish DEB
|
||||||
$PlatformDeb = "linux-deb-$Arch"
|
$PlatformDeb = "linux-deb-$Arch"
|
||||||
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\*.deb)"
|
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||||
$DebPath = "$artifactsDir\$DebFilename"
|
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||||
|
|
||||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||||
|
|
||||||
# Publish RPM
|
# Publish RPM
|
||||||
$PlatformRpm = "linux-rpm-$Arch"
|
$PlatformRpm = "linux-rpm-$Arch"
|
||||||
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\*.rpm)"
|
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||||
$RpmPath = "$artifactsDir\$RpmFilename"
|
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||||
|
|
||||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||||
|
|||||||
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
$Arch = "x64"
|
||||||
|
|
||||||
|
$Repo = "$(pwd)"
|
||||||
|
$Root = "$Repo\.."
|
||||||
|
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
|
||||||
|
$ServerName = "azuredatastudio-server-win32-$Arch"
|
||||||
|
$Server = "$Root\$ServerName"
|
||||||
|
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||||
|
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||||
|
|
||||||
|
# Create server archive
|
||||||
|
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||||
|
$global:LASTEXITCODE = 0
|
||||||
|
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||||
|
|
||||||
|
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||||
@@ -105,19 +105,6 @@ steps:
|
|||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
- task: ArchiveFiles@2 # WHY
|
|
||||||
displayName: 'Archive build scripts source'
|
|
||||||
inputs:
|
|
||||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
|
||||||
archiveType: tar
|
|
||||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1 # WHY
|
|
||||||
displayName: 'Publish Artifact: build scripts source'
|
|
||||||
inputs:
|
|
||||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
|
||||||
ArtifactName: source
|
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
@@ -189,39 +176,11 @@ steps:
|
|||||||
exec { yarn gulp "vscode-win32-x64-archive" }
|
exec { yarn gulp "vscode-win32-x64-archive" }
|
||||||
displayName: Archive & User & System setup
|
displayName: Archive & User & System setup
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy Archive to: $(Build.ArtifactStagingDirectory)'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/win32-x64/archive/'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)' # our release scripts expect the archive to be in the root
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy User Installer to: $(Build.ArtifactStagingDirectory)'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/win32-x64/user-setup/'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/user-setup/'
|
|
||||||
|
|
||||||
- task: CopyFiles@2
|
|
||||||
displayName: 'Copy System Install to: $(Build.ArtifactStagingDirectory)'
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/win32-x64/system-setup/'
|
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/' # our release scripts except system exe to be in root and user setup to be under /user-setup
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
$Repo = "$(pwd)"
|
|
||||||
$Root = "$Repo\.."
|
|
||||||
$LegacyServer = "$Root\azuredatastudio-reh-win32-x64"
|
|
||||||
$ServerZip = "$(Build.ArtifactStagingDirectory)\azuredatastudio-server-win32-x64.zip"
|
|
||||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $LegacyServer -r }
|
|
||||||
displayName: 'Package server'
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
displayName: 'Sign installers'
|
displayName: 'Sign installers'
|
||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'Code Signing'
|
ConnectedServiceName: 'Code Signing'
|
||||||
FolderPath: '$(Build.ArtifactStagingDirectory)'
|
FolderPath: '.build'
|
||||||
Pattern: '*.exe'
|
Pattern: '*.exe'
|
||||||
signConfigType: inlineSignParams
|
signConfigType: inlineSignParams
|
||||||
inlineOperation: |
|
inlineOperation: |
|
||||||
@@ -272,26 +231,24 @@ steps:
|
|||||||
MaxRetryAttempts: 20
|
MaxRetryAttempts: 20
|
||||||
condition: and(succeeded(), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
- powershell: | # WHY!
|
- task: ArchiveFiles@2
|
||||||
$PackageJson = Get-Content -Raw -Path "$(Build.SourcesDirectory)\package.json" | ConvertFrom-Json
|
displayName: 'Archive build scripts source'
|
||||||
|
inputs:
|
||||||
|
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||||
|
archiveType: tar
|
||||||
|
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
|
|
||||||
$jsonResult = @{
|
- task: PublishBuildArtifacts@1
|
||||||
version = $PackageJson.version
|
displayName: 'Publish Artifact: build scripts source'
|
||||||
quality = $env:VSCODE_QUALITY
|
inputs:
|
||||||
commit = "$(git rev-parse HEAD)"
|
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
}
|
ArtifactName: source
|
||||||
|
|
||||||
$jsonResult | ConvertTo-Json | Out-File "$(Build.ArtifactStagingDirectory)\version.json"
|
- powershell: |
|
||||||
displayName: 'Create version.json'
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
- powershell: | # WHY
|
.\build\azure-pipelines\win32\createDrop.ps1
|
||||||
Get-ChildItem "." |
|
displayName: Create Drop
|
||||||
ForEach-Object {
|
|
||||||
certutil.exe -hashfile $_.FullName SHA256 >> sha256hashes.txt
|
|
||||||
}
|
|
||||||
workingDirectory: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
displayName: 'Get SHA256 Hashes'
|
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- task: PublishBuildArtifacts@1
|
- task: PublishBuildArtifacts@1
|
||||||
displayName: 'Publish Artifact: drop'
|
displayName: 'Publish Artifact: drop'
|
||||||
@@ -319,11 +276,3 @@ steps:
|
|||||||
displayName: 'Component Detection'
|
displayName: 'Component Detection'
|
||||||
inputs:
|
inputs:
|
||||||
failOnAlert: true
|
failOnAlert: true
|
||||||
|
|
||||||
- powershell: 'Write-Host "##vso[build.addbuildtag]Scheduled" '
|
|
||||||
displayName: 'Tag build if scheduled'
|
|
||||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['Build.Reason'], 'Schedule'))
|
|
||||||
|
|
||||||
- powershell: 'Write-Host "##vso[build.addbuildtag]PerfTestCandidate" '
|
|
||||||
displayName: 'Tag build for PerfTestCandidate if needed'
|
|
||||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['VSCODE_QUALITY'], 'insider'))
|
|
||||||
|
|||||||
@@ -1,10 +1,4 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: DownloadPipelineArtifact@2
|
|
||||||
inputs:
|
|
||||||
buildType: 'current'
|
|
||||||
targetPath: '$(Agent.TempDirectory)'
|
|
||||||
artifactName: drop
|
|
||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "10.15.1"
|
||||||
@@ -20,6 +14,12 @@ steps:
|
|||||||
exec { git clean -fxd }
|
exec { git clean -fxd }
|
||||||
displayName: Clean repo
|
displayName: Clean repo
|
||||||
|
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
buildType: 'current'
|
||||||
|
targetPath: '$(Build.SourcesDirectory)\.build'
|
||||||
|
artifactName: drop
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
@@ -30,7 +30,7 @@ steps:
|
|||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Agent.TempDirectory)\azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||||
displayName: Unzip artifact
|
displayName: Unzip artifact
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
|
|||||||
@@ -9,11 +9,11 @@ $env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
|||||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
$ExeName = "AzureDataStudioSetup.exe"
|
$ExeName = "AzureDataStudioSetup.exe"
|
||||||
$SystemExe = "$artifactsDir\$ExeName"
|
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
|
||||||
$UserExe = "$artifactsDir\user-setup\$ExeName"
|
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
|
||||||
$UserExeName = "AzureDataStudioUserSetup.exe"
|
$UserExeName = "AzureDataStudioUserSetup.exe"
|
||||||
$ZipName = "azuredatastudio-win32-x64.zip"
|
$ZipName = "azuredatastudio-win32-x64.zip"
|
||||||
$Zip = "$artifactsDir\$ZipName"
|
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
|
||||||
|
|
||||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
$Version = $VersionJson.version
|
$Version = $VersionJson.version
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ gulp.task('package-external-extensions', task.series(
|
|||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
}).map(element => {
|
}).map(element => {
|
||||||
const pkgJson = require(path.join(element.path, 'package.json'));
|
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||||
const vsixDirectory = path.join(path.dirname(root), 'vsix');
|
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||||
mkdirp.sync(vsixDirectory);
|
mkdirp.sync(vsixDirectory);
|
||||||
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ if "%INTEGRATION_TEST_ELECTRON_PATH%"=="" (
|
|||||||
:: Run from a built: need to compile all test extensions
|
:: Run from a built: need to compile all test extensions
|
||||||
call yarn gulp compile-extension:integration-tests
|
call yarn gulp compile-extension:integration-tests
|
||||||
if NOT "%INTEGRATION_TEST_CLI_PATH%"=="" (
|
if NOT "%INTEGRATION_TEST_CLI_PATH%"=="" (
|
||||||
echo "using vsix directory %AGENT_TEMPDIRECTORY%\vsix"
|
echo "using vsix directory .build\extensions"
|
||||||
for /f %%f IN ('dir /b /s "%AGENT_TEMPDIRECTORY%\vsix\*"') DO (
|
for /f %%f IN ('dir /b /s ".build\extensions\*"') DO (
|
||||||
echo "installing extension %%f"
|
echo "installing extension %%f"
|
||||||
:: use the source cli, we could potentially change this if we ever care about testing this, but this is easier atm
|
:: use the source cli, we could potentially change this if we ever care about testing this, but this is easier atm
|
||||||
call %INTEGRATION_TEST_CLI_PATH% --install-extension "%%f" --force --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR%
|
call %INTEGRATION_TEST_CLI_PATH% --install-extension "%%f" --force --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR%
|
||||||
|
|||||||
Reference in New Issue
Block a user