Add scripts for creating artifacts (#8602)

* add remote build to the pipeline

* add a separte compile step

* fix darwin build

* add linux container and fix docker creation

* fix distro

* remove system install and add xvfb start

* distro

* add logic to only run tests on hosted machine

* fix yml

* fix yml

* add linux docker container

* fix docker file

* fixdocker

* fix darwin

* fix linux build

* add cache salt to npm cache

* intentially ignore kerberos binaries

* disable docker for now

* remove vsix from win32

* fix linxu and win32

* fix linux and win32

* fix linux and win32

* fix linux

* maybe fix win32

* fix linux

* fix linux image; disable server package for noe

* fix minimatch for win32 test

* fix linux build

* add back in docker

* fix test

* use tmp directory insteado workspace

* change name of docker image

* try a differnt folder

* fix download

* add a git clean step

* bump cache

* fix issues with builidng

* readd windows build, revert signing changes

* simplify win32 server packaage

* some more optimizations

* use decompress task

* add back in install for windows test

* fix linux build

* add integration test to bat file

* make platform the same

* add copy extension to windows test

* revert tests back

* fix vsix drop aquasition

* inital changes

* fix download

* fix dependent on for release

* just downlaod everything which makes it easier

* setup pipeline artifacts

* more clean up

* fix linux

* add logic to install extensions for integration tests

* fix extension install

* fix build failures

* fix some issues

* fix darwin drop

* change linux build copy to js

* fix darwin archive

* fix copy artifacts and use it for windows

* use for darinw

* fix darwin

* ad dep on linux

* fix win32

* fix darwin

* fix copy artifacts

* mkdir p darwin

* fix copy

* add error handler

* add more binaries

* add more binaries

* fix archive path on linux

* add more options to integration extension install

* add more binaries

* add verbose to installer copy

* fix ip rate issues

* fix bat file for including extensions

* move echo

* add windows test condition

* use powershell cmdlet rather than cp

* remove verbose

* remove compiling of extensions

* fix pipelines

* update docker location

* fix copy item

* fix signing on win32

* fix locations

* move back to using cp

* ensure the docker folder exists

* test a createdrop script on darwin

* wip

* fix copy

* add drop for linux

* fix builds

* fix drop

* fix docker on linx

* fix darwin

* lets try this again

* fix linux drop

* i guess try the copy files task

* add create drop for win32

* ensure windows drop location exists

* fix extension install

* just use mkdir

* add better logic for installing extensions

* ignore errors?

* try force

* testing

* ok this should work

* use production cli

* fix liveshare vscodeignore

* fix integration test script

* revert changes to integration tests to fix them

* try newitem

* remove exec

* explicitly clear last exit code

* fix test build

* revert publish scripts

* add version json

* fix tests

* add back sources creation

* this is stupid

* fix clean positioning

* add version information to drop

* fix locations of artifacts in publush scripts
This commit is contained in:
Anthony Dresser
2019-12-09 14:34:46 -08:00
committed by GitHub
parent a1b5af0445
commit adad11c725
13 changed files with 158 additions and 233 deletions

View File

@@ -111,20 +111,8 @@ steps:
yarn gulp package-external-extensions
displayName: Package External extensions
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
displayName: 'Archive build scripts source'
inputs:
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
archiveType: tar
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
- task: PublishBuildArtifacts@1 # WHY ARE WE DOING THIS?
displayName: 'Publish Artifact: build scripts source'
inputs:
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
ArtifactName: source
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
@@ -133,6 +121,7 @@ steps:
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
@@ -151,65 +140,10 @@ steps:
yarn gulp vscode-linux-x64-build-rpm
displayName: Build Rpm
- task: ArchiveFiles@1 # WHY ARE WE DOING THIS?
displayName: 'Archive files '
inputs:
rootFolder: '$(Build.SourcesDirectory)/../azuredatastudio-linux-x64'
archiveType: tar
archiveFile: '$(Build.ArtifactStagingDirectory)/azuredatastudio-linux-x64.tar.gz'
- task: CopyFiles@2
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (deb)'
inputs:
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/deb/amd64/deb'
Contents: '*.deb'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: CopyFiles@2
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (rpm)'
inputs:
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/rpm/x86_64/'
Contents: '*.rpm'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: CopyFiles@2
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory)/vsix'
inputs:
SourceFolder: '$(Build.SourcesDirectory)/../vsix'
TargetFolder: '$(Build.ArtifactStagingDirectory)/vsix'
- script: |
set -e
docker build -t azuredatastudio-server -f build/azure-pipelines/docker/Dockerfile $(agent.builddirectory)/azuredatastudio-reh-linux-x64
docker save azuredatastudio-server | gzip > azuredatastudio-server-docker.tar.gz
cp azuredatastudio-server-docker.tar.gz $(Build.ArtifactStagingDirectory)
displayName: "Create docker image"
- script: |
set -e
cd $(agent.builddirectory)
tar --owner=0 --group=0 -czf azuredatastudio-server-linux-x64.tar.gz azuredatastudio-reh-linux-x64
cp azuredatastudio-server-linux-x64.tar.gz $(Build.ArtifactStagingDirectory)
displayName: 'Package server'
- script: | # WHY ARE WE DOING THIS?
set -e
PACKAGEJSON="$(Build.SourcesDirectory)/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
COMMIT_ID=$(git rev-parse HEAD)
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$(Build.ArtifactStagingDirectory)/version.json"
displayName: 'Create version.json'
- script: | # WHY ARE WE DOING THIS?
set -e
for f in *
do
shasum -a 256 "$f" >> sha256hashes.txt
done
workingDirectory: '$(Build.ArtifactStagingDirectory)'
displayName: 'Get SHA256 Hashes'
continueOnError: true
./build/azure-pipelines/linux/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
@@ -234,11 +168,3 @@ steps:
displayName: 'Component Detection'
inputs:
failOnAlert: true
- script: 'echo "##vso[build.addbuildtag]Scheduled" '
displayName: 'Add scheduled tag if needed'
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['Build.Reason'], 'Schedule'))
- script: 'echo "##vso[build.addbuildtag]PerfTestCandidate" '
displayName: 'Add PerfTestCandidate tag if needed'
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['VSCODE_QUALITY'], 'insider'))