mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 02:51:36 -05:00
Build yamls (#8360)
* add darwin build * add global product build * add linux build * fix build * fix linux; add win32 * formatting * formatting * formatting * formatting * fix win32 * fix windows * fix python * fix linux display * fix linux * fix windows naming * fix windows timeout * add tagging * add env for building * add schedule
This commit is contained in:
162
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
162
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
@@ -0,0 +1,162 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
displayName: 'System Installs'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
|
||||
displayName: 'Archive build scripts source'
|
||||
inputs:
|
||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||
archiveType: tar
|
||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
|
||||
- task: PublishBuildArtifacts@1 # WHY ARE WE DOING THIS?
|
||||
displayName: 'Publish Artifact: build scripts source'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
ArtifactName: source
|
||||
|
||||
- script: DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||
displayName: 'Run Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||
displayName: 'Run Unstable Extension Unit Tests'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
displayName: Build Deb
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- task: ArchiveFiles@1 # WHY ARE WE DOING THIS?
|
||||
displayName: 'Archive files '
|
||||
inputs:
|
||||
rootFolder: '$(Build.SourcesDirectory)/../azuredatastudio-linux-x64'
|
||||
archiveType: tar
|
||||
archiveFile: '$(Build.ArtifactStagingDirectory)/azuredatastudio-linux-x64.tar.gz'
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (deb)'
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/deb/amd64/deb'
|
||||
Contents: '*.deb'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory) (rpm)'
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/.build/linux/rpm/x86_64/'
|
||||
Contents: '*.rpm'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- script: | # WHY ARE WE DOING THIS?
|
||||
set -e
|
||||
BUILD="$(Build.SourcesDirectory)/../azuredatastudio-linux-x64"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
COMMIT_ID=$(git rev-parse HEAD)
|
||||
|
||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$(Build.ArtifactStagingDirectory)/version.json"
|
||||
displayName: 'Create version.json'
|
||||
|
||||
- script: | # WHY ARE WE DOING THIS?
|
||||
set -e
|
||||
for f in *
|
||||
do
|
||||
shasum -a 256 "$f" >> sha256hashes.txt
|
||||
done
|
||||
workingDirectory: '$(Build.ArtifactStagingDirectory)'
|
||||
displayName: 'Get SHA256 Hashes'
|
||||
continueOnError: true
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
|
||||
- script: 'echo "##vso[build.addbuildtag]Scheduled" '
|
||||
displayName: 'Add scheduled tag if needed'
|
||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['Build.Reason'], 'Schedule'))
|
||||
|
||||
- script: 'echo "##vso[build.addbuildtag]AutoRelease" '
|
||||
displayName: 'Add AutoRelease tag if needed copy'
|
||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['AUTO_RELEASE'], 'true'))
|
||||
|
||||
- script: 'echo "##vso[build.addbuildtag]PerfTestCandidate" '
|
||||
displayName: 'Add PerfTestCandidate tag if needed'
|
||||
condition: and(in(variables['Agent.JobStatus'], 'Succeeded'), eq(variables['VSCODE_QUALITY'], 'insider'))
|
||||
Reference in New Issue
Block a user