Files
azuredatastudio/build/azure-pipelines/darwin/sql-product-build-darwin.yml
Lewis Sanchez 6bd0a17d3c Merge from vscode merge-base (#22769)
* Merge from vscode merge-base

* Turn off basic checks

* Enable compilation, unit, and integration tests
2023-04-18 18:28:58 -07:00

247 lines
10 KiB
YAML

steps:
- task: InstallAppleCertificate@2
displayName: 'Install developer certificate'
inputs:
certSecureFile: 'osx_signing_key.p12'
condition: eq(variables['signed'], true)
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
displayName: Extract compilation output
- task: NodeTool@0
inputs:
versionSpec: "16.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "sqltools@service.microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
mkdir -p .build
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js $(NPM_CONFIG_ARCH) > .build/yarnlockhash
displayName: Prepare yarn cache key
- task: Cache@2
displayName: Restore Cache - Node Modules
inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
continueOnError: true
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- script: |
set -e
export npm_config_arch=$(NPM_CONFIG_ARCH)
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
node build/azure-pipelines/sql-mixin
displayName: Mix in quality
- script: |
set -e
yarn gulp package-rebuild-extensions
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests" --coverage
displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# {{SQL CARBON TODO}} Reenable "Run Core Integration Tests"
# - script: |
# # Figure out the full absolute path of the product we just built
# # including the remote server and configure the integration tests
# # to run with these builds instead of running out of sources.
# set -e
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
# APP_NAME="`ls $APP_ROOT | head -n 1`"
# INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
# ./scripts/test-integration.sh --build --tfs "Integration Tests"
# displayName: Run core integration tests
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
yarn gulp compile-extensions
displayName: Compile Extensions
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
# Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
# in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
# {{SQL CARBON TODO}} -- reenable
# - script: |
# set -e
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
# APP_NAME="`ls $APP_ROOT | head -n 1`"
# yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions" --extraArgs "--disable-extension Microsoft.kusto --disable-extension Microsoft.azuremonitor"
# displayName: Run core smoke tests (Continue on Error)
# continueOnError: true
# condition: and(succeeded(), and(or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')), ne(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
# - script: |
# set -e
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
# APP_NAME="`ls $APP_ROOT | head -n 1`"
# yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
# displayName: Run core smoke tests (Fail on Error)
# condition: and(succeeded(), and(or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')), eq(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
# - script: |
# set -e
# node ./node_modules/playwright/install.js
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
# yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
# displayName: Run smoke tests (Browser)
# continueOnError: true
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: DownloadBuildArtifacts@0
displayName: 'Download arm64 and x64 packages'
inputs:
downloadType: specific
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-@(arm64|x64)-unsigned.zip'
downloadPath: $(agent.builddirectory)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-x64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-x64
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-arm64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-arm64
DEBUG=* node build/darwin/create-universal-app.js
displayName: Create Universal App
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
ls
echo "Cleaning the application"
xattr -cr *.app
cd *.app
find . -name '._*' -print0 | xargs -0 rm -rf --
cd ..
echo "Signing the application with deep"
codesign --deep --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
cd *.app
ls
echo "Signing specific components"
find . -type f -print0 | xargs -0 file | grep ': *Mach-O' | sed 's/: *Mach-O.*//' | while read -r file; do codesign --options runtime --timestamp --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS --force "$file" || break; done
echo "Signing Electron again..."
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS Contents/Frameworks/Electron\ Framework.framework
cd ..
echo "Signing the entire application one more time"
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
popd
displayName: 'Manual codesign'
condition: and(succeeded(), eq(variables['signed'], true))
- script: |
set -e
mkdir -p .build/darwin/archive
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip
popd
displayName: 'Archive'
- script: |
set -e
./build/azure-pipelines/darwin/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
condition: always()
- task: PublishTestResults@2
displayName: 'Publish Test Results'
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
continueOnError: true
condition: and(succeededOrFailed(), or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')))
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true