parameters: extensionsToUnitTest: [] steps: - task: NodeTool@0 inputs: versionSpec: "14.x" - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 inputs: versionSpec: "1.x" - task: AzureKeyVault@1 displayName: 'Azure Key Vault: Get Secrets' inputs: azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)' KeyVaultName: ado-secrets SecretsFilter: 'github-distro-mixin-password' - task: DownloadPipelineArtifact@2 inputs: artifact: Compilation displayName: Download compilation output - script: | set -e tar -xzf $(Pipeline.Workspace)/compilation.tar.gz displayName: Extract compilation output - script: | set -e cat << EOF > ~/.netrc machine github.com login azuredatastudio password $(github-distro-mixin-password) EOF git config user.email "sqltools@service.microsoft.com" git config user.name "AzureDataStudio" displayName: Prepare tooling - script: | set -e git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git fetch distro git merge $(node -p "require('./package.json').distro") displayName: Merge distro - script: | mkdir -p .build node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash displayName: Prepare yarn cache key - task: Cache@2 displayName: Restore Cache - Node Modules inputs: key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash' path: .build/node_modules_cache cacheHitVar: NODE_MODULES_RESTORED continueOnError: true - script: | set -e tar -xzf .build/node_modules_cache/cache.tgz condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) displayName: Extract node_modules archive - script: | set -e CHILD_CONCURRENCY=1 yarn --frozen-lockfile displayName: Install dependencies env: GITHUB_TOKEN: $(github-distro-mixin-password) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - script: | set -e node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt mkdir -p .build/node_modules_cache tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) displayName: Create node_modules archive - script: | set -e yarn postinstall displayName: Run postinstall scripts condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - script: | set -e node build/azure-pipelines/mixin displayName: Mix in quality - script: | set -e yarn gulp vscode-linux-x64-min-ci displayName: Build env: VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password) - script: | set -e yarn gulp package-rebuild-extensions yarn gulp compile-extensions yarn gulp package-external-extensions displayName: Package External extensions - script: | set -e yarn gulp package-langpacks displayName: Package Langpacks - script: | set -e service xvfb start displayName: Start xvfb condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - script: | set -e DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" # Disable code coverage since it's currently broken --coverage displayName: Run unit tests (Electron) condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - script: | # Figure out the full absolute path of the product we just built # including the remote server and configure the integration tests # to run with these builds instead of running out of sources. set -e APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64 APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName") INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \ DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests" displayName: Run integration tests (Electron) condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - script: | # Figure out the full absolute path of the product we just built # including the remote server and configure the unit tests # to run with these builds instead of running out of sources. set -e APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64 APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName") INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ NO_CLEANUP=1 \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \ DISPLAY=:10 ./scripts/test-extensions-unit.sh --build --tfs "Extension Unit Tests" displayName: 'Run Stable Extension Unit Tests' condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - script: | set -e APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64 APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName") INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh displayName: 'Run Unstable Extension Unit Tests' continueOnError: true condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true')) - bash: | set -e mkdir -p $(Build.ArtifactStagingDirectory)/logs/linux-x64 cd /tmp for folder in adsuser*/ do folder=${folder%/} # Only archive directories we want for debugging purposes tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/$folder.tar.gz $folder/User $folder/logs done displayName: Archive Logs continueOnError: true condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - script: | set -e yarn gulp vscode-linux-x64-build-deb displayName: Build Deb - script: | set -e yarn gulp vscode-linux-x64-build-rpm displayName: Build Rpm - task: UseDotNet@2 displayName: 'Install .NET Core sdk for signing' inputs: packageType: sdk version: 5.0.x installationPath: $(Agent.ToolsDirectory)/dotnet - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 inputs: ConnectedServiceName: 'Code Signing' FolderPath: '$(Build.SourcesDirectory)/.build' Pattern: 'extensions/*.vsix,langpacks/*.vsix' signConfigType: inlineSignParams inlineOperation: | [ { "keyCode": "CP-233016", "operationSetCode": "OpcSign", "parameters": [ { "parameterName": "FileDigest", "parameterValue": "/fd \"SHA256\"" } ], "toolName": "sign", "toolVersion": "1.0" }, { "keyCode": "CP-233016", "operationSetCode": "OpcVerify", "parameters": [], "toolName": "sign", "toolVersion": "1.0" } ] SessionTimeout: 120 displayName: 'Signing Extensions and Langpacks' condition: and(succeeded(), eq(variables['signed'], true)) - script: | set -e ./build/azure-pipelines/linux/createDrop.sh displayName: Create Drop - script: | set -e shopt -s globstar mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage displayName: Copy Coverage condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) - task: PublishTestResults@2 displayName: 'Publish Test Results test-results.xml' inputs: testResultsFiles: '*.xml' searchFolder: '$(Build.ArtifactStagingDirectory)/test-results' continueOnError: true condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true')) - task: PublishBuildArtifacts@1 displayName: 'Publish Artifact: crash reports' inputs: PathtoPublish: '$(Build.SourcesDirectory)/.build/crashes' ArtifactName: crashes condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true')) - task: PublishBuildArtifacts@1 displayName: 'Publish Artifact: drop' condition: succeededOrFailed() - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 displayName: 'Component Detection' inputs: failOnAlert: true