mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
@@ -1 +1 @@
|
|||||||
2019-08-30T20:24:23.714Z
|
2019-12-01T02:20:58.491Z
|
||||||
|
|||||||
@@ -1,4 +1,24 @@
|
|||||||
steps:
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '10.15.3'
|
versionSpec: '10.15.3'
|
||||||
@@ -7,12 +27,6 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
versionSpec: '1.x'
|
versionSpec: '1.x'
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
inputs:
|
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
|
||||||
vstsFeed: 'BuildCache'
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
inputs:
|
inputs:
|
||||||
@@ -40,6 +54,12 @@ steps:
|
|||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
@@ -48,7 +68,7 @@ steps:
|
|||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: 'BuildCache'
|
vstsFeed: 'BuildCache'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
@@ -71,8 +91,13 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp vscode-darwin-min
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp vscode-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-web-darwin-min-ci
|
||||||
displayName: Build
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
|
- task: ArchiveFiles@2 # WHY ARE WE DOING THIS?
|
||||||
displayName: 'Archive build scripts source'
|
displayName: 'Archive build scripts source'
|
||||||
@@ -140,6 +165,10 @@ steps:
|
|||||||
timeoutInMinutes: 20
|
timeoutInMinutes: 20
|
||||||
continueOnError: true
|
continueOnError: true
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $(Build.ArtifactStagingDirectory)/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||||
|
displayName: 'Package server'
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
- script: | # WHY ARE WE DOING THIS?
|
||||||
set -e
|
set -e
|
||||||
PACKAGEJSON=`ls $(Build.SourcesDirectory)/package.json`
|
PACKAGEJSON=`ls $(Build.SourcesDirectory)/package.json`
|
||||||
|
|||||||
16
build/azure-pipelines/docker/Dockerfile
Normal file
16
build/azure-pipelines/docker/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||||
|
|
||||||
|
ADD ./ /opt/ads-server
|
||||||
|
|
||||||
|
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||||
|
|
||||||
|
CMD ["/opt/ads-server/server.sh"]
|
||||||
|
|
||||||
|
EXPOSE 8000:8000
|
||||||
|
EXPOSE 8001:8001
|
||||||
20
build/azure-pipelines/linux/Dockerfile
Normal file
20
build/azure-pipelines/linux/Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update --fix-missing
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||||
|
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||||
|
libnss3 libasound2 make gcc libx11-dev fakeroot rpm
|
||||||
|
|
||||||
|
#docker
|
||||||
|
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||||
|
RUN apt-key fingerprint 0EBFCD88
|
||||||
|
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||||
|
|
||||||
|
ADD ./xvfb.init /etc/init.d/xvfb
|
||||||
|
RUN chmod +x /etc/init.d/xvfb
|
||||||
|
RUN update-rc.d xvfb defaults
|
||||||
@@ -1,17 +1,27 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '10.15.1'
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
sudo apt-get update
|
exit 1
|
||||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
displayName: Check RestoreCache
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
|
||||||
sudo update-rc.d xvfb defaults
|
- task: NodeTool@0
|
||||||
sudo service xvfb start
|
inputs:
|
||||||
displayName: 'System Installs'
|
versionSpec: '10.15.1'
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
inputs:
|
inputs:
|
||||||
@@ -45,7 +55,7 @@ steps:
|
|||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: 'BuildCache'
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
@@ -57,7 +67,7 @@ steps:
|
|||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: 'BuildCache'
|
vstsFeed: 'BuildCache'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
@@ -76,25 +86,28 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp install-sqltoolsservice
|
yarn gulp install-sqltoolsservice
|
||||||
displayName: Install sqltoolsservice
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
yarn gulp install-ssmsmin
|
yarn gulp install-ssmsmin
|
||||||
displayName: Install ssmsmin
|
displayName: Install extension binaries
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp vscode-linux-x64-min
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||||
displayName: Build
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
service xvfb start
|
||||||
|
displayName: Start xvfb
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
yarn gulp compile-extensions
|
yarn gulp compile-extensions
|
||||||
displayName: Compile Extensions
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
yarn gulp package-external-extensions
|
yarn gulp package-external-extensions
|
||||||
displayName: Package External extensions
|
displayName: Package External extensions
|
||||||
|
|
||||||
@@ -111,11 +124,19 @@ steps:
|
|||||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
ArtifactName: source
|
ArtifactName: source
|
||||||
|
|
||||||
- script: DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
- script: |
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||||
displayName: 'Run Stable Extension Unit Tests'
|
displayName: 'Run Stable Extension Unit Tests'
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
- script: DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
- script: |
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||||
displayName: 'Run Unstable Extension Unit Tests'
|
displayName: 'Run Unstable Extension Unit Tests'
|
||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
@@ -157,6 +178,20 @@ steps:
|
|||||||
SourceFolder: '$(Build.SourcesDirectory)/../vsix'
|
SourceFolder: '$(Build.SourcesDirectory)/../vsix'
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/vsix'
|
TargetFolder: '$(Build.ArtifactStagingDirectory)/vsix'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
docker build -t azuredatastudio-server -f build/azure-pipelines/docker/Dockerfile $(agent.builddirectory)/azuredatastudio-reh-linux-x64
|
||||||
|
docker save azuredatastudio-server | gzip > azuredatastudio-server-docker.tar.gz
|
||||||
|
cp azuredatastudio-server-docker.tar.gz $(Build.ArtifactStagingDirectory)
|
||||||
|
displayName: "Create docker image"
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cd $(agent.builddirectory)
|
||||||
|
tar --owner=0 --group=0 -czf azuredatastudio-server-linux-x64.tar.gz azuredatastudio-reh-linux-x64
|
||||||
|
cp azuredatastudio-server-linux-x64.tar.gz $(Build.ArtifactStagingDirectory)
|
||||||
|
displayName: 'Package server'
|
||||||
|
|
||||||
- script: | # WHY ARE WE DOING THIS?
|
- script: | # WHY ARE WE DOING THIS?
|
||||||
set -e
|
set -e
|
||||||
PACKAGEJSON="$(Build.SourcesDirectory)/package.json"
|
PACKAGEJSON="$(Build.SourcesDirectory)/package.json"
|
||||||
|
|||||||
@@ -1,30 +1,54 @@
|
|||||||
|
resources:
|
||||||
|
containers:
|
||||||
|
- container: linux-x64
|
||||||
|
image: sqltoolscontainers.azurecr.io/linux-build-agent:x64
|
||||||
|
endpoint: ContainerRegistry
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
- job: Compile
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
steps:
|
||||||
|
- template: sql-product-compile.yml
|
||||||
|
|
||||||
- job: macOS
|
- job: macOS
|
||||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||||
pool:
|
pool:
|
||||||
vmImage: macOS 10.13
|
vmImage: macOS 10.13
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
steps:
|
steps:
|
||||||
- template: darwin/sql-product-build-darwin.yml
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
timeoutInMinutes: 90
|
|
||||||
cancelTimeoutInMinutes: 5
|
|
||||||
|
|
||||||
- job: Linux
|
- job: Linux
|
||||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
steps:
|
steps:
|
||||||
- template: linux/sql-product-build-linux.yml
|
- template: linux/sql-product-build-linux.yml
|
||||||
timeoutInMinutes: 90
|
|
||||||
cancelTimeoutInMinutes: 5
|
|
||||||
|
|
||||||
- job: Windows
|
- job: Windows
|
||||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||||
pool:
|
pool:
|
||||||
name: mssqltools
|
vmImage: VS2017-Win2016
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
steps:
|
steps:
|
||||||
- template: win32/sql-product-build-win32.yml
|
- template: win32/sql-product-build-win32.yml
|
||||||
timeoutInMinutes: 90
|
|
||||||
cancelTimeoutInMinutes: 5
|
- job: Windows_Test
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
|
pool:
|
||||||
|
name: mssqltools
|
||||||
|
dependsOn:
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-test-win32.yml
|
||||||
|
|
||||||
- job: Release
|
- job: Release
|
||||||
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||||
@@ -34,6 +58,7 @@ jobs:
|
|||||||
- macOS
|
- macOS
|
||||||
- Linux
|
- Linux
|
||||||
- Windows
|
- Windows
|
||||||
|
- Windows_Test
|
||||||
steps:
|
steps:
|
||||||
- template: sql-release.yml
|
- template: sql-release.yml
|
||||||
|
|
||||||
|
|||||||
112
build/azure-pipelines/sql-product-compile.yml
Normal file
112
build/azure-pipelines/sql-product-compile.yml
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
|
# inline small SVGs
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp hygiene --skip-tslint
|
||||||
|
yarn gulp tslint
|
||||||
|
displayName: Run hygiene, tslint
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp compile-build
|
||||||
|
yarn gulp compile-extensions-build
|
||||||
|
yarn gulp minify-vscode
|
||||||
|
yarn gulp minify-vscode-reh
|
||||||
|
yarn gulp minify-vscode-reh-web
|
||||||
|
displayName: Compile
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
@@ -1,4 +1,24 @@
|
|||||||
steps:
|
steps:
|
||||||
|
- powershell: |
|
||||||
|
mkdir .build -ea 0
|
||||||
|
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||||
|
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "10.15.1"
|
||||||
@@ -7,14 +27,10 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.x"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- powershell: |
|
- task: UsePythonVersion@0
|
||||||
git clean -fxd
|
inputs:
|
||||||
displayName: Clean repo
|
versionSpec: '2.x'
|
||||||
|
addToPath: true
|
||||||
# - task: UsePythonVersion@0
|
|
||||||
# inputs:
|
|
||||||
# versionSpec: '2.x'
|
|
||||||
# addToPath: true
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -38,11 +54,11 @@ steps:
|
|||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
# inputs:
|
inputs:
|
||||||
# keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
# vstsFeed: 'BuildCache'
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
@@ -50,21 +66,21 @@ steps:
|
|||||||
$env:CHILD_CONCURRENCY="1"
|
$env:CHILD_CONCURRENCY="1"
|
||||||
exec { yarn --frozen-lockfile }
|
exec { yarn --frozen-lockfile }
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
# inputs:
|
inputs:
|
||||||
# keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
# vstsFeed: 'BuildCache'
|
vstsFeed: 'BuildCache'
|
||||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
# - powershell: |
|
- powershell: |
|
||||||
# . build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
# $ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
# exec { yarn postinstall }
|
exec { yarn postinstall }
|
||||||
# displayName: Run postinstall scripts
|
displayName: Run postinstall scripts
|
||||||
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
@@ -81,8 +97,13 @@ steps:
|
|||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { yarn gulp "vscode-win32-x64-min" }
|
exec { yarn gulp "package-rebuild-extensions" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||||
displayName: Build
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
- task: ArchiveFiles@2 # WHY
|
- task: ArchiveFiles@2 # WHY
|
||||||
displayName: 'Archive build scripts source'
|
displayName: 'Archive build scripts source'
|
||||||
@@ -105,69 +126,6 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
displayName: Run unstable tests
|
displayName: Run unstable tests
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
|
||||||
KeyVaultName: SqlToolsSecretStore
|
|
||||||
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { .\scripts\sql-test-integration.bat }
|
|
||||||
continueOnError: true
|
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
|
||||||
displayName: Run stable tests
|
|
||||||
env:
|
|
||||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
|
||||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
|
||||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
|
||||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
|
||||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
|
||||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
|
||||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
|
||||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
|
||||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { .\scripts\sql-test-integration.bat }
|
|
||||||
continueOnError: true
|
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
|
||||||
displayName: Run release tests
|
|
||||||
env:
|
|
||||||
ADS_TEST_GREP: (.*@REL@|integration test setup)
|
|
||||||
ADS_TEST_INVERT_GREP: 0
|
|
||||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
|
||||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
|
||||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
|
||||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
|
||||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
|
||||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
|
||||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
|
||||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
|
||||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { .\scripts\sql-test-integration-unstable.bat }
|
|
||||||
continueOnError: true
|
|
||||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
|
||||||
displayName: Run unstable integration tests
|
|
||||||
env:
|
|
||||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
|
||||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
|
||||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
|
||||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
|
||||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
|
||||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
|
||||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
|
||||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
|
||||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
displayName: 'Sign out code'
|
displayName: 'Sign out code'
|
||||||
inputs:
|
inputs:
|
||||||
@@ -249,11 +207,15 @@ steps:
|
|||||||
SourceFolder: '$(Build.SourcesDirectory)/.build/win32-x64/system-setup/'
|
SourceFolder: '$(Build.SourcesDirectory)/.build/win32-x64/system-setup/'
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/' # our release scripts except system exe to be in root and user setup to be under /user-setup
|
TargetFolder: '$(Build.ArtifactStagingDirectory)/' # our release scripts except system exe to be in root and user setup to be under /user-setup
|
||||||
|
|
||||||
- task: CopyFiles@2
|
- powershell: |
|
||||||
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory)/vsix'
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
inputs:
|
$ErrorActionPreference = "Stop"
|
||||||
SourceFolder: '$(Build.SourcesDirectory)/../vsix'
|
$Repo = "$(pwd)"
|
||||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/vsix'
|
$Root = "$Repo\.."
|
||||||
|
$LegacyServer = "$Root\azuredatastudio-reh-win32-x64"
|
||||||
|
$ServerZip = "$(Build.ArtifactStagingDirectory)\azuredatastudio-server-win32-x64.zip"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $LegacyServer -r }
|
||||||
|
displayName: 'Package server'
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
displayName: 'Sign installers'
|
displayName: 'Sign installers'
|
||||||
|
|||||||
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
steps:
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
buildType: 'current'
|
||||||
|
targetPath: '$(Agent.TempDirectory)'
|
||||||
|
artifactName: drop
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { git clean -fxd }
|
||||||
|
displayName: Clean repo
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { yarn --frozen-lockfile }
|
||||||
|
displayName: Install dependencies
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Agent.TempDirectory)\azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||||
|
displayName: Unzip artifact
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: SqlToolsSecretStore
|
||||||
|
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run stable tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run release tests
|
||||||
|
env:
|
||||||
|
ADS_TEST_GREP: (.*@REL@|integration test setup)
|
||||||
|
ADS_TEST_INVERT_GREP: 0
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
displayName: Run unstable integration tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
@@ -160,3 +160,8 @@ gulp.task('package-external-extensions', task.series(
|
|||||||
return Promise.all(vsixes);
|
return Promise.all(vsixes);
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
|
|
||||||
|
gulp.task('package-rebuild-extensions', task.series(
|
||||||
|
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||||
|
task.define('rebuild-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
|
||||||
|
));
|
||||||
|
|||||||
@@ -206,6 +206,11 @@ const externalExtensions = [
|
|||||||
'liveshare',
|
'liveshare',
|
||||||
'database-project'
|
'database-project'
|
||||||
];
|
];
|
||||||
|
// extensions that require a rebuild since they have native parts
|
||||||
|
const rebuildExtensions = [
|
||||||
|
'big-data-cluster',
|
||||||
|
'mssql'
|
||||||
|
];
|
||||||
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
function packageLocalExtensionsStream() {
|
function packageLocalExtensionsStream() {
|
||||||
@@ -252,3 +257,24 @@ function packageExternalExtensionsStream() {
|
|||||||
}
|
}
|
||||||
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
function cleanRebuildExtensions(root) {
|
||||||
|
return Promise.all(rebuildExtensions.map(async (e) => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
|
}
|
||||||
|
exports.cleanRebuildExtensions = cleanRebuildExtensions;
|
||||||
|
function packageRebuildExtensionsStream() {
|
||||||
|
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;
|
||||||
|
|||||||
@@ -243,6 +243,12 @@ const externalExtensions = [
|
|||||||
'database-project'
|
'database-project'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// extensions that require a rebuild since they have native parts
|
||||||
|
const rebuildExtensions = [
|
||||||
|
'big-data-cluster',
|
||||||
|
'mssql'
|
||||||
|
];
|
||||||
|
|
||||||
interface IBuiltInExtension {
|
interface IBuiltInExtension {
|
||||||
name: string;
|
name: string;
|
||||||
version: string;
|
version: string;
|
||||||
@@ -303,3 +309,26 @@ export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
return es.merge(builtExtensions);
|
return es.merge(builtExtensions);
|
||||||
}
|
}
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
|
||||||
|
export function cleanRebuildExtensions(root: string): Promise<void> {
|
||||||
|
return Promise.all(rebuildExtensions.map(async e => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
|
||||||
|
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
|
||||||
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
src/**
|
src/**
|
||||||
out/**
|
|
||||||
tsconfig.json
|
tsconfig.json
|
||||||
extension.webpack.config.js
|
|
||||||
yarn.lock
|
yarn.lock
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
@echo OFF
|
||||||
setlocal
|
setlocal
|
||||||
|
|
||||||
pushd %~dp0\..
|
pushd %~dp0\..
|
||||||
@@ -7,6 +8,29 @@ set VSCODEEXTENSIONSDIR=%TMP%\adsext-%RANDOM%-%TIME:~6,5%
|
|||||||
echo VSCODEUSERDATADIR=%VSCODEUSERDATADIR%
|
echo VSCODEUSERDATADIR=%VSCODEUSERDATADIR%
|
||||||
echo VSCODEEXTENSIONSDIR=%VSCODEEXTENSIONSDIR%
|
echo VSCODEEXTENSIONSDIR=%VSCODEEXTENSIONSDIR%
|
||||||
|
|
||||||
|
:: Figure out which Electron to use for running tests
|
||||||
|
if "%INTEGRATION_TEST_ELECTRON_PATH%"=="" (
|
||||||
|
:: Run out of sources: no need to compile as code.sh takes care of it
|
||||||
|
set INTEGRATION_TEST_ELECTRON_PATH=.\scripts\code.bat
|
||||||
|
|
||||||
|
echo "Running integration tests out of sources."
|
||||||
|
) else (
|
||||||
|
:: Run from a built: need to compile all test extensions
|
||||||
|
call yarn gulp compile-extension:integration-tests
|
||||||
|
if NOT "%INTEGRATION_TEST_CLI_PATH%"=="" (
|
||||||
|
echo "using vsix directory %AGENT_TEMPDIRECTORY%\vsix"
|
||||||
|
for /f %%f IN ('dir /b /s "%AGENT_TEMPDIRECTORY%\vsix\*"') DO (
|
||||||
|
echo "installing extension %%f"
|
||||||
|
:: use the source cli, we could potentially change this if we ever care about testing this, but this is easier atm
|
||||||
|
call %INTEGRATION_TEST_CLI_PATH% --install-extension "%%f" --force --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR%
|
||||||
|
)
|
||||||
|
) else (
|
||||||
|
echo "Not installing external extensions"
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "Running integration tests with '%INTEGRATION_TEST_ELECTRON_PATH%' as build."
|
||||||
|
)
|
||||||
|
|
||||||
:: Default to only running stable tests if test grep isn't set
|
:: Default to only running stable tests if test grep isn't set
|
||||||
if "%ADS_TEST_GREP%" == "" (
|
if "%ADS_TEST_GREP%" == "" (
|
||||||
echo Running stable tests only
|
echo Running stable tests only
|
||||||
@@ -14,17 +38,16 @@ if "%ADS_TEST_GREP%" == "" (
|
|||||||
SET ADS_TEST_INVERT_GREP=1
|
SET ADS_TEST_INVERT_GREP=1
|
||||||
)
|
)
|
||||||
|
|
||||||
@echo OFF
|
|
||||||
|
|
||||||
if "%SKIP_PYTHON_INSTALL_TEST%" == "1" (
|
if "%SKIP_PYTHON_INSTALL_TEST%" == "1" (
|
||||||
echo Skipping Python installation tests.
|
echo Skipping Python installation tests.
|
||||||
) else (
|
) else (
|
||||||
set PYTHON_TEST_PATH=%VSCODEUSERDATADIR%\TestPythonInstallation
|
set PYTHON_TEST_PATH=%VSCODEUSERDATADIR%\TestPythonInstallation
|
||||||
echo %PYTHON_TEST_PATH%
|
echo %PYTHON_TEST_PATH%
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\notebook --extensionTestsPath=%~dp0\..\extensions\notebook\out\integrationTest --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call %INTEGRATION_TEST_ELECTRON_PATH% --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\notebook --extensionTestsPath=%~dp0\..\extensions\notebook\out\integrationTest --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
)
|
)
|
||||||
|
|
||||||
call .\scripts\code.bat -nogpu --extensionDevelopmentPath=%~dp0\..\extensions\integration-tests --extensionTestsPath=%~dp0\..\extensions\integration-tests\out --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call %INTEGRATION_TEST_ELECTRON_PATH% -nogpu --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222 ^
|
||||||
|
--extensionDevelopmentPath=%~dp0\..\extensions\integration-tests --extensionTestsPath=%~dp0\..\extensions\integration-tests\out
|
||||||
|
|
||||||
rmdir /s /q %VSCODEUSERDATADIR%
|
rmdir /s /q %VSCODEUSERDATADIR%
|
||||||
rmdir /s /q %VSCODEEXTENSIONSDIR%
|
rmdir /s /q %VSCODEEXTENSIONSDIR%
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ else
|
|||||||
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
||||||
VSCODEUSERDATADIR=`mktemp -d 2>/dev/null`
|
VSCODEUSERDATADIR=`mktemp -d 2>/dev/null`
|
||||||
VSCODEEXTDIR=`mktemp -d 2>/dev/null`
|
VSCODEEXTDIR=`mktemp -d 2>/dev/null`
|
||||||
|
LINUX_NO_SANDBOX="--no-sandbox" # Electron 6 introduces a chrome-sandbox that requires root to run. This can fail. Disable sandbox via --no-sandbox.
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Default to only running stable tests if test grep isn't set
|
# Default to only running stable tests if test grep isn't set
|
||||||
@@ -42,10 +43,10 @@ else
|
|||||||
export PYTHON_TEST_PATH=$VSCODEUSERDATADIR/TestPythonInstallation
|
export PYTHON_TEST_PATH=$VSCODEUSERDATADIR/TestPythonInstallation
|
||||||
echo $PYTHON_TEST_PATH
|
echo $PYTHON_TEST_PATH
|
||||||
|
|
||||||
$INTEGRATION_TEST_ELECTRON_PATH --nogpu --extensionDevelopmentPath=$ROOT/extensions/notebook --extensionTestsPath=$ROOT/extensions/notebook/out/integrationTest --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR --remote-debugging-port=9222 --disable-telemetry --disable-crash-reporter --disable-updates --skip-getting-started --disable-inspect
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/notebook --extensionTestsPath=$ROOT/extensions/notebook/out/integrationTest --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR --remote-debugging-port=9222 --disable-telemetry --disable-crash-reporter --disable-updates --skip-getting-started --disable-inspect
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$INTEGRATION_TEST_ELECTRON_PATH --nogpu --extensionDevelopmentPath=$ROOT/extensions/admin-pack \
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/admin-pack \
|
||||||
--extensionDevelopmentPath=$ROOT/extensions/admin-tool-ext-win \
|
--extensionDevelopmentPath=$ROOT/extensions/admin-tool-ext-win \
|
||||||
--extensionDevelopmentPath=$ROOT/extensions/agent \
|
--extensionDevelopmentPath=$ROOT/extensions/agent \
|
||||||
--extensionDevelopmentPath=$ROOT/extensions/azurecore \
|
--extensionDevelopmentPath=$ROOT/extensions/azurecore \
|
||||||
|
|||||||
@@ -9,6 +9,26 @@ set VSCODEEXTENSIONSDIR=%TMP%\adsext-%RANDOM%-%TIME:~6,5%
|
|||||||
echo %VSCODEUSERDATADIR%
|
echo %VSCODEUSERDATADIR%
|
||||||
echo %VSCODEEXTENSIONSDIR%
|
echo %VSCODEEXTENSIONSDIR%
|
||||||
|
|
||||||
|
:: Figure out which Electron to use for running tests
|
||||||
|
if "%INTEGRATION_TEST_ELECTRON_PATH%"=="" (
|
||||||
|
:: Run out of sources: no need to compile as code.sh takes care of it
|
||||||
|
set INTEGRATION_TEST_ELECTRON_PATH=.\scripts\code.bat
|
||||||
|
|
||||||
|
echo "Running integration tests out of sources."
|
||||||
|
) else (
|
||||||
|
:: Run from a built: need to compile all test extensions
|
||||||
|
call yarn gulp compile-extension:admin-tool-ext-win
|
||||||
|
call yarn gulp compile-extension:agent
|
||||||
|
call yarn gulp compile-extension:azurecore
|
||||||
|
call yarn gulp compile-extension:cms
|
||||||
|
call yarn gulp compile-extension:dacpac
|
||||||
|
call yarn gulp compile-extension:schema-compare
|
||||||
|
call yarn gulp compile-extension:notebook
|
||||||
|
call yarn gulp compile-extension:resource-deployment
|
||||||
|
|
||||||
|
echo "Running integration tests with '%INTEGRATION_TEST_ELECTRON_PATH%' as build."
|
||||||
|
)
|
||||||
|
|
||||||
:: Default to only running stable tests if test grep isn't set
|
:: Default to only running stable tests if test grep isn't set
|
||||||
if "%ADS_TEST_GREP%" == "" (
|
if "%ADS_TEST_GREP%" == "" (
|
||||||
echo Running stable tests only
|
echo Running stable tests only
|
||||||
@@ -21,42 +41,42 @@ if "%ADS_TEST_GREP%" == "" (
|
|||||||
echo ***************************************************
|
echo ***************************************************
|
||||||
echo *** starting admin tool extension windows tests ***
|
echo *** starting admin tool extension windows tests ***
|
||||||
echo ***************************************************
|
echo ***************************************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\admin-tool-ext-win --extensionTestsPath=%~dp0\..\extensions\admin-tool-ext-win\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --disableExtensions --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\admin-tool-ext-win --extensionTestsPath=%~dp0\..\extensions\admin-tool-ext-win\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --disableExtensions --remote-debugging-port=9222
|
||||||
|
|
||||||
echo ****************************
|
echo ****************************
|
||||||
echo *** starting agent tests ***
|
echo *** starting agent tests ***
|
||||||
echo ****************************
|
echo ****************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\agent --extensionTestsPath=%~dp0\..\extensions\agent\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\agent --extensionTestsPath=%~dp0\..\extensions\agent\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo ********************************
|
echo ********************************
|
||||||
echo *** starting azurecore tests ***
|
echo *** starting azurecore tests ***
|
||||||
echo ********************************
|
echo ********************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\azurecore --extensionTestsPath=%~dp0\..\extensions\azurecore\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\azurecore --extensionTestsPath=%~dp0\..\extensions\azurecore\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo **************************
|
echo **************************
|
||||||
echo *** starting cms tests ***
|
echo *** starting cms tests ***
|
||||||
echo **************************
|
echo **************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\cms --extensionTestsPath=%~dp0\..\extensions\cms\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\cms --extensionTestsPath=%~dp0\..\extensions\cms\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo *****************************
|
echo *****************************
|
||||||
echo *** starting dacpac tests ***
|
echo *** starting dacpac tests ***
|
||||||
echo *****************************
|
echo *****************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\dacpac --extensionTestsPath=%~dp0\..\extensions\dacpac\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\dacpac --extensionTestsPath=%~dp0\..\extensions\dacpac\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo *************************************
|
echo *************************************
|
||||||
echo *** starting schema compare tests ***
|
echo *** starting schema compare tests ***
|
||||||
echo *************************************
|
echo *************************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\schema-compare --extensionTestsPath=%~dp0\..\extensions\schema-compare\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\schema-compare --extensionTestsPath=%~dp0\..\extensions\schema-compare\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo *******************************
|
echo *******************************
|
||||||
echo *** starting notebook tests ***
|
echo *** starting notebook tests ***
|
||||||
echo *******************************
|
echo *******************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\notebook --extensionTestsPath=%~dp0\..\extensions\notebook\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\notebook --extensionTestsPath=%~dp0\..\extensions\notebook\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
echo ******************************************
|
echo ******************************************
|
||||||
echo *** starting resource deployment tests ***
|
echo *** starting resource deployment tests ***
|
||||||
echo ******************************************
|
echo ******************************************
|
||||||
call .\scripts\code.bat --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\resource-deployment --extensionTestsPath=%~dp0\..\extensions\resource-deployment\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
call "%INTEGRATION_TEST_ELECTRON_PATH%" --nogpu --extensionDevelopmentPath=%~dp0\..\extensions\resource-deployment --extensionTestsPath=%~dp0\..\extensions\resource-deployment\out\test --user-data-dir=%VSCODEUSERDATADIR% --extensions-dir=%VSCODEEXTENSIONSDIR% --remote-debugging-port=9222
|
||||||
|
|
||||||
if %errorlevel% neq 0 exit /b %errorlevel%
|
if %errorlevel% neq 0 exit /b %errorlevel%
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ else
|
|||||||
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
||||||
VSCODEUSERDATADIR=`mktemp -d 2>/dev/null`
|
VSCODEUSERDATADIR=`mktemp -d 2>/dev/null`
|
||||||
VSCODEEXTDIR=`mktemp -d 2>/dev/null`
|
VSCODEEXTDIR=`mktemp -d 2>/dev/null`
|
||||||
|
LINUX_NO_SANDBOX="--no-sandbox" # Electron 6 introduces a chrome-sandbox that requires root to run. This can fail. Disable sandbox via --no-sandbox.
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Default to only running stable tests if test grep isn't set
|
# Default to only running stable tests if test grep isn't set
|
||||||
@@ -21,6 +22,19 @@ if [[ "$ADS_TEST_GREP" == "" ]]; then
|
|||||||
export ADS_TEST_INVERT_GREP=1
|
export ADS_TEST_INVERT_GREP=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Figure out which Electron to use for running tests
|
||||||
|
if [ -z "$INTEGRATION_TEST_ELECTRON_PATH" ]
|
||||||
|
then
|
||||||
|
# Run out of sources: no need to compile as code.sh takes care of it
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="./scripts/code.sh"
|
||||||
|
|
||||||
|
echo "Running integration tests out of sources."
|
||||||
|
else
|
||||||
|
# Run from a built: need to compile all test extensions
|
||||||
|
|
||||||
|
echo "Running integration tests with '$INTEGRATION_TEST_ELECTRON_PATH' as build."
|
||||||
|
fi
|
||||||
|
|
||||||
cd $ROOT
|
cd $ROOT
|
||||||
echo $VSCODEUSERDATADIR
|
echo $VSCODEUSERDATADIR
|
||||||
echo $VSCODEEXTDIR
|
echo $VSCODEEXTDIR
|
||||||
@@ -28,42 +42,42 @@ echo $VSCODEEXTDIR
|
|||||||
echo ***************************************************
|
echo ***************************************************
|
||||||
echo *** starting admin tool extension windows tests ***
|
echo *** starting admin tool extension windows tests ***
|
||||||
echo ***************************************************
|
echo ***************************************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/admin-tool-ext-win --extensionTestsPath=$ROOT/extensions/admin-tool-ext-win/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/admin-tool-ext-win --extensionTestsPath=$ROOT/extensions/admin-tool-ext-win/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo ****************************
|
echo ****************************
|
||||||
echo *** starting agent tests ***
|
echo *** starting agent tests ***
|
||||||
echo ****************************
|
echo ****************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/agent --extensionTestsPath=$ROOT/extensions/agent/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/agent --extensionTestsPath=$ROOT/extensions/agent/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo ********************************
|
echo ********************************
|
||||||
echo *** starting azurecore tests ***
|
echo *** starting azurecore tests ***
|
||||||
echo ********************************
|
echo ********************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/azurecore --extensionTestsPath=$ROOT/extensions/azurecore/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/azurecore --extensionTestsPath=$ROOT/extensions/azurecore/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo **************************
|
echo **************************
|
||||||
echo *** starting cms tests ***
|
echo *** starting cms tests ***
|
||||||
echo **************************
|
echo **************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/cms --extensionTestsPath=$ROOT/extensions/cms/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/cms --extensionTestsPath=$ROOT/extensions/cms/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo *****************************
|
echo *****************************
|
||||||
echo *** starting dacpac tests ***
|
echo *** starting dacpac tests ***
|
||||||
echo *****************************
|
echo *****************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/dacpac --extensionTestsPath=$ROOT/extensions/dacpac/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/dacpac --extensionTestsPath=$ROOT/extensions/dacpac/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo *************************************
|
echo *************************************
|
||||||
echo *** starting schema compare tests ***
|
echo *** starting schema compare tests ***
|
||||||
echo *************************************
|
echo *************************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/schema-compare --extensionTestsPath=$ROOT/extensions/schema-compare/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/schema-compare --extensionTestsPath=$ROOT/extensions/schema-compare/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo *******************************
|
echo *******************************
|
||||||
echo *** starting notebook tests ***
|
echo *** starting notebook tests ***
|
||||||
echo *******************************
|
echo *******************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/notebook --extensionTestsPath=$ROOT/extensions/notebook/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/notebook --extensionTestsPath=$ROOT/extensions/notebook/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
echo ******************************************
|
echo ******************************************
|
||||||
echo *** starting resource deployment tests ***
|
echo *** starting resource deployment tests ***
|
||||||
echo ******************************************
|
echo ******************************************
|
||||||
./scripts/code.sh --nogpu --extensionDevelopmentPath=$ROOT/extensions/resource-deployment --extensionTestsPath=$ROOT/extensions/resource-deployment/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
"$INTEGRATION_TEST_ELECTRON_PATH" $LINUX_NO_SANDBOX --nogpu --extensionDevelopmentPath=$ROOT/extensions/resource-deployment --extensionTestsPath=$ROOT/extensions/resource-deployment/out/test --user-data-dir=$VSCODEUSERDATADIR --extensions-dir=$VSCODEEXTDIR
|
||||||
|
|
||||||
rm -r $VSCODEUSERDATADIR
|
rm -r $VSCODEUSERDATADIR
|
||||||
rm -r $VSCODEEXTDIR
|
rm -r $VSCODEEXTDIR
|
||||||
|
|||||||
Reference in New Issue
Block a user