mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Add Apple Silicon build and Universal build for macOS (#20776)
* use stages * fix indention * add jobs section * fix error * indention * arm64 for macos * other stages not run * fix container * fix * fix stage * skip tests * variable * dependency * fix name error * sts * const * fall back to x64 * pass in variable * universal flavor * fix universal * fix path * remove * special processing * return on error * copy instead of move * restore sts * release * fix error * Fix readme * remove commented code * add comments * add issue * update comment * pr comments * delete universal yml * update the generated js file
This commit is contained in:
@@ -25,7 +25,9 @@ Go to our [download page](https://aka.ms/getazuredatastudio) for more specific i
|
|||||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
||||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
||||||
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
- [macOS ZIP (Universal) - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin-universal/insider)
|
||||||
|
- [macOS ZIP (Intel Chip) - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
||||||
|
- [macOS ZIP (Apple Silicon) - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin-arm64/insider)
|
||||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||||
|
|
||||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2021-11-19T02:27:18.022Z
|
2022-10-06T02:27:18.022Z
|
||||||
|
|||||||
@@ -9,12 +9,12 @@ steps:
|
|||||||
displayName: 'Download Build Artifacts'
|
displayName: 'Download Build Artifacts'
|
||||||
inputs:
|
inputs:
|
||||||
downloadType: specific
|
downloadType: specific
|
||||||
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-unsigned.zip'
|
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip'
|
||||||
downloadPath: '$(Build.SourcesDirectory)/.build/'
|
downloadPath: '$(Build.SourcesDirectory)/.build/'
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
pushd $(Build.SourcesDirectory)/.build/drop/darwin/archive
|
pushd $(Build.SourcesDirectory)/.build/drop/darwin/archive
|
||||||
mv azuredatastudio-darwin-unsigned.zip azuredatastudio-darwin.zip
|
mv azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip azuredatastudio-darwin-$(VSCODE_ARCH).zip
|
||||||
displayName: 'Rename the file'
|
displayName: 'Rename the file'
|
||||||
|
|
||||||
- task: UseDotNet@2
|
- task: UseDotNet@2
|
||||||
@@ -29,7 +29,7 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'Code Signing'
|
ConnectedServiceName: 'Code Signing'
|
||||||
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
||||||
Pattern: 'azuredatastudio-darwin.zip'
|
Pattern: 'azuredatastudio-darwin-$(VSCODE_ARCH).zip'
|
||||||
signConfigType: inlineSignParams
|
signConfigType: inlineSignParams
|
||||||
inlineOperation: |
|
inlineOperation: |
|
||||||
[
|
[
|
||||||
@@ -47,7 +47,7 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
zip -d $(Build.SourcesDirectory)/.build/drop/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
zip -d $(Build.SourcesDirectory)/.build/drop/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH).zip "*.pkg"
|
||||||
displayName: Clean Archive
|
displayName: Clean Archive
|
||||||
condition: and(succeeded(), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'Code Signing'
|
ConnectedServiceName: 'Code Signing'
|
||||||
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
||||||
Pattern: 'azuredatastudio-darwin.zip'
|
Pattern: 'azuredatastudio-darwin-$(VSCODE_ARCH).zip'
|
||||||
signConfigType: inlineSignParams
|
signConfigType: inlineSignParams
|
||||||
inlineOperation: |
|
inlineOperation: |
|
||||||
[
|
[
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
mkdir -p .build
|
mkdir -p .build
|
||||||
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
|
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js $(NPM_CONFIG_ARCH) > .build/yarnlockhash
|
||||||
displayName: Prepare yarn cache key
|
displayName: Prepare yarn cache key
|
||||||
|
|
||||||
- task: Cache@2
|
- task: Cache@2
|
||||||
@@ -71,6 +71,7 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
export npm_config_arch=$(NPM_CONFIG_ARCH)
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
env:
|
env:
|
||||||
@@ -99,10 +100,11 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp package-rebuild-extensions
|
yarn gulp package-rebuild-extensions
|
||||||
yarn gulp vscode-darwin-x64-min-ci
|
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||||
displayName: Build
|
displayName: Build
|
||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -115,7 +117,7 @@ steps:
|
|||||||
# including the remote server and configure the integration tests
|
# including the remote server and configure the integration tests
|
||||||
# to run with these builds instead of running out of sources.
|
# to run with these builds instead of running out of sources.
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
|
||||||
@@ -127,12 +129,13 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
yarn gulp compile-extensions
|
yarn gulp compile-extensions
|
||||||
displayName: Compile Extensions
|
displayName: Compile Extensions
|
||||||
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
# Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
|
# Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
|
||||||
# in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
|
# in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
||||||
displayName: Run smoke tests (Electron) (Continue on Error)
|
displayName: Run smoke tests (Electron) (Continue on Error)
|
||||||
@@ -141,7 +144,7 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
||||||
displayName: Run smoke tests (Electron) (Fail on Error)
|
displayName: Run smoke tests (Electron) (Fail on Error)
|
||||||
@@ -156,9 +159,25 @@ steps:
|
|||||||
# continueOnError: true
|
# continueOnError: true
|
||||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: DownloadBuildArtifacts@0
|
||||||
|
displayName: 'Download arm64 and x64 packages'
|
||||||
|
inputs:
|
||||||
|
downloadType: specific
|
||||||
|
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-@(arm64|x64)-unsigned.zip'
|
||||||
|
downloadPath: $(agent.builddirectory)
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
pushd ../azuredatastudio-darwin-x64
|
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-x64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-x64
|
||||||
|
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-arm64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-arm64
|
||||||
|
DEBUG=* node build/darwin/create-universal-app.js
|
||||||
|
displayName: Create Universal App
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
ls
|
ls
|
||||||
|
|
||||||
echo "Cleaning the application"
|
echo "Cleaning the application"
|
||||||
@@ -188,8 +207,8 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
mkdir -p .build/darwin/archive
|
mkdir -p .build/darwin/archive
|
||||||
pushd ../azuredatastudio-darwin-x64
|
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH).zip
|
||||||
popd
|
popd
|
||||||
displayName: 'Archive (no signing)'
|
displayName: 'Archive (no signing)'
|
||||||
condition: and(succeeded(), eq(variables['signed'], false))
|
condition: and(succeeded(), eq(variables['signed'], false))
|
||||||
@@ -197,8 +216,8 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
mkdir -p .build/darwin/archive
|
mkdir -p .build/darwin/archive
|
||||||
pushd ../azuredatastudio-darwin-x64
|
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-unsigned.zip
|
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip
|
||||||
popd
|
popd
|
||||||
displayName: 'Archive'
|
displayName: 'Archive'
|
||||||
condition: and(succeeded(), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|||||||
@@ -13,12 +13,22 @@ $Version = $VersionJson.version
|
|||||||
$Quality = $VersionJson.quality
|
$Quality = $VersionJson.quality
|
||||||
$CommitId = $VersionJson.commit
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
$ZipName = "azuredatastudio-darwin.zip"
|
$Flavors = "x64","arm64","universal"
|
||||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
$FlavorSuffixes = "","-arm64","-universal"
|
||||||
$UploadName = "azuredatastudio-macos-$Version"
|
|
||||||
|
|
||||||
If (-NOT ($Quality -eq "stable")) {
|
For($i = 0; $i -lt $Flavors.Length; $i++)
|
||||||
$UploadName = "$UploadName-$Quality"
|
{
|
||||||
|
$Flavor = $Flavors[$i]
|
||||||
|
$FlavorSuffix = $FlavorSuffixes[$i]
|
||||||
|
$ZipName = "azuredatastudio-darwin-$Flavor.zip"
|
||||||
|
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||||
|
$UploadName = "azuredatastudio-macos$FlavorSuffix-$Version"
|
||||||
|
|
||||||
|
If (-NOT ($Quality -eq "stable")) {
|
||||||
|
$UploadName = "$UploadName-$Quality"
|
||||||
|
}
|
||||||
|
|
||||||
|
$Platform = "darwin$FlavorSuffix"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $Platform archive "$UploadName.zip" $Version true $Zip $CommitId
|
||||||
}
|
}
|
||||||
|
|
||||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive "$UploadName.zip" $Version true $Zip $CommitId
|
|
||||||
|
|||||||
@@ -4,87 +4,142 @@ resources:
|
|||||||
image: sqltoolscontainers.azurecr.io/linux-build-agent:6
|
image: sqltoolscontainers.azurecr.io/linux-build-agent:6
|
||||||
endpoint: SqlToolsContainers
|
endpoint: SqlToolsContainers
|
||||||
|
|
||||||
jobs:
|
stages:
|
||||||
- job: Compile
|
- stage: Compile
|
||||||
pool:
|
jobs:
|
||||||
vmImage: 'Ubuntu-20.04'
|
- job: Compile
|
||||||
container: linux-x64
|
pool:
|
||||||
steps:
|
vmImage: 'Ubuntu-20.04'
|
||||||
- script: |
|
container: linux-x64
|
||||||
set -e
|
steps:
|
||||||
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
|
- script: |
|
||||||
displayName: Add Quality Build Tag
|
set -e
|
||||||
- template: sql-product-compile.yml
|
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
|
||||||
timeoutInMinutes: 120
|
displayName: Add Quality Build Tag
|
||||||
|
- template: sql-product-compile.yml
|
||||||
|
timeoutInMinutes: 120
|
||||||
|
|
||||||
- job: macOS
|
- stage: macOS
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'macos-latest'
|
vmImage: 'macos-latest'
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- Compile
|
- Compile
|
||||||
steps:
|
jobs:
|
||||||
- template: darwin/sql-product-build-darwin.yml
|
- job: macOS
|
||||||
timeoutInMinutes: 90
|
variables:
|
||||||
|
NPM_CONFIG_ARCH: x64
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
- job: macOS_Signing
|
- job: macOS_Signing
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true), ne(variables['VSCODE_QUALITY'], 'saw'))
|
variables:
|
||||||
pool:
|
VSCODE_ARCH: x64
|
||||||
vmImage: 'macos-latest'
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- macOS
|
- macOS
|
||||||
steps:
|
steps:
|
||||||
- template: darwin/sql-product-build-darwin-signing.yml
|
- template: darwin/sql-product-build-darwin-signing.yml
|
||||||
timeoutInMinutes: 60
|
timeoutInMinutes: 60
|
||||||
|
|
||||||
- job: Linux
|
- job: macOS_ARM64
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
variables:
|
||||||
pool:
|
NPM_CONFIG_ARCH: arm64
|
||||||
vmImage: 'Ubuntu-20.04'
|
VSCODE_ARCH: arm64
|
||||||
container: linux-x64
|
# Do not run tests for arm64 build
|
||||||
dependsOn:
|
RUN_TESTS: false
|
||||||
- Compile
|
RUN_SMOKE_TESTS: false
|
||||||
steps:
|
steps:
|
||||||
- template: linux/sql-product-build-linux.yml
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
parameters:
|
timeoutInMinutes: 90
|
||||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-bindings", "sql-database-projects"]
|
|
||||||
timeoutInMinutes: 90
|
|
||||||
|
|
||||||
- job: Windows
|
- job: macOS_Signing_ARM64
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
variables:
|
||||||
pool:
|
VSCODE_ARCH: arm64
|
||||||
vmImage: 'windows-2019'
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- Compile
|
- macOS_ARM64
|
||||||
steps:
|
steps:
|
||||||
- template: win32/sql-product-build-win32.yml
|
- template: darwin/sql-product-build-darwin-signing.yml
|
||||||
timeoutInMinutes: 90
|
timeoutInMinutes: 60
|
||||||
|
|
||||||
# disable due to invalid machine pool (karlb 3/9/2022)
|
- job: macOS_Universal
|
||||||
# - job: Windows_Test
|
variables:
|
||||||
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
NPM_CONFIG_ARCH: x64
|
||||||
# pool:
|
VSCODE_ARCH: universal
|
||||||
# name: mssqltools
|
# Do not run tests for universal build
|
||||||
# dependsOn:
|
RUN_TESTS: false
|
||||||
# - Linux
|
RUN_SMOKE_TESTS: false
|
||||||
# - Windows
|
dependsOn:
|
||||||
# steps:
|
- macOS
|
||||||
# - template: win32/sql-product-test-win32.yml
|
- macOS_ARM64
|
||||||
# timeoutInMinutes: 90
|
steps:
|
||||||
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
- job: Release
|
- job: macOS_Signing_Universal
|
||||||
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
variables:
|
||||||
pool:
|
VSCODE_ARCH: universal
|
||||||
vmImage: 'Ubuntu-20.04'
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- macOS
|
- macOS_Universal
|
||||||
- Linux
|
steps:
|
||||||
- Windows
|
- template: darwin/sql-product-build-darwin-signing.yml
|
||||||
# disable due to invalid machine pool (karlb 3/9/2022)
|
timeoutInMinutes: 60
|
||||||
# - Windows_Test
|
|
||||||
- macOS_Signing
|
- stage: Linux
|
||||||
steps:
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], true))
|
||||||
- template: sql-release.yml
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
jobs:
|
||||||
|
- job: Linux
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-20.04'
|
||||||
|
container: linux-x64
|
||||||
|
steps:
|
||||||
|
- template: linux/sql-product-build-linux.yml
|
||||||
|
parameters:
|
||||||
|
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-bindings", "sql-database-projects"]
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
|
- stage: Windows
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], true))
|
||||||
|
pool:
|
||||||
|
vmImage: 'windows-2019'
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
jobs:
|
||||||
|
- job: Windows
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-build-win32.yml
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
|
# disable due to invalid machine pool (karlb 3/9/2022)
|
||||||
|
# - job: Windows_Test
|
||||||
|
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
|
# pool:
|
||||||
|
# name: mssqltools
|
||||||
|
# dependsOn:
|
||||||
|
# - Linux
|
||||||
|
# - Windows
|
||||||
|
# steps:
|
||||||
|
# - template: win32/sql-product-test-win32.yml
|
||||||
|
# timeoutInMinutes: 90
|
||||||
|
|
||||||
|
- stage: Release
|
||||||
|
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-20.04'
|
||||||
|
dependsOn:
|
||||||
|
- macOS
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
jobs:
|
||||||
|
- job: Release
|
||||||
|
steps:
|
||||||
|
- template: sql-release.yml
|
||||||
|
|
||||||
trigger: none
|
trigger: none
|
||||||
pr: none
|
pr: none
|
||||||
|
|||||||
@@ -10,20 +10,53 @@ const fs = require("fs-extra");
|
|||||||
const path = require("path");
|
const path = require("path");
|
||||||
const plist = require("plist");
|
const plist = require("plist");
|
||||||
const product = require("../../product.json");
|
const product = require("../../product.json");
|
||||||
|
const glob = require("glob"); // {{SQL CARBON EDIT}}
|
||||||
async function main() {
|
async function main() {
|
||||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||||
const arch = process.env['VSCODE_ARCH'];
|
const arch = process.env['VSCODE_ARCH'];
|
||||||
if (!buildDir) {
|
if (!buildDir) {
|
||||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||||
}
|
}
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
const x64AppNameBase = 'azuredatastudio-darwin-x64';
|
||||||
|
const arm64AppNameBase = 'azuredatastudio-darwin-arm64';
|
||||||
|
// {{SQL CARBON EDIT}} - END
|
||||||
const appName = product.nameLong + '.app';
|
const appName = product.nameLong + '.app';
|
||||||
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
|
const x64AppPath = path.join(buildDir, x64AppNameBase, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
|
const arm64AppPath = path.join(buildDir, arm64AppNameBase, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
const outAppPath = path.join(buildDir, `azuredatastudio-darwin-${arch}`, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
// Current STS arm64 builds doesn't work on osx-arm64, we need to use the x64 version of STS on osx-arm64 until the issue is fixed.
|
||||||
|
// Tracked by: https://github.com/microsoft/azuredatastudio/issues/20775
|
||||||
|
// makeUniversalApp function will complain if the x64 ADS and arm64 ADS have the same STS binaries, to workaround the issue, we need
|
||||||
|
// to delete STS from both of them and then copy it to the universal app.
|
||||||
|
const stsPath = '/Contents/Resources/app/extensions/mssql/sqltoolsservice';
|
||||||
|
const tempSTSDir = path.join(buildDir, 'sqltoolsservice');
|
||||||
|
const x64STSDir = path.join(x64AppPath, stsPath);
|
||||||
|
const arm64STSDir = path.join(arm64AppPath, stsPath);
|
||||||
|
const targetSTSDirs = [x64STSDir, arm64STSDir];
|
||||||
|
// backup the x64 STS to a temporary directory, later it will be copied to the universal app directory.
|
||||||
|
await fs.copy(x64STSDir, tempSTSDir);
|
||||||
|
// delete STS directories from both x64 ADS and arm64 ADS.
|
||||||
|
console.debug(`Removing SqlToolsService folders.`);
|
||||||
|
targetSTSDirs.forEach(async (dir) => {
|
||||||
|
await fs.remove(dir);
|
||||||
|
});
|
||||||
|
// makeUniversalApp requires the non-binary files in arm64 and x64 versions to be exactly the same,
|
||||||
|
// but sometimes the content of nls.metadata.json files could be different(only the order of the entries).
|
||||||
|
// To workaround the issue, we need to replace these files in arm64 ADS with the files from x64 ADS.
|
||||||
|
// Tracked by issue: https://github.com/microsoft/azuredatastudio/issues/20792
|
||||||
|
const sourceFiles = glob.sync(path.join(x64AppPath, '/Contents/Resources/app/**/nls.metadata.json'));
|
||||||
|
sourceFiles.forEach(source => {
|
||||||
|
const target = source.replace(x64AppNameBase, arm64AppNameBase);
|
||||||
|
console.debug(`Replacing file '${target}' with '${source}'`);
|
||||||
|
fs.copySync(source, target, { overwrite: true });
|
||||||
|
});
|
||||||
|
// {{SQL CARBON EDIT}} - END
|
||||||
await (0, vscode_universal_bundler_1.makeUniversalApp)({
|
await (0, vscode_universal_bundler_1.makeUniversalApp)({
|
||||||
x64AppPath,
|
x64AppPath,
|
||||||
arm64AppPath,
|
arm64AppPath,
|
||||||
@@ -57,6 +90,9 @@ async function main() {
|
|||||||
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
|
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
|
||||||
throw new Error(`Invalid arch, got : ${lipoOutput}`);
|
throw new Error(`Invalid arch, got : ${lipoOutput}`);
|
||||||
}
|
}
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
console.debug(`Copying SqlToolsService to the universal app folder.`);
|
||||||
|
await fs.copy(tempSTSDir, path.join(outAppPath, stsPath), { overwrite: true });
|
||||||
}
|
}
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
main().catch(err => {
|
main().catch(err => {
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import * as fs from 'fs-extra';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as plist from 'plist';
|
import * as plist from 'plist';
|
||||||
import * as product from '../../product.json';
|
import * as product from '../../product.json';
|
||||||
|
import * as glob from 'glob'; // {{SQL CARBON EDIT}}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||||
@@ -20,15 +21,50 @@ async function main() {
|
|||||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
const x64AppNameBase = 'azuredatastudio-darwin-x64';
|
||||||
|
const arm64AppNameBase = 'azuredatastudio-darwin-arm64';
|
||||||
|
// {{SQL CARBON EDIT}} - END
|
||||||
|
|
||||||
const appName = product.nameLong + '.app';
|
const appName = product.nameLong + '.app';
|
||||||
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
|
const x64AppPath = path.join(buildDir, x64AppNameBase, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
|
const arm64AppPath = path.join(buildDir, arm64AppNameBase, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
const outAppPath = path.join(buildDir, `azuredatastudio-darwin-${arch}`, appName); // {{SQL CARBON EDIT}} - CHANGE VSCode to azuredatastudio
|
||||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
// Current STS arm64 builds doesn't work on osx-arm64, we need to use the x64 version of STS on osx-arm64 until the issue is fixed.
|
||||||
|
// Tracked by: https://github.com/microsoft/azuredatastudio/issues/20775
|
||||||
|
// makeUniversalApp function will complain if the x64 ADS and arm64 ADS have the same STS binaries, to workaround the issue, we need
|
||||||
|
// to delete STS from both of them and then copy it to the universal app.
|
||||||
|
const stsPath = '/Contents/Resources/app/extensions/mssql/sqltoolsservice';
|
||||||
|
const tempSTSDir = path.join(buildDir, 'sqltoolsservice');
|
||||||
|
const x64STSDir = path.join(x64AppPath, stsPath);
|
||||||
|
const arm64STSDir = path.join(arm64AppPath, stsPath);
|
||||||
|
const targetSTSDirs = [x64STSDir, arm64STSDir];
|
||||||
|
// backup the x64 STS to a temporary directory, later it will be copied to the universal app directory.
|
||||||
|
await fs.copy(x64STSDir, tempSTSDir);
|
||||||
|
// delete STS directories from both x64 ADS and arm64 ADS.
|
||||||
|
console.debug(`Removing SqlToolsService folders.`);
|
||||||
|
targetSTSDirs.forEach(async dir => {
|
||||||
|
await fs.remove(dir);
|
||||||
|
});
|
||||||
|
|
||||||
|
// makeUniversalApp requires the non-binary files in arm64 and x64 versions to be exactly the same,
|
||||||
|
// but sometimes the content of nls.metadata.json files could be different(only the order of the entries).
|
||||||
|
// To workaround the issue, we need to replace these files in arm64 ADS with the files from x64 ADS.
|
||||||
|
// Tracked by issue: https://github.com/microsoft/azuredatastudio/issues/20792
|
||||||
|
const sourceFiles = glob.sync(path.join(x64AppPath, '/Contents/Resources/app/**/nls.metadata.json'));
|
||||||
|
sourceFiles.forEach(source => {
|
||||||
|
const target = source.replace(x64AppNameBase, arm64AppNameBase);
|
||||||
|
console.debug(`Replacing file '${target}' with '${source}'`);
|
||||||
|
fs.copySync(source, target, { overwrite: true });
|
||||||
|
});
|
||||||
|
// {{SQL CARBON EDIT}} - END
|
||||||
|
|
||||||
await makeUniversalApp({
|
await makeUniversalApp({
|
||||||
x64AppPath,
|
x64AppPath,
|
||||||
arm64AppPath,
|
arm64AppPath,
|
||||||
@@ -65,6 +101,10 @@ async function main() {
|
|||||||
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
|
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
|
||||||
throw new Error(`Invalid arch, got : ${lipoOutput}`)
|
throw new Error(`Invalid arch, got : ${lipoOutput}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
console.debug(`Copying SqlToolsService to the universal app folder.`);
|
||||||
|
await fs.copy(tempSTSDir, path.join(outAppPath, stsPath), { overwrite: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
const serviceDownloader = require('@microsoft/ads-service-downloader').ServiceDownloadProvider;
|
const serviceDownloader = require('@microsoft/ads-service-downloader').ServiceDownloadProvider;
|
||||||
const platform = require('@microsoft/ads-service-downloader/out/platform').PlatformInformation;
|
const platform = require('@microsoft/ads-service-downloader/out/platform');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
const rimraf = require('rimraf');
|
const rimraf = require('rimraf');
|
||||||
@@ -15,7 +15,15 @@
|
|||||||
async function installService() {
|
async function installService() {
|
||||||
const absoluteConfigPath = require.resolve('../config.json');
|
const absoluteConfigPath = require.resolve('../config.json');
|
||||||
const config = require(absoluteConfigPath);
|
const config = require(absoluteConfigPath);
|
||||||
const runtime = (await platform.getCurrent()).runtimeId;
|
let runtime = (await platform.PlatformInformation.getCurrent()).runtimeId;
|
||||||
|
const arch = process.env['npm_config_arch'];
|
||||||
|
|
||||||
|
// In the build pipeline, macOS x64 image is used to produce arm64 build,
|
||||||
|
// we need to check the environment variable to determine the actual target runtime.
|
||||||
|
if (runtime === platform.Runtime.OSX && arch === 'arm64') {
|
||||||
|
console.log(`Set the target runtime to OSX_ARM64`);
|
||||||
|
runtime = platform.Runtime.OSX_ARM64;
|
||||||
|
}
|
||||||
// fix path since it won't be correct
|
// fix path since it won't be correct
|
||||||
config.installDirectory = path.join(path.dirname(absoluteConfigPath), config.installDirectory);
|
config.installDirectory = path.join(path.dirname(absoluteConfigPath), config.installDirectory);
|
||||||
let installer = new serviceDownloader(config);
|
let installer = new serviceDownloader(config);
|
||||||
|
|||||||
Reference in New Issue
Block a user