mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6783aa6967 | ||
|
|
ccbc2f74fe | ||
|
|
d7283a6e56 | ||
|
|
0684040d34 | ||
|
|
625eb00be2 | ||
|
|
c5a27a89f3 | ||
|
|
b9a7d5e4bd | ||
|
|
f876c00ca1 | ||
|
|
83ae789aa0 | ||
|
|
6fe4d0a561 | ||
|
|
2eaec9f41d | ||
|
|
2edafe50bb | ||
|
|
24c5686bd6 | ||
|
|
1731aeffbe | ||
|
|
b35ff6451a | ||
|
|
07aa256f4c | ||
|
|
473764de9a |
@@ -1,18 +1,14 @@
|
||||
# Code - OSS Development Container
|
||||
|
||||
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/microsoft/vscode)
|
||||
|
||||
This repository includes configuration for a development container for working with Code - OSS in a local container or using [GitHub Codespaces](https://github.com/features/codespaces).
|
||||
|
||||
> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` and a web client is available on port `6080`.
|
||||
|
||||
## Quick start - local
|
||||
|
||||
If you already have VS Code and Docker installed, you can click the badge above or [here](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/microsoft/vscode) to get started. Clicking these links will cause VS Code to automatically install the Remote - Containers extension if needed, clone the source code into a container volume, and spin up a dev container for use.
|
||||
|
||||
1. Install Docker Desktop or Docker for Linux on your local machine. (See [docs](https://aka.ms/vscode-remote/containers/getting-started) for additional details.)
|
||||
|
||||
2. **Important**: Docker needs at least **4 Cores and 8 GB of RAM** to run a full build. If you are on macOS, or are using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item and going to **Preferences/Settings > Resources > Advanced**.
|
||||
2. **Important**: Docker needs at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run a full build. If you are on macOS, or are using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item and going to **Preferences/Settings > Resources > Advanced**.
|
||||
|
||||
> **Note:** The [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) extension is included in the container so you can keep an eye on CPU/Memory in the status bar.
|
||||
|
||||
@@ -62,12 +58,12 @@ You may see improved VNC responsiveness when accessing a codespace from VS Code
|
||||
|
||||
2. After the VS Code is up and running, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> or <kbd>F1</kbd>, choose **Codespaces: Create New Codespace**, and use the following settings:
|
||||
- `microsoft/vscode` for the repository.
|
||||
- Select any branch (e.g. **main**) - you can select a different one later.
|
||||
- Select any branch (e.g. **main**) - you select a different one later.
|
||||
- Choose **Standard** (4-core, 8GB) as the size.
|
||||
|
||||
4. After you have connected to the codespace, you can use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
||||
|
||||
> **Tip:** You may also need change your VNC client's **Picture Quality** setting to **High** to get a full color desktop.
|
||||
> **Tip:** You may also need change your VNC client's **Picture Quaility** setting to **High** to get a full color desktop.
|
||||
|
||||
5. Anything you start in VS Code, or the integrated terminal, will appear here.
|
||||
|
||||
|
||||
1
.devcontainer/cache/.gitignore
vendored
Normal file
1
.devcontainer/cache/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.manifest
|
||||
10
.devcontainer/cache/before-cache.sh
vendored
10
.devcontainer/cache/before-cache.sh
vendored
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# This file establishes a basline for the repository before any steps in the "prepare.sh"
|
||||
# This file establishes a basline for the reposuitory before any steps in the "prepare.sh"
|
||||
# are run. Its just a find command that filters out a few things we don't need to watch.
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
|
||||
SOURCE_FOLDER="${1:-"."}"
|
||||
CACHE_FOLDER="${2:-"$HOME/.devcontainer-cache"}"
|
||||
|
||||
cd "${SOURCE_FOLDER}"
|
||||
echo "[$(date)] Generating ""before"" manifest..."
|
||||
mkdir -p "${CACHE_FOLDER}"
|
||||
find -L . -not -path "*/.git/*" -and -not -path "${CACHE_FOLDER}/*.manifest" -type f > "${CACHE_FOLDER}/before.manifest"
|
||||
find -L . -not -path "*/.git/*" -and -not -path "${SCRIPT_PATH}/*.manifest" -type f > "${SCRIPT_PATH}/before.manifest"
|
||||
echo "[$(date)] Done!"
|
||||
|
||||
|
||||
18
.devcontainer/cache/build-cache-image.sh
vendored
18
.devcontainer/cache/build-cache-image.sh
vendored
@@ -1,12 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This file simply wraps the docker build command to build an image that includes
|
||||
# a cache.tar file with the result of "prepare.sh" inside of it. See cache.Dockerfile
|
||||
# for the steps that are actually taken to do this.
|
||||
# This file simply wraps the dockeer build command used to build the image with the
|
||||
# cached result of the commands from "prepare.sh" and pushes it to the specified
|
||||
# container image registry.
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_PATH="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
|
||||
CONTAINER_IMAGE_REPOSITORY="$1"
|
||||
BRANCH="${2:-"main"}"
|
||||
|
||||
@@ -19,10 +19,10 @@ TAG="branch-${BRANCH//\//-}"
|
||||
echo "[$(date)] ${BRANCH} => ${TAG}"
|
||||
cd "${SCRIPT_PATH}/../.."
|
||||
|
||||
echo "[$(date)] Starting image build and push..."
|
||||
export DOCKER_BUILDKIT=1
|
||||
docker buildx create --use --name vscode-dev-containers
|
||||
docker run --privileged --rm tonistiigi/binfmt --install all
|
||||
docker buildx build --push --platform linux/amd64,linux/arm64 -t ${CONTAINER_IMAGE_REPOSITORY}:"${TAG}" -f "${SCRIPT_PATH}/cache.Dockerfile" .
|
||||
echo "[$(date)] Starting image build..."
|
||||
docker build -t ${CONTAINER_IMAGE_REPOSITORY}:"${TAG}" -f "${SCRIPT_PATH}/cache.Dockerfile" .
|
||||
echo "[$(date)] Image build complete."
|
||||
|
||||
echo "[$(date)] Pushing image..."
|
||||
docker push ${CONTAINER_IMAGE_REPOSITORY}:"${TAG}"
|
||||
echo "[$(date)] Done!"
|
||||
|
||||
20
.devcontainer/cache/cache-diff.sh
vendored
20
.devcontainer/cache/cache-diff.sh
vendored
@@ -1,23 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# This file is used to archive off a copy of any differences in the source tree into another location
|
||||
# in the image. Once the codespace / container is up, this will be restored into its proper location.
|
||||
# in the image. Once the codespace is up, this will be restored into its proper location (which is
|
||||
# quick and happens parallel to other startup activities)
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
|
||||
SOURCE_FOLDER="${1:-"."}"
|
||||
CACHE_FOLDER="${2:-"$HOME/.devcontainer-cache"}"
|
||||
|
||||
if [ ! -d "${CACHE_FOLDER}" ]; then
|
||||
echo "No cache folder found. Be sure to run before-cache.sh to set one up."
|
||||
exit 1
|
||||
fi
|
||||
CACHE_FOLDER="${2:-"/usr/local/etc/devcontainer-cache"}"
|
||||
|
||||
echo "[$(date)] Starting cache operation..."
|
||||
cd "${SOURCE_FOLDER}"
|
||||
echo "[$(date)] Determining diffs..."
|
||||
find -L . -not -path "*/.git/*" -and -not -path "${CACHE_FOLDER}/*.manifest" -type f > "${CACHE_FOLDER}/after.manifest"
|
||||
grep -Fxvf "${CACHE_FOLDER}/before.manifest" "${CACHE_FOLDER}/after.manifest" > "${CACHE_FOLDER}/cache.manifest"
|
||||
find -L . -not -path "*/.git/*" -and -not -path "${SCRIPT_PATH}/*.manifest" -type f > "${SCRIPT_PATH}/after.manifest"
|
||||
grep -Fxvf "${SCRIPT_PATH}/before.manifest" "${SCRIPT_PATH}/after.manifest" > "${SCRIPT_PATH}/cache.manifest"
|
||||
echo "[$(date)] Archiving diffs..."
|
||||
tar -cf "${CACHE_FOLDER}/cache.tar" --totals --files-from "${CACHE_FOLDER}/cache.manifest"
|
||||
mkdir -p "${CACHE_FOLDER}"
|
||||
tar -cf "${CACHE_FOLDER}/cache.tar" --totals --files-from "${SCRIPT_PATH}/cache.manifest"
|
||||
echo "[$(date)] Done! $(du -h "${CACHE_FOLDER}/cache.tar")"
|
||||
|
||||
26
.devcontainer/cache/cache.Dockerfile
vendored
26
.devcontainer/cache/cache.Dockerfile
vendored
@@ -1,24 +1,14 @@
|
||||
# This dockerfile is used to build up from a base image to create an image a cache.tar file containing the results of running "prepare.sh".
|
||||
# This dockerfile is used to build up from a base image to create an image with cached results of running "prepare.sh".
|
||||
# Other image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:dev
|
||||
|
||||
# This first stage generates cache.tar
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:dev as cache
|
||||
ARG USERNAME=node
|
||||
ARG CACHE_FOLDER="/home/${USERNAME}/.devcontainer-cache"
|
||||
COPY --chown=${USERNAME}:${USERNAME} . /repo-source-tmp/
|
||||
RUN mkdir -p ${CACHE_FOLDER} && chown ${USERNAME} ${CACHE_FOLDER} /repo-source-tmp \
|
||||
RUN mkdir /usr/local/etc/devcontainer-cache \
|
||||
&& chown ${USERNAME} /usr/local/etc/devcontainer-cache /repo-source-tmp \
|
||||
&& su ${USERNAME} -c "\
|
||||
cd /repo-source-tmp \
|
||||
&& .devcontainer/cache/before-cache.sh . ${CACHE_FOLDER} \
|
||||
&& .devcontainer/prepare.sh . ${CACHE_FOLDER} \
|
||||
&& .devcontainer/cache/cache-diff.sh . ${CACHE_FOLDER}"
|
||||
|
||||
# This second stage starts fresh and just copies in cache.tar from the previous stage. The related
|
||||
# devcontainer.json file is then setup to have postCreateCommand fire restore-diff.sh to expand it.
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:dev as dev-container
|
||||
ARG USERNAME=node
|
||||
ARG CACHE_FOLDER="/home/${USERNAME}/.devcontainer-cache"
|
||||
RUN mkdir -p "${CACHE_FOLDER}" \
|
||||
&& chown "${USERNAME}:${USERNAME}" "${CACHE_FOLDER}" \
|
||||
&& su ${USERNAME} -c "git config --global codespaces-theme.hide-status 1"
|
||||
COPY --from=cache ${CACHE_FOLDER}/cache.tar ${CACHE_FOLDER}/
|
||||
&& .devcontainer/cache/before-cache.sh \
|
||||
&& .devcontainer/prepare.sh \
|
||||
&& .devcontainer/cache/cache-diff.sh" \
|
||||
&& rm -rf /repo-source-tmp
|
||||
|
||||
24
.devcontainer/cache/restore-diff.sh
vendored
24
.devcontainer/cache/restore-diff.sh
vendored
@@ -1,12 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# This file expands the cache.tar file in the image that contains the results of "prepare.sh"
|
||||
# on top of the source tree. It runs as a postCreateCommand which runs after the container/codespace
|
||||
# is already up where you would typically run a command like "yarn install".
|
||||
# This file restores the results of the "prepare.sh" into their proper locations
|
||||
# once the container has been created. It runs as a postCreateCommand which
|
||||
# in GitHub Codespaces occurs parallel to other startup activities and does not
|
||||
# really add to the overal startup time given how quick the operation ends up being.
|
||||
|
||||
set -e
|
||||
|
||||
SOURCE_FOLDER="$(cd "${1:-"."}" && pwd)"
|
||||
CACHE_FOLDER="${2:-"$HOME/.devcontainer-cache"}"
|
||||
CACHE_FOLDER="${2:-"/usr/local/etc/devcontainer-cache"}"
|
||||
|
||||
if [ ! -d "${CACHE_FOLDER}" ]; then
|
||||
echo "No cache folder found."
|
||||
@@ -15,15 +17,7 @@ fi
|
||||
|
||||
echo "[$(date)] Expanding $(du -h "${CACHE_FOLDER}/cache.tar") file to ${SOURCE_FOLDER}..."
|
||||
cd "${SOURCE_FOLDER}"
|
||||
# Ensure user/group is correct if the UID/GID was changed for some reason
|
||||
echo "+1000 +$(id -u)" > "${CACHE_FOLDER}/cache-owner-map"
|
||||
echo "+1000 +$(id -g)" > "${CACHE_FOLDER}/cache-group-map"
|
||||
# Untar to workspace folder, preserving permissions and order, but mapping GID/UID if required
|
||||
tar --owner-map="${CACHE_FOLDER}/cache-owner-map" --group-map="${CACHE_FOLDER}/cache-group-map" -xpsf "${CACHE_FOLDER}/cache.tar"
|
||||
rm -rf "${CACHE_FOLDER}"
|
||||
tar -xf "${CACHE_FOLDER}/cache.tar"
|
||||
rm -f "${CACHE_FOLDER}/cache.tar"
|
||||
echo "[$(date)] Done!"
|
||||
|
||||
# Change ownership of chrome-sandbox
|
||||
sudo chown root .build/electron/chrome-sandbox
|
||||
sudo chmod 4755 .build/electron/chrome-sandbox
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
|
||||
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-main",
|
||||
"overrideCommand": false,
|
||||
"runArgs": [ "--init", "--security-opt", "seccomp=unconfined", "--shm-size=1g"],
|
||||
"runArgs": [ "--init", "--security-opt", "seccomp=unconfined"],
|
||||
|
||||
"settings": {
|
||||
"resmon.show.battery": false,
|
||||
@@ -32,9 +32,5 @@
|
||||
// Optionally loads a cached yarn install for the repo
|
||||
"postCreateCommand": ".devcontainer/cache/restore-diff.sh",
|
||||
|
||||
"remoteUser": "node",
|
||||
|
||||
"hostRequirements": {
|
||||
"memory": "8gb"
|
||||
}
|
||||
"remoteUser": "node"
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# This file contains the steps that should be run when building a "cache" image with contents that should be
|
||||
# layered directly **on top of the source tree** once a dev container is created. This avoids having to run long
|
||||
# running commands like "yarn install" from the ground up. Developers (and should) still run these commands
|
||||
# after the actual dev container is created, but only differences will be processed.
|
||||
# This file contains the steps that should be run when creating the intermediary image that contains
|
||||
# contents for that should be in the image by default. It will be used to build up from the base image
|
||||
# to create an image that speeds up first time use of the dev container by "caching" the results
|
||||
# of these commands. Developers can still run these commands without an issue once the container is
|
||||
# up, but only differences will be processed which also speeds up the first time these operations occur.
|
||||
|
||||
yarn install
|
||||
yarn electron
|
||||
|
||||
@@ -1,58 +1,20 @@
|
||||
**/build/*/**/*.js
|
||||
**/dist/**/*.js
|
||||
**/extensions/**/*.d.ts
|
||||
**/extensions/**/build/**
|
||||
**/extensions/**/colorize-fixtures/**
|
||||
**/extensions/azurecore/extension.webpack.config.js
|
||||
**/extensions/css-language-features/server/test/pathCompletionFixtures/**
|
||||
**/extensions/html-language-features/server/lib/jquery.d.ts
|
||||
**/extensions/html-language-features/server/src/test/pathCompletionFixtures/**
|
||||
**/extensions/markdown-language-features/media/**
|
||||
**/extensions/markdown-language-features/notebook-out/**
|
||||
**/extensions/markdown-math/notebook-out/**
|
||||
**/extensions/notebook-renderers/renderer-out/index.js
|
||||
**/extensions/simple-browser/media/index.js
|
||||
**/extensions/typescript-language-features/test-workspace/**
|
||||
**/vs/nls.build.js
|
||||
**/vs/nls.js
|
||||
**/vs/css.build.js
|
||||
**/vs/css.js
|
||||
**/vs/loader.js
|
||||
**/dompurify/**
|
||||
**/insane/**
|
||||
**/marked/**
|
||||
**/semver/**
|
||||
**/test/**/*.js
|
||||
**/node_modules/**
|
||||
**/vscode-api-tests/testWorkspace/**
|
||||
**/vscode-api-tests/testWorkspace2/**
|
||||
**/extensions/**/out/**
|
||||
**/extensions/**/build/**
|
||||
**/extensions/**/colorize-fixtures/**
|
||||
**/extensions/html-language-features/server/lib/jquery.d.ts
|
||||
/extensions/markdown-language-features/media/**
|
||||
/extensions/markdown-language-features/notebook-out/**
|
||||
**/extensions/markdown-math/notebook-out/**
|
||||
**/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts
|
||||
**/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts
|
||||
**/extensions/markdown-language-features/media/**
|
||||
**/extensions/markdown-language-features/notebook-out/**
|
||||
**/extensions/typescript-basics/test/colorize-fixtures/**
|
||||
**/extensions/**/dist/**
|
||||
/extensions/types
|
||||
/extensions/typescript-language-features/test-workspace/**
|
||||
/test/automation/out
|
||||
/resources/web/code-web.js
|
||||
**/extensions/vscode-api-tests/testWorkspace/**
|
||||
**/extensions/vscode-api-tests/testWorkspace2/**
|
||||
**/fixtures/**
|
||||
**/node_modules/**
|
||||
**/out-*/**/*.js
|
||||
**/out-editor-*/**
|
||||
**/out/**/*.js
|
||||
**/src/**/dompurify.js
|
||||
**/src/**/marked.js
|
||||
**/src/**/semver.js
|
||||
**/src/typings/**/*.d.ts
|
||||
**/src/vs/*/**/*.d.ts
|
||||
**/src/vs/base/test/common/filters.perf.data.js
|
||||
**/src/vs/css.build.js
|
||||
**/src/vs/css.js
|
||||
**/src/vs/loader.js
|
||||
**/src/vs/nls.build.js
|
||||
**/src/vs/nls.js
|
||||
**/test/unit/assert.js
|
||||
**/typings/**
|
||||
|
||||
152
.eslintrc.json
Executable file → Normal file
152
.eslintrc.json
Executable file → Normal file
@@ -7,27 +7,23 @@
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"jsdoc",
|
||||
"header"
|
||||
"jsdoc"
|
||||
],
|
||||
"rules": {
|
||||
"no-undef": "off",
|
||||
"no-unused-vars": "off",
|
||||
"constructor-super": "warn",
|
||||
"curly": "off",
|
||||
"curly": "warn",
|
||||
"eqeqeq": "warn",
|
||||
"no-buffer-constructor": "warn",
|
||||
"no-caller": "warn",
|
||||
"no-debugger": "warn",
|
||||
"no-duplicate-case": "warn",
|
||||
"no-duplicate-imports": "off",
|
||||
"no-duplicate-imports": "warn",
|
||||
"no-eval": "warn",
|
||||
"no-async-promise-executor": "off",
|
||||
"no-extra-semi": "warn",
|
||||
"no-new-wrappers": "warn",
|
||||
"no-redeclare": "off",
|
||||
"no-sparse-arrays": "warn",
|
||||
"no-throw-literal": "off",
|
||||
"no-throw-literal": "warn",
|
||||
"no-unsafe-finally": "warn",
|
||||
"no-unused-labels": "warn",
|
||||
"no-restricted-globals": [
|
||||
@@ -42,10 +38,10 @@
|
||||
"orientation",
|
||||
"context"
|
||||
], // non-complete list of globals that are easy to access unintentionally
|
||||
"no-var": "off",
|
||||
"no-var": "warn",
|
||||
"jsdoc/no-types": "warn",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": "off",
|
||||
"@typescript-eslint/semi": "warn",
|
||||
"@typescript-eslint/naming-convention": [
|
||||
"warn",
|
||||
{
|
||||
@@ -56,15 +52,15 @@
|
||||
}
|
||||
],
|
||||
"code-no-unused-expressions": [
|
||||
"off",
|
||||
"warn",
|
||||
{
|
||||
"allowTernary": true
|
||||
}
|
||||
],
|
||||
"code-translation-remind": "off",
|
||||
"code-translation-remind": "warn",
|
||||
"code-no-nls-in-standalone-editor": "warn",
|
||||
"code-no-standalone-editor": "warn",
|
||||
"code-no-unexternalized-strings": "off",
|
||||
"code-no-unexternalized-strings": "warn",
|
||||
"code-layering": [
|
||||
"warn",
|
||||
{
|
||||
@@ -92,7 +88,7 @@
|
||||
}
|
||||
],
|
||||
"code-import-patterns": [
|
||||
"off",
|
||||
"warn",
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// !!! Do not relax these rules !!!
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
@@ -137,7 +133,7 @@
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -179,7 +175,7 @@
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -198,7 +194,7 @@
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -207,7 +203,7 @@
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -215,13 +211,9 @@
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"mssql",
|
||||
"azurecore",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"@microsoft/1ds-post-js",
|
||||
"@microsoft/1ds-core-js"
|
||||
"**/{vs,sql}/platform/*/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -259,9 +251,7 @@
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node}/**",
|
||||
"**/{vs,sql}/platform/*/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*", // node modules
|
||||
"@microsoft/1ds-post-js",
|
||||
"@microsoft/1ds-core-js"
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -283,7 +273,7 @@
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -294,7 +284,8 @@
|
||||
"**/{vs,sql}/base/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/platform/*/{common,node,electron-main}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"**/{vs,sql}/code/**",
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -310,9 +301,7 @@
|
||||
"**/{vs,sql}/base/test/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**", // {{SQL CARBON EDIT}}
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**",
|
||||
"@microsoft/1ds-post-js",
|
||||
"@microsoft/1ds-core-js"
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -483,8 +472,6 @@
|
||||
"restrictions": [
|
||||
"vscode",
|
||||
"azdata",
|
||||
"mssql",
|
||||
"azurecore",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
@@ -533,7 +520,7 @@
|
||||
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/services/*/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -548,7 +535,7 @@
|
||||
"vs/workbench/contrib/files/browser/editors/fileEditorInput",
|
||||
"**/{vs,sql}/workbench/services/**",
|
||||
"**/{vs,sql}/workbench/test/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -563,7 +550,6 @@
|
||||
"**/{vs,sql}/workbench/common/**",
|
||||
"**/{vs,sql}/workbench/services/**/common/**",
|
||||
"**/{vs,sql}/workbench/api/**/common/**",
|
||||
"**/{vs,sql}/workbench/contrib/**/common/**",
|
||||
"vs/workbench/contrib/files/common/editors/fileEditorInput", // this should be fine, it only accesses constants from contrib
|
||||
"vscode-textmate",
|
||||
"vscode-oniguruma",
|
||||
@@ -591,8 +577,6 @@
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"azdata",
|
||||
"mssql",
|
||||
"azurecore",
|
||||
"vscode",
|
||||
"**/{vs,sql}/base/**/{common,browser,worker}/**",
|
||||
"**/{vs,sql}/platform/**/{common,browser}/**",
|
||||
@@ -601,17 +585,14 @@
|
||||
"**/{vs,sql}/workbench/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/contrib/**/common/**",
|
||||
"vscode-textmate",
|
||||
"vscode-oniguruma",
|
||||
"iconv-lite-umd",
|
||||
"jschardet",
|
||||
"@vscode/vscode-languagedetection",
|
||||
"@angular/*",
|
||||
"rxjs/**",
|
||||
"sanitize-html",
|
||||
"ansi_up",
|
||||
"@microsoft/applicationinsights-web"
|
||||
"ansi_up"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -624,7 +605,7 @@
|
||||
"**/{vs,sql}/workbench/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -655,7 +636,7 @@
|
||||
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -770,7 +751,7 @@
|
||||
"chart.js",
|
||||
"plotly.js",
|
||||
"angular2-grid",
|
||||
"kburtram-query-plan",
|
||||
"html-query-plan",
|
||||
"html-to-image",
|
||||
"turndown",
|
||||
"gridstack",
|
||||
@@ -778,8 +759,7 @@
|
||||
"vscode-textmate",
|
||||
"vscode-oniguruma",
|
||||
"iconv-lite-umd",
|
||||
"jschardet",
|
||||
"azdataGraph"
|
||||
"jschardet"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -794,7 +774,7 @@
|
||||
"**/{vs,sql}/workbench/api/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/contrib/**/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -827,7 +807,7 @@
|
||||
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/workbench/contrib/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -850,7 +830,7 @@
|
||||
"**/{vs,sql}/base/parts/**/{common,node}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node}/**",
|
||||
"**/{vs,sql}/code/**/{common,node}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -862,7 +842,7 @@
|
||||
"**/{vs,sql}/base/parts/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"**/{vs,sql}/code/**/{common,browser,node,electron-sandbox,electron-browser}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -873,7 +853,7 @@
|
||||
"**/{vs,sql}/base/parts/**/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/code/**/{common,node,electron-main}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -885,7 +865,8 @@
|
||||
"**/{vs,sql}/platform/**/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/**/{common,node}/**",
|
||||
"**/{vs,sql}/server/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"**/{vs,sql}/code/**/{common,node}/**",
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -956,28 +937,28 @@
|
||||
"target": "**/test/smoke/**",
|
||||
"restrictions": [
|
||||
"**/test/smoke/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/automation/**",
|
||||
"restrictions": [
|
||||
"**/test/automation/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/integration/**",
|
||||
"restrictions": [
|
||||
"**/test/integration/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/monaco/**",
|
||||
"restrictions": [
|
||||
"**/test/monaco/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -997,7 +978,7 @@
|
||||
"target": "**/{node,electron-browser,electron-main}/**/*.test.ts",
|
||||
"restrictions": [
|
||||
"**/{vs,sql}/**",
|
||||
"@vscode/*", "@parcel/*", "*", // node modules
|
||||
"*", // node modules
|
||||
"@angular/*" // {{SQL CARBON EDIT}}
|
||||
]
|
||||
},
|
||||
@@ -1005,14 +986,14 @@
|
||||
"target": "**/{node,electron-browser,electron-main}/**/test/**",
|
||||
"restrictions": [
|
||||
"**/{vs,sql}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/{node,electron-browser,electron-main}/**",
|
||||
"restrictions": [
|
||||
"**/{vs,sql}/**",
|
||||
"@vscode/*", "@parcel/*", "*" // node modules
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1040,16 +1021,6 @@
|
||||
"xterm*"
|
||||
]
|
||||
}
|
||||
],
|
||||
"header/header": [
|
||||
2,
|
||||
"block",
|
||||
[
|
||||
"---------------------------------------------------------------------------------------------",
|
||||
" * Copyright (c) Microsoft Corporation. All rights reserved.",
|
||||
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
|
||||
" *--------------------------------------------------------------------------------------------"
|
||||
]
|
||||
]
|
||||
},
|
||||
"overrides": [
|
||||
@@ -1138,47 +1109,6 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"src/{vs,sql}/server/*",
|
||||
|
||||
// {{SQL CARBON EDIT}} Ignore our own that don't use our copyright
|
||||
"extensions/azuremonitor/src/prompts/**",
|
||||
"extensions/azuremonitor/src/typings/findRemove.d.ts",
|
||||
"extensions/kusto/src/prompts/**",
|
||||
"extensions/mssql/src/hdfs/webhdfs.ts",
|
||||
"extensions/mssql/src/prompts/**",
|
||||
"extensions/mssql/src/typings/bufferStreamReader.d.ts",
|
||||
"extensions/mssql/src/typings/findRemove.d.ts",
|
||||
"extensions/notebook/resources/jupyter_config/**",
|
||||
"extensions/notebook/src/intellisense/text.ts",
|
||||
"extensions/notebook/src/prompts/**",
|
||||
"extensions/resource-deployment/src/typings/linuxReleaseInfo.d.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/rowDetailView.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/rowMoveManager.plugin.ts",
|
||||
"src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/factories.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/registry.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts",
|
||||
"src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts",
|
||||
"src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts",
|
||||
"src/sql/workbench/services/notebook/common/nbformat.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/renderers.ts",
|
||||
"src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts"
|
||||
],
|
||||
"rules": {
|
||||
"header/header": [
|
||||
// hygiene.js still checks that all files (even those in this directory) are MIT licensed.
|
||||
"off"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
{
|
||||
"ignoreVoid": true
|
||||
}
|
||||
],
|
||||
"jsdoc/check-param-names": "error"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt
|
||||
|
||||
# mjbvz: Fix spacing
|
||||
13f4f052582bcec3d6c6c6a70d995c9dee2cac13
|
||||
|
||||
# mjbvz: Add script to run build with noImplicitOverride
|
||||
ae1452eea678f5266ef513f22dacebb90955d6c9
|
||||
|
||||
# alexdima: Revert "bump version"
|
||||
537ba0ef1791c090bb18bc68d727816c0451c117
|
||||
|
||||
# alexdima: bump version
|
||||
387a0dcb82df729e316ca2518a9ed81a75482b18
|
||||
|
||||
# joaomoreno: add ghooks dev dependency
|
||||
0dfc06e0f9de5925de792cdf9f0e6597bb25908f
|
||||
|
||||
# mjbvz: organize imports
|
||||
494cbbd02d67e87727ec885f98d19551aa33aad1
|
||||
a3cb14be7f2cceadb17adf843675b1a59537dbbd
|
||||
ee1655a82ebdfd38bf8792088a6602c69f7bbd94
|
||||
|
||||
|
||||
# jrieken: new eslint-rule
|
||||
4a130c40ed876644ed8af2943809d08221375408
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -7,5 +7,4 @@ ThirdPartyNotices.txt eol=crlf
|
||||
*.cmd eol=crlf
|
||||
*.ps1 eol=lf
|
||||
*.sh eol=lf
|
||||
*.rtf -text
|
||||
**/*.json linguist-language=jsonc
|
||||
*.rtf -text
|
||||
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -5,18 +5,13 @@
|
||||
/extensions/admin-tool-ext-win @Charles-Gagnon
|
||||
/extensions/arc/ @Charles-Gagnon @swells @candiceye
|
||||
/extensions/azcli/ @Charles-Gagnon @swells @candiceye
|
||||
/extensions/azurecore/ @cssuh @cheenamalhotra
|
||||
/extensions/big-data-cluster/ @Charles-Gagnon
|
||||
/extensions/dacpac/ @kisantia
|
||||
/extensions/datavirtualization @Charles-Gagnon
|
||||
/extensions/import @aasimkhan30
|
||||
/extensions/machine-learning @llali
|
||||
/extensions/notebook @azure-data-studio-notebook-devs
|
||||
/extensions/query-history/ @Charles-Gagnon
|
||||
/extensions/resource-deployment/ @Charles-Gagnon
|
||||
/extensions/schema-compare/ @kisantia
|
||||
/extensions/sql-bindings/ @vasubhog @Charles-Gagnon @lucyzhang929 @chlafreniere @MaddyDev
|
||||
/extensions/sql-database-projects/ @Benjin @kisantia
|
||||
/extensions/sql-migration @AkshayMata @raymondtruong @brian-harris @junierch @siyangMicrosoft
|
||||
/extensions/mssql/config.json @Charles-Gagnon @alanrenmsft @kburtram
|
||||
|
||||
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon
|
||||
|
||||
25
.github/label-actions.yml
vendored
25
.github/label-actions.yml
vendored
@@ -2,9 +2,8 @@
|
||||
Needs Logs:
|
||||
comment: "We need more info to debug your particular issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.
|
||||
|
||||
First open the Settings page, find the `Mssql: Tracing Level` setting and change that to `All` then restart ADS and repro your issue.
|
||||
|
||||
Next there are two types of logs to collect:
|
||||
There are two types of logs to collect:
|
||||
|
||||
|
||||
**Console Logs**
|
||||
@@ -18,7 +17,6 @@ Next there are two types of logs to collect:
|
||||
|
||||
- Save this text into a file named console.log and attach it to this issue.
|
||||
|
||||
- Developer Tools can be closed via Help -> Toggle Developer Tools
|
||||
|
||||
**Application Logs**
|
||||
|
||||
@@ -29,23 +27,10 @@ Next there are two types of logs to collect:
|
||||
|
||||
- This will open the log folder locally. Please zip up this folder and attach it to the issue."
|
||||
|
||||
# actions for Needs Logs - Azure label
|
||||
Needs Logs - Azure:
|
||||
comment: "We need more info to debug your Azure Active Directory issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.
|
||||
|
||||
- In the settings menu, find the setting titled `Azure: Logging Level` and select the `Verbose` option
|
||||
|
||||
- Run the process that produces your error
|
||||
|
||||
- Open command palette (Click **View** -> **Command Palette**)
|
||||
|
||||
- Run the command: **`Developer: Open Logs Folder`**
|
||||
|
||||
- Follow this path to find the Azure Accounts log file: `[default log folder]/exthost1/output_logging_[earliest timestamp]/#-Azure Acounts.log`
|
||||
|
||||
- Please attach the Azure-Accounts.log file to the issue."
|
||||
|
||||
# actions for Out of Scope label
|
||||
Out of Scope:
|
||||
comment: "Thank you for your feedback! This feature is currently out of scope and we do not plan to work on it in a currently planned release. We will close this issue to keep our backlog focused on requests that we are planning to work on. Please note that users can continue to vote and comment on closed issues, which we encourage as it helps us understand user interest and can provide more details about why a feature is requested."
|
||||
comment: "Thank you for opening this suggestion! This enhancement is not planned in our
|
||||
medium-term roadmap. The issue is being closed to reduce active issues to focus on
|
||||
enhancements that are being considered for an upcoming release. We will review closed issues
|
||||
with the 'Out of Scope' label when doing long-term planning."
|
||||
close: true
|
||||
|
||||
3
.github/port-labeler.yml
vendored
3
.github/port-labeler.yml
vendored
@@ -1,3 +0,0 @@
|
||||
# Add 'repo' label to any root file changes
|
||||
Port Request:
|
||||
- '**/*'
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -5,3 +5,5 @@
|
||||
* Ensure that the code is up-to-date with the `main` branch.
|
||||
* Include a description of the proposed changes and how to test them.
|
||||
-->
|
||||
|
||||
This PR fixes #
|
||||
|
||||
8
.github/subscribers.json
vendored
8
.github/subscribers.json
vendored
@@ -1,2 +1,10 @@
|
||||
{
|
||||
"notebook": [
|
||||
"claudiaregio",
|
||||
"rchiodo",
|
||||
"greazer",
|
||||
"donjayamanne",
|
||||
"jilljac",
|
||||
"IanMatthewHuff"
|
||||
]
|
||||
}
|
||||
|
||||
22
.github/workflows/bad-tag.yml
vendored
22
.github/workflows/bad-tag.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Bad Tag
|
||||
on:
|
||||
create
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.ref == '1.999.0'
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "microsoft/vscode-github-triage-actions"
|
||||
ref: stable
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Run Bad Tag
|
||||
uses: ./actions/tag-alert
|
||||
with:
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
tag-name: '1.999.0'
|
||||
177
.github/workflows/basic.yml
vendored
177
.github/workflows/basic.yml
vendored
@@ -1,177 +0,0 @@
|
||||
name: Basic checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
main:
|
||||
if: github.ref != 'refs/heads/main'
|
||||
name: Compilation, Unit and Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||
- name: Setup Build Environment
|
||||
run: |
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Compute node modules cache key
|
||||
id: nodeModulesCacheKey
|
||||
run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||
- name: Cache node modules
|
||||
id: cacheNodeModules
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: "**/node_modules"
|
||||
key: ${{ runner.os }}-cacheNodeModules23-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
restore-keys: ${{ runner.os }}-cacheNodeModules23-
|
||||
- name: Get yarn cache directory path
|
||||
id: yarnCacheDirPath
|
||||
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Cache yarn directory
|
||||
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
- name: Execute yarn
|
||||
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
env:
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
|
||||
- name: Compile and Download
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64"
|
||||
|
||||
- name: Run Unit Tests
|
||||
id: electron-unit-tests
|
||||
run: DISPLAY=:10 ./scripts/test.sh
|
||||
|
||||
- name: Run Integration Tests (Electron)
|
||||
id: electron-integration-tests
|
||||
run: DISPLAY=:10 ./scripts/test-integration.sh
|
||||
|
||||
# {{SQL CARBON TODO}} Bring back "Hygiene and Layering" and "Warm up node modules cache"
|
||||
# hygiene:
|
||||
# if: github.ref != 'refs/heads/main'
|
||||
# name: Hygiene and Layering
|
||||
# runs-on: ubuntu-latest
|
||||
# timeout-minutes: 40
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
|
||||
# - uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: 16
|
||||
|
||||
# - name: Compute node modules cache key
|
||||
# id: nodeModulesCacheKey
|
||||
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||
# - name: Cache node modules
|
||||
# id: cacheNodeModules
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: "**/node_modules"
|
||||
# key: ${{ runner.os }}-cacheNodeModules23-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules23-
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
# - name: Cache yarn directory
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
# - name: Execute yarn
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# env:
|
||||
# PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
# ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
# run: yarn --frozen-lockfile --network-timeout 180000
|
||||
|
||||
# - name: Run Hygiene Checks
|
||||
# run: yarn gulp hygiene
|
||||
|
||||
# - name: Run Valid Layers Checks
|
||||
# run: yarn valid-layers-check
|
||||
|
||||
# - name: Compile /build/
|
||||
# run: yarn --cwd build compile
|
||||
|
||||
# - name: Check clean git state
|
||||
# run: ./.github/workflows/check-clean-git-state.sh
|
||||
|
||||
# - name: Run eslint
|
||||
# run: yarn eslint
|
||||
|
||||
# - name: Run vscode-dts Compile Checks
|
||||
# run: yarn vscode-dts-compile-check
|
||||
|
||||
# - name: Run Trusted Types Checks
|
||||
# run: yarn tsec-compile-check
|
||||
|
||||
# warm-cache:
|
||||
# name: Warm up node modules cache
|
||||
# if: github.ref == 'refs/heads/main'
|
||||
# runs-on: ubuntu-latest
|
||||
# timeout-minutes: 40
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
|
||||
# - uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: 16
|
||||
|
||||
# - name: Compute node modules cache key
|
||||
# id: nodeModulesCacheKey
|
||||
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||
# - name: Cache node modules
|
||||
# id: cacheNodeModules
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: "**/node_modules"
|
||||
# key: ${{ runner.os }}-cacheNodeModules23-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules23-
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
# - name: Cache yarn directory
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
# - name: Execute yarn
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# env:
|
||||
# PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
# ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
# run: yarn --frozen-lockfile --network-timeout 180000
|
||||
235
.github/workflows/ci.yml
vendored
235
.github/workflows/ci.yml
vendored
@@ -1,87 +1,84 @@
|
||||
name: CI
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - main
|
||||
# - release/*
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - main
|
||||
# - release/*
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release/*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
windows:
|
||||
name: Windows
|
||||
runs-on: windows-2022
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
name: Windows
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2.2.0
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 12
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "2.x"
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "2.x"
|
||||
|
||||
# {{SQL CARBON EDIT}} Skip caching for now
|
||||
# - name: Compute node modules cache key
|
||||
# id: nodeModulesCacheKey
|
||||
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||
# - name: Cache node_modules archive
|
||||
# id: cacheNodeModules
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: ".build/node_modules_cache"
|
||||
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
|
||||
# - name: Extract node_modules archive
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
|
||||
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
# - name: Cache yarn directory
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
# {{SQL CARBON EDIT}} Skip caching for now
|
||||
# - name: Compute node modules cache key
|
||||
# id: nodeModulesCacheKey
|
||||
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||
# - name: Cache node_modules archive
|
||||
# id: cacheNodeModules
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: ".build/node_modules_cache"
|
||||
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
|
||||
# - name: Extract node_modules archive
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
|
||||
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
# - name: Cache yarn directory
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
|
||||
- name: Execute yarn
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
|
||||
env:
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: |
|
||||
# mkdir -Force .build
|
||||
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
# mkdir -Force .build/node_modules_cache
|
||||
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
|
||||
- name: Execute yarn
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
|
||||
env:
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
# run: |
|
||||
# mkdir -Force .build
|
||||
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
# mkdir -Force .build/node_modules_cache
|
||||
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
|
||||
|
||||
- name: Compile and Download
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
|
||||
- name: Compile and Download
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
|
||||
|
||||
- name: Run Core Unit Tests # {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
run: .\scripts\test.bat
|
||||
- name: Run Unit Tests (Electron)
|
||||
run: .\scripts\test.bat
|
||||
|
||||
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
|
||||
# run: yarn test-browser --browser chromium
|
||||
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
|
||||
# run: yarn test-browser --browser chromium
|
||||
|
||||
# {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
# - name: Run Core Integration Tests {{SQL CARBON EDIT}} disable for now
|
||||
# run: .\scripts\test-integration.bat
|
||||
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
|
||||
# run: .\scripts\test-integration.bat
|
||||
|
||||
linux:
|
||||
name: Linux
|
||||
@@ -104,7 +101,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 12
|
||||
# {{SQL CARBON EDIT}} Skip caching for now
|
||||
# - name: Compute node modules cache key
|
||||
# id: nodeModulesCacheKey
|
||||
@@ -114,8 +111,8 @@ jobs:
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: "**/node_modules"
|
||||
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
|
||||
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
@@ -134,17 +131,14 @@ jobs:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
|
||||
# Don't inline source maps so that we generate code coverage for ts files
|
||||
- name: Compile and Download
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
|
||||
env:
|
||||
SQL_NO_INLINE_SOURCEMAP: 1
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
|
||||
|
||||
- name: Run Core Unit Tests # {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
- name: Run Unit Tests (Electron)
|
||||
id: electron-unit-tests
|
||||
run: DISPLAY=:10 ./scripts/test.sh --runGlob "**/sql/**/*.test.js" --coverage
|
||||
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage
|
||||
|
||||
- name: Run Extension Unit Tests # {{SQL CARBON EDIT}} Rename to extension for clarity
|
||||
- name: Run Extension Unit Tests (Electron)
|
||||
id: electron-extension-unit-tests
|
||||
run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||
|
||||
@@ -161,8 +155,7 @@ jobs:
|
||||
# id: browser-unit-tests
|
||||
# run: DISPLAY=:10 yarn test-browser --browser chromium
|
||||
|
||||
# {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
# - name: Run Core Integration Tests {{SQL CARBON EDIT}} Skip for now
|
||||
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
|
||||
# id: electron-integration-tests
|
||||
# run: DISPLAY=:10 ./scripts/test-integration.sh
|
||||
|
||||
@@ -177,7 +170,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 12
|
||||
|
||||
# {{SQL CARBON EDIT}} Skip caching for now
|
||||
# - name: Compute node modules cache key
|
||||
@@ -188,8 +181,8 @@ jobs:
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: "**/node_modules"
|
||||
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
|
||||
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
|
||||
# - name: Get yarn cache directory path
|
||||
# id: yarnCacheDirPath
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
@@ -202,14 +195,14 @@ jobs:
|
||||
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||
- name: Execute yarn
|
||||
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
|
||||
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
env:
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
|
||||
- name: Compile and Download
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
|
||||
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
|
||||
|
||||
# This is required for keytar unittests, otherwise we hit
|
||||
# https://github.com/atom/node-keytar/issues/76
|
||||
@@ -219,28 +212,27 @@ jobs:
|
||||
security default-keychain -s $RUNNER_TEMP/buildagent.keychain
|
||||
security unlock-keychain -p pwd $RUNNER_TEMP/buildagent.keychain
|
||||
|
||||
- name: Run Core Unit Tests # {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
- name: Run Unit Tests (Electron)
|
||||
run: DISPLAY=:10 ./scripts/test.sh
|
||||
|
||||
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} Skip for now
|
||||
# run: DISPLAY=:10 yarn test-browser --browser chromium
|
||||
|
||||
# {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
# - name: Run Core Integration Tests {{SQL CARBON EDIT}} Skip for now
|
||||
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
|
||||
# run: DISPLAY=:10 ./scripts/test-integration.sh
|
||||
|
||||
hygiene:
|
||||
name: Hygiene and Layering
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 12
|
||||
|
||||
- name: Compute node modules cache key
|
||||
id: nodeModulesCacheKey
|
||||
@@ -250,8 +242,7 @@ jobs:
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: "**/node_modules"
|
||||
key: ${{ runner.os }}-cacheNodeModules23-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
restore-keys: ${{ runner.os }}-cacheNodeModules23-
|
||||
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||
- name: Get yarn cache directory path
|
||||
id: yarnCacheDirPath
|
||||
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||
@@ -275,27 +266,51 @@ jobs:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
run: yarn --frozen-lockfile --network-timeout 180000
|
||||
|
||||
- name: Download Playwright
|
||||
run: yarn playwright-install
|
||||
|
||||
- name: Run Hygiene Checks
|
||||
run: yarn gulp hygiene
|
||||
|
||||
- name: Run Valid Layers Checks
|
||||
run: yarn valid-layers-check
|
||||
|
||||
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
|
||||
run: yarn strict-vscode
|
||||
|
||||
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# run: yarn monaco-compile-check
|
||||
|
||||
- name: Compile /build/
|
||||
run: yarn --cwd build compile
|
||||
|
||||
- name: Check clean git state
|
||||
run: ./.github/workflows/check-clean-git-state.sh
|
||||
|
||||
- name: Run eslint
|
||||
run: yarn eslint
|
||||
|
||||
# {{SQL CARBON EDIT}} Don't need this
|
||||
# - name: Run vscode-dts Compile Checks
|
||||
# run: yarn vscode-dts-compile-check
|
||||
|
||||
- name: Run Trusted Types Checks
|
||||
run: yarn tsec-compile-check
|
||||
|
||||
# - name: Editor Distro & ESM Bundle {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# run: yarn gulp editor-esm-bundle
|
||||
|
||||
# - name: Typings validation prep {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# run: |
|
||||
# mkdir typings-test
|
||||
|
||||
# - name: Typings validation {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# working-directory: ./typings-test
|
||||
# run: |
|
||||
# yarn init -yp
|
||||
# ../node_modules/.bin/tsc --init
|
||||
# echo "import '../out-monaco-editor-core';" > a.ts
|
||||
# ../node_modules/.bin/tsc --noEmit
|
||||
|
||||
# - name: Webpack Editor {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# working-directory: ./test/monaco
|
||||
# run: yarn run bundle
|
||||
|
||||
# - name: Compile Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# working-directory: ./test/monaco
|
||||
# run: yarn run compile
|
||||
|
||||
# - name: Download Playwright {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# run: yarn playwright-install
|
||||
|
||||
# - name: Run Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
|
||||
# timeout-minutes: 5
|
||||
# working-directory: ./test/monaco
|
||||
# run: yarn test
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -7,13 +7,11 @@ node_modules/
|
||||
extensions/**/dist/
|
||||
/out*/
|
||||
/extensions/**/out/
|
||||
src/vs/server
|
||||
resources/server
|
||||
build/node_modules
|
||||
coverage/
|
||||
test_data/
|
||||
test-results/
|
||||
yarn-error.log
|
||||
*.vsix
|
||||
vscode.lsif
|
||||
vscode.db
|
||||
/.profile-oss
|
||||
*.orig
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"project": "src/tsconfig.json",
|
||||
"source": "./package.json",
|
||||
"package": "package.json",
|
||||
"out": "vscode.lsif"
|
||||
}
|
||||
142
.vscode/cgmanifest.schema.json
vendored
Normal file
142
.vscode/cgmanifest.schema.json
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"registrations": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"component": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"git"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"git"
|
||||
]
|
||||
},
|
||||
"git": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"repositoryUrl",
|
||||
"commitHash"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"commitHash": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"npm"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"npm"
|
||||
]
|
||||
},
|
||||
"npm": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"other"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"other"
|
||||
]
|
||||
},
|
||||
"other": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"downloadUrl",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"downloadUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string",
|
||||
"description": "The git url of the component"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "The version of the component"
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "The name of the license"
|
||||
},
|
||||
"developmentDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is inlined in the vscode repo and **is not shipped**."
|
||||
},
|
||||
"isOnlyProductionDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is shipped and **is not inlined in the vscode repo**."
|
||||
},
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The license text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@@ -3,7 +3,6 @@
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"ms-vscode.vscode-selfhost-test-provider"
|
||||
"EditorConfig.EditorConfig"
|
||||
]
|
||||
}
|
||||
|
||||
223
.vscode/launch.json
vendored
223
.vscode/launch.json
vendored
@@ -1,107 +1,6 @@
|
||||
{
|
||||
"version": "0.1.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Gulp Build",
|
||||
"program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
|
||||
"stopOnEntry": true,
|
||||
"args": [
|
||||
"hygiene"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "VS Code Git Tests",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"/tmp/my4g9l",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/extensions/git",
|
||||
"--extensionTestsPath=${workspaceFolder}/extensions/git/out/test"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 6
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "VS Code Github Tests",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"${workspaceFolder}/extensions/github/testWorkspace",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/extensions/github",
|
||||
"--extensionTestsPath=${workspaceFolder}/extensions/github/out/test"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/extensions/github/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 6
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "VS Code API Tests (single folder)",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
// "${workspaceFolder}", // Uncomment for running out of sources.
|
||||
"${workspaceFolder}/extensions/vscode-api-tests/testWorkspace",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
|
||||
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/singlefolder-tests",
|
||||
"--disable-extensions"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "VS Code API Tests (workspace)",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"${workspaceFolder}/extensions/vscode-api-tests/testworkspace.code-workspace",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
|
||||
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/workspace-tests"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 4
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "VS Code Tokenizer Tests",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"${workspaceFolder}/extensions/vscode-colorize-tests/test",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-colorize-tests",
|
||||
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-colorize-tests/out"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
@@ -110,18 +9,8 @@
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--no-cached-data",
|
||||
"--crash-reporter-directory=${workspaceFolder}/.profile-oss/crashes",
|
||||
// for general runtime freezes: https://github.com/microsoft/vscode/issues/127861#issuecomment-904144910
|
||||
"--disable-features=CalculateNativeWinOcclusion",
|
||||
"--no-cached-data"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -154,15 +43,12 @@
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--no-cached-data",
|
||||
"--crash-reporter-directory=${workspaceFolder}/.profile-oss/crashes",
|
||||
// for general runtime freezes: https://github.com/microsoft/vscode/issues/127861#issuecomment-904144910
|
||||
"--disable-features=CalculateNativeWinOcclusion",
|
||||
],
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"cascadeTerminateToConfigurations": [
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"userDataDir": "${workspaceFolder}/.profile-oss",
|
||||
"userDataDir": false,
|
||||
"pauseForSourceMap": false,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -212,18 +98,6 @@
|
||||
"group": "2_attach"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to CLI Process",
|
||||
"port": 5874,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_attach"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "pwa-chrome",
|
||||
"request": "attach",
|
||||
@@ -244,49 +118,6 @@
|
||||
"order": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Search Process",
|
||||
"port": 5876,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_attach"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Pty Host Process",
|
||||
"port": 5877,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_attach"
|
||||
}
|
||||
},
|
||||
{
|
||||
/* Added for "VS Code Selfhost Test Provider" extension support */
|
||||
"type": "pwa-chrome",
|
||||
"request": "attach",
|
||||
"name": "Attach to VS Code",
|
||||
"browserAttachLocation": "workspace",
|
||||
"port": 9222,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_attach",
|
||||
"hidden": true
|
||||
},
|
||||
"resolveSourceMapLocations": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"perScriptSourcemaps": "yes"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
@@ -438,56 +269,6 @@
|
||||
"presentation": {
|
||||
"group": "4_web"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Run Sample Resource Deployment Extension",
|
||||
"type": "sqlopsExtensionHost",
|
||||
"request": "launch",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/samples/sample-resource-deployment"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/samples/sample-resource-deployment/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "Watch sample-resource-deployment",
|
||||
"presentation": {
|
||||
"group": "5_samples"
|
||||
},
|
||||
"timeout": 30000
|
||||
},
|
||||
{
|
||||
"name": "Run Sample Notebook Provider Extension",
|
||||
"type": "sqlopsExtensionHost",
|
||||
"request": "launch",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/samples/sample-notebook-provider"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/samples/sample-notebook-provider/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "Watch sample-notebook-provider",
|
||||
"presentation": {
|
||||
"group": "5_samples"
|
||||
},
|
||||
"timeout": 30000
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
|
||||
4
.vscode/notebooks/api.github-issues
vendored
4
.vscode/notebooks/api.github-issues
vendored
@@ -7,7 +7,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"July 2022\""
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2021\""
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -27,6 +27,6 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo $milestone is:open label:api-proposal sort:created-asc"
|
||||
"value": "$repo $milestone is:open label:api-proposal "
|
||||
}
|
||||
]
|
||||
36
.vscode/notebooks/endgame.github-issues
vendored
36
.vscode/notebooks/endgame.github-issues
vendored
@@ -7,7 +7,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-dev repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub repo:microsoft/vscode-remote-repositories-github repo:microsoft/vscode-livepreview repo:microsoft/vscode-python repo:microsoft/vscode-jupyter repo:microsoft/vscode-jupyter-internal repo:microsoft/vscode-unpkg\n\n$MILESTONE=milestone:\"July 2022\""
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\""
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -24,26 +24,6 @@
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:pr is:open"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Unverified Older Insiders-Released Issues"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS -$MILESTONE is:issue is:closed label:bug label:insiders-released -label:verified -label:*duplicate -label:*as-designed -label:z-author-verified -label:on-testplan"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Unverified Older Insiders-Released Feature Requests"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS -$MILESTONE is:issue is:closed label:feature-request label:insiders-released -label:on-testplan -label:verified -label:*duplicate"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
@@ -64,16 +44,6 @@
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Open Test Plan Items without milestone"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item no:milestone"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
@@ -87,7 +57,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS is:issue is:open label:testplan-item"
|
||||
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -97,7 +67,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed label:verification-needed -label:verified"
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
|
||||
425
.vscode/notebooks/grooming-delta.github-issues
vendored
425
.vscode/notebooks/grooming-delta.github-issues
vendored
@@ -2,666 +2,769 @@
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Config"
|
||||
"value": "## Config",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$since=2021-10-01"
|
||||
"value": "$since=2020-10-01",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode\n\nQuery exceeds the maximum result. Run the query manually: `is:issue is:open closed:>2021-10-01`"
|
||||
"value": "# vscode\n\nQuery exceeds the maximum result. Run the query manually: `is:issue is:open closed:>2020-10-01`",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "//repo:microsoft/vscode is:issue closed:>$since"
|
||||
"value": "//repo:microsoft/vscode is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "//repo:microsoft/vscode is:issue created:>$since"
|
||||
"value": "//repo:microsoft/vscode is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-remote-release"
|
||||
"value": "# vscode-remote-release",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-remote-release is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-remote-release is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-remote-release is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-remote-release is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-editor"
|
||||
"value": "# monaco-editor",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-editor is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-editor is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-editor is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-editor is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-docs"
|
||||
"value": "# vscode-docs",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-docs is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-docs is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-docs is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-docs is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-js-debug"
|
||||
"value": "# vscode-js-debug",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-js-debug is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-js-debug is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-js-debug is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-js-debug is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# language-server-protocol"
|
||||
"value": "# language-server-protocol",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/language-server-protocol is:issue closed:>$since"
|
||||
"value": "repo:microsoft/language-server-protocol is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/language-server-protocol is:issue created:>$since"
|
||||
"value": "repo:microsoft/language-server-protocol is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-eslint"
|
||||
"value": "# vscode-eslint",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-eslint is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-eslint is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-eslint is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-eslint is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-css-languageservice"
|
||||
"value": "# vscode-css-languageservice",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-css-languageservice is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-css-languageservice is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-css-languageservice is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-css-languageservice is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-test"
|
||||
"value": "# vscode-test",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-test is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-pull-request-github"
|
||||
"value": "# vscode-pull-request-github",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-chrome-debug-core"
|
||||
"value": "# vscode-chrome-debug (deprecated)",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-chrome-debug-core is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-chrome-debug is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-chrome-debug-core is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-chrome-debug is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-debugadapter-node"
|
||||
"value": "# vscode-chrome-debug-core",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-debugadapter-node is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-chrome-debug-core is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-debugadapter-node is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-chrome-debug-core is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-emmet-helper"
|
||||
"value": "# vscode-debugadapter-node",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-emmet-helper is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-debugadapter-node is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-emmet-helper is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-debugadapter-node is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-extension-vscode\n\nDeprecated"
|
||||
"value": "# vscode-emmet-helper",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-extension-vscode is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-emmet-helper is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-extension-vscode is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-emmet-helper is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-extension-samples"
|
||||
"value": "# vscode-extension-vscode\n\nDeprecated",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-extension-samples is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-extension-vscode is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-extension-samples is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-extension-vscode is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-filewatcher-windows"
|
||||
"value": "# vscode-extension-samples",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-filewatcher-windows is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-extension-samples is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-filewatcher-windows is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-extension-samples is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-generator-code"
|
||||
"value": "# vscode-filewatcher-windows",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-generator-code is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-filewatcher-windows is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-generator-code is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-filewatcher-windows is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-html-languageservice"
|
||||
"value": "# vscode-generator-code",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-html-languageservice is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-generator-code is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-html-languageservice is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-generator-code is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-json-languageservice"
|
||||
"value": "# vscode-html-languageservice",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-json-languageservice is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-html-languageservice is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-json-languageservice is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-html-languageservice is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-languageserver-node"
|
||||
"value": "# vscode-jshint",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-languageserver-node is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-jshint is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-jshint is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": ""
|
||||
"value": "# vscode-json-languageservice",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-languageserver-node is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-json-languageservice is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-json-languageservice is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-loader"
|
||||
"value": "# vscode-languageserver-node",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-loader is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-languageserver-node is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-loader is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-languageserver-node is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-mono-debug"
|
||||
"value": "# vscode-loader",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-mono-debug is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-loader is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-mono-debug is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-loader is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-node-debug"
|
||||
"value": "# vscode-mono-debug",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-node-debug is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-mono-debug is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-node-debug is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-mono-debug is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-node-debug2"
|
||||
"value": "# vscode-node-debug",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-node-debug2 is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-node-debug is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-node-debug2 is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-node-debug is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-recipes"
|
||||
"value": "# vscode-node-debug2",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-recipes is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-node-debug2 is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-recipes is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-node-debug2 is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-textmate"
|
||||
"value": "# vscode-recipes",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-textmate is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-recipes is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-textmate is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-recipes is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-themes"
|
||||
"value": "# vscode-textmate",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-themes is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-textmate is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-themes is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-textmate is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-vsce"
|
||||
"value": "# vscode-themes",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-vsce is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-themes is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-vsce is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-themes is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-website"
|
||||
"value": "# vscode-vsce",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-website is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-vsce is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-website is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-vsce is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-windows-process-tree"
|
||||
"value": "# vscode-website",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-windows-process-tree is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-website is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-windows-process-tree is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-website is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# debug-adapter-protocol"
|
||||
"value": "# vscode-windows-process-tree",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/debug-adapter-protocol is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-windows-process-tree is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/debug-adapter-protocol is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-windows-process-tree is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# inno-updater"
|
||||
"value": "# debug-adapter-protocol",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/inno-updater is:issue closed:>$since"
|
||||
"value": "repo:microsoft/debug-adapter-protocol is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/inno-updater is:issue created:>$since"
|
||||
"value": "repo:microsoft/debug-adapter-protocol is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-languages"
|
||||
"value": "# inno-updater",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-languages is:issue closed:>$since"
|
||||
"value": "repo:microsoft/inno-updater is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-languages is:issue created:>$since"
|
||||
"value": "repo:microsoft/inno-updater is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-typescript"
|
||||
"value": "# language-server-protocol-inspector",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-typescript is:issue closed:>$since"
|
||||
"value": "repo:microsoft/language-server-protocol-inspector is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-typescript is:issue created:>$since"
|
||||
"value": "repo:microsoft/language-server-protocol-inspector is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-css"
|
||||
"value": "# monaco-languages",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-css is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-languages is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-css is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-languages is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-json"
|
||||
"value": "# monaco-typescript",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-json is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-typescript is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-json is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-typescript is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-html"
|
||||
"value": "# monaco-css",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-html is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-css is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-html is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-css is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# monaco-editor-webpack-plugin"
|
||||
"value": "# monaco-json",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-editor-webpack-plugin is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-json is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/monaco-editor-webpack-plugin is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-json is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# node-jsonc-parser"
|
||||
"value": "# monaco-html",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/node-jsonc-parser is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-html is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/node-jsonc-parser is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-html is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-jupyter"
|
||||
"value": "# monaco-editor-webpack-plugin",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-jupyter is:issue closed:>$since"
|
||||
"value": "repo:microsoft/monaco-editor-webpack-plugin is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-jupyter is:issue created:>$since"
|
||||
"value": "repo:microsoft/monaco-editor-webpack-plugin is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-python"
|
||||
"value": "# node-jsonc-parser",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-python is:issue closed:>$since"
|
||||
"value": "repo:microsoft/node-jsonc-parser is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-python is:issue created:>$since"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-livepreview"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-livepreview is:issue closed:>$since"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-livepreview is:issue created:>$since"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": ""
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-test"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue closed:>$since"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since"
|
||||
"value": "repo:microsoft/node-jsonc-parser is:issue created:>$since",
|
||||
"editable": true
|
||||
}
|
||||
]
|
||||
30
.vscode/notebooks/grooming.github-issues
vendored
Normal file
30
.vscode/notebooks/grooming.github-issues
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
[
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Categorizing Issues\n\nEach issue must have a type label. Most type labels are grey, some are yellow. Bugs are grey with a touch of red.",
|
||||
"editable": true,
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:\"needs more info\" -label:bug -label:feature-request -label:under-discussion -label:debt -label:*question -label:upstream -label:electron -label:engineering -label:plan-item ",
|
||||
"editable": true,
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Feature Areas\n\nEach issue should be assigned to a feature area",
|
||||
"editable": true,
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:L10N -label:VIM -label:api -label:api-finalization -label:api-proposal -label:authentication -label:breadcrumbs -label:callhierarchy -label:code-lens -label:color-palette -label:comments -label:config -label:context-keys -label:css-less-scss -label:custom-editors -label:debug -label:debug-console -label:dialogs -label:diff-editor -label:dropdown -label:editor -label:editor-RTL -label:editor-autoclosing -label:editor-autoindent -label:editor-bracket-matching -label:editor-clipboard -label:editor-code-actions -label:editor-color-picker -label:editor-columnselect -label:editor-commands -label:editor-comments -label:editor-contrib -label:editor-core -label:editor-drag-and-drop -label:editor-error-widget -label:editor-find -label:editor-folding -label:editor-highlight -label:editor-hover -label:editor-indent-detection -label:editor-indent-guides -label:editor-input -label:editor-input-IME -label:editor-insets -label:editor-minimap -label:editor-multicursor -label:editor-parameter-hints -label:editor-render-whitespace -label:editor-rendering -label:editor-scrollbar -label:editor-symbols -label:editor-synced-region -label:editor-textbuffer -label:editor-theming -label:editor-wordnav -label:editor-wrapping -label:emmet -label:error-list -label:explorer-custom -label:extension-host -label:extension-recommendations -label:extensions -label:extensions-development -label:file-decorations -label:file-encoding -label:file-explorer -label:file-glob -label:file-guess-encoding -label:file-io -label:file-watcher -label:font-rendering -label:formatting -label:git -label:github -label:gpu -label:grammar -label:grid-view -label:html -label:i18n -label:icon-brand -label:icons-product -label:install-update -label:integrated-terminal -label:integrated-terminal-conpty -label:integrated-terminal-links -label:integrated-terminal-rendering -label:integrated-terminal-winpty -label:intellisense-config -label:ipc -label:issue-bot -label:issue-reporter -label:javascript -label:json -label:keybindings -label:keybindings-editor -label:keyboard-layout -label:label-provider -label:languages-basic -label:languages-diagnostics -label:languages-guessing -label:layout -label:lcd-text-rendering -label:list -label:log -label:markdown -label:marketplace -label:menus -label:merge-conflict -label:notebook -label:outline -label:output -label:perf -label:perf-bloat -label:perf-startup -label:php -label:portable-mode -label:proxy -label:quick-pick -label:references-viewlet -label:release-notes -label:remote -label:remote-explorer -label:rename -label:sandbox -label:scm -label:screencast-mode -label:search -label:search-api -label:search-editor -label:search-replace -label:semantic-tokens -label:settings-editor -label:settings-sync -label:settings-sync-server -label:shared-process -label:simple-file-dialog -label:smart-select -label:snap -label:snippets -label:splitview -label:suggest -label:sync-error-handling -label:tasks -label:telemetry -label:themes -label:timeline -label:timeline-git -label:titlebar -label:tokenization -label:touch/pointer -label:trackpad/scroll -label:tree -label:typescript -label:undo-redo -label:uri -label:ux -label:variable-resolving -label:vscode-build -label:vscode-website -label:web -label:webview -label:workbench-actions -label:workbench-cli -label:workbench-diagnostics -label:workbench-dnd -label:workbench-editor-grid -label:workbench-editors -label:workbench-electron -label:workbench-feedback -label:workbench-history -label:workbench-hot-exit -label:workbench-hover -label:workbench-launch -label:workbench-link -label:workbench-multiroot -label:workbench-notifications -label:workbench-os-integration -label:workbench-rapid-render -label:workbench-run-as-admin -label:workbench-state -label:workbench-status -label:workbench-tabs -label:workbench-touchbar -label:workbench-views -label:workbench-welcome -label:workbench-window -label:workbench-zen -label:workspace-edit -label:workspace-symbols -label:zoom",
|
||||
"editable": true,
|
||||
"outputs": []
|
||||
}
|
||||
]
|
||||
53
.vscode/notebooks/inbox.github-issues
vendored
53
.vscode/notebooks/inbox.github-issues
vendored
@@ -2,46 +2,49 @@
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## tl;dr: Triage Inbox\n\nAll inbox issues but not those that need more information. These issues need to be triaged, e.g assigned to a user or ask for more information"
|
||||
"value": "## tl;dr: Triage Inbox\n\nAll inbox issues but not those that need more information. These issues need to be triaged, e.g assigned to a user or ask for more information",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$inbox -label:\"info-needed\" sort:created-desc"
|
||||
"value": "$inbox -label:\"needs more info\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "##### `Config`: defines the inbox query",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode label:triage-needed is:open"
|
||||
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "##### `Config`: defines the inbox query"
|
||||
"value": "## Inbox tracking and Issue triage",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/main/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## All Inbox Items\n\nAll issues that have no assignee and that have neither **feature requests** nor **test plan items** nor **plan items**.",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item "
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Inbox tracking and Issue triage"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/main/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions."
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## All Inbox Items\n\nAll issues that have no assignee and that have neither **feature requests** nor **test plan items** nor **plan items**."
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$inbox"
|
||||
"value": "$inbox",
|
||||
"editable": true
|
||||
}
|
||||
]
|
||||
14
.vscode/notebooks/my-endgame.github-issues
vendored
14
.vscode/notebooks/my-endgame.github-issues
vendored
@@ -7,7 +7,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-dev repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub repo:microsoft/vscode-remote-repositories-github repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-livepreview repo:microsoft/vscode-python repo:microsoft/vscode-jupyter repo:microsoft/vscode-jupyter-internal\n\n$MILESTONE=milestone:\"July 2022\"\n\n$MINE=assignee:@me"
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -52,7 +52,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS is:issue is:open author:@me label:testplan-item"
|
||||
"value": "$REPOS $MILESTONE is:issue is:open author:@me label:testplan-item"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -62,7 +62,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed -label:verified"
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -77,7 +77,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MINE is:issue is:open label:testplan-item"
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:testplan-item"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -147,7 +147,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:unreleased -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:triage-needed -label:verification-found"
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -157,7 +157,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:unreleased -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:chrisdias -author:chrmarti -author:Chuxel -author:claudiaregio -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:DonJayamanne -author:dynamicwebpaige -author:eamodio -author:egamma -author:fiveisprime -author:greazer -author:gregvanl -author:hediet -author:IanMatthewHuff -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:joyceerhl -author:jrieken -author:karrtikr-author:kieferrm -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rchiodo -author:rebornix -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:tanhakabir -author:TylerLeonhardt -author:Tyriar -author:weinand -author:kimadeline -author:amunger"
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15 -author:hediet"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -167,7 +167,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:unreleased -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
|
||||
56
.vscode/notebooks/my-work.github-issues
vendored
56
.vscode/notebooks/my-work.github-issues
vendored
File diff suppressed because one or more lines are too long
27
.vscode/notebooks/verification.github-issues
vendored
27
.vscode/notebooks/verification.github-issues
vendored
@@ -2,46 +2,55 @@
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. "
|
||||
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. ",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "#### Config: update list of `repos` and the `milestone`"
|
||||
"value": "#### Config: update list of `repos` and the `milestone`",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-dev repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-jupyter repo:microsoft/vscode-python\n$milestone=milestone:\"May 2022\""
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Bugs You Filed"
|
||||
"value": "### Bugs You Filed",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me"
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me",
|
||||
"editable": false
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Bugs From Outside"
|
||||
"value": "### Bugs From Outside",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh -author:hediet -author:joyceerhl -author:rchiodo -author:IanMatthewHuff"
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh",
|
||||
"editable": false
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### All"
|
||||
"value": "### All",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate"
|
||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate",
|
||||
"editable": false
|
||||
}
|
||||
]
|
||||
42
.vscode/notebooks/vscode-dev.github-issues
vendored
42
.vscode/notebooks/vscode-dev.github-issues
vendored
@@ -1,42 +0,0 @@
|
||||
[
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode.dev repo"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-dev milestone:\"May 2022\" is:open"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-dev milestone:\"Backlog\" is:open"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# VS Code repo"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode label:vscode.dev is:open"
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# GitHub Repositories repos"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-remote-repositories-github milestone:\"May 2022\" is:open"
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-remotehub milestone:\"May 2022\" is:open"
|
||||
}
|
||||
]
|
||||
101
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
101
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
# Query: .innerHTML =
|
||||
# Flags: CaseSensitive WordMatch
|
||||
# Including: src/vs/**/*.{t,j}s
|
||||
# Excluding: *.test.ts, **/test/**
|
||||
# ContextLines: 3
|
||||
|
||||
12 results - 9 files
|
||||
|
||||
src/vs/base/browser/dom.ts:
|
||||
1359 );
|
||||
1360
|
||||
1361 const html = _ttpSafeInnerHtml?.createHTML(value, options) ?? insane(value, options);
|
||||
1362: node.innerHTML = html as unknown as string;
|
||||
1363 }
|
||||
|
||||
src/vs/base/browser/markdownRenderer.ts:
|
||||
272 };
|
||||
273
|
||||
274 if (_ttpInsane) {
|
||||
275: element.innerHTML = _ttpInsane.createHTML(renderedMarkdown, insaneOptions) as unknown as string;
|
||||
276 } else {
|
||||
277: element.innerHTML = insane(renderedMarkdown, insaneOptions);
|
||||
278 }
|
||||
279
|
||||
280 // signal that async code blocks can be now be inserted
|
||||
|
||||
src/vs/editor/browser/core/markdownRenderer.ts:
|
||||
88
|
||||
89 const element = document.createElement('span');
|
||||
90
|
||||
91: element.innerHTML = MarkdownRenderer._ttpTokenizer
|
||||
92 ? MarkdownRenderer._ttpTokenizer.createHTML(value, tokenization) as unknown as string
|
||||
93 : tokenizeToString(value, tokenization);
|
||||
94
|
||||
|
||||
src/vs/editor/browser/view/domLineBreaksComputer.ts:
|
||||
107 allCharOffsets[i] = tmp[0];
|
||||
108 allVisibleColumns[i] = tmp[1];
|
||||
109 }
|
||||
110: containerDomNode.innerHTML = sb.build();
|
||||
111
|
||||
112 containerDomNode.style.position = 'absolute';
|
||||
113 containerDomNode.style.top = '10000';
|
||||
|
||||
src/vs/editor/browser/view/viewLayer.ts:
|
||||
512 }
|
||||
513 const lastChild = <HTMLElement>this.domNode.lastChild;
|
||||
514 if (domNodeIsEmpty || !lastChild) {
|
||||
515: this.domNode.innerHTML = newLinesHTML;
|
||||
516 } else {
|
||||
517 lastChild.insertAdjacentHTML('afterend', newLinesHTML);
|
||||
518 }
|
||||
|
||||
533 if (ViewLayerRenderer._ttPolicy) {
|
||||
534 invalidLinesHTML = ViewLayerRenderer._ttPolicy.createHTML(invalidLinesHTML) as unknown as string;
|
||||
535 }
|
||||
536: hugeDomNode.innerHTML = invalidLinesHTML;
|
||||
537
|
||||
538 for (let i = 0; i < ctx.linesLength; i++) {
|
||||
539 const line = ctx.lines[i];
|
||||
|
||||
src/vs/editor/browser/widget/diffEditorWidget.ts:
|
||||
2157
|
||||
2158 let domNode = document.createElement('div');
|
||||
2159 domNode.className = `view-lines line-delete ${MOUSE_CURSOR_TEXT_CSS_CLASS_NAME}`;
|
||||
2160: domNode.innerHTML = sb.build();
|
||||
2161 Configuration.applyFontInfoSlow(domNode, fontInfo);
|
||||
2162
|
||||
2163 let marginDomNode = document.createElement('div');
|
||||
2164 marginDomNode.className = 'inline-deleted-margin-view-zone';
|
||||
2165: marginDomNode.innerHTML = marginHTML.join('');
|
||||
2166 Configuration.applyFontInfoSlow(marginDomNode, fontInfo);
|
||||
2167
|
||||
2168 return {
|
||||
|
||||
src/vs/editor/standalone/browser/colorizer.ts:
|
||||
40 let text = domNode.firstChild ? domNode.firstChild.nodeValue : '';
|
||||
41 domNode.className += ' ' + theme;
|
||||
42 let render = (str: string) => {
|
||||
43: domNode.innerHTML = str;
|
||||
44 };
|
||||
45 return this.colorize(modeService, text || '', mimeType, options).then(render, (err) => console.error(err));
|
||||
46 }
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/cellRenderer.ts:
|
||||
580 const element = DOM.$('div', { style });
|
||||
581
|
||||
582 const linesHtml = this.getRichTextLinesAsHtml(model, modelRange, colorMap);
|
||||
583: element.innerHTML = linesHtml as unknown as string;
|
||||
584 return element;
|
||||
585 }
|
||||
586
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/webviewPreloads.ts:
|
||||
375 addMouseoverListeners(outputNode, outputId);
|
||||
376 const content = data.content;
|
||||
377 if (content.type === RenderOutputType.Html) {
|
||||
378: outputNode.innerHTML = content.htmlContent;
|
||||
379 cellOutputContainer.appendChild(outputNode);
|
||||
380 domEval(outputNode);
|
||||
381 } else if (preloadErrs.some(e => !!e)) {
|
||||
44
.vscode/settings.json
vendored
44
.vscode/settings.json
vendored
@@ -4,11 +4,11 @@
|
||||
"files.exclude": {
|
||||
".git": true,
|
||||
".build": true,
|
||||
".profile-oss": true,
|
||||
"**/.DS_Store": true,
|
||||
"build/**/*.js": {
|
||||
"when": "$(basename).ts"
|
||||
}
|
||||
},
|
||||
"src/vs/server": false
|
||||
},
|
||||
"files.associations": {
|
||||
"cglicenses.json": "jsonc"
|
||||
@@ -26,7 +26,8 @@
|
||||
"test/automation/out/**": true,
|
||||
"test/integration/browser/out/**": true,
|
||||
"src/vs/base/test/node/uri.test.data.txt": true,
|
||||
"src/vs/workbench/api/test/browser/extHostDocumentData.test.perf-data.ts": true
|
||||
"src/vs/workbench/test/browser/api/extHostDocumentData.test.perf-data.ts": true,
|
||||
"src/vs/server": false
|
||||
},
|
||||
"lcov.path": [
|
||||
"./.build/coverage/lcov.info",
|
||||
@@ -57,7 +58,7 @@
|
||||
"fileMatch": [
|
||||
"cgmanifest.json"
|
||||
],
|
||||
"url": "https://json.schemastore.org/component-detection-manifest.json"
|
||||
"url": "./.vscode/cgmanifest.schema.json"
|
||||
},
|
||||
{
|
||||
"fileMatch": [
|
||||
@@ -67,47 +68,18 @@
|
||||
}
|
||||
],
|
||||
"git.ignoreLimitWarning": true,
|
||||
"git.branchProtection": [
|
||||
"main",
|
||||
"release/*"
|
||||
],
|
||||
"git.branchProtectionPrompt": "alwaysCommitToNewBranch",
|
||||
"git.branchRandomName.enable": true,
|
||||
"git.mergeEditor": true,
|
||||
"remote.extensionKind": {
|
||||
"msjsdiag.debugger-for-chrome": "workspace"
|
||||
},
|
||||
"gulp.autoDetect": "off",
|
||||
"files.insertFinalNewline": true,
|
||||
"[plaintext]": {
|
||||
"files.insertFinalNewline": false
|
||||
"files.insertFinalNewline": false,
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features",
|
||||
"editor.formatOnSave": true
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
||||
},
|
||||
"typescript.tsc.autoDetect": "off",
|
||||
"notebook.experimental.useMarkdownRenderer": true,
|
||||
"testing.autoRun.mode": "rerun",
|
||||
"conventionalCommits.scopes": [
|
||||
"tree",
|
||||
"scm",
|
||||
"grid",
|
||||
"splitview",
|
||||
"table",
|
||||
"list",
|
||||
"git",
|
||||
"sash"
|
||||
],
|
||||
"editor.quickSuggestions": {
|
||||
"other": "inline",
|
||||
"comments": "inline",
|
||||
"strings": "inline"
|
||||
},
|
||||
"yaml.schemas": {
|
||||
"https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/master/service-schema.json": "build/azure-pipelines/**/*.yml"
|
||||
},
|
||||
}
|
||||
|
||||
62
.vscode/tasks.json
vendored
62
.vscode/tasks.json
vendored
@@ -8,8 +8,7 @@
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never",
|
||||
"group": "buildWatchers",
|
||||
"close": false
|
||||
"group": "buildWatchers"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"owner": "typescript",
|
||||
@@ -24,8 +23,8 @@
|
||||
"message": 3
|
||||
},
|
||||
"background": {
|
||||
"beginsPattern": "Starting compilation...",
|
||||
"endsPattern": "Finished compilation with"
|
||||
"beginsPattern": "Starting compilation",
|
||||
"endsPattern": "Finished compilation"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -36,8 +35,7 @@
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never",
|
||||
"group": "buildWatchers",
|
||||
"close": false
|
||||
"group": "buildWatchers"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"owner": "typescript",
|
||||
@@ -103,14 +101,18 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Restart VS Code - Build",
|
||||
"dependsOn": [
|
||||
"Kill VS Code - Build",
|
||||
"VS Code - Build"
|
||||
],
|
||||
"group": "build",
|
||||
"dependsOrder": "sequence",
|
||||
"problemMatcher": []
|
||||
"type": "npm",
|
||||
"script": "strict-vscode-watch",
|
||||
"label": "TS - Strict VSCode",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-vscode",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
@@ -183,12 +185,8 @@
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"command": "./scripts/code-server.sh",
|
||||
"windows": {
|
||||
"command": ".\\scripts\\code-server.bat"
|
||||
},
|
||||
"args": ["--no-launch", "--connection-token", "dev-token", "--port", "8080"],
|
||||
"label": "Run code server",
|
||||
"command": "yarn web --no-launch",
|
||||
"label": "Run web",
|
||||
"isBackground": true,
|
||||
"problemMatcher": {
|
||||
"pattern": {
|
||||
@@ -233,30 +231,6 @@
|
||||
"group": "build",
|
||||
"label": "npm: tsec-compile-check",
|
||||
"detail": "node_modules/tsec/bin/tsec -p src/tsconfig.json --noEmit"
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "watch",
|
||||
"label": "Watch sample-resource-deployment",
|
||||
"path": "./samples/sample-resource-deployment/package.json",
|
||||
"problemMatcher": "$tsc-watch",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "watch",
|
||||
"label": "Watch sample-notebook-provider",
|
||||
"path": "./samples/sample-notebook-provider/package.json",
|
||||
"problemMatcher": "$tsc-watch",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"group": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
3
.yarnrc
3
.yarnrc
@@ -1,4 +1,3 @@
|
||||
disturl "https://electronjs.org/headers"
|
||||
target "19.1.8"
|
||||
target "12.0.9"
|
||||
runtime "electron"
|
||||
build_from_source "true"
|
||||
|
||||
509
CHANGELOG.md
509
CHANGELOG.md
@@ -1,440 +1,11 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.43.0
|
||||
* Release number: 1.43.0
|
||||
* Release date: April 12, 2023
|
||||
|
||||
### What's new in 1.43.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Connection | Added notation for required properties (e.g. Attestation protocol and Attestation URL) when Secure Enclaves are enabled |
|
||||
| SQL Database Projects extension | General Availability |
|
||||
| SQL Database Projects extension | Move and rename files within Database Projects view |
|
||||
| SQL Database Projects extension | SQLCMD variables available for editing in Database Projects view |
|
||||
| Object Explorer | Double-clicking on a user or table in Object Explorer will open the designer for the object |
|
||||
| Query Editor | Added a Parse button to the Query Editor toolbar for parsing queries before execution |
|
||||
| Query Results | Added support to select a row in query results via double click |
|
||||
|
||||
### Bug fixes in 1.43.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Connection | Added support for linked accounts with same username but different domains |
|
||||
| Connection | Fixed issue with incorrect cache write path |
|
||||
| Connection | Added ability to include optional name and grouping when creating a new connection using a connection string |
|
||||
| Connection | Updating username in MSSQL connections to use Preferred username for the display name |
|
||||
| Connection | Fixed issue with encoding for OSX keychain on macOS |
|
||||
| Connection | Added support for Azure MFA and ‘Sql Authentication Provider’ on Linux |
|
||||
| Dataverse | Addressed error generated when expanding the database node for a Dataverse database in Object Explorer |
|
||||
| IntelliCode extension | Fixed error that occurred when launching Azure Data Studio with Visual Studio Code IntelliCode extension installed |
|
||||
| PostgreSQL extension | Implemented support for exporting query results on Apple M1 from a notebook |
|
||||
| SQL Database Projects extension | Added Accessibility Fixes related to screen reader, label names, and improved focus when navigating |
|
||||
|
||||
For a full list of bug fixes addressed for the April 2023 release, visit the [April 2023 Release on GitHub](https://github.com/microsoft/azuredatastudio/milestone/99?closed=1).
|
||||
|
||||
## Version 1.42.0
|
||||
* Release number: 1.42.0
|
||||
* Release date: March 22, 2023
|
||||
|
||||
### What's new in 1.42.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| ARM64 Support for macOS | Implemented native arm64 SqlToolsService support for arm64 Windows and macOS. |
|
||||
| Connection | Changed the icon under Linked Accounts when adding a new Azure account. |
|
||||
| Connection | Introduced support for the Command Timeout connection property. |
|
||||
| Connection | Added support for all three connection encryption options: Strict, Mandatory, and Optional. |
|
||||
| Connection | Introduced HostNameInCertificate connection property under Security on the Advanced tab, for server with a certificate configured. |
|
||||
| Connection | Added new advanced option in the Connection dialog to support Secure Enclaves. |
|
||||
| Connection | Introduced a new setting, Mssql Enable Sql Authentication Provider to allow connections to be maintained without the concern of losing access token lifetime or getting dropped by server. Access tokens will be refreshed internally by the SqlClient driver whenever they are found to be expired. This option is disabled by default. |
|
||||
| Connection | Added support for connections to Microsoft Dataverse using the TDS endpoint. |
|
||||
| Connection | Introduced additional error reporting for Azure connections. |
|
||||
| Connection | Introduced support for change password. |
|
||||
| Connection | Added support for encryption options for Arc SQL Managed Instance when server certificates are not installed. |
|
||||
| Deployment | Moved the New Deployment option from the Connections breadcrumb to the File Menu. |
|
||||
| Object Explorer | Introduced ability to group objects in Object Explorer by database schema. This applies to all MSSQL connections when enabled or disabled. |
|
||||
| Object Explorer | Introduced a new option to allow a custom timeout to be configured for Object Explorer. Within Settings, enable Mssql > Object Explorer: Expand Timeout. |
|
||||
| Query Results | Added option to disable special handling for JSON strings. |
|
||||
|
||||
### Bug fixes in 1.42.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Accessibility | Updated server group color display to improve visibility and contrast. |
|
||||
| Backup | Addressed inability to select "Backup Set" checkbox. |
|
||||
| Connection | Removed refresh action for connections which are disconnected. |
|
||||
| Connection | Fixed issue with MSAL not properly set for connections. |
|
||||
| Connection | Added ability to delete a server group if no connections exist for it. |
|
||||
| Connection | Added connection display name to the Delete Connection dialog. |
|
||||
| Connection | Azure connections with "Do not save" for the server group are no longer added to the default server group list. |
|
||||
| Connection | Improved error handling in the connection dialog. |
|
||||
| Connection | Fixed issue where saved passwords were not retained for Azure SQL connections. |
|
||||
| Connection | Improved method to retrieve database access when connecting to Azure SQL. |
|
||||
| Connection | Improved connection experience for cloud users. |
|
||||
| Connection | Improved account and tenant selection when connecting to Azure SQL in the connection dialog. |
|
||||
| Deployment | Improved narration for deployment wizard. |
|
||||
| Installation | Updated default install location for the Windows on ARM installer. |
|
||||
| MySQL Extension | Addressed issue where dialog boxes in the MySQL connection pane were not editable. |
|
||||
| Notebooks | Fixed issue with updating the relative path in a Notebook cell. |
|
||||
| Notebooks | Fixed issue that caused internal notebook links to break when editing characters in the page. |
|
||||
| Notebooks | Addressed error thrown when opening a Notebook via a link. |
|
||||
| Object Explorer | Fixed issue with Object Explorer node not expanding. |
|
||||
| Query Editor | Fixed database dropdown list for contained users to display correctly. |
|
||||
| Query Editor | Addressed issue where database dropdown list was not ordered the same as in Object Explorer. |
|
||||
| Query Editor | Added the ability to properly escape special characters when they exist in object names. |
|
||||
| Query Editor | Fixed issue which caused query timer to continue to run even though execution was complete. |
|
||||
| Query Plan Viewer | Addressed an issue where a query plan would not render when opened via a URL. |
|
||||
| Query Results | Improved precision formatting for datetimeoffset data type. |
|
||||
|
||||
For a full list of bug fixes addressed for the March 2023 release, visit the [bugs and issues list on GitHub](https://github.com/microsoft/azuredatastudio/milestone/95?closed=1).
|
||||
|
||||
#### Known issues
|
||||
|
||||
For a list of the current known issues, visit the [issues list on GitHub](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue).
|
||||
|
||||
## Version 1.41.2
|
||||
* Release date: February 10, 2023
|
||||
* Release status: General Availability
|
||||
|
||||
### Bug fixes in 1.41.2
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Connection | Fixed a regression blocking connections to sovereign Azure clouds |
|
||||
| Query Editor | Fixed a regression causing the Output window to display on each query execution |
|
||||
|
||||
## Version 1.41.1
|
||||
* Release date: January 30, 2023
|
||||
* Release status: General Availability
|
||||
|
||||
### Bug fixes in 1.41.1
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Connection | Fixed a bug causing incorrect Azure account tenant selection when connecting to server through Azure view |
|
||||
| Object Explorer | Fixed a regression causing Object Explorer to not show database objects for Azure SQL DB Basic SLO |
|
||||
|
||||
## Version 1.41.0
|
||||
* Release date: January 25, 2023
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in 1.41.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Azure Subscriptions | Introduced Azure Synapse Analytics and Dedicated SQL Pools nodes. |
|
||||
| Azure SQL Migration Extension | Premium series memory optimized SQL MI SKUs included in recommendations. |
|
||||
| Connection | Migrated Azure authentication library from ADAL to MSAL. MSAL is the library used by default starting with release 1.41. However, if you encounter issues, you can change back to ADAL within **Settings > Azure: Authentication Library**. |
|
||||
| Connection | Added ability to provide a description when creating a firewall rule from Azure Data Studio. |
|
||||
| Connection | Include ability to change password for new or expired login. |
|
||||
| Connection | Add support for SQL Server Alias use when connecting to a server. |
|
||||
| MongoDB Atlas Extension | Provides the ability to connect to and query data on MongoDB Atlas (Preview). |
|
||||
| Notebooks | Provide option for users to convert markdown to a table or not when HMTL table tag is present. |
|
||||
| Object Explorer | Databases are no longer brought online in serverless Azure SQL when Databases node is expanded. |
|
||||
| Object Explorer | Added support for Ledger views. |
|
||||
| Object Explorer | Fixed issue with key binding for objectExplorer.manage not working. |
|
||||
| Query Editor | Fixes and updates to SQL grammar (colorization and auto-complete). |
|
||||
| Query Plan Viewer | Changed default folder to be user’s home directory when saving a query plan. |
|
||||
| Query Results | Added ability to only copy Column Headers, and only for cells that are highlighted. |
|
||||
| Query Results | Added option to show or hide the action bar in the results window. |
|
||||
| Query Results | Increased height of horizontal scrollbar in results window. |
|
||||
| Query Results | Added new aggregate details in the results toolbar when selecting multiple cells. |
|
||||
| SQL Projects Extension | Provide the ability select an existing project via a new dropdown. |
|
||||
|
||||
### Bug fixes in 1.41.0
|
||||
|
||||
| New Item | Details |
|
||||
| --- | --- |
|
||||
| Accessibility | Accessibility improvements were made in the Query Plan Viewer, Query History Extension and Migration Extension. |
|
||||
| Big Data Cluster | Fix missing connect icon in BDC view header bar. |
|
||||
| Big Data Cluster | Fixed issue preventing HDFS nodes for BDC servers in Object Explorer from expanding. |
|
||||
| Connection | Added ability to delete a connection that has expired AAD credentials. |
|
||||
| Connection | Improved experience when Azure Active Directory token expiration occurs. |
|
||||
| Connection | Improved connection experience when using multiple Azure tenants. |
|
||||
| Connection | Addressed problem with adding a firewall exception for a non-default Azure subscription. |
|
||||
| Migration Extension | Added support for non-public clouds for migration scenarios. |
|
||||
| MySQL Extension | Updated resource endpoints to support AAD logins in the MySQL extension. |
|
||||
| Notebooks | Improve Intellisense refresh in Notebook cells. |
|
||||
| Notebooks | Address issue with "New Notebook Job" resulting in an empty form. |
|
||||
| Object Explorer | Fixed issue with database list not loading. |
|
||||
| Query Execution | Fixed error generated when executing a query with LEFT JOIN and NULL values. |
|
||||
| Query Plan Viewer | When saving query plans (.sqlplan file), the filename will numerically increment to prevent duplicate filenames. |
|
||||
| Query Results | Fixed issue where users were unable to open JSON data as a new file. |
|
||||
| Query Results | Provide proper cell selection and navigation in the query results grid. |
|
||||
| Query Results | Improved the handling of line breaks when copying cell contents. |
|
||||
| Query Results | Addressed issue where a column would re-size incorrectly when auto-sizing in the results output. |
|
||||
| Query Results | Improved JSON cell handling from query results. |
|
||||
| Query Results | Fixed behavior where focus was incorrectly set on a cell using keyboard navigation. |
|
||||
| Resource Deployment | Remove 'Preview' flag for SQL Server 2022 deployment types. |
|
||||
| Schema Compare Extension | Fixed problem where differences in schema compare were not being highlighted. |
|
||||
| Schema Compare Extension | Permissions are now included in schema compare when the "Include Permissions" option is selected. |
|
||||
| SQL Projects Extension | Changes to db_datawriter or db_datareader roles are now supported. |
|
||||
| SQL Projects Extension | Updated Database Projects Net Core SDK Location dialog to be more descriptive. |
|
||||
| Table Designer | Updated Table Designer to disable transaction support for Azure Synapse databases. |
|
||||
| Table Designer | Addressed problem of the table name not refreshing after being updated prior to publishing. |
|
||||
| Table Designer | Fixed issue where table designer could not be opened for existing Ledger tables. |
|
||||
|
||||
## Version 1.40.2
|
||||
* Release date: December 27, 2022
|
||||
* Release status: General Availability
|
||||
|
||||
### Bug fixes in 1.40.2
|
||||
* Fix potential elevation of privilege issue using Bash shell on Windows. VS Code issue [#160827](https://github.com/microsoft/vscode/issues/160827)
|
||||
|
||||
## Version 1.40.1
|
||||
* Release date: November 22, 2022
|
||||
* Release status: General Availability
|
||||
|
||||
### Bug fixes in 1.40.1
|
||||
* Fixed bug that caused folders in the servers tree to display incorrect contents [#21245](https://github.com/microsoft/azuredatastudio/issues/21245)
|
||||
|
||||
## Version 1.40.0
|
||||
* Release date: November 16, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in 1.40.0
|
||||
| New Item | Details |
|
||||
|----------|---------|
|
||||
| Connections | Connections for SQL now default to Encrypt = 'True'. |
|
||||
| ARM64 Support for macOS | ARM64 build for macOS is now available. |
|
||||
| Table Designer | Announcing the General Availability of the Table Designer. |
|
||||
| Table Designer | Period columns now added by default when System-Versioning table option is selected. |
|
||||
| Table Designer | Added support for hash indexes for In-Memory tables, and added support for columnstore indexes. |
|
||||
| Table Designer | New checkbox added, "Preview Database Updates", when making database changes to ensure that users are aware of potential risks prior to updating the database.|
|
||||
| Table Designer | "Move Up" and "Move Down" buttons added to support column reordering for Primary Keys. |
|
||||
| Query Plan Viewer | Announcing the General Availability of the Query Plan Viewer in Azure Data Studio. |
|
||||
| Query Plan Viewer | Added support for identification of most expensive operator(s) in a plan. |
|
||||
| Query Plan Viewer | Updates were made to the properties window to allow for full display of text upon hovering over a cell. Full text can also be copied. |
|
||||
| Query Plan Viewer | Implemented filter functionality in the Properties pane for an execution plan. |
|
||||
| Query Plan Viewer | Added support for collapsing and expanding all subcategories within the Plan Comparison Properties window. |
|
||||
| Query History Extension | Announcing the General Availability of the SQL History Extension. |
|
||||
| Query History Extension | Now includes ability to persist history across multiple user sessions. |
|
||||
| Query History Extension | Added the ability to limit the number of entries stored in the history. |
|
||||
| Schema Compare | Users can now open .scmp files directly from the context menu for existing files in the file explorer. |
|
||||
| Query Editor | Now allows full display for text strings larger than 65,535 characters. |
|
||||
| Query Editor | Added support for the SHIFT key when making multiple cell selections. |
|
||||
| MySQL Extension | Support for MySQL extension is now available in preview. |
|
||||
| Azure SQL Migration Extension | Azure SQL Database Offline Migrations is now available in preview. Customers can use this new capability to save and share reports as needed. |
|
||||
| Azure SQL Migration Extension | Addition of elastic Azure recommendations model. |
|
||||
| Database Migration Assessment for Oracle | Assessment tooling for Oracle database migrations to Azure Database for PostgreSQL and Azure SQL available in preview. |
|
||||
| VS Code merge | VS Code merges to version 1.67. Read [their release notes](https://code.visualstudio.com/updates/v1_67) to learn more. |
|
||||
| SQL Database Projects | Adds SQL projects support for syntax introduced in SQL Server 2022.|
|
||||
|
||||
### Bug fixes in 1.40.0
|
||||
| New Item | Details |
|
||||
|----------|---------|
|
||||
| Connections | Fixed bug that occurred when trying to connect to the Dedicated Admin Connection (DAC) on SQL Server. |
|
||||
| Connections | Fixed issue with wrong tenant showing up while trying to connect to a database with Azure Active Directory login. |
|
||||
| Connections | Fixed zoom reset behavior when adding a new connection. |
|
||||
| Connections | Fixed loading bug what occurred when attempting to sign in to Azure via proxy. |
|
||||
| Connections | Fixed issue encountered while attempting to connect to a "sleeping" Azure SQL Database. |
|
||||
| Object Explorer | Fixed the SELECT script generation issue for Synapse Databases. |
|
||||
| Schema Compare | Fixed error that caused duplication of comment headers when applying schema changes on stored procedure objects. |
|
||||
| Schema Compare | Fixed issue that prevented schema compare issues when creating a new empty schema with a "DOMAIN\User" pattern. |
|
||||
| Query Editor | Fixed bug that caused results to be lost upon saving query files. |
|
||||
| Table Designer | Fixed a bug that caused creation of a new table when renaming an existing table. |
|
||||
| Query Plan Viewer | Fixed missing index recommendation T-SQL syntax. |
|
||||
| SQL Projects | Fixed bug in SQL Projects that led to extension not using output path when publishing a project. |
|
||||
| SQL Projects | Fixed bug that caused .NET install to not be found when using the SQL Projects extension on Linux platforms. |
|
||||
|
||||
## Version 1.39.1
|
||||
* Release date: August 30, 2022
|
||||
* Release status: General Availability
|
||||
|
||||
### Bug fixes in 1.39.1
|
||||
* Fixed bug that caused Database Trees in server connections to not expand in the Object Explorer.
|
||||
|
||||
## Version 1.39.0
|
||||
* Release date: August 24, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in 1.39.0
|
||||
* New Features:
|
||||
* Deployment Wizard - Azure Data Studio now supports SQL Server 2022 (Preview) in the Deployment Wizard for both local and container installation.
|
||||
* Object Explorer - Added Ledger icons and scripting support to Object Explorer for Ledger objects.
|
||||
* Dashboard - Added hexadecimal values to support color detection.
|
||||
* Query Plan Viewer - Added the ability to copy text from cells in the Properties Pane of a query plan.
|
||||
* Query Plan Viewer - Introduced a "find node" option in plan comparison to search for nodes in either the original or added plan.
|
||||
* Table Designer - Now supports the ability to add included columns to a nonclustered index, and the ability to create filtered indexes.
|
||||
* SQL Projects - Publish options were added to the Publish Dialog.
|
||||
* Query History Extension - Added double-click support for query history to either open the query or immediately execute it, based on user configuration.
|
||||
|
||||
* Bug Fixes:
|
||||
* Dashboard - Fixed an accessibility issue that prevented users from being able to access tooltip information using the keyboard.
|
||||
* Voiceover - Fixed a bug that caused voiceover errors across the Dashboard, SQL Projects, SQL Import Wizard, and SQL Migration extensions.
|
||||
* Schema Compare - Fixed a bug that caused the UI to jump back to the top of the options list after selecting/deselecting any option.
|
||||
* Schema Compare - Fixed a bug involving Schema Compare (.SCMP) file incompatibility with Database Project information causing errors when reading and using information stored in this file type.
|
||||
* Object Explorer - Fixed a bug that caused menu items in Object Explorer not to show up for non-English languages.
|
||||
* Table Designer - Fixed a bug that caused the History Table name not to be consistent with the current table name when working with System-Versioned Tables.
|
||||
* Table Designer - Fixed a bug in the Primary Key settings that caused the "Allow Nulls" option to be checked, but disabled, preventing users from changing this option.
|
||||
* Query Editor - Fixed a bug that prevented the SQLCMD in T-SQL from working correctly, giving false errors when running scripts in Azure Data Studio.
|
||||
* Query Editor - Fixed a bug that caused user-specified zoom settings to reset to default when selecting JSON values after query that returned JSON dataset was ran.
|
||||
* SQL Projects - Fixed a bug that caused the "Generate Script" command to not work correctly when targeting a new Azure SQL Database.
|
||||
* Notebooks - Fixed a bug that caused pasted images to disappear from editor after going out of edit mode.
|
||||
* Notebooks - Fixed a bug that caused a console error message to appear after opening a markdown file.
|
||||
* Notebooks - Fixed a bug that prevented markdown cell toolbar shortcuts from working after creating a new split view cell.
|
||||
* Notebooks - Fixed a bug that caused text cells to be erroneously created in split view mode when the notebook default text edit mode was set to "Markdown".
|
||||
|
||||
## Version 1.38.0
|
||||
* Release date: July 27, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in 1.38.0
|
||||
* New Features:
|
||||
* VS Code merges to 1.62 - This release includes updates to VS Code from the three previous VS Code releases. Read [their release notes](https://code.visualstudio.com/updates/v1_62) to learn more.
|
||||
* Table Designer - New column added to Table Designer for easier access to additional actions specific to individual rows.
|
||||
* Query Plan Viewer - The Top Operations pane view now includes clickable links to operations in each of its rows to show the runtime statistics which can be used to evaluate estimated and actual rows when analyzing a plan.
|
||||
* Query Plan Viewer - Improved UI on selected operation node in the Execution Plan.
|
||||
* Query Plan Viewer - The keyboard command **CTRL + M** no longer executes queries. It now just enables or disables the actual execution plan creation when a query is executed.
|
||||
* Query Plan Viewer - Plan labels are now updated in the Properties window when plans are compared and the orientation is toggled from horizontal to vertical, and back.
|
||||
* Query Plan Viewer - Updates were made to the Command Palette. All execution plan commands are prefixed with "Execution Plan", so that they are easier to find and use.
|
||||
* Query Plan Viewer - A collapse/expand functionality is now available at the operator level to allow users to hide or display sections of the plan during analysis.
|
||||
* Query History - The Query History extension was refactored to be fully implemented in an extension. This makes the history view behave like all other extension views and also allows for searching and filtering in the view by selecting the view and typing in your search text.
|
||||
|
||||
* Bug Fixes:
|
||||
* Table Designer - Error found in edit data tab when switching back to previously selected column when adding a new row. To fix this, editing the table is now disabled while new rows are being added and only reenabled afterwards.
|
||||
* Query Editor - Fixed coloring issues for new T-SQL functions in the Query Editor.
|
||||
* Query Plan Viewer - Fixed bug that caused custom zoom level spinner to allow values outside valid range.
|
||||
* Dashboard - Fixed issue that caused incorrect displaying of insight widgets on the dashboard.
|
||||
* Notebooks - Fixed issue where keyboard shortcuts and toolbar buttons were not working when first creating a Split View markdown cell.
|
||||
* Notebooks - Fixed issue where cell languages were not being set correctly when opening an ADS .NET Interactive notebook in VS Code.
|
||||
* Notebooks - Fixed issue where notebook was being opened as empty when exporting a SQL query as a notebook.
|
||||
* Notebooks - Disables install and uninstall buttons in Manage Packages dialog while a package is being installed or uninstalled.
|
||||
* Notebooks - Fixed issue where cell toolbar buttons were not refreshing when converting cell type.
|
||||
* Notebooks - Fixed issue where notebook was not opening if a cell contains an unsupported output type.
|
||||
* Schema Compare - Fixed issue where views and stored procedures were not correctly recognized by schema compare after applying changes.
|
||||
|
||||
## Version 1.37.0
|
||||
* Release date: June 15, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
* New Features:
|
||||
* Backup & Restore - Backup & Restore to URL is now available in preview for Azure SQL Managed Instances.
|
||||
* Table Designer - Added API to support computed column capabilities on Table Designer.
|
||||
* Table Designer - Can now specify where to add new columns and columns can now be re-arranged by mouse dragging.
|
||||
* Table Designer - Table Designer is now supported by SQL Projects to add or modify database schema without need to be connected to a server instance.
|
||||
* Query Plan Viewer - Smart plan comparison is now available. Can now compare execution plans and view detailed differences between plans in the Properties Table.
|
||||
* Query Plan Viewer - Added toggle button to switch between estimated and actual execution plans.
|
||||
* Query Plan Viewer - Query Plan now comes with improved precision to operator costs for larger plans.
|
||||
* MongoDB Extension for Azure Cosmos DB (Preview) - This extension introduces support for access to Mongo resources for Cosmos DB.
|
||||
|
||||
* Bug Fixes:
|
||||
* Table Designer - Fixed issue that caused app to not prompt user to save before closing.
|
||||
* Table Designer - Fixed issue that returned empty data set upon attempting to edit the first cell of a new row.
|
||||
* Table Designer - Improved resize to fit experience when zooming in on user interface as well as tab behavior issues.
|
||||
* Query Plan Viewer - Fixed bug that caused custom zoom level spinner to allow values outside valid range.
|
||||
* Schema Compare - Fixed issue with indexes not being added correctly when updating project from database.
|
||||
* Notebooks - Fixed inconsistencies with notebook cell behavior and toolbars.
|
||||
* Notebooks - Fixed issues with keyboard navigation.
|
||||
|
||||
## Version 1.36.2
|
||||
* Release date: May 20, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
- Fix connectivity issue with PBI data source
|
||||
- Fix query plan zoom and icon issues
|
||||
- Issues fixed in this release https://github.com/microsoft/azuredatastudio/milestone/89?closed=1
|
||||
|
||||
## Version 1.36.1
|
||||
* Release date: April 22, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
* April Hotfix addressing these issues https://github.com/microsoft/azuredatastudio/milestone/88?closed=1.
|
||||
* Hotfix RCA - https://github.com/microsoft/azuredatastudio/wiki/ADS-April-2022-Hotfix-RCA
|
||||
|
||||
## Version 1.36.0
|
||||
* Release date: April 20, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
- General Availability of the Azure SQL Migration Extension for ADS
|
||||
- Support for .NET Interactive Notebooks Extension
|
||||
- New Table Designer Features including support for System Versioned, Graph and Memory Optomized Tables
|
||||
- Query Plan Viewer Updates includign warning and parallelism icons, the option to disable tooltips and support for opening .sqlplan files
|
||||
- Improvements in SQL Projects and Schema Compare
|
||||
|
||||
## Version 1.35.1
|
||||
* Release date: March 17, 2022
|
||||
* Release status: General Availability
|
||||
### Hotfix release
|
||||
- Fix for [Excel number format #18615](https://github.com/microsoft/azuredatastudio/issues/18615)
|
||||
- Fix for [Geometry Data Type Returned as Unknown Charset in Results Grid #18630](https://github.com/microsoft/azuredatastudio/issues/18630)
|
||||
|
||||
## Version 1.35.0
|
||||
* Release date: February 24, 2022
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
* New Features:
|
||||
* Table Designer - Added functionality for creation and management of tables for SQL Servers. Built using DacFx framework
|
||||
* Query Plan Viewer - Added functionality for users to view a graphic view of estimated and actual query plans without need for an extension
|
||||
* Azure Arc Extension - Updated the Data Controller deployment wizard and the SQL Managed Instance - Azure Arc deployment wizard to reflect the deployment experience in Azure Portal
|
||||
|
||||
* Bug Fixes:
|
||||
* Azure Arc Extension - SQL Managed Instance-Azure Arc is now fixed for both indirect connectivity mode and direct connectivity mode
|
||||
* Notebooks - Support for keyboard navigation between cells to minimize mouse clicking
|
||||
|
||||
## Version 1.34.0
|
||||
* Release date: December 15, 2021
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
* New Features:
|
||||
* Added “Currently restoring backup file” in the migration progress details page of Azure SQL Migration extension when backup files location is Azure Storage blob container
|
||||
* Enhancements to diagnostics in Azure SQL Migration extension
|
||||
* Support for project build with .NET 6 in SQL Database Projects extension
|
||||
* Publish to container in SQL Database Projects extension
|
||||
* Undo and redo support for notebook cell-level operations
|
||||
|
||||
* Extension Updates:
|
||||
* Azure SQL Migration
|
||||
* Langpacks
|
||||
* SQL Database Projects
|
||||
|
||||
* Bug Fixes:
|
||||
* Fix for multiple database migrations when using network share as backup files location in Azure SQL Migration extension
|
||||
* Fix for multiple database migrations when using blob storage containers as backup files location in Azure SQL Migration extension
|
||||
* Fix to pre-populate target database names in the migration wizard in Azure SQL Migration extension
|
||||
* Fix to column sorting in grids where the presence of null values could lead to unexpected results
|
||||
* Fix for Python upgrades when two or more notebooks were open
|
||||
|
||||
## Version 1.33.1
|
||||
* Release date: Nov 4, 2021
|
||||
* Release status: General Availability
|
||||
|
||||
### Hotfix release
|
||||
- Fix for [#16535 Unable to See Saved Connections in Restricted Mode](https://github.com/microsoft/azuredatastudio/issues/17535)
|
||||
- Fix for [#17579 Can't type in Notebook code cell after editing text cell](https://github.com/microsoft/azuredatastudio/issues/17579)
|
||||
|
||||
## Version 1.33.0
|
||||
* Release date: October 27, 2021
|
||||
* Release status: General Availability
|
||||
### What's new in this version
|
||||
* New Notebook Features:
|
||||
* Notebook Views
|
||||
* Split cell support
|
||||
* Keyboard shortcuts for Markdown Toolbar Cells
|
||||
* Ctrl/Cmd + B = Bold Text
|
||||
* Ctrl/Cmd + I = Italicize Text
|
||||
* Ctrl/Cmd + U = Underline Text
|
||||
* Ctrl/Cmd + Shift + K = Add Code Block
|
||||
* Ctrl/Cmd + Shift + H = Highlight Text
|
||||
* Book improvements
|
||||
* Add a new section
|
||||
* Drag and Drop
|
||||
|
||||
* Extension Updates:
|
||||
* Import
|
||||
* Langpacks
|
||||
* Schema Compare
|
||||
* Sql Database Projects
|
||||
|
||||
* Bug Fixes
|
||||
* Notebook linking improvements
|
||||
* Horizontal Scrollbar improvement (when word wrap is off in MD Splitview / MD mode) in Notebooks
|
||||
* Vertical Scrollbar improvement for MD Splitview in Notebooks
|
||||
|
||||
## Version 1.32.0
|
||||
* Release date: August 18, 2021
|
||||
* Release status: General Availability
|
||||
* Extension Updates:
|
||||
* Arc/Az CLI extensions - Azure Arc extension now uses Azure CLI instead of Azure Data CLI for deploying and interacting with Azure Arc
|
||||
instances
|
||||
* Arc/Az CLI extensions - Azure Arc extension now uses Azure CLI instead of Azure Data CLI for deploying and interacting with Azure Arc
|
||||
instances
|
||||
* Langpacks
|
||||
* SQL Database Projects
|
||||
* Azure Monitor
|
||||
@@ -443,7 +14,7 @@ For a list of the current known issues, visit the [issues list on GitHub](https:
|
||||
## Version 1.31.1
|
||||
* Release date: July 29, 2021
|
||||
* Release status: General Availability
|
||||
### Hotfix Release
|
||||
## Hotfix Release
|
||||
- Fix for [#16436 Database Connection Toolbar Missing](https://github.com/microsoft/azuredatastudio/issues/16436)
|
||||
|
||||
## Version 1.31.0
|
||||
@@ -734,7 +305,7 @@ For a list of the current known issues, visit the [issues list on GitHub](https:
|
||||
* GA status for Big Data Cluster/SQL 2019 features [#8269](https://github.com/microsoft/azuredatastudio/issues/8269)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/44?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
## Version 1.13.1
|
||||
@@ -748,7 +319,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* General Availability release for Schema Compare and DACPAC extensions
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||
|
||||
@@ -766,7 +337,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: October 2, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Announcing the Query History panel
|
||||
* Improved Query Results Grid copy selection support
|
||||
* TempDB page added to Server Reports extension
|
||||
@@ -777,14 +348,14 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: September 10, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
|
||||
|
||||
## Version 1.10.0
|
||||
* Release date: August 14, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* [SandDance](https://github.com/microsoft/SandDance) integration — A new way to interact with data. Download the extension [here](https://docs.microsoft.com/sql/azure-data-studio/sanddance-extension)
|
||||
* Notebook improvements
|
||||
* Better loading performance
|
||||
@@ -800,7 +371,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: July 11, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Release of [SentryOne Plan Explorer Extension](https://www.sentryone.com/products/sentryone-plan-explorer-extension-azure-data-studio)
|
||||
* **Schema Compare**
|
||||
* Schema Compare File Support (.SCMP)
|
||||
@@ -826,7 +397,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: June 6, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Initial release of the Database Admin Tool Extensions for Windows *Preview* extension
|
||||
* Initial release of the Central Management Servers extension
|
||||
* **Schema Compare**
|
||||
@@ -847,24 +418,24 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: May 8, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Announcing Schema Compare *Preview* extension
|
||||
* Tasks Panel UX improvement
|
||||
* Announcing new Welcome page
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/31?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues.
|
||||
|
||||
## Version 1.6.0
|
||||
* Release date: April 18, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Align with latest VS Code editor platform (currently 1.33.1)
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/26?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* yamatoya for `fix the format (#4899)`
|
||||
@@ -873,13 +444,13 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: March 18, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Announcing T-SQL Notebooks
|
||||
* Announcing PostgreSQL extension
|
||||
* Announcing SQL Server Dacpac extension
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||
@@ -888,7 +459,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: February 13, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
|
||||
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
|
||||
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
|
||||
@@ -902,7 +473,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Results streaming enabled by default for long running queries
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
@@ -913,7 +484,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: January 9, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* #13 Feature Request: Azure Active Directory Authentication
|
||||
* #1040 Stream initial query results as they become available
|
||||
* #3298 Сan't add an azure account.
|
||||
@@ -923,7 +494,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
||||
* **sp_executesql to SQL** and **New Database** extensions
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
||||
@@ -935,7 +506,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: November 6, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Update to the SQL Server 2019 Preview extension
|
||||
* Introducing Paste the Plan extension
|
||||
* Introducing High Color queries extension, including SSMS editor theme
|
||||
@@ -944,7 +515,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
|
||||
* Fix customer reported GitHub issues
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* rdaniels6813 for `Add query plan theme support #3031`
|
||||
@@ -958,12 +529,12 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: October 18, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Introducing the Azure Resource Explorer to browse Azure SQL Databases
|
||||
* Improve Object Explorer and Query Editor connectivity robustness
|
||||
* SQL Server 2019 and SQL Agent extension improvements
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* philoushka for `center the icon #2760`
|
||||
@@ -975,7 +546,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: September 24, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Announcing the SQL Server 2019 Preview extension.
|
||||
* Support for SQL Server 2019 preview features including Big Data Cluster support.
|
||||
* Azure Data Studio Notebooks
|
||||
@@ -986,7 +557,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Accessibility improvements for screen reader, keyboard navigation and high-contrast.
|
||||
* Added Connection name option to provide an alternative display name in the Servers viewlet.
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* AlexFsmn `Feature: Ability to add connection name #2332`
|
||||
@@ -996,7 +567,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: August 30, 2018
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Announcing the SQL Server Import Extension
|
||||
* SQL Server Profiler Session management
|
||||
* SQL Server Agent improvements
|
||||
@@ -1004,7 +575,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Quality of Life improvements: Connection strings
|
||||
* Fix many customer reported GitHub issues
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* SebastianPfliegel `Added more saveAsCsv options #2099`
|
||||
@@ -1022,7 +593,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: July 19, 2018
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* SQL Server Agent for Azure Data Studio extension improvements
|
||||
* Added view of Alerts, Operators, and Proxies and icons on left pane
|
||||
* Added dialogs for New Job, New Job Step, New Alert, and New Operator
|
||||
@@ -1043,7 +614,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: June 20, 2018
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* **SQL Server Profiler for Azure Data Studio *Preview*** extension initial release
|
||||
* The new **SQL Data Warehouse** extension includes rich customizable dashboard widgets surfacing insights to your data warehouse. This unlocks key scenarios around managing and tuning your data warehouse to ensure it is optimized for consistent performance.
|
||||
* **Edit Data "Filtering and Sorting"** support
|
||||
@@ -1056,7 +627,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: May 7, 2018
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
The May release is focused on stabilization and bug fixes leading up to the Build conference. This build contains the following highlights.
|
||||
|
||||
* Announcing **Redgate SQL Search** extension available in Extension Manager
|
||||
@@ -1071,7 +642,7 @@ The May release is focused on stabilization and bug fixes leading up to the Buil
|
||||
* Release date: April 25, 2018
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
The April Public Preview release contains some of the following highlights.
|
||||
|
||||
* Improvements to SQL Agent *Preview* extension
|
||||
@@ -1086,7 +657,7 @@ The April Public Preview release contains some of the following highlights.
|
||||
* Release date: March 28, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
The March Public Preview release enables some key aspects of the Azure Data Studio
|
||||
extensibility story. Here are some highlights in this release.
|
||||
|
||||
@@ -1101,14 +672,14 @@ extensibility story. Here are some highlights in this release.
|
||||
* Release date: February 16, 2017
|
||||
* Release status: Public Preview Hotfix 1
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Bug fix for `#717 Selecting partial query and hitting Cmd or Ctrl+C opens terminal with Error message`
|
||||
|
||||
## Version 0.26.6
|
||||
* Release date: February 15, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
The February release fixes several important customer reported issues, as well as various feature improvements. We've also introduced auto-update support in February which will simplify keeping updated with the lastest changes.
|
||||
|
||||
Here's some of the highlights in the February release.
|
||||
@@ -1128,7 +699,7 @@ Here's some of the highlights in the February release.
|
||||
* VS Code Editor 1.19 integration
|
||||
* Update JustinPealing/html-query-plan component to pick-up several Query Plan viewer improvements
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* SebastianPfliegel for `Add cursor snippet (#475)`
|
||||
@@ -1139,7 +710,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: January 17, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
The January release focuses on addressing a few of the top upvoted feature suggestions, as well as fixing high-priority bugs. This release period coincides with holiday vacations, so the churn in this release is
|
||||
relatively scoped.
|
||||
|
||||
@@ -1154,7 +725,7 @@ Here's some of the highlights in the January release.
|
||||
* Fix missing Azure Account branding icon
|
||||
* Change "Server name" to "Server" in Connection Dialog
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* alextercete for `Fix "No extension gallery service configured" error (#427)`
|
||||
@@ -1164,7 +735,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Release date: December 19, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
### What's new in this version
|
||||
## What's new in this version
|
||||
* Azure Integration with Create Firewall Rule
|
||||
* Windows Setup, Linux DEB and Linux RPM installation packages
|
||||
* Manage Dashboard visual layout editor
|
||||
@@ -1188,7 +759,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Allow expanding databases not in certain non-Online states
|
||||
* Connection Dialog selects most common default authentication method based on platform
|
||||
|
||||
### Contributions and "thank you"
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
|
||||
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# Data Collection
|
||||
|
||||
The software may collect information about you and your use of the software and send it to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may turn off the telemetry as described in the repository. There are also some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with a copy of Microsoft's privacy statement. Our privacy statement is located at <https://go.microsoft.com/fwlink/?LinkID=824704>. You can learn more about data collection and use in the help documentation and our privacy statement. Your use of the software operates as your consent to these practices.
|
||||
82
README.md
82
README.md
@@ -1,66 +1,36 @@
|
||||
# Azure Data Studio
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=453&branchName=main)
|
||||
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=main)
|
||||
[](https://twitter.com/azuredatastudio)
|
||||
|
||||
Azure Data Studio is a data management and development tool with connectivity to popular cloud and on-premises databases. Azure Data Studio supports Windows, macOS, and Linux, with immediate capability to connect to Azure SQL and SQL Server. Browse the [extension library](wiki/List-of-Extensions) for additional database support options including MySQL, PostreSQL, and MongoDB.
|
||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
## **Download the latest Azure Data Studio release**
|
||||
|
||||
|Platform |Type |Download |
|
||||
| --------|-----------------|----------------------- |
|
||||
|Windows |User Installer |[64 bit][win-user] [ARM][win-user-arm64] |
|
||||
| |System Installer |[64 bit][win-system] [ARM][win-system-arm64] |
|
||||
| |.zip |[64 bit][win-zip] [ARM][win-zip-arm64] |
|
||||
|Linux |.tar.gz |[64 bit][linux-zip] |
|
||||
| |.deb |[64 bit][linux-deb] |
|
||||
| |.rpm |[64 bit][linux-rpm] |
|
||||
|Mac |.zip |[Universal][osx-universal] [Intel Chip][osx-zip] [Apple Silicon][osx-arm64] |
|
||||
| Platform |
|
||||
| --------------------------------------- |
|
||||
| [Windows User Installer][win-user] |
|
||||
| [Windows System Installer][win-system] |
|
||||
| [Windows ZIP][win-zip] |
|
||||
| [macOS ZIP][osx-zip] |
|
||||
| [Linux TAR.GZ][linux-zip] |
|
||||
| [Linux RPM][linux-rpm] |
|
||||
| [Linux DEB][linux-deb] |
|
||||
|
||||
[win-user]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/stable
|
||||
[win-system]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/stable
|
||||
[win-zip]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/stable
|
||||
[win-user-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64-user/stable
|
||||
[win-system-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64/stable
|
||||
[win-zip-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64-archive/stable
|
||||
[linux-zip]: https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/stable
|
||||
[linux-deb]: https://azuredatastudio-update.azurewebsites.net/latest/linux-deb-x64/stable
|
||||
[linux-rpm]: https://azuredatastudio-update.azurewebsites.net/latest/linux-rpm-x64/stable
|
||||
[osx-universal]: https://azuredatastudio-update.azurewebsites.net/latest/darwin-universal/stable
|
||||
[osx-zip]: https://azuredatastudio-update.azurewebsites.net/latest/darwin/stable
|
||||
[osx-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/darwin-arm64/stable
|
||||
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
|
||||
## Try out the latest insiders build from `main` branch:
|
||||
## Try out the latest insiders build from `main`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
||||
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||
|
||||
|Platform |Type |Download - Insiders Build |
|
||||
| --------|-----------------|----------------------- |
|
||||
|Windows |User Installer |[64 bit][in-win-user] [ARM][in-win-user-arm64] |
|
||||
| |System Installer |[64 bit][in-win-system] [ARM][in-win-system-arm64] |
|
||||
| |.zip |[64 bit][in-win-zip] [ARM][in-win-zip-arm64] |
|
||||
|Linux |.tar.gz |[64 bit][in-linux-zip] |
|
||||
| |.deb |[64 bit][in-linux-deb] |
|
||||
| |.rpm |[64 bit][in-linux-rpm] |
|
||||
|Mac |.zip |[Universal][in-osx-universal] [Intel Chip][in-osx-zip] [Apple Silicon][in-osx-arm64] |
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
|
||||
[in-win-user]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider
|
||||
[in-win-system]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider
|
||||
[in-win-zip]: https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider
|
||||
[in-win-user-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64-user/insider
|
||||
[in-win-system-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64/insider
|
||||
[in-win-zip-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/win32-arm64-archive/insider
|
||||
[in-linux-zip]:https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider
|
||||
[in-linux-deb]:https://azuredatastudio-update.azurewebsites.net/latest/linux-deb-x64/insider
|
||||
[in-linux-rpm]:https://azuredatastudio-update.azurewebsites.net/latest/linux-rpm-x64/insider
|
||||
[in-osx-universal]: https://azuredatastudio-update.azurewebsites.net/latest/darwin-universal/insider
|
||||
[in-osx-zip]: https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider
|
||||
[in-osx-arm64]: https://azuredatastudio-update.azurewebsites.net/latest/darwin-arm64/insider
|
||||
|
||||
Please visit our [download page](https://aka.ms/getazuredatastudio) for more specific installation instructions.
|
||||
Check out the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) or [release notes](https://learn.microsoft.com/sql/azure-data-studio/release-notes-azure-data-studio) for additional details of what's in the each release.
|
||||
The [Azure Data Studio documentation](https://learn.microsoft.com/sql/azure-data-studio) includes complete details on getting started, connection quickstarts, and feature tutorials.
|
||||
|
||||
## **Feature Highlights**
|
||||
|
||||
@@ -94,12 +64,8 @@ This project has adopted the [Microsoft Open Source Code of Conduct](https://ope
|
||||
## Localization
|
||||
Azure Data Studio is localized into 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). The language packs are available in the Extension Manager marketplace. Simply, search for the specific language using the extension marketplace and install. Once you install the selected language, Azure Data Studio will prompt you to restart with the new language.
|
||||
|
||||
## Telemetry
|
||||
|
||||
Azure Data Studio collects telemetry data, which is used to help understand how to improve the product. For example, this usage data helps to debug issues, such as slow start-up times, and to prioritize new features. While we appreciate the insights this data provides, we also know that not everyone wants to send usage data and you can disable telemetry as described in the [disable telemetry reporting](https://aka.ms/ads-disable-telemetry) documentation.
|
||||
|
||||
## Privacy Statement
|
||||
The [Microsoft Privacy Statement](https://go.microsoft.com/fwlink/?LinkID=824704) describes the privacy statement of this software.
|
||||
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/privacystatement) describes the privacy statement of this software.
|
||||
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
@@ -164,3 +130,11 @@ And of course, we'd like to thank the authors of all upstream dependencies. Ple
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Licensed under the [Source EULA](LICENSE.txt).
|
||||
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2170400
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2170401
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2170402
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2169955
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2170045
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2170403
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2169956
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
||||
2022-07-19T07:55:26.168Z
|
||||
2021-08-23T03:52:18.011Z
|
||||
|
||||
1
build/.gitignore
vendored
1
build/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
.yarnrc
|
||||
@@ -12,14 +12,14 @@ fsevents/src/**
|
||||
fsevents/test/**
|
||||
!fsevents/**/*.node
|
||||
|
||||
@vscode/sqlite3/binding.gyp
|
||||
@vscode/sqlite3/benchmark/**
|
||||
@vscode/sqlite3/cloudformation/**
|
||||
@vscode/sqlite3/deps/**
|
||||
@vscode/sqlite3/test/**
|
||||
@vscode/sqlite3/build/**
|
||||
@vscode/sqlite3/src/**
|
||||
!@vscode/sqlite3/build/Release/*.node
|
||||
vscode-sqlite3/binding.gyp
|
||||
vscode-sqlite3/benchmark/**
|
||||
vscode-sqlite3/cloudformation/**
|
||||
vscode-sqlite3/deps/**
|
||||
vscode-sqlite3/test/**
|
||||
vscode-sqlite3/build/**
|
||||
vscode-sqlite3/src/**
|
||||
!vscode-sqlite3/build/Release/*.node
|
||||
|
||||
windows-mutex/binding.gyp
|
||||
windows-mutex/build/**
|
||||
@@ -110,18 +110,6 @@ nsfw/src/**
|
||||
nsfw/includes/**
|
||||
!nsfw/build/Release/*.node
|
||||
|
||||
vscode-nsfw/binding.gyp
|
||||
vscode-nsfw/build/**
|
||||
vscode-nsfw/src/**
|
||||
vscode-nsfw/includes/**
|
||||
!vscode-nsfw/build/Release/*.node
|
||||
|
||||
@parcel/watcher/binding.gyp
|
||||
@parcel/watcher/build/**
|
||||
@parcel/watcher/prebuilds/**
|
||||
@parcel/watcher/src/**
|
||||
!@parcel/watcher/build/Release/*.node
|
||||
|
||||
vsda/build/**
|
||||
vsda/ci/**
|
||||
vsda/src/**
|
||||
@@ -139,14 +127,6 @@ vscode-encrypt/binding.gyp
|
||||
vscode-encrypt/README.md
|
||||
!vscode-encrypt/build/Release/vscode-encrypt-native.node
|
||||
|
||||
vscode-policy-watcher/build/**
|
||||
vscode-policy-watcher/.husky/**
|
||||
vscode-policy-watcher/src/**
|
||||
vscode-policy-watcher/binding.gyp
|
||||
vscode-policy-watcher/README.md
|
||||
vscode-policy-watcher/index.d.ts
|
||||
!vscode-policy-watcher/build/Release/vscode-policy-watcher.node
|
||||
|
||||
vscode-windows-ca-certs/**/*
|
||||
!vscode-windows-ca-certs/package.json
|
||||
!vscode-windows-ca-certs/**/*.node
|
||||
|
||||
@@ -29,6 +29,3 @@ xterm-addon-unicode11/out/**
|
||||
|
||||
xterm-addon-webgl/src/**
|
||||
xterm-addon-webgl/out/**
|
||||
|
||||
# This makes sure the model is included in the package
|
||||
!@vscode/vscode-languagedetection/model/**
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.OctoKitIssue = exports.OctoKit = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const child_process_1 = require("child_process");
|
||||
@@ -21,6 +20,7 @@ class OctoKit {
|
||||
}
|
||||
async *query(query) {
|
||||
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`;
|
||||
console.log(`Querying for ${q}:`);
|
||||
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
|
||||
...query,
|
||||
q,
|
||||
@@ -41,17 +41,19 @@ class OctoKit {
|
||||
};
|
||||
for await (const pageResponse of this.octokit.paginate.iterator(options)) {
|
||||
await timeout();
|
||||
await (0, utils_1.logRateLimit)(this.token);
|
||||
await utils_1.logRateLimit(this.token);
|
||||
const page = pageResponse.data;
|
||||
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`);
|
||||
yield page.map((issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)));
|
||||
}
|
||||
}
|
||||
async createIssue(owner, repo, title, body) {
|
||||
(0, core_1.debug)(`Creating issue \`${title}\` on ${owner}/${repo}`);
|
||||
core_1.debug(`Creating issue \`${title}\` on ${owner}/${repo}`);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.create({ owner, repo, title, body });
|
||||
}
|
||||
octokitIssueToIssue(issue) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
return {
|
||||
author: { name: issue.user.login, isGitHubApp: issue.user.type === 'Bot' },
|
||||
body: issue.body,
|
||||
@@ -62,8 +64,8 @@ class OctoKit {
|
||||
locked: issue.locked,
|
||||
numComments: issue.comments,
|
||||
reactions: issue.reactions,
|
||||
assignee: issue.assignee?.login ?? issue.assignees?.[0]?.login,
|
||||
milestoneId: issue.milestone?.number ?? null,
|
||||
assignee: (_b = (_a = issue.assignee) === null || _a === void 0 ? void 0 : _a.login) !== null && _b !== void 0 ? _b : (_d = (_c = issue.assignees) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.login,
|
||||
milestoneId: (_f = (_e = issue.milestone) === null || _e === void 0 ? void 0 : _e.number) !== null && _f !== void 0 ? _f : null,
|
||||
createdAt: +new Date(issue.created_at),
|
||||
updatedAt: +new Date(issue.updated_at),
|
||||
closedAt: issue.closed_at ? +new Date(issue.closed_at) : undefined,
|
||||
@@ -71,10 +73,10 @@ class OctoKit {
|
||||
}
|
||||
async hasWriteAccess(user) {
|
||||
if (user.name in this.writeAccessCache) {
|
||||
(0, core_1.debug)('Got permissions from cache for ' + user);
|
||||
core_1.debug('Got permissions from cache for ' + user);
|
||||
return this.writeAccessCache[user.name];
|
||||
}
|
||||
(0, core_1.debug)('Fetching permissions for ' + user);
|
||||
core_1.debug('Fetching permissions for ' + user);
|
||||
const permissions = (await this.octokit.repos.getCollaboratorPermissionLevel({
|
||||
...this.params,
|
||||
username: user.name,
|
||||
@@ -94,14 +96,14 @@ class OctoKit {
|
||||
}
|
||||
}
|
||||
async createLabel(name, color, description) {
|
||||
(0, core_1.debug)('Creating label ' + name);
|
||||
core_1.debug('Creating label ' + name);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.createLabel({ ...this.params, color, description, name });
|
||||
else
|
||||
this.mockLabels.add(name);
|
||||
}
|
||||
async deleteLabel(name) {
|
||||
(0, core_1.debug)('Deleting label ' + name);
|
||||
core_1.debug('Deleting label ' + name);
|
||||
try {
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.deleteLabel({ ...this.params, name });
|
||||
@@ -114,7 +116,7 @@ class OctoKit {
|
||||
}
|
||||
}
|
||||
async readConfig(path) {
|
||||
(0, core_1.debug)('Reading config at ' + path);
|
||||
core_1.debug('Reading config at ' + path);
|
||||
const repoPath = `.github/${path}.json`;
|
||||
const data = (await this.octokit.repos.getContents({ ...this.params, path: repoPath })).data;
|
||||
if ('type' in data && data.type === 'file') {
|
||||
@@ -126,10 +128,10 @@ class OctoKit {
|
||||
throw Error('Found directory at config path when expecting file' + JSON.stringify(data));
|
||||
}
|
||||
async releaseContainsCommit(release, commit) {
|
||||
if ((0, utils_1.getInput)('commitReleasedDebuggingOverride')) {
|
||||
if (utils_1.getInput('commitReleasedDebuggingOverride')) {
|
||||
return true;
|
||||
}
|
||||
return new Promise((resolve, reject) => (0, child_process_1.exec)(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) => !err || err.code === 1 ? resolve(!err) : reject(err)));
|
||||
return new Promise((resolve, reject) => child_process_1.exec(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) => !err || err.code === 1 ? resolve(!err) : reject(err)));
|
||||
}
|
||||
}
|
||||
exports.OctoKit = OctoKit;
|
||||
@@ -140,7 +142,7 @@ class OctoKitIssue extends OctoKit {
|
||||
this.issueData = issueData;
|
||||
}
|
||||
async addAssignee(assignee) {
|
||||
(0, core_1.debug)('Adding assignee ' + assignee + ' to ' + this.issueData.number);
|
||||
core_1.debug('Adding assignee ' + assignee + ' to ' + this.issueData.number);
|
||||
if (!this.options.readonly) {
|
||||
await this.octokit.issues.addAssignees({
|
||||
...this.params,
|
||||
@@ -150,7 +152,7 @@ class OctoKitIssue extends OctoKit {
|
||||
}
|
||||
}
|
||||
async closeIssue() {
|
||||
(0, core_1.debug)('Closing issue ' + this.issueData.number);
|
||||
core_1.debug('Closing issue ' + this.issueData.number);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.update({
|
||||
...this.params,
|
||||
@@ -159,15 +161,16 @@ class OctoKitIssue extends OctoKit {
|
||||
});
|
||||
}
|
||||
async lockIssue() {
|
||||
(0, core_1.debug)('Locking issue ' + this.issueData.number);
|
||||
core_1.debug('Locking issue ' + this.issueData.number);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.lock({ ...this.params, issue_number: this.issueData.number });
|
||||
}
|
||||
async getIssue() {
|
||||
if (isIssue(this.issueData)) {
|
||||
(0, core_1.debug)('Got issue data from query result ' + this.issueData.number);
|
||||
core_1.debug('Got issue data from query result ' + this.issueData.number);
|
||||
return this.issueData;
|
||||
}
|
||||
console.log('Fetching issue ' + this.issueData.number);
|
||||
const issue = (await this.octokit.issues.get({
|
||||
...this.params,
|
||||
issue_number: this.issueData.number,
|
||||
@@ -176,7 +179,7 @@ class OctoKitIssue extends OctoKit {
|
||||
return (this.issueData = this.octokitIssueToIssue(issue));
|
||||
}
|
||||
async postComment(body) {
|
||||
(0, core_1.debug)(`Posting comment ${body} on ${this.issueData.number}`);
|
||||
core_1.debug(`Posting comment ${body} on ${this.issueData.number}`);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.createComment({
|
||||
...this.params,
|
||||
@@ -185,7 +188,7 @@ class OctoKitIssue extends OctoKit {
|
||||
});
|
||||
}
|
||||
async deleteComment(id) {
|
||||
(0, core_1.debug)(`Deleting comment ${id} on ${this.issueData.number}`);
|
||||
core_1.debug(`Deleting comment ${id} on ${this.issueData.number}`);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.deleteComment({
|
||||
owner: this.params.owner,
|
||||
@@ -194,7 +197,7 @@ class OctoKitIssue extends OctoKit {
|
||||
});
|
||||
}
|
||||
async setMilestone(milestoneId) {
|
||||
(0, core_1.debug)(`Setting milestone for ${this.issueData.number} to ${milestoneId}`);
|
||||
core_1.debug(`Setting milestone for ${this.issueData.number} to ${milestoneId}`);
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.update({
|
||||
...this.params,
|
||||
@@ -203,7 +206,7 @@ class OctoKitIssue extends OctoKit {
|
||||
});
|
||||
}
|
||||
async *getComments(last) {
|
||||
(0, core_1.debug)('Fetching comments for ' + this.issueData.number);
|
||||
core_1.debug('Fetching comments for ' + this.issueData.number);
|
||||
const response = this.octokit.paginate.iterator(this.octokit.issues.listComments.endpoint.merge({
|
||||
...this.params,
|
||||
issue_number: this.issueData.number,
|
||||
@@ -220,7 +223,7 @@ class OctoKitIssue extends OctoKit {
|
||||
}
|
||||
}
|
||||
async addLabel(name) {
|
||||
(0, core_1.debug)(`Adding label ${name} to ${this.issueData.number}`);
|
||||
core_1.debug(`Adding label ${name} to ${this.issueData.number}`);
|
||||
if (!(await this.repoHasLabel(name))) {
|
||||
throw Error(`Action could not execute becuase label ${name} is not defined.`);
|
||||
}
|
||||
@@ -232,7 +235,7 @@ class OctoKitIssue extends OctoKit {
|
||||
});
|
||||
}
|
||||
async removeLabel(name) {
|
||||
(0, core_1.debug)(`Removing label ${name} from ${this.issueData.number}`);
|
||||
core_1.debug(`Removing label ${name} from ${this.issueData.number}`);
|
||||
try {
|
||||
if (!this.options.readonly)
|
||||
await this.octokit.issues.removeLabel({
|
||||
@@ -243,12 +246,14 @@ class OctoKitIssue extends OctoKit {
|
||||
}
|
||||
catch (err) {
|
||||
if (err.status === 404) {
|
||||
console.log(`Label ${name} not found on issue`);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
async getClosingInfo() {
|
||||
var _a;
|
||||
if ((await this.getIssue()).open) {
|
||||
return;
|
||||
}
|
||||
@@ -262,12 +267,13 @@ class OctoKitIssue extends OctoKit {
|
||||
for (const timelineEvent of timelineEvents) {
|
||||
if (timelineEvent.event === 'closed') {
|
||||
closingCommit = {
|
||||
hash: timelineEvent.commit_id ?? undefined,
|
||||
hash: (_a = timelineEvent.commit_id) !== null && _a !== void 0 ? _a : undefined,
|
||||
timestamp: +new Date(timelineEvent.created_at),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`);
|
||||
return closingCommit;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ export class OctoKit implements GitHub {
|
||||
|
||||
async *query(query: Query): AsyncIterableIterator<GitHubIssue[]> {
|
||||
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`
|
||||
console.log(`Querying for ${q}:`)
|
||||
|
||||
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
|
||||
...query,
|
||||
@@ -49,6 +50,7 @@ export class OctoKit implements GitHub {
|
||||
await timeout()
|
||||
await logRateLimit(this.token)
|
||||
const page: Array<Octokit.SearchIssuesAndPullRequestsResponseItemsItem> = pageResponse.data
|
||||
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`)
|
||||
yield page.map(
|
||||
(issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)),
|
||||
)
|
||||
@@ -197,6 +199,7 @@ export class OctoKitIssue extends OctoKit implements GitHubIssue {
|
||||
return this.issueData
|
||||
}
|
||||
|
||||
console.log('Fetching issue ' + this.issueData.number)
|
||||
const issue = (
|
||||
await this.octokit.issues.get({
|
||||
...this.params,
|
||||
@@ -283,6 +286,7 @@ export class OctoKitIssue extends OctoKit implements GitHubIssue {
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
console.log(`Label ${name} not found on issue`)
|
||||
return
|
||||
}
|
||||
throw err
|
||||
@@ -310,6 +314,7 @@ export class OctoKitIssue extends OctoKit implements GitHubIssue {
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`)
|
||||
return closingCommit
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,18 +4,17 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TestbedIssue = exports.Testbed = void 0;
|
||||
class Testbed {
|
||||
constructor(config) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
this.config = {
|
||||
globalLabels: config?.globalLabels ?? [],
|
||||
configs: config?.configs ?? {},
|
||||
writers: config?.writers ?? [],
|
||||
releasedCommits: config?.releasedCommits ?? [],
|
||||
queryRunner: config?.queryRunner ??
|
||||
async function* () {
|
||||
yield [];
|
||||
},
|
||||
globalLabels: (_a = config === null || config === void 0 ? void 0 : config.globalLabels) !== null && _a !== void 0 ? _a : [],
|
||||
configs: (_b = config === null || config === void 0 ? void 0 : config.configs) !== null && _b !== void 0 ? _b : {},
|
||||
writers: (_c = config === null || config === void 0 ? void 0 : config.writers) !== null && _c !== void 0 ? _c : [],
|
||||
releasedCommits: (_d = config === null || config === void 0 ? void 0 : config.releasedCommits) !== null && _d !== void 0 ? _d : [],
|
||||
queryRunner: (_e = config === null || config === void 0 ? void 0 : config.queryRunner) !== null && _e !== void 0 ? _e : async function* () {
|
||||
yield [];
|
||||
},
|
||||
};
|
||||
}
|
||||
async *query(query) {
|
||||
@@ -48,15 +47,16 @@ class Testbed {
|
||||
exports.Testbed = Testbed;
|
||||
class TestbedIssue extends Testbed {
|
||||
constructor(globalConfig, issueConfig) {
|
||||
var _a, _b, _c;
|
||||
super(globalConfig);
|
||||
issueConfig = issueConfig ?? {};
|
||||
issueConfig.comments = issueConfig?.comments ?? [];
|
||||
issueConfig.labels = issueConfig?.labels ?? [];
|
||||
issueConfig = issueConfig !== null && issueConfig !== void 0 ? issueConfig : {};
|
||||
issueConfig.comments = (_a = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) !== null && _a !== void 0 ? _a : [];
|
||||
issueConfig.labels = (_b = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.labels) !== null && _b !== void 0 ? _b : [];
|
||||
issueConfig.issue = {
|
||||
author: { name: 'JacksonKearl' },
|
||||
body: 'issue body',
|
||||
locked: false,
|
||||
numComments: issueConfig?.comments?.length || 0,
|
||||
numComments: ((_c = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) === null || _c === void 0 ? void 0 : _c.length) || 0,
|
||||
number: 1,
|
||||
open: true,
|
||||
title: 'issue title',
|
||||
@@ -90,7 +90,7 @@ class TestbedIssue extends Testbed {
|
||||
}
|
||||
async postComment(body, author) {
|
||||
this.issueConfig.comments.push({
|
||||
author: { name: author ?? 'bot' },
|
||||
author: { name: author !== null && author !== void 0 ? author : 'bot' },
|
||||
body,
|
||||
id: Math.random(),
|
||||
timestamp: +new Date(),
|
||||
|
||||
@@ -8,15 +8,15 @@ const core = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const octokit_1 = require("../api/octokit");
|
||||
const utils_1 = require("../utils/utils");
|
||||
const token = (0, utils_1.getRequiredInput)('token');
|
||||
const label = (0, utils_1.getRequiredInput)('label');
|
||||
const token = utils_1.getRequiredInput('token');
|
||||
const label = utils_1.getRequiredInput('label');
|
||||
async function main() {
|
||||
const pr = new octokit_1.OctoKitIssue(token, github_1.context.repo, { number: github_1.context.issue.number });
|
||||
pr.addLabel(label);
|
||||
}
|
||||
main()
|
||||
.then(() => (0, utils_1.logRateLimit)(token))
|
||||
.then(() => utils_1.logRateLimit(token))
|
||||
.catch(async (error) => {
|
||||
core.setFailed(error.message);
|
||||
await (0, utils_1.logErrorToIssue)(error.message, true, token);
|
||||
await utils_1.logErrorToIssue(error.message, true, token);
|
||||
});
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
{
|
||||
"name": "github-actions",
|
||||
"version": "1.0.0",
|
||||
"description": "GitHub Actions",
|
||||
"scripts": {
|
||||
"test": "mocha -r ts-node/register **/*.test.ts",
|
||||
"build": "tsc -p ./tsconfig.json",
|
||||
"compile": "tsc -p ./tsconfig.json",
|
||||
"lint": "eslint -c .eslintrc --fix --ext .ts .",
|
||||
"watch-typecheck": "tsc --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/microsoft/azuredatastudio.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/github": "^2.1.1",
|
||||
"axios": "^0.21.4",
|
||||
"name": "github-actions",
|
||||
"version": "1.0.0",
|
||||
"description": "GitHub Actions",
|
||||
"scripts": {
|
||||
"test": "mocha -r ts-node/register **/*.test.ts",
|
||||
"build": "tsc -p ./tsconfig.json",
|
||||
"lint": "eslint -c .eslintrc --fix --ext .ts .",
|
||||
"watch-typecheck": "tsc --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/microsoft/azuredatastudio.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/github": "^2.1.1",
|
||||
"axios": "^0.21.4",
|
||||
"ts-node": "^8.6.2",
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,16 +4,13 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.logErrorToIssue = exports.logRateLimit = exports.daysAgoToHumanReadbleDate = exports.daysAgoToTimestamp = exports.loadLatestRelease = exports.normalizeIssue = exports.getRequiredInput = exports.getInput = void 0;
|
||||
const core = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const axios_1 = require("axios");
|
||||
const octokit_1 = require("../api/octokit");
|
||||
const getInput = (name) => core.getInput(name) || undefined;
|
||||
exports.getInput = getInput;
|
||||
const getRequiredInput = (name) => core.getInput(name, { required: true });
|
||||
exports.getRequiredInput = getRequiredInput;
|
||||
const normalizeIssue = (issue) => {
|
||||
exports.getInput = (name) => core.getInput(name) || undefined;
|
||||
exports.getRequiredInput = (name) => core.getInput(name, { required: true });
|
||||
exports.normalizeIssue = (issue) => {
|
||||
const { body, title } = issue;
|
||||
const isBug = body.includes('bug_report_template') || /Issue Type:.*Bug.*/.test(body);
|
||||
const isFeatureRequest = body.includes('feature_request_template') || /Issue Type:.*Feature Request.*/.test(body);
|
||||
@@ -36,25 +33,23 @@ const normalizeIssue = (issue) => {
|
||||
issueType: isBug ? 'bug' : isFeatureRequest ? 'feature_request' : 'unknown',
|
||||
};
|
||||
};
|
||||
exports.normalizeIssue = normalizeIssue;
|
||||
const loadLatestRelease = async (quality) => (await axios_1.default.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data;
|
||||
exports.loadLatestRelease = loadLatestRelease;
|
||||
const daysAgoToTimestamp = (days) => +new Date(Date.now() - days * 24 * 60 * 60 * 1000);
|
||||
exports.daysAgoToTimestamp = daysAgoToTimestamp;
|
||||
const daysAgoToHumanReadbleDate = (days) => new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '');
|
||||
exports.daysAgoToHumanReadbleDate = daysAgoToHumanReadbleDate;
|
||||
const logRateLimit = async (token) => {
|
||||
exports.loadLatestRelease = async (quality) => (await axios_1.default.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data;
|
||||
exports.daysAgoToTimestamp = (days) => +new Date(Date.now() - days * 24 * 60 * 60 * 1000);
|
||||
exports.daysAgoToHumanReadbleDate = (days) => new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '');
|
||||
exports.logRateLimit = async (token) => {
|
||||
const usageData = (await new github_1.GitHub(token).rateLimit.get()).data.resources;
|
||||
['core', 'graphql', 'search'].forEach(async (category) => {
|
||||
const usage = 1 - usageData[category].remaining / usageData[category].limit;
|
||||
const message = `Usage at ${usage} for ${category}`;
|
||||
if (usage > 0) {
|
||||
console.log(message);
|
||||
}
|
||||
if (usage > 0.5) {
|
||||
await (0, exports.logErrorToIssue)(message, false, token);
|
||||
await exports.logErrorToIssue(message, false, token);
|
||||
}
|
||||
});
|
||||
};
|
||||
exports.logRateLimit = logRateLimit;
|
||||
const logErrorToIssue = async (message, ping, token) => {
|
||||
exports.logErrorToIssue = async (message, ping, token) => {
|
||||
// Attempt to wait out abuse detection timeout if present
|
||||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||
const dest = github_1.context.repo.repo === 'vscode-internalbacklog'
|
||||
@@ -75,4 +70,3 @@ ${JSON.stringify(github_1.context, null, 2).replace(/<!--/gu, '<@--').replace(/-
|
||||
-->
|
||||
`);
|
||||
};
|
||||
exports.logErrorToIssue = logErrorToIssue;
|
||||
|
||||
@@ -58,10 +58,13 @@ export const daysAgoToHumanReadbleDate = (days: number) =>
|
||||
new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '')
|
||||
|
||||
export const logRateLimit = async (token: string) => {
|
||||
const usageData = (await new GitHub(token).rateLimit.get()).data.resources;
|
||||
(['core', 'graphql', 'search'] as const).forEach(async (category) => {
|
||||
const usageData = (await new GitHub(token).rateLimit.get()).data.resources
|
||||
;(['core', 'graphql', 'search'] as const).forEach(async (category) => {
|
||||
const usage = 1 - usageData[category].remaining / usageData[category].limit
|
||||
const message = `Usage at ${usage} for ${category}`
|
||||
if (usage > 0) {
|
||||
console.log(message)
|
||||
}
|
||||
if (usage > 0.5) {
|
||||
await logErrorToIssue(message, false, token)
|
||||
}
|
||||
|
||||
@@ -3,12 +3,9 @@
|
||||
|
||||
|
||||
"@actions/core@^1.2.6":
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.9.1.tgz#97c0201b1f9856df4f7c3a375cdcdb0c2a2f750b"
|
||||
integrity sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==
|
||||
dependencies:
|
||||
"@actions/http-client" "^2.0.1"
|
||||
uuid "^8.3.2"
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
|
||||
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
|
||||
|
||||
"@actions/github@^2.1.1":
|
||||
version "2.1.1"
|
||||
@@ -26,13 +23,6 @@
|
||||
dependencies:
|
||||
tunnel "0.0.6"
|
||||
|
||||
"@actions/http-client@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.0.1.tgz#873f4ca98fe32f6839462a6f046332677322f99c"
|
||||
integrity sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==
|
||||
dependencies:
|
||||
tunnel "^0.0.6"
|
||||
|
||||
"@octokit/auth-token@^2.4.0":
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.0.tgz#b64178975218b99e4dfe948253f0673cbbb59d9f"
|
||||
@@ -218,9 +208,9 @@ execa@^1.0.0:
|
||||
strip-eof "^1.0.0"
|
||||
|
||||
follow-redirects@^1.14.0:
|
||||
version "1.14.8"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.8.tgz#016996fb9a11a100566398b1c6839337d7bfa8fc"
|
||||
integrity sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==
|
||||
version "1.14.3"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.3.tgz#6ada78118d8d24caee595595accdc0ac6abd022e"
|
||||
integrity sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==
|
||||
|
||||
get-stream@^4.0.0:
|
||||
version "4.1.0"
|
||||
@@ -282,11 +272,9 @@ nice-try@^1.0.4:
|
||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||
|
||||
node-fetch@^2.3.0:
|
||||
version "2.6.7"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
|
||||
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
|
||||
dependencies:
|
||||
whatwg-url "^5.0.0"
|
||||
version "2.6.1"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||
|
||||
npm-run-path@^2.0.0:
|
||||
version "2.0.2"
|
||||
@@ -373,11 +361,6 @@ strip-eof@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
|
||||
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
|
||||
|
||||
tr46@~0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
|
||||
integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
|
||||
|
||||
ts-node@^8.6.2:
|
||||
version "8.9.0"
|
||||
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.9.0.tgz#d7bf7272dcbecd3a2aa18bd0b96c7d2f270c15d4"
|
||||
@@ -389,15 +372,15 @@ ts-node@^8.6.2:
|
||||
source-map-support "^0.5.17"
|
||||
yn "3.1.1"
|
||||
|
||||
tunnel@0.0.6, tunnel@^0.0.6:
|
||||
tunnel@0.0.6:
|
||||
version "0.0.6"
|
||||
resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c"
|
||||
integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==
|
||||
|
||||
typescript@^3.8.3:
|
||||
version "3.9.10"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.10.tgz#70f3910ac7a51ed6bef79da7800690b19bf778b8"
|
||||
integrity sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==
|
||||
version "3.8.3"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.8.3.tgz#409eb8544ea0335711205869ec458ab109ee1061"
|
||||
integrity sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==
|
||||
|
||||
universal-user-agent@^4.0.0:
|
||||
version "4.0.1"
|
||||
@@ -413,24 +396,6 @@ universal-user-agent@^5.0.0:
|
||||
dependencies:
|
||||
os-name "^3.1.0"
|
||||
|
||||
uuid@^8.3.2:
|
||||
version "8.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
|
||||
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
|
||||
|
||||
webidl-conversions@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
|
||||
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
|
||||
|
||||
whatwg-url@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
|
||||
integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==
|
||||
dependencies:
|
||||
tr46 "~0.0.3"
|
||||
webidl-conversions "^3.0.0"
|
||||
|
||||
which@^1.2.9:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"codebaseName": "vscode-client",
|
||||
"ppe": false,
|
||||
"notificationAliases": [
|
||||
"sbatten@microsoft.com"
|
||||
],
|
||||
"codebaseAdmins": [
|
||||
"REDMOND\\stbatt",
|
||||
"REDMOND\\monacotools",
|
||||
],
|
||||
"instanceUrl": "https://msazure.visualstudio.com/defaultcollection",
|
||||
"projectName": "One",
|
||||
"areaPath": "One\\VSCode\\Visual Studio Code Client",
|
||||
"iterationPath": "One",
|
||||
"notifyAlways": true,
|
||||
"tools": [
|
||||
"BinSkim",
|
||||
"CredScan",
|
||||
"CodeQL"
|
||||
]
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
@@ -13,17 +13,8 @@ const shasum = crypto.createHash('sha1');
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
// Add `package.json` and `yarn.lock` files
|
||||
for (const dir of dirs) {
|
||||
const packageJsonPath = path.join(ROOT, dir, 'package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
|
||||
const relevantPackageJsonSections = {
|
||||
dependencies: packageJson.dependencies,
|
||||
devDependencies: packageJson.devDependencies,
|
||||
optionalDependencies: packageJson.optionalDependencies,
|
||||
resolutions: packageJson.resolutions
|
||||
};
|
||||
shasum.update(JSON.stringify(relevantPackageJsonSections));
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as crypto from 'crypto';
|
||||
@@ -16,18 +18,8 @@ shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
|
||||
// Add `package.json` and `yarn.lock` files
|
||||
for (const dir of dirs) {
|
||||
const packageJsonPath = path.join(ROOT, dir, 'package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
|
||||
const relevantPackageJsonSections = {
|
||||
dependencies: packageJson.dependencies,
|
||||
devDependencies: packageJson.devDependencies,
|
||||
optionalDependencies: packageJson.optionalDependencies,
|
||||
resolutions: packageJson.resolutions
|
||||
};
|
||||
shasum.update(JSON.stringify(relevantPackageJsonSections));
|
||||
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ const files = [
|
||||
'.build/langpacks/**/*.vsix',
|
||||
'.build/extensions/**/*.vsix',
|
||||
'.build/win32-x64/**/*.{exe,zip}',
|
||||
'.build/win32-arm64/**/*.{exe,zip}',
|
||||
'.build/linux/sha256hashes.txt',
|
||||
'.build/linux/deb/amd64/deb/*.deb',
|
||||
'.build/linux/rpm/x86_64/*.rpm',
|
||||
|
||||
@@ -13,8 +13,7 @@ import * as fs from 'fs';
|
||||
const files = [
|
||||
'.build/langpacks/**/*.vsix', // langpacks
|
||||
'.build/extensions/**/*.vsix', // external extensions
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows x64 binaries
|
||||
'.build/win32-arm64/**/*.{exe,zip}', // windows arm64 binaries
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||
'.build/linux/sha256hashes.txt', // linux hashes
|
||||
'.build/linux/deb/amd64/deb/*.deb', // linux debs
|
||||
'.build/linux/rpm/x86_64/*.rpm', // linux rpms
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const url = require("url");
|
||||
const crypto = require("crypto");
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const identity_1 = require("@azure/identity");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 8) {
|
||||
console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
|
||||
@@ -20,7 +20,7 @@ function getPlatform(product, os, arch, type) {
|
||||
switch (os) {
|
||||
case 'win32':
|
||||
switch (product) {
|
||||
case 'client': {
|
||||
case 'client':
|
||||
const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
|
||||
switch (type) {
|
||||
case 'archive':
|
||||
@@ -30,30 +30,20 @@ function getPlatform(product, os, arch, type) {
|
||||
case 'user-setup':
|
||||
return `${asset}-user`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
}
|
||||
case 'server':
|
||||
if (arch === 'arm64') {
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
|
||||
case 'web':
|
||||
if (arch === 'arm64') {
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
}
|
||||
case 'alpine':
|
||||
switch (product) {
|
||||
case 'server':
|
||||
return `server-alpine-${arch}`;
|
||||
case 'web':
|
||||
return `server-alpine-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'linux':
|
||||
switch (type) {
|
||||
@@ -68,14 +58,14 @@ function getPlatform(product, os, arch, type) {
|
||||
case 'web':
|
||||
return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'deb-package':
|
||||
return `linux-deb-${arch}`;
|
||||
case 'rpm-package':
|
||||
return `linux-rpm-${arch}`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'darwin':
|
||||
switch (product) {
|
||||
@@ -85,20 +75,17 @@ function getPlatform(product, os, arch, type) {
|
||||
}
|
||||
return `darwin-${arch}`;
|
||||
case 'server':
|
||||
if (arch === 'x64') {
|
||||
return 'server-darwin';
|
||||
}
|
||||
return `server-darwin-${arch}`;
|
||||
return 'server-darwin';
|
||||
case 'web':
|
||||
if (arch === 'x64') {
|
||||
return 'server-darwin-web';
|
||||
if (arch !== 'x64') {
|
||||
throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return `server-darwin-${arch}-web`;
|
||||
return 'server-darwin-web';
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
}
|
||||
// Contains all of the logic for mapping types to our actual types in CosmosDB
|
||||
@@ -122,6 +109,20 @@ function hashStream(hashName, stream) {
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
async function doesAssetExist(blobService, quality, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, quality, blobName, filePath, fileName) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(filePath),
|
||||
contentDisposition: `attachment; filename="${fileName}"`,
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
@@ -135,7 +136,7 @@ async function main() {
|
||||
const platform = getPlatform(product, os, arch, unprocessedType);
|
||||
const type = getRealType(unprocessedType);
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT'] || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
console.log('Creating asset...');
|
||||
const stat = await new Promise((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
@@ -145,48 +146,28 @@ async function main() {
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
const blobName = commit + '/' + fileName;
|
||||
const storagePipelineOptions = { retryOptions: { retryPolicyType: storage_blob_1.StorageRetryPolicyType.EXPONENTIAL, maxTries: 6, tryTimeoutInMs: 10 * 60 * 1000 } };
|
||||
const credential = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
|
||||
const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://vscode.blob.core.windows.net`, credential, storagePipelineOptions);
|
||||
const containerClient = blobServiceClient.getContainerClient(quality);
|
||||
const blobClient = containerClient.getBlockBlobClient(blobName);
|
||||
const blobExists = await blobClient.exists();
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
const blobOptions = {
|
||||
blobHTTPHeaders: {
|
||||
blobContentType: mime.lookup(filePath),
|
||||
blobContentDisposition: `attachment; filename="${fileName}"`,
|
||||
blobCacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
const uploadPromises = [
|
||||
(0, retry_1.retry)(async () => {
|
||||
await blobClient.uploadFile(filePath, blobOptions);
|
||||
console.log('Blob successfully uploaded to Azure storage.');
|
||||
})
|
||||
];
|
||||
const shouldUploadToMooncake = /true/i.test(process.env['VSCODE_PUBLISH_TO_MOONCAKE'] ?? 'true');
|
||||
if (shouldUploadToMooncake) {
|
||||
const mooncakeCredential = new identity_1.ClientSecretCredential(process.env['AZURE_MOONCAKE_TENANT_ID'], process.env['AZURE_MOONCAKE_CLIENT_ID'], process.env['AZURE_MOONCAKE_CLIENT_SECRET']);
|
||||
const mooncakeBlobServiceClient = new storage_blob_1.BlobServiceClient(`https://vscode.blob.core.chinacloudapi.cn`, mooncakeCredential, storagePipelineOptions);
|
||||
const mooncakeContainerClient = mooncakeBlobServiceClient.getContainerClient(quality);
|
||||
const mooncakeBlobClient = mooncakeContainerClient.getBlockBlobClient(blobName);
|
||||
uploadPromises.push((0, retry_1.retry)(async () => {
|
||||
await mooncakeBlobClient.uploadFile(filePath, blobOptions);
|
||||
console.log('Blob successfully uploaded to Mooncake Azure storage.');
|
||||
}));
|
||||
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
|
||||
}
|
||||
else {
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
}
|
||||
await Promise.all(uploadPromises);
|
||||
console.log('All blobs successfully uploaded.');
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
|
||||
await retry_1.retry(() => Promise.all([
|
||||
uploadBlob(blobService, quality, blobName, filePath, fileName),
|
||||
uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
|
||||
]));
|
||||
console.log('Blobs successfully uploaded.');
|
||||
// TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
|
||||
const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
|
||||
const blobPath = new URL(assetUrl).pathname;
|
||||
const blobPath = url.parse(assetUrl).path;
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
const asset = {
|
||||
platform,
|
||||
@@ -202,9 +183,9 @@ async function main() {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials: credential });
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
console.log(` Done ✔️`);
|
||||
}
|
||||
main().then(() => {
|
||||
|
||||
@@ -3,13 +3,15 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as url from 'url';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import { BlobServiceClient, BlockBlobParallelUploadOptions, StoragePipelineOptions, StorageRetryPolicyType } from '@azure/storage-blob';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { ClientSecretCredential } from '@azure/identity';
|
||||
import { retry } from './retry';
|
||||
|
||||
interface Asset {
|
||||
@@ -33,7 +35,7 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
|
||||
switch (os) {
|
||||
case 'win32':
|
||||
switch (product) {
|
||||
case 'client': {
|
||||
case 'client':
|
||||
const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
|
||||
switch (type) {
|
||||
case 'archive':
|
||||
@@ -43,30 +45,20 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
|
||||
case 'user-setup':
|
||||
return `${asset}-user`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
}
|
||||
case 'server':
|
||||
if (arch === 'arm64') {
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
|
||||
case 'web':
|
||||
if (arch === 'arm64') {
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
}
|
||||
case 'alpine':
|
||||
switch (product) {
|
||||
case 'server':
|
||||
return `server-alpine-${arch}`;
|
||||
case 'web':
|
||||
return `server-alpine-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'linux':
|
||||
switch (type) {
|
||||
@@ -81,14 +73,14 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
|
||||
case 'web':
|
||||
return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'deb-package':
|
||||
return `linux-deb-${arch}`;
|
||||
case 'rpm-package':
|
||||
return `linux-rpm-${arch}`;
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
case 'darwin':
|
||||
switch (product) {
|
||||
@@ -98,20 +90,17 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
|
||||
}
|
||||
return `darwin-${arch}`;
|
||||
case 'server':
|
||||
if (arch === 'x64') {
|
||||
return 'server-darwin';
|
||||
}
|
||||
return `server-darwin-${arch}`;
|
||||
return 'server-darwin';
|
||||
case 'web':
|
||||
if (arch === 'x64') {
|
||||
return 'server-darwin-web';
|
||||
if (arch !== 'x64') {
|
||||
throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
return `server-darwin-${arch}-web`;
|
||||
return 'server-darwin-web';
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
|
||||
throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -139,6 +128,23 @@ function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||
});
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, filePath: string, fileName: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(filePath),
|
||||
contentDisposition: `attachment; filename="${fileName}"`,
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
@@ -155,7 +161,7 @@ async function main(): Promise<void> {
|
||||
const platform = getPlatform(product, os, arch, unprocessedType);
|
||||
const type = getRealType(unprocessedType);
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT'] || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
|
||||
console.log('Creating asset...');
|
||||
|
||||
@@ -171,58 +177,37 @@ async function main(): Promise<void> {
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + fileName;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const storagePipelineOptions: StoragePipelineOptions = { retryOptions: { retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, maxTries: 6, tryTimeoutInMs: 10 * 60 * 1000 } };
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
|
||||
const blobServiceClient = new BlobServiceClient(`https://vscode.blob.core.windows.net`, credential, storagePipelineOptions);
|
||||
const containerClient = blobServiceClient.getContainerClient(quality);
|
||||
const blobClient = containerClient.getBlockBlobClient(blobName);
|
||||
const blobExists = await blobClient.exists();
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const blobOptions: BlockBlobParallelUploadOptions = {
|
||||
blobHTTPHeaders: {
|
||||
blobContentType: mime.lookup(filePath),
|
||||
blobContentDisposition: `attachment; filename="${fileName}"`,
|
||||
blobCacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const uploadPromises: Promise<void>[] = [
|
||||
retry(async () => {
|
||||
await blobClient.uploadFile(filePath, blobOptions);
|
||||
console.log('Blob successfully uploaded to Azure storage.');
|
||||
})
|
||||
];
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
const shouldUploadToMooncake = /true/i.test(process.env['VSCODE_PUBLISH_TO_MOONCAKE'] ?? 'true');
|
||||
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
|
||||
|
||||
if (shouldUploadToMooncake) {
|
||||
const mooncakeCredential = new ClientSecretCredential(process.env['AZURE_MOONCAKE_TENANT_ID']!, process.env['AZURE_MOONCAKE_CLIENT_ID']!, process.env['AZURE_MOONCAKE_CLIENT_SECRET']!);
|
||||
const mooncakeBlobServiceClient = new BlobServiceClient(`https://vscode.blob.core.chinacloudapi.cn`, mooncakeCredential, storagePipelineOptions);
|
||||
const mooncakeContainerClient = mooncakeBlobServiceClient.getContainerClient(quality);
|
||||
const mooncakeBlobClient = mooncakeContainerClient.getBlockBlobClient(blobName);
|
||||
await retry(() => Promise.all([
|
||||
uploadBlob(blobService, quality, blobName, filePath, fileName),
|
||||
uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
|
||||
]));
|
||||
|
||||
uploadPromises.push(retry(async () => {
|
||||
await mooncakeBlobClient.uploadFile(filePath, blobOptions);
|
||||
console.log('Blob successfully uploaded to Mooncake Azure storage.');
|
||||
}));
|
||||
|
||||
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
|
||||
} else {
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
}
|
||||
|
||||
await Promise.all(uploadPromises);
|
||||
console.log('All blobs successfully uploaded.');
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
// TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
|
||||
const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
|
||||
const blobPath = new URL(assetUrl).pathname;
|
||||
const blobPath = url.parse(assetUrl).path;
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
|
||||
const asset: Asset = {
|
||||
@@ -242,7 +227,7 @@ async function main(): Promise<void> {
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials: credential });
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const identity_1 = require("@azure/identity");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 3) {
|
||||
@@ -21,9 +20,9 @@ function getEnv(name) {
|
||||
async function main() {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT']?.trim() || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = process.env['VSCODE_DISTRO_REF']?.trim() || getEnv('BUILD_SOURCEBRANCH');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
@@ -34,16 +33,14 @@ async function main() {
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
private: Boolean(process.env['VSCODE_DISTRO_REF']?.trim()),
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
const aadCredentials = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials });
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
|
||||
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ClientSecretCredential } from '@azure/identity';
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
@@ -25,9 +26,9 @@ function getEnv(name: string): string {
|
||||
async function main(): Promise<void> {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT']?.trim() || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = process.env['VSCODE_DISTRO_REF']?.trim() || getEnv('BUILD_SOURCEBRANCH');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
|
||||
console.log('Creating build...');
|
||||
@@ -40,15 +41,13 @@ async function main(): Promise<void> {
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
private: Boolean(process.env['VSCODE_DISTRO_REF']?.trim()),
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
|
||||
const aadCredentials = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials });
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
|
||||
}
|
||||
|
||||
@@ -4,9 +4,11 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const retry_1 = require("./retry");
|
||||
const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/server');
|
||||
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
|
||||
const playwrightPath = path.dirname(require.resolve('playwright'));
|
||||
async function install() {
|
||||
await (0, retry_1.retry)(() => installDefaultBrowsersForNpmInstall());
|
||||
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
|
||||
}
|
||||
install();
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { retry } from './retry';
|
||||
const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/server');
|
||||
|
||||
async function install() {
|
||||
await retry(() => installDefaultBrowsersForNpmInstall());
|
||||
}
|
||||
|
||||
install();
|
||||
@@ -1,8 +1,8 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
|
||||
71
build/azure-pipelines/common/publish-webview.js
Normal file
71
build/azure-pipelines/common/publish-webview.js
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const minimist = require("minimist");
|
||||
const path_1 = require("path");
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
async function assertContainer(blobService, container) {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
async function doesBlobExist(blobService, container, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, container, blobName, file) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
async function publish(commit, files) {
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, commit);
|
||||
for (const file of files) {
|
||||
const blobName = path_1.basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
const files = fileNames.map(fileName => path_1.join(directory, fileName));
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
||||
9
build/azure-pipelines/common/publish-webview.sh
Executable file
9
build/azure-pipelines/common/publish-webview.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# Publish webview contents
|
||||
PACKAGEJSON="$REPO/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"
|
||||
87
build/azure-pipelines/common/publish-webview.ts
Normal file
87
build/azure-pipelines/common/publish-webview.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { basename, join } from 'path';
|
||||
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
await assertContainer(blobService, commit);
|
||||
|
||||
for (const file of files) {
|
||||
const blobName = basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
|
||||
const files = fileNames.map(fileName => join(directory, fileName));
|
||||
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
||||
@@ -6,10 +6,10 @@
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const crypto = require("crypto");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const minimist = require("minimist");
|
||||
const documentdb_1 = require("documentdb");
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.argv.length < 9) {
|
||||
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
|
||||
@@ -104,23 +104,21 @@ function createOrUpdate(commit, quality, platform, type, release, asset, isUpdat
|
||||
});
|
||||
}));
|
||||
}
|
||||
async function assertContainer(containerClient) {
|
||||
let containerResponse = await containerClient.createIfNotExists({ access: 'blob' });
|
||||
return containerResponse && !!containerResponse.errorCode;
|
||||
async function assertContainer(blobService, quality) {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
async function uploadBlob(blobClient, file) {
|
||||
const result = await blobClient.uploadFile(file, {
|
||||
blobHTTPHeaders: {
|
||||
blobContentType: mime.lookup(file),
|
||||
blobCacheControl: 'max-age=31536000, public'
|
||||
async function doesAssetExist(blobService, quality, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, quality, blobName, file) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
});
|
||||
if (result && !result.errorCode) {
|
||||
console.log(`Blobs uploaded successfully, response status: ${result?._response?.status}`);
|
||||
}
|
||||
else {
|
||||
console.error(`Blobs failed to upload, response status: ${result?._response?.status}, errorcode: ${result?.errorCode}`);
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
async function publish(commit, quality, platform, type, name, version, _isUpdate, file, opts) {
|
||||
const isUpdate = _isUpdate === 'true';
|
||||
@@ -144,62 +142,54 @@ async function publish(commit, quality, platform, type, name, version, _isUpdate
|
||||
console.log('SHA256:', sha256hash);
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const storageKey = process.env['AZURE_STORAGE_ACCESS_KEY_2'];
|
||||
const connectionString = `DefaultEndpointsProtocol=https;AccountName=${storageAccount};AccountKey=${storageKey};EndpointSuffix=core.windows.net`;
|
||||
let blobServiceClient = storage_blob_1.BlobServiceClient.fromConnectionString(connectionString, {
|
||||
retryOptions: {
|
||||
maxTries: 20,
|
||||
retryPolicyType: storage_blob_1.StorageRetryPolicyType.EXPONENTIAL
|
||||
}
|
||||
});
|
||||
let containerClient = blobServiceClient.getContainerClient(quality);
|
||||
if (await assertContainer(containerClient)) {
|
||||
const blobClient = containerClient.getBlockBlobClient(blobName);
|
||||
const blobExists = await blobClient.exists();
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobClient, file);
|
||||
const config = await getConfig(quality);
|
||||
console.log('Quality config:', config);
|
||||
const asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from main
|
||||
const isReleased = (((quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
|
||||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))) &&
|
||||
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, quality);
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
console.log('Blobs successfully uploaded.');
|
||||
const config = await getConfig(quality);
|
||||
console.log('Quality config:', config);
|
||||
const asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from main
|
||||
const isReleased = (((quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
|
||||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))) &&
|
||||
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
const RETRY_TIMES = 10;
|
||||
async function retry(fn) {
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
import * as fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { DocumentClient, NewDocument } from 'documentdb';
|
||||
import { BlobServiceClient, BlockBlobClient, ContainerClient, StorageRetryPolicyType } from '@azure/storage-blob';
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.argv.length < 9) {
|
||||
@@ -127,23 +127,24 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
}));
|
||||
}
|
||||
|
||||
async function assertContainer(containerClient: ContainerClient): Promise<boolean> {
|
||||
let containerResponse = await containerClient.createIfNotExists({ access: 'blob' });
|
||||
return containerResponse && !!containerResponse.errorCode;
|
||||
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
||||
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function uploadBlob(blobClient: BlockBlobClient, file: string): Promise<void> {
|
||||
const result = await blobClient.uploadFile(file, {
|
||||
blobHTTPHeaders: {
|
||||
blobContentType: mime.lookup(file),
|
||||
blobCacheControl: 'max-age=31536000, public'
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
});
|
||||
if (result && !result.errorCode) {
|
||||
console.log(`Blobs uploaded successfully, response status: ${result?._response?.status}`);
|
||||
} else {
|
||||
console.error(`Blobs failed to upload, response status: ${result?._response?.status}, errorcode: ${result?.errorCode}`)
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
interface PublishOptions {
|
||||
@@ -179,78 +180,74 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
const storageKey = process.env['AZURE_STORAGE_ACCESS_KEY_2']!;
|
||||
const connectionString = `DefaultEndpointsProtocol=https;AccountName=${storageAccount};AccountKey=${storageKey};EndpointSuffix=core.windows.net`;
|
||||
|
||||
let blobServiceClient = BlobServiceClient.fromConnectionString(connectionString, {
|
||||
retryOptions: {
|
||||
maxTries: 20,
|
||||
retryPolicyType: StorageRetryPolicyType.EXPONENTIAL
|
||||
}
|
||||
});
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
let containerClient = blobServiceClient.getContainerClient(quality);
|
||||
if (await assertContainer(containerClient)) {
|
||||
const blobClient = containerClient.getBlockBlobClient(blobName);
|
||||
const blobExists = await blobClient.exists();
|
||||
await assertContainer(blobService, quality);
|
||||
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobClient, file);
|
||||
const config = await getConfig(quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
const asset: Asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from main
|
||||
const isReleased = (
|
||||
(
|
||||
(quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
|
||||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))
|
||||
) &&
|
||||
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)
|
||||
);
|
||||
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [] as Array<Asset>,
|
||||
updates: {} as any
|
||||
};
|
||||
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
const config = await getConfig(quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
const asset: Asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from main
|
||||
const isReleased = (
|
||||
(
|
||||
(quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
|
||||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))
|
||||
) &&
|
||||
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)
|
||||
);
|
||||
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [] as Array<Asset>,
|
||||
updates: {} as any
|
||||
};
|
||||
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
|
||||
const RETRY_TIMES = 10;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const identity_1 = require("@azure/identity");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
function getEnv(name) {
|
||||
@@ -29,10 +28,9 @@ async function getConfig(client, quality) {
|
||||
return res.resources[0];
|
||||
}
|
||||
async function main() {
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT'] || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const aadCredentials = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials });
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
console.log('Quality config:', config);
|
||||
if (config.frozen) {
|
||||
@@ -41,7 +39,7 @@ async function main() {
|
||||
}
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ClientSecretCredential } from '@azure/identity';
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
@@ -42,11 +43,10 @@ async function getConfig(client: CosmosClient, quality: string): Promise<Config>
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const commit = process.env['VSCODE_DISTRO_COMMIT'] || getEnv('BUILD_SOURCEVERSION');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
|
||||
const aadCredentials = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials });
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
@@ -1,28 +1,25 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retry = void 0;
|
||||
async function retry(fn) {
|
||||
let lastError;
|
||||
for (let run = 1; run <= 10; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
}
|
||||
catch (err) {
|
||||
if (!/ECONNRESET|CredentialUnavailableError|Audience validation failed/i.test(err.message)) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
lastError = err;
|
||||
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
|
||||
console.log(`Request failed, retrying in ${millis}ms...`);
|
||||
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
|
||||
// maximum delay is 10th retry: ~3 seconds
|
||||
await new Promise(c => setTimeout(c, millis));
|
||||
}
|
||||
}
|
||||
console.log(`Too many retries, aborting.`);
|
||||
throw lastError;
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
exports.retry = retry;
|
||||
|
||||
@@ -3,26 +3,24 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
let lastError: Error | undefined;
|
||||
'use strict';
|
||||
|
||||
export async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
for (let run = 1; run <= 10; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
if (!/ECONNRESET|CredentialUnavailableError|Audience validation failed/i.test(err.message)) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
lastError = err;
|
||||
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
|
||||
console.log(`Request failed, retrying in ${millis}ms...`);
|
||||
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
|
||||
|
||||
// maximum delay is 10th retry: ~3 seconds
|
||||
await new Promise(c => setTimeout(c, millis));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Too many retries, aborting.`);
|
||||
throw lastError;
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const sign_1 = require("./sign");
|
||||
const path = require("path");
|
||||
(0, sign_1.main)([
|
||||
process.env['EsrpCliDllPath'],
|
||||
'windows',
|
||||
process.env['ESRPPKI'],
|
||||
process.env['ESRPAADUsername'],
|
||||
process.env['ESRPAADPassword'],
|
||||
path.dirname(process.argv[2]),
|
||||
path.basename(process.argv[2])
|
||||
]);
|
||||
@@ -1,17 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { main } from './sign';
|
||||
import * as path from 'path';
|
||||
|
||||
main([
|
||||
process.env['EsrpCliDllPath']!,
|
||||
'windows',
|
||||
process.env['ESRPPKI']!,
|
||||
process.env['ESRPAADUsername']!,
|
||||
process.env['ESRPAADPassword']!,
|
||||
path.dirname(process.argv[2]),
|
||||
path.basename(process.argv[2])
|
||||
]);
|
||||
@@ -1,85 +0,0 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.main = void 0;
|
||||
const cp = require("child_process");
|
||||
const fs = require("fs");
|
||||
const tmp = require("tmp");
|
||||
const crypto = require("crypto");
|
||||
function getParams(type) {
|
||||
switch (type) {
|
||||
case 'windows':
|
||||
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
case 'rpm':
|
||||
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
|
||||
case 'darwin-sign':
|
||||
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
case 'darwin-notarize':
|
||||
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
default:
|
||||
throw new Error(`Sign type ${type} not found`);
|
||||
}
|
||||
}
|
||||
function main([esrpCliPath, type, cert, username, password, folderPath, pattern]) {
|
||||
tmp.setGracefulCleanup();
|
||||
const patternPath = tmp.tmpNameSync();
|
||||
fs.writeFileSync(patternPath, pattern);
|
||||
const paramsPath = tmp.tmpNameSync();
|
||||
fs.writeFileSync(paramsPath, getParams(type));
|
||||
const keyFile = tmp.tmpNameSync();
|
||||
const key = crypto.randomBytes(32);
|
||||
const iv = crypto.randomBytes(16);
|
||||
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
|
||||
const clientkeyPath = tmp.tmpNameSync();
|
||||
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
|
||||
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
|
||||
clientkey += clientkeyCypher.final('hex');
|
||||
fs.writeFileSync(clientkeyPath, clientkey);
|
||||
const clientcertPath = tmp.tmpNameSync();
|
||||
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
|
||||
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
|
||||
clientcert += clientcertCypher.final('hex');
|
||||
fs.writeFileSync(clientcertPath, clientcert);
|
||||
const args = [
|
||||
esrpCliPath,
|
||||
'vsts.sign',
|
||||
'-a', username,
|
||||
'-k', clientkeyPath,
|
||||
'-z', clientcertPath,
|
||||
'-f', folderPath,
|
||||
'-p', patternPath,
|
||||
'-u', 'false',
|
||||
'-x', 'regularSigning',
|
||||
'-b', 'input.json',
|
||||
'-l', 'AzSecPack_PublisherPolicyProd.xml',
|
||||
'-y', 'inlineSignParams',
|
||||
'-j', paramsPath,
|
||||
'-c', '9997',
|
||||
'-t', '120',
|
||||
'-g', '10',
|
||||
'-v', 'Tls12',
|
||||
'-s', 'https://api.esrp.microsoft.com/api/v1',
|
||||
'-m', '0',
|
||||
'-o', 'Microsoft',
|
||||
'-i', 'https://www.microsoft.com',
|
||||
'-n', '5',
|
||||
'-r', 'true',
|
||||
'-e', keyFile,
|
||||
];
|
||||
try {
|
||||
cp.execFileSync('dotnet', args, { stdio: 'inherit' });
|
||||
}
|
||||
catch (err) {
|
||||
console.error('ESRP failed');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
exports.main = main;
|
||||
if (require.main === module) {
|
||||
main(process.argv.slice(2));
|
||||
process.exit(0);
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as cp from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as tmp from 'tmp';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
function getParams(type: string): string {
|
||||
switch (type) {
|
||||
case 'windows':
|
||||
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
case 'rpm':
|
||||
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
|
||||
case 'darwin-sign':
|
||||
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
case 'darwin-notarize':
|
||||
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[],"toolName":"sign","toolVersion":"1.0"}]';
|
||||
default:
|
||||
throw new Error(`Sign type ${type} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
export function main([esrpCliPath, type, cert, username, password, folderPath, pattern]: string[]) {
|
||||
tmp.setGracefulCleanup();
|
||||
|
||||
const patternPath = tmp.tmpNameSync();
|
||||
fs.writeFileSync(patternPath, pattern);
|
||||
|
||||
const paramsPath = tmp.tmpNameSync();
|
||||
fs.writeFileSync(paramsPath, getParams(type));
|
||||
|
||||
const keyFile = tmp.tmpNameSync();
|
||||
const key = crypto.randomBytes(32);
|
||||
const iv = crypto.randomBytes(16);
|
||||
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
|
||||
|
||||
const clientkeyPath = tmp.tmpNameSync();
|
||||
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
|
||||
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
|
||||
clientkey += clientkeyCypher.final('hex');
|
||||
fs.writeFileSync(clientkeyPath, clientkey);
|
||||
|
||||
const clientcertPath = tmp.tmpNameSync();
|
||||
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
|
||||
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
|
||||
clientcert += clientcertCypher.final('hex');
|
||||
fs.writeFileSync(clientcertPath, clientcert);
|
||||
|
||||
const args = [
|
||||
esrpCliPath,
|
||||
'vsts.sign',
|
||||
'-a', username,
|
||||
'-k', clientkeyPath,
|
||||
'-z', clientcertPath,
|
||||
'-f', folderPath,
|
||||
'-p', patternPath,
|
||||
'-u', 'false',
|
||||
'-x', 'regularSigning',
|
||||
'-b', 'input.json',
|
||||
'-l', 'AzSecPack_PublisherPolicyProd.xml',
|
||||
'-y', 'inlineSignParams',
|
||||
'-j', paramsPath,
|
||||
'-c', '9997',
|
||||
'-t', '120',
|
||||
'-g', '10',
|
||||
'-v', 'Tls12',
|
||||
'-s', 'https://api.esrp.microsoft.com/api/v1',
|
||||
'-m', '0',
|
||||
'-o', 'Microsoft',
|
||||
'-i', 'https://www.microsoft.com',
|
||||
'-n', '5',
|
||||
'-r', 'true',
|
||||
'-e', keyFile,
|
||||
];
|
||||
|
||||
try {
|
||||
cp.execFileSync('dotnet', args, { stdio: 'inherit' });
|
||||
} catch (err) {
|
||||
console.error('ESRP failed');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main(process.argv.slice(2));
|
||||
process.exit(0);
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"tool": "Credential Scanner",
|
||||
"suppressions": [
|
||||
{
|
||||
"file": [
|
||||
"src/vs/base/test/common/uri.test.ts"
|
||||
],
|
||||
"_justification": "These are not passwords, they are URIs."
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"instanceUrl": "https://msazure.visualstudio.com/defaultcollection",
|
||||
"projectName": "One",
|
||||
"areaPath": "One\\VSCode\\Client",
|
||||
"iterationPath": "One",
|
||||
"notificationAliases": [
|
||||
"sbatten@microsoft.com"
|
||||
],
|
||||
"ppe": "false",
|
||||
"template": "TFSMSAzure",
|
||||
"codebaseName": "vscode-client"
|
||||
}
|
||||
@@ -8,8 +8,6 @@
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.audio-input</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.camera</key>
|
||||
|
||||
@@ -1,39 +1,39 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "14.x"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
|
||||
- script: |
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} remove editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
# - script: | {{SQL CARBON EDIT}} remove editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
@@ -43,21 +43,21 @@ steps:
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Core Unit Tests # {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Core Integration Tests # {{SQL CARBON EDIT}} Rename to core for clarity
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests (Electron)
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
|
||||
@@ -4,5 +4,11 @@
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "14.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -22,79 +21,18 @@ steps:
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
|
||||
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
|
||||
git checkout FETCH_HEAD
|
||||
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
|
||||
displayName: Checkout override commit
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js x64 $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
displayName: Extract node_modules cache
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
retryCountOnTaskFailure: 3
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile --check-files && break
|
||||
if [ $i -eq 3 ]; then
|
||||
echo "Yarn failed too many times" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
GITHUB_TOKEN: "$(github-distro-mixin-password)"
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
pushd build \
|
||||
&& yarn \
|
||||
&& npm install -g typescript \
|
||||
&& tsc azure-pipelines/common/createAsset.ts \
|
||||
&& popd
|
||||
displayName: Restore modules for just build folder and compile it
|
||||
|
||||
- download: current
|
||||
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
|
||||
@@ -106,22 +44,60 @@ steps:
|
||||
mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
displayName: Unzip & move
|
||||
|
||||
- task: UseDotNet@2
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
version: 2.x
|
||||
|
||||
- task: EsrpClientTool@1
|
||||
displayName: Download ESRPClient
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
ConnectedServiceName: "ESRP CodeSign"
|
||||
FolderPath: "$(agent.builddirectory)"
|
||||
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppDeveloperSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "Hardening",
|
||||
"parameterValue": "--options=runtime"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Codesign
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
displayName: Notarize
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
||||
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
||||
displayName: Export bundle identifier
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: "ESRP CodeSign"
|
||||
FolderPath: "$(agent.builddirectory)"
|
||||
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppNotarize",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "BundleId",
|
||||
"parameterValue": "$(BundleIdentifier)"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Notarization
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
parameters:
|
||||
- name: VSCODE_QUALITY
|
||||
type: string
|
||||
- name: VSCODE_RUN_UNIT_TESTS
|
||||
type: boolean
|
||||
- name: VSCODE_RUN_INTEGRATION_TESTS
|
||||
type: boolean
|
||||
- name: VSCODE_RUN_SMOKE_TESTS
|
||||
type: boolean
|
||||
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_RUN_UNIT_TESTS, true) }}:
|
||||
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 15
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-node
|
||||
displayName: Run unit tests (node.js)
|
||||
timeoutInMinutes: 15
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DEBUG=*browser* yarn test-browser-no-install --sequential --browser chromium --browser webkit --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser, Chromium & Webkit)
|
||||
timeoutInMinutes: 30
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 15
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-node --build
|
||||
displayName: Run unit tests (node.js)
|
||||
timeoutInMinutes: 15
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DEBUG=*browser* yarn test-browser-no-install --sequential --build --browser chromium --browser webkit --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser, Chromium & Webkit)
|
||||
timeoutInMinutes: 30
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true) }}:
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp \
|
||||
compile-extension:configuration-editing \
|
||||
compile-extension:css-language-features-server \
|
||||
compile-extension:emmet \
|
||||
compile-extension:git \
|
||||
compile-extension:github-authentication \
|
||||
compile-extension:html-language-features-server \
|
||||
compile-extension:ipynb \
|
||||
compile-extension:json-language-features-server \
|
||||
compile-extension:markdown-language-features-server \
|
||||
compile-extension:markdown-language-features \
|
||||
compile-extension-media \
|
||||
compile-extension:microsoft-authentication \
|
||||
compile-extension:typescript-language-features \
|
||||
compile-extension:vscode-api-tests \
|
||||
compile-extension:vscode-colorize-tests \
|
||||
compile-extension:vscode-notebook-tests \
|
||||
compile-extension:vscode-test-resolver
|
||||
displayName: Build integration tests
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 20
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin-$(VSCODE_ARCH)" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 20
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin-$(VSCODE_ARCH)" \
|
||||
./scripts/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser, Webkit)
|
||||
timeoutInMinutes: 20
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin-$(VSCODE_ARCH)" \
|
||||
./scripts/test-remote-integration.sh
|
||||
displayName: Run integration tests (Remote)
|
||||
timeoutInMinutes: 20
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_RUN_SMOKE_TESTS, true) }}:
|
||||
- script: |
|
||||
set -e
|
||||
ps -ef
|
||||
displayName: Diagnostics before smoke test run
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/smoke compile
|
||||
displayName: Compile smoke tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn smoketest-no-compile --tracing
|
||||
timeoutInMinutes: 20
|
||||
displayName: Run smoke tests (Electron)
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest-no-compile --tracing --build "$APP_ROOT/$APP_NAME"
|
||||
timeoutInMinutes: 20
|
||||
displayName: Run smoke tests (Electron)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin-$(VSCODE_ARCH)" \
|
||||
yarn smoketest-no-compile --web --tracing --headless
|
||||
timeoutInMinutes: 20
|
||||
displayName: Run smoke tests (Browser, Chromium)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp compile-extension:vscode-test-resolver
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin-$(VSCODE_ARCH)" \
|
||||
yarn smoketest-no-compile --tracing --remote --build "$APP_ROOT/$APP_NAME"
|
||||
timeoutInMinutes: 20
|
||||
displayName: Run smoke tests (Remote)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
ps -ef
|
||||
displayName: Diagnostics after smoke test run
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- ${{ if or(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
targetPath: .build/crashes
|
||||
${{ if and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
artifactName: crash-dump-macos-$(VSCODE_ARCH)-integration-$(System.JobAttempt)
|
||||
${{ elseif and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
|
||||
artifactName: crash-dump-macos-$(VSCODE_ARCH)-smoke-$(System.JobAttempt)
|
||||
${{ else }}:
|
||||
artifactName: crash-dump-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
|
||||
displayName: "Publish Crash Reports"
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
# In order to properly symbolify above crash reports
|
||||
# (if any), we need the compiled native modules too
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
targetPath: node_modules
|
||||
${{ if and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
artifactName: node-modules-macos-$(VSCODE_ARCH)-integration-$(System.JobAttempt)
|
||||
${{ elseif and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
|
||||
artifactName: node-modules-macos-$(VSCODE_ARCH)-smoke-$(System.JobAttempt)
|
||||
${{ else }}:
|
||||
artifactName: node-modules-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
|
||||
displayName: "Publish Node Modules"
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
targetPath: .build/logs
|
||||
${{ if and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
artifactName: logs-macos-$(VSCODE_ARCH)-integration-$(System.JobAttempt)
|
||||
${{ elseif and(eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
|
||||
artifactName: logs-macos-$(VSCODE_ARCH)-smoke-$(System.JobAttempt)
|
||||
${{ else }}:
|
||||
artifactName: logs-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
|
||||
displayName: "Publish Log Files"
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
||||
@@ -1,139 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
SecretsFilter: "github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
|
||||
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
|
||||
git checkout FETCH_HEAD
|
||||
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
|
||||
displayName: Checkout override commit
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js x64 $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
displayName: Extract node_modules cache
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
retryCountOnTaskFailure: 3
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile --check-files && break
|
||||
if [ $i -eq 3 ]; then
|
||||
echo "Yarn failed too many times" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
GITHUB_TOKEN: "$(github-distro-mixin-password)"
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- download: current
|
||||
artifact: unsigned_vscode_client_darwin_x64_archive
|
||||
displayName: Download x64 artifact
|
||||
|
||||
- download: current
|
||||
artifact: unsigned_vscode_client_darwin_arm64_archive
|
||||
displayName: Download arm64 artifact
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
|
||||
cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
|
||||
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
|
||||
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
|
||||
DEBUG=* node build/darwin/create-universal-app.js
|
||||
displayName: Create Universal App
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
|
||||
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
||||
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
VSCODE_ARCH=$(VSCODE_ARCH) DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||
displayName: Set Hardened Entitlements
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
|
||||
displayName: Archive build
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
|
||||
displayName: Publish client archive
|
||||
@@ -1,73 +1,51 @@
|
||||
parameters:
|
||||
- name: VSCODE_PUBLISH
|
||||
type: boolean
|
||||
- name: VSCODE_QUALITY
|
||||
type: string
|
||||
- name: VSCODE_RUN_UNIT_TESTS
|
||||
type: boolean
|
||||
- name: VSCODE_RUN_INTEGRATION_TESTS
|
||||
type: boolean
|
||||
- name: VSCODE_RUN_SMOKE_TESTS
|
||||
type: boolean
|
||||
|
||||
steps:
|
||||
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- checkout: self
|
||||
fetchDepth: 1
|
||||
retryCountOnTaskFailure: 3
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "14.x"
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
SecretsFilter: "github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key"
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
# Set up the credentials to retrieve distro repo and setup git persona
|
||||
# to create a merge commit for when we merge distro into oss
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
|
||||
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
|
||||
git checkout FETCH_HEAD
|
||||
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
|
||||
displayName: Checkout override commit
|
||||
- script: |
|
||||
set -e
|
||||
sudo xcode-select -s /Applications/Xcode_12.2.app
|
||||
displayName: Switch to Xcode 12
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
- script: |
|
||||
set -e
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
@@ -76,7 +54,7 @@ steps:
|
||||
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
@@ -87,11 +65,17 @@ steps:
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
retryCountOnTaskFailure: 3
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
@@ -99,9 +83,11 @@ steps:
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
export npm_config_build_from_source=true
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile --check-files && break
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
echo "Yarn failed too many times" >&2
|
||||
exit 1
|
||||
@@ -111,7 +97,6 @@ steps:
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
GITHUB_TOKEN: "$(github-distro-mixin-password)"
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
@@ -123,142 +108,206 @@ steps:
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
# This script brings in the right resources (images, icons, etc) based on the quality (insiders, stable, exploration)
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
# This script brings in the right resources (images, icons, etc) based on the quality (insiders, stable, exploration)
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build client
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build client
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin --server
|
||||
displayName: Mix in server quality
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build Server
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-darwin-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build Server
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp "transpile-client" "transpile-extensions"
|
||||
displayName: Transpile
|
||||
- download: current
|
||||
artifact: unsigned_vscode_client_darwin_x64_archive
|
||||
displayName: Download x64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if or(eq(parameters.VSCODE_RUN_UNIT_TESTS, true), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
|
||||
- template: product-build-darwin-test.yml
|
||||
parameters:
|
||||
VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }}
|
||||
VSCODE_RUN_UNIT_TESTS: ${{ parameters.VSCODE_RUN_UNIT_TESTS }}
|
||||
VSCODE_RUN_INTEGRATION_TESTS: ${{ parameters.VSCODE_RUN_INTEGRATION_TESTS }}
|
||||
VSCODE_RUN_SMOKE_TESTS: ${{ parameters.VSCODE_RUN_SMOKE_TESTS }}
|
||||
- download: current
|
||||
artifact: unsigned_vscode_client_darwin_arm64_archive
|
||||
displayName: Download arm64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_PUBLISH, true) }}:
|
||||
# Setting hardened entitlements is a requirement for:
|
||||
# * Apple notarization
|
||||
# * Running tests on Big Sur (because Big Sur has additional security precautions)
|
||||
- script: |
|
||||
set -e
|
||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
|
||||
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
||||
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
VSCODE_ARCH=$(VSCODE_ARCH) DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||
displayName: Set Hardened Entitlements
|
||||
- script: |
|
||||
set -e
|
||||
cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
|
||||
cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
|
||||
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
|
||||
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
|
||||
DEBUG=* node build/darwin/create-universal-app.js
|
||||
displayName: Create Universal App
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
|
||||
displayName: Archive build
|
||||
# Setting hardened entitlements is a requirement for:
|
||||
# * Apple notarization
|
||||
# * Running tests on Big Sur (because Big Sur has additional security precautions)
|
||||
- script: |
|
||||
set -e
|
||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
|
||||
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
||||
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
VSCODE_ARCH=$(VSCODE_ARCH) DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||
displayName: Set Hardened Entitlements
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- script: |
|
||||
set -e
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin-$(VSCODE_ARCH) vscode-server-darwin-$(VSCODE_ARCH) && zip -Xry vscode-server-darwin-$(VSCODE_ARCH).zip vscode-server-darwin-$(VSCODE_ARCH) && popd
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
# package Remote Extension Host (Web)
|
||||
pushd .. && mv vscode-reh-web-darwin-$(VSCODE_ARCH) vscode-server-darwin-$(VSCODE_ARCH)-web && zip -Xry vscode-server-darwin-$(VSCODE_ARCH)-web.zip vscode-server-darwin-$(VSCODE_ARCH)-web && popd
|
||||
displayName: Prepare to publish servers
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/integration/browser compile
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
|
||||
displayName: Generate SBOM (client)
|
||||
inputs:
|
||||
BuildDropPath: $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
PackageName: Visual Studio Code
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- publish: $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)/_manifest
|
||||
displayName: Publish SBOM (client)
|
||||
artifact: vscode_client_darwin_$(VSCODE_ARCH)_sbom
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
|
||||
displayName: Generate SBOM (server)
|
||||
inputs:
|
||||
BuildDropPath: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH)
|
||||
PackageName: Visual Studio Code Server
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- publish: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH)/_manifest
|
||||
displayName: Publish SBOM (server)
|
||||
artifact: vscode_server_darwin_$(VSCODE_ARCH)_sbom
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/smoke compile
|
||||
displayName: Compile smoke tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
|
||||
displayName: Publish client archive
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: vscode_server_darwin_$(VSCODE_ARCH)_archive-unsigned
|
||||
displayName: Publish server archive
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Remote)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)-web.zip
|
||||
artifact: vscode_web_darwin_$(VSCODE_ARCH)_archive-unsigned
|
||||
displayName: Publish web server archive
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
yarn smoketest-no-compile --web --headless
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- task: AzureCLI@2
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
scriptType: pscore
|
||||
scriptLocation: inlineScript
|
||||
addSpnToEnvironment: true
|
||||
inlineScript: |
|
||||
Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId"
|
||||
Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId"
|
||||
Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_SECRET;issecret=true]$env:servicePrincipalKey"
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: crash-dump-macos-$(VSCODE_ARCH)
|
||||
targetPath: .build/crashes
|
||||
displayName: "Publish Crash Reports"
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_PUBLISH, true), eq(parameters.VSCODE_RUN_UNIT_TESTS, false), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, false), eq(parameters.VSCODE_RUN_SMOKE_TESTS, false)) }}:
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCOUNT="ticino" \
|
||||
AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
|
||||
AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
|
||||
AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
node build/azure-pipelines/upload-configuration
|
||||
displayName: Upload configuration (for Bing settings search)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
continueOnError: true
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
|
||||
displayName: Archive build
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||
|
||||
# package Remote Extension Host (Web)
|
||||
pushd .. && mv vscode-reh-web-darwin vscode-server-darwin-web && zip -Xry vscode-server-darwin-web.zip vscode-server-darwin-web && popd
|
||||
displayName: Prepare to publish servers
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
|
||||
displayName: Publish client archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
|
||||
artifact: vscode_server_darwin_$(VSCODE_ARCH)_archive-unsigned
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
|
||||
artifact: vscode_web_darwin_$(VSCODE_ARCH)_archive-unsigned
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
yarn gulp upload-vscode-configuration
|
||||
displayName: Upload configuration (for Bing settings search)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
continueOnError: true
|
||||
|
||||
@@ -9,27 +9,20 @@ steps:
|
||||
displayName: 'Download Build Artifacts'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip'
|
||||
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-unsigned.zip'
|
||||
downloadPath: '$(Build.SourcesDirectory)/.build/'
|
||||
|
||||
- script: |
|
||||
pushd $(Build.SourcesDirectory)/.build/drop/darwin/archive
|
||||
mv azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip azuredatastudio-darwin-$(VSCODE_ARCH).zip
|
||||
mv azuredatastudio-darwin-unsigned.zip azuredatastudio-darwin.zip
|
||||
displayName: 'Rename the file'
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Install .NET Core sdk for signing'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: 2.1.x
|
||||
installationPath: $(Agent.ToolsDirectory)/dotnet
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'ESRP CodeSigning'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin-$(VSCODE_ARCH).zip'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
@@ -47,7 +40,7 @@ steps:
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
zip -d $(Build.SourcesDirectory)/.build/drop/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH).zip "*.pkg"
|
||||
zip -d $(Build.SourcesDirectory)/.build/drop/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
@@ -56,7 +49,7 @@ steps:
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/drop/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin-$(VSCODE_ARCH).zip'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
|
||||
@@ -17,7 +17,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
@@ -52,7 +52,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js $(NPM_CONFIG_ARCH) > .build/yarnlockhash
|
||||
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache key
|
||||
|
||||
- task: Cache@2
|
||||
@@ -61,7 +61,6 @@ steps:
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
continueOnError: true
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -71,7 +70,6 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(NPM_CONFIG_ARCH)
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
@@ -94,63 +92,49 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/sql-mixin
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||
yarn gulp vscode-darwin-x64-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests" --coverage
|
||||
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
# {{SQL CARBON TODO}} Reenable "Run Core Integration Tests"
|
||||
# - script: |
|
||||
# # Figure out the full absolute path of the product we just built
|
||||
# # including the remote server and configure the integration tests
|
||||
# # to run with these builds instead of running out of sources.
|
||||
# set -e
|
||||
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
||||
# APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
# INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
|
||||
# ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
# displayName: Run core integration tests
|
||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp compile-extensions
|
||||
displayName: Compile Extensions
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
# Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
|
||||
# in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
|
||||
# {{SQL CARBON TODO}} -- reenable
|
||||
# - script: |
|
||||
# set -e
|
||||
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||
# APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
# yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions" --extraArgs "--disable-extension Microsoft.kusto --disable-extension Microsoft.azuremonitor"
|
||||
# displayName: Run core smoke tests (Continue on Error)
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), and(or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')), ne(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||
# APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
# yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
||||
# displayName: Run core smoke tests (Fail on Error)
|
||||
# condition: and(succeeded(), and(or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')), eq(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
|
||||
displayName: Run smoke tests (Electron)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
@@ -161,25 +145,9 @@ steps:
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: 'Download arm64 and x64 packages'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'drop/darwin/archive/azuredatastudio-darwin-@(arm64|x64)-unsigned.zip'
|
||||
downloadPath: $(agent.builddirectory)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-x64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-x64
|
||||
unzip $(agent.builddirectory)/drop/darwin/archive/azuredatastudio-darwin-arm64-unsigned.zip -d $(agent.builddirectory)/azuredatastudio-darwin-arm64
|
||||
DEBUG=* node build/darwin/create-universal-app.js
|
||||
displayName: Create Universal App
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||
pushd ../azuredatastudio-darwin-x64
|
||||
ls
|
||||
|
||||
echo "Cleaning the application"
|
||||
@@ -209,10 +177,20 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p .build/darwin/archive
|
||||
pushd ../azuredatastudio-darwin-$(VSCODE_ARCH)
|
||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-$(VSCODE_ARCH)-unsigned.zip
|
||||
pushd ../azuredatastudio-darwin-x64
|
||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
popd
|
||||
displayName: 'Archive (no signing)'
|
||||
condition: and(succeeded(), eq(variables['signed'], false))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p .build/darwin/archive
|
||||
pushd ../azuredatastudio-darwin-x64
|
||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-unsigned.zip
|
||||
popd
|
||||
displayName: 'Archive'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -224,12 +202,12 @@ steps:
|
||||
condition: always()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results'
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), or(eq(variables['RUN_TESTS'], 'true'), eq(variables['RUN_SMOKE_TESTS'], 'true')))
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
|
||||
@@ -13,22 +13,12 @@ $Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$Flavors = "x64","arm64","universal"
|
||||
$FlavorSuffixes = "","-arm64","-universal"
|
||||
$ZipName = "azuredatastudio-darwin.zip"
|
||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||
$UploadName = "azuredatastudio-macos-$Version"
|
||||
|
||||
For($i = 0; $i -lt $Flavors.Length; $i++)
|
||||
{
|
||||
$Flavor = $Flavors[$i]
|
||||
$FlavorSuffix = $FlavorSuffixes[$i]
|
||||
$ZipName = "azuredatastudio-darwin-$Flavor.zip"
|
||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||
$UploadName = "azuredatastudio-macos$FlavorSuffix-$Version"
|
||||
|
||||
If (-NOT ($Quality -eq "stable")) {
|
||||
$UploadName = "$UploadName-$Quality"
|
||||
}
|
||||
|
||||
$Platform = "darwin$FlavorSuffix"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $Platform archive "$UploadName.zip" $Version true $Zip $CommitId
|
||||
If (-NOT ($Quality -eq "stable")) {
|
||||
$UploadName = "$UploadName-$Quality"
|
||||
}
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive "$UploadName.zip" $Version true $Zip $CommitId
|
||||
|
||||
@@ -1,22 +1,23 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-20.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include: ["main", "release/*"]
|
||||
pr: none
|
||||
pr:
|
||||
branches:
|
||||
include: ["main", "release/*"]
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "14.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
SecretsFilter: "github-distro-mixin-password"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#Download base image ubuntu 22.04
|
||||
FROM mcr.microsoft.com/mirror/docker/library/ubuntu:22.04
|
||||
#Download base image ubuntu 21.04
|
||||
FROM ubuntu:21.04
|
||||
ENV TZ=America/Los_Angeles
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
@@ -64,7 +64,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/sql-mixin
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
|
||||
@@ -11,14 +11,13 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "16.x"
|
||||
versionSpec: "14.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
SecretsFilter: "github-distro-mixin-password"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
#Download base image ubuntu 20.04
|
||||
FROM mcr.microsoft.com/mirror/docker/library/ubuntu:20.04
|
||||
|
||||
#Adding apt repos for g++-4.9
|
||||
RUN echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list
|
||||
RUN echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list
|
||||
#Download base image ubuntu 18.04
|
||||
FROM ubuntu:18.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9 python-dev \
|
||||
libgbm-dev
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9
|
||||
|
||||
RUN rm /usr/bin/gcc
|
||||
RUN rm /usr/bin/g++
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user