mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-01-13 17:22:15 -05:00
Merge from vscode 2cd495805cf99b31b6926f08ff4348124b2cf73d
This commit is contained in:
committed by
AzureDataStudio
parent
a8a7559229
commit
1388493cc1
121
.devcontainer/Dockerfile
Normal file
121
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,121 @@
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-10
|
||||
|
||||
ARG TARGET_DISPLAY=":1"
|
||||
|
||||
# VNC options
|
||||
ARG MAX_VNC_RESOLUTION=1920x1080x16
|
||||
ARG TARGET_VNC_RESOLUTION=1920x1080
|
||||
ARG TARGET_VNC_DPI=72
|
||||
ARG TARGET_VNC_PORT=5901
|
||||
ARG VNC_PASSWORD="vscode"
|
||||
|
||||
# noVNC (VNC web client) options
|
||||
ARG INSTALL_NOVNC="true"
|
||||
ARG NOVNC_VERSION=1.1.0
|
||||
ARG TARGET_NOVNC_PORT=6080
|
||||
ARG WEBSOCKETIFY_VERSION=0.9.0
|
||||
|
||||
# Firefox is useful for testing things like browser launch events, but optional
|
||||
ARG INSTALL_FIREFOX="false"
|
||||
|
||||
# Expected non-root username from base image
|
||||
ARG USERNAME=node
|
||||
|
||||
# Core environment variables for X11, VNC, and fluxbox
|
||||
ENV DBUS_SESSION_BUS_ADDRESS="autolaunch:" \
|
||||
MAX_VNC_RESOLUTION="${MAX_VNC_RESOLUTION}" \
|
||||
VNC_RESOLUTION="${TARGET_VNC_RESOLUTION}" \
|
||||
VNC_DPI="${TARGET_VNC_DPI}" \
|
||||
VNC_PORT="${TARGET_VNC_PORT}" \
|
||||
NOVNC_PORT="${TARGET_NOVNC_PORT}" \
|
||||
DISPLAY="${TARGET_DISPLAY}" \
|
||||
LANG="en_US.UTF-8" \
|
||||
LANGUAGE="en_US.UTF-8" \
|
||||
VISUAL="nano" \
|
||||
EDITOR="nano"
|
||||
|
||||
# Configure apt and install packages
|
||||
RUN apt-get update \
|
||||
&& export DEBIAN_FRONTEND=noninteractive \
|
||||
#
|
||||
# Install the Cascadia Code fonts - https://github.com/microsoft/cascadia-code
|
||||
&& curl -sSL https://github.com/microsoft/cascadia-code/releases/download/v2004.30/CascadiaCode_2004.30.zip -o /tmp/cascadia-fonts.zip \
|
||||
&& unzip /tmp/cascadia-fonts.zip -d /tmp/cascadia-fonts \
|
||||
&& mkdir -p /usr/share/fonts/truetype/cascadia \
|
||||
&& mv /tmp/cascadia-fonts/ttf/* /usr/share/fonts/truetype/cascadia/ \
|
||||
&& rm -rf /tmp/cascadia-fonts.zip /tmp/cascadia-fonts \
|
||||
#
|
||||
# Install X11, fluxbox and VS Code dependencies
|
||||
&& apt-get -y install --no-install-recommends \
|
||||
xvfb \
|
||||
x11vnc \
|
||||
fluxbox \
|
||||
dbus-x11 \
|
||||
x11-utils \
|
||||
x11-xserver-utils \
|
||||
xdg-utils \
|
||||
fbautostart \
|
||||
xterm \
|
||||
eterm \
|
||||
gnome-terminal \
|
||||
gnome-keyring \
|
||||
seahorse \
|
||||
nautilus \
|
||||
libx11-dev \
|
||||
libxkbfile-dev \
|
||||
libsecret-1-dev \
|
||||
libnotify4 \
|
||||
libnss3 \
|
||||
libxss1 \
|
||||
libasound2 \
|
||||
xfonts-base \
|
||||
xfonts-terminus \
|
||||
fonts-noto \
|
||||
fonts-wqy-microhei \
|
||||
fonts-droid-fallback \
|
||||
vim-tiny \
|
||||
nano \
|
||||
#
|
||||
# [Optional] Install noVNC
|
||||
&& if [ "${INSTALL_NOVNC}" = "true" ]; then \
|
||||
mkdir -p /usr/local/novnc \
|
||||
&& curl -sSL https://github.com/novnc/noVNC/archive/v${NOVNC_VERSION}.zip -o /tmp/novnc-install.zip \
|
||||
&& unzip /tmp/novnc-install.zip -d /usr/local/novnc \
|
||||
&& cp /usr/local/novnc/noVNC-${NOVNC_VERSION}/vnc_lite.html /usr/local/novnc/noVNC-${NOVNC_VERSION}/index.html \
|
||||
&& rm /tmp/novnc-install.zip \
|
||||
&& curl -sSL https://github.com/novnc/websockify/archive/v${WEBSOCKETIFY_VERSION}.zip -o /tmp/websockify-install.zip \
|
||||
&& unzip /tmp/websockify-install.zip -d /usr/local/novnc \
|
||||
&& apt-get -y install --no-install-recommends python-numpy \
|
||||
&& ln -s /usr/local/novnc/websockify-${WEBSOCKETIFY_VERSION} /usr/local/novnc/noVNC-${NOVNC_VERSION}/utils/websockify \
|
||||
&& rm /tmp/websockify-install.zip; \
|
||||
fi \
|
||||
#
|
||||
# [Optional] Install Firefox
|
||||
&& if [ "${INSTALL_FIREFOX}" = "true" ]; then \
|
||||
apt-get -y install --no-install-recommends firefox-esr; \
|
||||
fi \
|
||||
#
|
||||
# Clean up
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY bin/init-dev-container.sh /usr/local/share/
|
||||
COPY bin/set-resolution /usr/local/bin/
|
||||
COPY fluxbox/* /root/.fluxbox/
|
||||
COPY fluxbox/* /home/${USERNAME}/.fluxbox/
|
||||
|
||||
# Update privs, owners of config files
|
||||
RUN mkdir -p /var/run/dbus /root/.vnc /home/${USERNAME}/.vnc \
|
||||
&& touch /root/.Xmodmap /home/${USERNAME}/.Xmodmap \
|
||||
&& echo "${VNC_PASSWORD}" | tee /root/.vnc/passwd > /home/${USERNAME}/.vnc/passwd \
|
||||
&& chown -R ${USERNAME}:${USERNAME} /home/${USERNAME}/.Xmodmap /home/${USERNAME}/.fluxbox /home/${USERNAME}/.vnc \
|
||||
&& chmod +x /usr/local/share/init-dev-container.sh /usr/local/bin/set-resolution
|
||||
|
||||
ENTRYPOINT ["/usr/local/share/init-dev-container.sh"]
|
||||
CMD ["sleep", "infinity"]
|
||||
82
.devcontainer/README.md
Normal file
82
.devcontainer/README.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Code - OSS Development Container
|
||||
|
||||
This repository includes configuration for a development container for working with Code - OSS in an isolated local container or using [Visual Studio Codespaces](https://aka.ms/vso).
|
||||
|
||||
> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` with a web client at `6080`. For better performance, we recommend using a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Applications like the macOS Screen Sharing app will not perform as well. [Chicken](https://sourceforge.net/projects/chicken/) is a good macOS alternative.
|
||||
|
||||
## Quick start - local
|
||||
|
||||
1. Install Docker Desktop or Docker on your local machine. (See [docs](https://aka.ms/vscode-remote/containers/getting-started) for additional details.)
|
||||
|
||||
2. [Docker Desktop] If you are not using the new WSL2 Docker Desktop engine, increase the resources allocated to Docker Desktop to at least **4 Cores and 4 GB of RAM (8 GB recommended)**. Right-click on the Docker status bar item, go to **Preferences/Settings > Resources > Advanced** to do so.
|
||||
|
||||
> **Note:** The [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) extension is included in the container so you can keep an eye on CPU/Memory in the status bar.
|
||||
|
||||
3. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the [Remote - Containers](https://aka.ms/vscode-remote/download/containers) extension.
|
||||
|
||||

|
||||
|
||||
> Note that the Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
|
||||
|
||||
4. Press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **Remote - Containers: Open Repository in Container...**.
|
||||
|
||||
> **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or using the Hyper-V engine on Windows. We recommend the "open repository" approach instead since it uses "named volume" rather than the local filesystem.
|
||||
|
||||
5. Type `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box and press <kbd>Enter</kbd>.
|
||||
|
||||
6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
||||
|
||||
Anything you start in VS Code or the integrated terminal will appear here.
|
||||
|
||||
Next: **[Try it out!](#try-it)**
|
||||
|
||||
## Quick start - Codespaces
|
||||
|
||||
>Note that the Codespaces browser-based editor cannot currently access the desktop environment in this container (due to a [missing feature](https://github.com/MicrosoftDocs/vsonline/issues/117)). We recommend using Visual Studio Code from the desktop to connect instead in the near term.
|
||||
|
||||
1. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the [Visual Studio Codespaces](https://aka.ms/vscs-ext-vscode) extension.
|
||||
|
||||

|
||||
|
||||
> Note that the Visual Studio Codespaces extension requires the Visual Studio Code distribution of Code - OSS.
|
||||
|
||||
2. Sign in by pressing <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and selecting **Codespaces: Sign In**. You may also need to use the **Codespaces: Create Plan** if you do not have a plan. See the [Codespaces docs](https://aka.ms/vso-docs/vscode) for details.
|
||||
|
||||
3. Press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **Codespaces: Create New Codespace**.
|
||||
|
||||
4. Use default settings, select a plan, and then enter the repository URL `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box when prompted.
|
||||
|
||||
5. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
||||
|
||||
6. Anything you start in VS Code or the integrated terminal will appear here.
|
||||
|
||||
## Try it!
|
||||
|
||||
This container uses the [Fluxbox](http://fluxbox.org/) window manager to keep things lean. **Right-click on the desktop** to see menu options. It works with GNOME and GTK applications, so other tools can be installed if needed.
|
||||
|
||||
Note you can also set the resolution from the command line by typing `set-resolution`.
|
||||
|
||||
To start working with Code - OSS, follow these steps:
|
||||
|
||||
1. In your local VS Code, open a terminal (<kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>\`</kbd>) and type the following commands:
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
bash scripts/code.sh
|
||||
```
|
||||
|
||||
2. After the build is complete, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
||||
|
||||
3. You should now see Code - OSS!
|
||||
|
||||
Next, let's try debugging.
|
||||
|
||||
1. Shut down Code - OSS by clicking the box in the upper right corner of the Code - OSS window through your browser or VNC viewer.
|
||||
|
||||
2. Go to your local VS Code client, and use Run / Debug view to launch the **VS Code** configuration. (Typically the default, so you can likely just press <kbd>F5</kbd>).
|
||||
|
||||
> **Note:** If launching times out, you can increase the value of `timeout` in the "VS Code", "Attach Main Process", "Attach Extension Host", and "Attach to Shared Process" configurations in [launch.json](../.vscode/launch.json). However, running `scripts/code.sh` first will set up Electron which will usually solve timeout issues.
|
||||
|
||||
3. After a bit, Code - OSS will appear with the debugger attached!
|
||||
|
||||
Enjoy!
|
||||
91
.devcontainer/bin/init-dev-container.sh
Normal file
91
.devcontainer/bin/init-dev-container.sh
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/bin/bash
|
||||
|
||||
NONROOT_USER=node
|
||||
LOG=/tmp/container-init.log
|
||||
|
||||
# Execute the command it not already running
|
||||
startInBackgroundIfNotRunning()
|
||||
{
|
||||
log "Starting $1."
|
||||
echo -e "\n** $(date) **" | sudoIf tee -a /tmp/$1.log > /dev/null
|
||||
if ! pidof $1 > /dev/null; then
|
||||
keepRunningInBackground "$@"
|
||||
while ! pidof $1 > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
log "$1 started."
|
||||
else
|
||||
echo "$1 is already running." | sudoIf tee -a /tmp/$1.log > /dev/null
|
||||
log "$1 is already running."
|
||||
fi
|
||||
}
|
||||
|
||||
# Keep command running in background
|
||||
keepRunningInBackground()
|
||||
{
|
||||
($2 sh -c "while :; do echo [\$(date)] Process started.; $3; echo [\$(date)] Process exited!; sleep 5; done 2>&1" | sudoIf tee -a /tmp/$1.log > /dev/null & echo "$!" | sudoIf tee /tmp/$1.pid > /dev/null)
|
||||
}
|
||||
|
||||
# Use sudo to run as root when required
|
||||
sudoIf()
|
||||
{
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
sudo "$@"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Use sudo to run as non-root user if not already running
|
||||
sudoUserIf()
|
||||
{
|
||||
if [ "$(id -u)" -eq 0 ]; then
|
||||
sudo -u ${NONROOT_USER} "$@"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Log messages
|
||||
log()
|
||||
{
|
||||
echo -e "[$(date)] $@" | sudoIf tee -a $LOG > /dev/null
|
||||
}
|
||||
|
||||
log "** SCRIPT START **"
|
||||
|
||||
# Start dbus.
|
||||
log 'Running "/etc/init.d/dbus start".'
|
||||
if [ -f "/var/run/dbus/pid" ] && ! pidof dbus-daemon > /dev/null; then
|
||||
sudoIf rm -f /var/run/dbus/pid
|
||||
fi
|
||||
sudoIf /etc/init.d/dbus start 2>&1 | sudoIf tee -a /tmp/dbus-daemon-system.log > /dev/null
|
||||
while ! pidof dbus-daemon > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Set up Xvfb.
|
||||
startInBackgroundIfNotRunning "Xvfb" sudoIf "Xvfb ${DISPLAY:-:1} +extension RANDR -screen 0 ${MAX_VNC_RESOLUTION:-1920x1080x16}"
|
||||
|
||||
# Start fluxbox as a light weight window manager.
|
||||
startInBackgroundIfNotRunning "fluxbox" sudoUserIf "dbus-launch startfluxbox"
|
||||
|
||||
# Start x11vnc
|
||||
startInBackgroundIfNotRunning "x11vnc" sudoIf "x11vnc -display ${DISPLAY:-:1} -rfbport ${VNC_PORT:-5901} -localhost -no6 -xkb -shared -forever -passwdfile $HOME/.vnc/passwd"
|
||||
|
||||
# Set resolution
|
||||
/usr/local/bin/set-resolution ${VNC_RESOLUTION:-1280x720} ${VNC_DPI:-72}
|
||||
|
||||
|
||||
# Spin up noVNC if installed and not runnning.
|
||||
if [ -d "/usr/local/novnc" ] && [ "$(ps -ef | grep /usr/local/novnc/noVNC*/utils/launch.sh | grep -v grep)" = "" ]; then
|
||||
keepRunningInBackground "noVNC" sudoIf "/usr/local/novnc/noVNC*/utils/launch.sh --listen ${NOVNC_PORT:-6080} --vnc localhost:${VNC_PORT:-5901}"
|
||||
log "noVNC started."
|
||||
else
|
||||
log "noVNC is already running or not installed."
|
||||
fi
|
||||
|
||||
# Run whatever was passed in
|
||||
log "Executing \"$@\"."
|
||||
"$@"
|
||||
log "** SCRIPT EXIT **"
|
||||
25
.devcontainer/bin/set-resolution
Normal file
25
.devcontainer/bin/set-resolution
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
RESOLUTION=${1:-${VNC_RESOLUTION:-1920x1080}}
|
||||
DPI=${2:-${VNC_DPI:-72}}
|
||||
if [ -z "$1" ]; then
|
||||
echo -e "**Current Settings **\n"
|
||||
xrandr
|
||||
echo -n -e "\nEnter new resolution (WIDTHxHEIGHT, blank for ${RESOLUTION}, Ctrl+C to abort).\n> "
|
||||
read NEW_RES
|
||||
if [ "${NEW_RES}" != "" ]; then
|
||||
RESOLUTION=${NEW_RES}
|
||||
fi
|
||||
if [ -z "$2" ]; then
|
||||
echo -n -e "\nEnter new DPI (blank for ${DPI}, Ctrl+C to abort).\n> "
|
||||
read NEW_DPI
|
||||
if [ "${NEW_DPI}" != "" ]; then
|
||||
DPI=${NEW_DPI}
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
xrandr --fb ${RESOLUTION} --dpi ${DPI} > /dev/null 2>&1
|
||||
|
||||
echo -e "\n**New Settings **\n"
|
||||
xrandr
|
||||
echo
|
||||
41
.devcontainer/devcontainer.json
Normal file
41
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "Code - OSS",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"MAX_VNC_RESOLUTION": "1920x1080x16",
|
||||
"TARGET_VNC_RESOLUTION": "1280x768",
|
||||
"TARGET_VNC_PORT": "5901",
|
||||
"TARGET_NOVNC_PORT": "6080",
|
||||
"VNC_PASSWORD": "vscode",
|
||||
"INSTALL_FIREFOX": "true"
|
||||
}
|
||||
},
|
||||
"overrideCommand": false,
|
||||
"runArgs": ["--init"],
|
||||
|
||||
"settings": {
|
||||
// zsh is also available
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"resmon.show.battery": false,
|
||||
"resmon.show.cpufreq": false,
|
||||
"remote.extensionKind": {
|
||||
"ms-vscode.js-debug-nightly": "workspace",
|
||||
"msjsdiag.debugger-for-chrome": "workspace"
|
||||
},
|
||||
"debug.chrome.useV3": true
|
||||
},
|
||||
|
||||
// noVNC, VNC ports
|
||||
"forwardPorts": [6080, 5901],
|
||||
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"msjsdiag.debugger-for-chrome",
|
||||
"mutantdino.resourcemonitor",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
],
|
||||
|
||||
"remoteUser": "node"
|
||||
}
|
||||
9
.devcontainer/fluxbox/apps
Normal file
9
.devcontainer/fluxbox/apps
Normal file
@@ -0,0 +1,9 @@
|
||||
[app] (name=code-oss-dev)
|
||||
[Position] (CENTER) {0 0}
|
||||
[Maximized] {yes}
|
||||
[Dimensions] {100% 100%}
|
||||
[end]
|
||||
[transient] (role=GtkFileChooserDialog)
|
||||
[Position] (CENTER) {0 0}
|
||||
[Dimensions] {70% 70%}
|
||||
[end]
|
||||
9
.devcontainer/fluxbox/init
Normal file
9
.devcontainer/fluxbox/init
Normal file
@@ -0,0 +1,9 @@
|
||||
session.menuFile: ~/.fluxbox/menu
|
||||
session.keyFile: ~/.fluxbox/keys
|
||||
session.styleFile: /usr/share/fluxbox/styles//Squared_for_Debian
|
||||
session.configVersion: 13
|
||||
session.screen0.workspaces: 1
|
||||
session.screen0.workspacewarping: false
|
||||
session.screen0.toolbar.widthPercent: 100
|
||||
session.screen0.strftimeFormat: %d %b, %a %02k:%M:%S
|
||||
session.screen0.toolbar.tools: prevworkspace, workspacename, nextworkspace, clock, prevwindow, nextwindow, iconbar, systemtray
|
||||
16
.devcontainer/fluxbox/menu
Normal file
16
.devcontainer/fluxbox/menu
Normal file
@@ -0,0 +1,16 @@
|
||||
[begin] ( Code - OSS Development Container )
|
||||
[exec] (File Manager) { nautilus ~ } <>
|
||||
[exec] (Terminal) {/usr/bin/gnome-terminal --working-directory=~ } <>
|
||||
[exec] (Start Code - OSS) { x-terminal-emulator -T "Code - OSS Build" -e bash /workspaces/vscode*/scripts/code.sh } <>
|
||||
[submenu] (System >) {}
|
||||
[exec] (Set Resolution) { x-terminal-emulator -T "Set Resolution" -e bash /usr/local/bin/set-resolution } <>
|
||||
[exec] (Passwords and Keys) { seahorse } <>
|
||||
[exec] (Top) { x-terminal-emulator -T "Top" -e /usr/bin/top } <>
|
||||
[exec] (Editres) {editres} <>
|
||||
[exec] (Xfontsel) {xfontsel} <>
|
||||
[exec] (Xkill) {xkill} <>
|
||||
[exec] (Xrefresh) {xrefresh} <>
|
||||
[end]
|
||||
[config] (Configuration >)
|
||||
[workspaces] (Workspaces >)
|
||||
[end]
|
||||
@@ -542,7 +542,9 @@
|
||||
"**/{vs,sql}/workbench/api/**/common/**",
|
||||
"vs/workbench/contrib/files/common/editors/fileEditorInput", // this should be fine, it only accesses constants from contrib
|
||||
"vscode-textmate",
|
||||
"vscode-oniguruma"
|
||||
"vscode-oniguruma",
|
||||
"iconv-lite-umd",
|
||||
"semver-umd"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -26,6 +26,7 @@
|
||||
"type": "pwa-chrome",
|
||||
"request": "attach",
|
||||
"name": "Attach to Shared Process",
|
||||
"timeout": 30000,
|
||||
"port": 9222,
|
||||
"urlFilter": "*sharedProcess.html*",
|
||||
"presentation": {
|
||||
@@ -57,6 +58,7 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Main Process",
|
||||
"timeout": 30000,
|
||||
"port": 5875,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -26,6 +26,7 @@
|
||||
"test/automation/out/**": true,
|
||||
"test/integration/browser/out/**": true,
|
||||
"src/vs/base/test/node/uri.test.data.txt": true,
|
||||
"src/vs/workbench/test/browser/api/extHostDocumentData.test.perf-data.ts": true,
|
||||
"src/vs/server": false
|
||||
},
|
||||
"lcov.path": [
|
||||
|
||||
37
.vscode/tasks.json
vendored
37
.vscode/tasks.json
vendored
@@ -55,6 +55,43 @@
|
||||
},
|
||||
"problemMatcher": "$tsc"
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "watch-webd",
|
||||
"label": "Build Web Extensions",
|
||||
"group": "build",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"owner": "typescript",
|
||||
"applyTo": "closedDocuments",
|
||||
"fileLocation": [
|
||||
"absolute"
|
||||
],
|
||||
"pattern": {
|
||||
"regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
},
|
||||
"background": {
|
||||
"beginsPattern": "Starting compilation",
|
||||
"endsPattern": "Finished compilation"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "kill-watch-webd",
|
||||
"label": "Kill Build Web Extensions",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "never",
|
||||
},
|
||||
"problemMatcher": "$tsc"
|
||||
},
|
||||
{
|
||||
"label": "Run tests",
|
||||
"type": "shell",
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "7.3.1"
|
||||
target "7.3.2"
|
||||
runtime "electron"
|
||||
|
||||
27
build/.webignore
Normal file
27
build/.webignore
Normal file
@@ -0,0 +1,27 @@
|
||||
# cleanup rules for web node modules, .gitignore style
|
||||
|
||||
**/*.txt
|
||||
**/*.json
|
||||
**/*.md
|
||||
**/*.d.ts
|
||||
**/*.js.map
|
||||
**/LICENSE
|
||||
**/CONTRIBUTORS
|
||||
|
||||
jschardet/index.js
|
||||
jschardet/src/**
|
||||
jschardet/dist/jschardet.js
|
||||
|
||||
vscode-textmate/webpack.config.js
|
||||
|
||||
xterm/src/**
|
||||
|
||||
xterm-addon-search/src/**
|
||||
xterm-addon-search/out/**
|
||||
xterm-addon-search/fixtures/**
|
||||
|
||||
xterm-addon-unicode11/src/**
|
||||
xterm-addon-unicode11/out/**
|
||||
|
||||
xterm-addon-webgl/src/**
|
||||
xterm-addon-webgl/out/**
|
||||
@@ -10,8 +10,8 @@ git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
|
||||
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
|
||||
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
|
||||
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
|
||||
|
||||
@@ -31,10 +31,10 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git checkout origin/electron-8.0.x
|
||||
git merge origin/main
|
||||
git checkout origin/electron-x.y.z
|
||||
git merge origin/master
|
||||
|
||||
# Push main branch into exploration branch
|
||||
git push origin HEAD:electron-8.0.x
|
||||
# Push master branch into exploration branch
|
||||
git push origin HEAD:electron-x.y.z
|
||||
|
||||
displayName: Sync & Merge Exploration
|
||||
|
||||
@@ -218,7 +218,7 @@ steps:
|
||||
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
|
||||
feedsToUse: config
|
||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
|
||||
externalFeedCredentials: 'ESRP Nuget'
|
||||
restoreDirectory: packages
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
|
||||
@@ -8,9 +8,11 @@ require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const nodeUtil = require('util');
|
||||
const tsb = require('gulp-tsb');
|
||||
const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const webpack = require('webpack');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const watcher = require('./lib/watch');
|
||||
@@ -21,6 +23,8 @@ const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
const ext = require('./lib/extensions');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
@@ -173,3 +177,78 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
|
||||
|
||||
gulp.task(compileExtensionsBuildTask);
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
|
||||
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
|
||||
gulp.task(compileWebExtensionsTask);
|
||||
exports.compileWebExtensionsTask = compileWebExtensionsTask;
|
||||
|
||||
const watchWebExtensionsTask = task.define('watch-web', () => buildWebExtensions(true));
|
||||
gulp.task(watchWebExtensionsTask);
|
||||
exports.watchWebExtensionsTask = watchWebExtensionsTask;
|
||||
|
||||
async function buildWebExtensions(isWatch) {
|
||||
|
||||
const webpackConfigLocations = await nodeUtil.promisify(glob)(
|
||||
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
|
||||
{ ignore: ['**/node_modules'] }
|
||||
);
|
||||
|
||||
const webpackConfigs = [];
|
||||
|
||||
for (const webpackConfigPath of webpackConfigLocations) {
|
||||
const configOrFnOrArray = require(webpackConfigPath);
|
||||
function addConfig(configOrFn) {
|
||||
if (typeof configOrFn === 'function') {
|
||||
webpackConfigs.push(configOrFn({}, {}));
|
||||
} else {
|
||||
webpackConfigs.push(configOrFn);
|
||||
}
|
||||
}
|
||||
addConfig(configOrFnOrArray);
|
||||
}
|
||||
function reporter(fullStats) {
|
||||
if (Array.isArray(fullStats.children)) {
|
||||
for (const stats of fullStats.children) {
|
||||
const outputPath = stats.outputPath;
|
||||
if (outputPath) {
|
||||
const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
|
||||
const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
|
||||
fancyLog(`Finished ${ansiColors.green('packaging web extension')} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
|
||||
}
|
||||
if (Array.isArray(stats.errors)) {
|
||||
stats.errors.forEach(error => {
|
||||
fancyLog.error(error);
|
||||
});
|
||||
}
|
||||
if (Array.isArray(stats.warnings)) {
|
||||
stats.warnings.forEach(warning => {
|
||||
fancyLog.warn(warning);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
if (isWatch) {
|
||||
webpack(webpackConfigs).watch({}, (err, stats) => {
|
||||
if (err) {
|
||||
reject();
|
||||
} else {
|
||||
reporter(stats.toJson());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
webpack(webpackConfigs).run((err, stats) => {
|
||||
if (err) {
|
||||
fancyLog.error(err);
|
||||
reject();
|
||||
} else {
|
||||
reporter(stats.toJson());
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ const nodeModules = [ // {{SQL CARBON EDIT}}
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||
buildfile.base,
|
||||
buildfile.workerExtensionHost,
|
||||
buildfile.workbenchDesktop,
|
||||
buildfile.code
|
||||
]);
|
||||
@@ -69,6 +70,7 @@ const vscodeResources = [
|
||||
'out-build/bootstrap.js',
|
||||
'out-build/bootstrap-fork.js',
|
||||
'out-build/bootstrap-amd.js',
|
||||
'out-build/bootstrap-node.js',
|
||||
'out-build/bootstrap-window.js',
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,html}',
|
||||
@@ -83,6 +85,7 @@ const vscodeResources = [
|
||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
||||
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
|
||||
'out-build/vs/**/markdown.css',
|
||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
||||
'out-build/vs/platform/files/**/*.exe',
|
||||
|
||||
@@ -13,6 +13,10 @@ module.exports = new class ApiLiteralOrTypes {
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSUnionType TSLiteralType']: (node) => {
|
||||
var _a;
|
||||
if (((_a = node.literal) === null || _a === void 0 ? void 0 : _a.type) === 'TSNullKeyword') {
|
||||
return;
|
||||
}
|
||||
context.report({
|
||||
node: node,
|
||||
messageId: 'useEnum'
|
||||
|
||||
@@ -15,6 +15,9 @@ export = new class ApiLiteralOrTypes implements eslint.Rule.RuleModule {
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
return {
|
||||
['TSTypeAnnotation TSUnionType TSLiteralType']: (node: any) => {
|
||||
if (node.literal?.type === 'TSNullKeyword') {
|
||||
return;
|
||||
}
|
||||
context.report({
|
||||
node: node,
|
||||
messageId: 'useEnum'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.packageMarketplaceWebExtensionsStream = exports.packageMarketplaceExtensionsStream = exports.packageLocalWebExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
|
||||
exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceWebExtensionsStream = exports.packageMarketplaceExtensionsStream = exports.packageLocalWebExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const glob = require("glob");
|
||||
@@ -291,6 +291,38 @@ function packageMarketplaceWebExtensionsStream(builtInExtensions) {
|
||||
return es.merge(extensions);
|
||||
}
|
||||
exports.packageMarketplaceWebExtensionsStream = packageMarketplaceWebExtensionsStream;
|
||||
function scanBuiltinExtensions(extensionsRoot, forWeb) {
|
||||
const scannedExtensions = [];
|
||||
const extensionsFolders = fs.readdirSync(extensionsRoot);
|
||||
for (const extensionFolder of extensionsFolders) {
|
||||
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
|
||||
if (!fs.existsSync(packageJSONPath)) {
|
||||
continue;
|
||||
}
|
||||
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
|
||||
const extensionKind = packageJSON['extensionKind'] || [];
|
||||
if (forWeb && extensionKind.indexOf('web') === -1) {
|
||||
continue;
|
||||
}
|
||||
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
|
||||
const packageNLS = children.filter(child => child === 'package.nls.json')[0];
|
||||
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
|
||||
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
|
||||
if (packageNLS) {
|
||||
// temporary
|
||||
packageJSON = translatePackageJSON(packageJSON, path.join(extensionsRoot, extensionFolder, packageNLS));
|
||||
}
|
||||
scannedExtensions.push({
|
||||
extensionPath: extensionFolder,
|
||||
packageJSON,
|
||||
packageNLSPath: packageNLS ? path.join(extensionFolder, packageNLS) : undefined,
|
||||
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
|
||||
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
|
||||
});
|
||||
}
|
||||
return scannedExtensions;
|
||||
}
|
||||
exports.scanBuiltinExtensions = scanBuiltinExtensions;
|
||||
function packageExternalExtensionsStream() {
|
||||
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
@@ -306,6 +338,7 @@ function packageExternalExtensionsStream() {
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
||||
// {{SQL CARBON EDIT}} start
|
||||
function cleanRebuildExtensions(root) {
|
||||
return Promise.all(rebuildExtensions.map(async (e) => {
|
||||
await util2.rimraf(path.join(root, e))();
|
||||
@@ -327,3 +360,28 @@ function packageRebuildExtensionsStream() {
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;
|
||||
// {{SQL CARBON EDIT}} end
|
||||
function translatePackageJSON(packageJSON, packageNLSPath) {
|
||||
const CharCode_PC = '%'.charCodeAt(0);
|
||||
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
|
||||
const translate = (obj) => {
|
||||
for (let key in obj) {
|
||||
const val = obj[key];
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(translate);
|
||||
}
|
||||
else if (val && typeof val === 'object') {
|
||||
translate(val);
|
||||
}
|
||||
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
|
||||
const translated = packageNls[val.substr(1, val.length - 2)];
|
||||
if (translated) {
|
||||
obj[key] = translated;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
translate(packageJSON);
|
||||
return packageJSON;
|
||||
}
|
||||
exports.translatePackageJSON = translatePackageJSON;
|
||||
|
||||
@@ -340,6 +340,47 @@ export function packageMarketplaceWebExtensionsStream(builtInExtensions: IBuiltI
|
||||
return es.merge(extensions);
|
||||
}
|
||||
|
||||
export interface IScannedBuiltinExtension {
|
||||
extensionPath: string,
|
||||
packageJSON: any,
|
||||
packageNLSPath?: string,
|
||||
readmePath?: string,
|
||||
changelogPath?: string,
|
||||
}
|
||||
|
||||
export function scanBuiltinExtensions(extensionsRoot: string, forWeb: boolean): IScannedBuiltinExtension[] {
|
||||
const scannedExtensions: IScannedBuiltinExtension[] = [];
|
||||
const extensionsFolders = fs.readdirSync(extensionsRoot);
|
||||
for (const extensionFolder of extensionsFolders) {
|
||||
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
|
||||
if (!fs.existsSync(packageJSONPath)) {
|
||||
continue;
|
||||
}
|
||||
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
|
||||
const extensionKind: string[] = packageJSON['extensionKind'] || [];
|
||||
if (forWeb && extensionKind.indexOf('web') === -1) {
|
||||
continue;
|
||||
}
|
||||
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
|
||||
const packageNLS = children.filter(child => child === 'package.nls.json')[0];
|
||||
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
|
||||
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
|
||||
|
||||
if (packageNLS) {
|
||||
// temporary
|
||||
packageJSON = translatePackageJSON(packageJSON, path.join(extensionsRoot, extensionFolder, packageNLS));
|
||||
}
|
||||
scannedExtensions.push({
|
||||
extensionPath: extensionFolder,
|
||||
packageJSON,
|
||||
packageNLSPath: packageNLS ? path.join(extensionFolder, packageNLS) : undefined,
|
||||
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
|
||||
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
|
||||
});
|
||||
}
|
||||
return scannedExtensions;
|
||||
}
|
||||
|
||||
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
@@ -357,6 +398,7 @@ export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}} start
|
||||
export function cleanRebuildExtensions(root: string): Promise<void> {
|
||||
return Promise.all(rebuildExtensions.map(async e => {
|
||||
await util2.rimraf(path.join(root, e))();
|
||||
@@ -379,3 +421,26 @@ export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
// {{SQL CARBON EDIT}} end
|
||||
|
||||
export function translatePackageJSON(packageJSON: string, packageNLSPath: string) {
|
||||
const CharCode_PC = '%'.charCodeAt(0);
|
||||
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
|
||||
const translate = (obj: any) => {
|
||||
for (let key in obj) {
|
||||
const val = obj[key];
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(translate);
|
||||
} else if (val && typeof val === 'object') {
|
||||
translate(val);
|
||||
} else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
|
||||
const translated = packageNls[val.substr(1, val.length - 2)];
|
||||
if (translated) {
|
||||
obj[key] = translated;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
translate(packageJSON);
|
||||
return packageJSON;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ const https = require("https");
|
||||
const gulp = require("gulp");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const iconv = require("iconv-lite");
|
||||
const iconv = require("iconv-lite-umd");
|
||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||
function log(message, ...rest) {
|
||||
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
|
||||
@@ -1181,9 +1181,10 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
|
||||
});
|
||||
const basename = path.basename(originalFilePath);
|
||||
const filePath = `${basename}.${language.id}.isl`;
|
||||
const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
|
||||
return new File({
|
||||
path: filePath,
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
|
||||
contents: Buffer.from(encoded),
|
||||
});
|
||||
}
|
||||
function encodeEntities(value) {
|
||||
|
||||
@@ -218,6 +218,10 @@
|
||||
"name": "vs/workbench/contrib/userDataSync",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/views",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/actions",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
@@ -15,7 +15,7 @@ import * as https from 'https';
|
||||
import * as gulp from 'gulp';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import * as iconv from 'iconv-lite';
|
||||
import * as iconv from 'iconv-lite-umd';
|
||||
|
||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||
|
||||
@@ -1347,10 +1347,11 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
|
||||
const basename = path.basename(originalFilePath);
|
||||
const filePath = `${basename}.${language.id}.isl`;
|
||||
const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
|
||||
|
||||
return new File({
|
||||
path: filePath,
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
|
||||
contents: Buffer.from(encoded),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"gulp-bom": "^1.0.0",
|
||||
"gulp-sourcemaps": "^1.11.0",
|
||||
"gulp-uglify": "^3.0.0",
|
||||
"iconv-lite": "0.6.0",
|
||||
"iconv-lite-umd": "0.6.5",
|
||||
"mime": "^1.3.4",
|
||||
"minimatch": "3.0.4",
|
||||
"minimist": "^1.2.3",
|
||||
@@ -49,9 +49,9 @@
|
||||
"rollup-plugin-commonjs": "^10.1.0",
|
||||
"rollup-plugin-node-resolve": "^5.2.0",
|
||||
"terser": "4.3.8",
|
||||
"typescript": "^4.0.0-dev.20200615",
|
||||
"typescript": "^4.0.0-dev.20200629",
|
||||
"vsce": "1.48.0",
|
||||
"vscode-telemetry-extractor": "^1.5.4",
|
||||
"vscode-telemetry-extractor": "^1.6.0",
|
||||
"xml2js": "^0.4.17"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -265,7 +265,7 @@ begin
|
||||
end;
|
||||
end;
|
||||
|
||||
// http://stackoverflow.com/a/23838239/261019
|
||||
// https://stackoverflow.com/a/23838239/261019
|
||||
procedure Explode(var Dest: TArrayOfString; Text: String; Separator: String);
|
||||
var
|
||||
i, p: Integer;
|
||||
|
||||
@@ -420,6 +420,11 @@ acorn@^7.0.0:
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.1.0.tgz#949d36f2c292535da602283586c2477c57eb2d6c"
|
||||
integrity sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==
|
||||
|
||||
agent-base@5:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c"
|
||||
integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g==
|
||||
|
||||
ajv@^4.9.1:
|
||||
version "4.11.8"
|
||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
|
||||
@@ -1043,7 +1048,7 @@ debug@2.X, debug@^2.1.2, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8:
|
||||
dependencies:
|
||||
ms "2.0.0"
|
||||
|
||||
debug@^4.1.1:
|
||||
debug@4, debug@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
|
||||
integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==
|
||||
@@ -1804,12 +1809,18 @@ http-signature@~1.2.0:
|
||||
jsprim "^1.2.2"
|
||||
sshpk "^1.7.0"
|
||||
|
||||
iconv-lite@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.0.tgz#66a93b80df0bd05d2a43a7426296b7f91073f125"
|
||||
integrity sha512-43ZpGYZ9QtuutX5l6WC1DSO8ane9N+Ct5qPLF2OV7vM9abM69gnAbVkh66ibaZd3aOGkoP1ZmringlKhLBkw2Q==
|
||||
https-proxy-agent@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b"
|
||||
integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg==
|
||||
dependencies:
|
||||
safer-buffer ">= 2.1.2 < 3"
|
||||
agent-base "5"
|
||||
debug "4"
|
||||
|
||||
iconv-lite-umd@0.6.5:
|
||||
version "0.6.5"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
|
||||
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
|
||||
|
||||
iconv-lite@^0.4.4:
|
||||
version "0.4.24"
|
||||
@@ -2809,6 +2820,11 @@ process-nextick-args@~2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
|
||||
integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==
|
||||
|
||||
proxy-from-env@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2"
|
||||
integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==
|
||||
|
||||
pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
@@ -3523,10 +3539,10 @@ typescript@^3.0.1:
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
|
||||
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
|
||||
|
||||
typescript@^4.0.0-dev.20200615:
|
||||
version "4.0.0-dev.20200615"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.0-dev.20200615.tgz#5c06a0d5f25a29a018767970c6531fbbed7240e3"
|
||||
integrity sha512-OD7KRTLimUwW5E1xHsAqXNjw0O0Krk9CgRVFYkqANv4fZisaN1LJI06u30D5QiNnHBzm2nBSzZIAhjj4MUqaRA==
|
||||
typescript@^4.0.0-dev.20200629:
|
||||
version "4.0.0-dev.20200629"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.0-dev.20200629.tgz#4631667ebffe3a340beee885a4bebe3a73b6f18e"
|
||||
integrity sha512-c4DUu7KvTcx4x7V8sBWexYNkCfioiH1huOJL6WFAA8Oot0Gr/+PlKKDBS9fYjsadEv1JI1qboJKobwLQn0kQXw==
|
||||
|
||||
typical@^4.0.0:
|
||||
version "4.0.0"
|
||||
@@ -3686,19 +3702,22 @@ vsce@1.48.0:
|
||||
yauzl "^2.3.1"
|
||||
yazl "^2.2.2"
|
||||
|
||||
vscode-ripgrep@^1.5.6:
|
||||
version "1.5.7"
|
||||
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.5.7.tgz#acb6b548af488a4bca5d0f1bb5faf761343289ce"
|
||||
integrity sha512-/Vsz/+k8kTvui0q3O74pif9FK0nKopgFTiGNVvxicZANxtSA8J8gUE9GQ/4dpi7D/2yI/YVORszwVskFbz46hQ==
|
||||
vscode-ripgrep@^1.6.2:
|
||||
version "1.6.2"
|
||||
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.6.2.tgz#fb912c7465699f10ce0218a6676cc632c77369b4"
|
||||
integrity sha512-jkZEWnQFcE+QuQFfxQXWcWtDafTmgkp3DjMKawDkajZwgnDlGKpFp15ybKrZNVTi1SLEF/12BzxYSZVVZ2XrkA==
|
||||
dependencies:
|
||||
https-proxy-agent "^4.0.0"
|
||||
proxy-from-env "^1.1.0"
|
||||
|
||||
vscode-telemetry-extractor@^1.5.4:
|
||||
version "1.5.4"
|
||||
resolved "https://registry.yarnpkg.com/vscode-telemetry-extractor/-/vscode-telemetry-extractor-1.5.4.tgz#bcb0d17667fa1b77715e3a3bf372ade18f846782"
|
||||
integrity sha512-MN9LNPo0Rc6cy3sIWTAG97PTWkEKdRnP0VeYoS8vjKSNtG9CAsrUxHgFfYoHm2vNK/ijd0a4NzETyVGO2kT6hw==
|
||||
vscode-telemetry-extractor@^1.6.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/vscode-telemetry-extractor/-/vscode-telemetry-extractor-1.6.0.tgz#e9d9c1d24863cce8d3d715f0287de3b31eb90c56"
|
||||
integrity sha512-zSxvkbyAMa1lTRGIHfGg7gW2e9Sey+2zGYD19uNWCsVEfoXAr2NB6uzb0sNHtbZ2SSqxSePmFXzBAavsudT5fw==
|
||||
dependencies:
|
||||
command-line-args "^5.1.1"
|
||||
ts-morph "^3.1.3"
|
||||
vscode-ripgrep "^1.5.6"
|
||||
vscode-ripgrep "^1.6.2"
|
||||
|
||||
vso-node-api@6.1.2-preview:
|
||||
version "6.1.2-preview"
|
||||
|
||||
131
cglicenses.json
131
cglicenses.json
@@ -200,6 +200,135 @@
|
||||
},
|
||||
{
|
||||
"name": "big-integer",
|
||||
"prependLicenseText": ["Copyright released to public domain"]
|
||||
"prependLicenseText": [
|
||||
"Copyright released to public domain"
|
||||
]
|
||||
},
|
||||
{
|
||||
// Reason: The license at https://github.com/justmoon/node-extend/blob/main/LICENSE
|
||||
// cannot be found by the OSS tool automatically.
|
||||
"name": "extend",
|
||||
"fullLicenseText": [
|
||||
"The MIT License (MIT)",
|
||||
"",
|
||||
"Copyright (c) 2014 Stefan Thomas",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining",
|
||||
"a copy of this software and associated documentation files (the",
|
||||
"\"Software\"), to deal in the Software without restriction, including",
|
||||
"without limitation the rights to use, copy, modify, merge, publish,",
|
||||
"distribute, sublicense, and/or sell copies of the Software, and to",
|
||||
"permit persons to whom the Software is furnished to do so, subject to",
|
||||
"the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be",
|
||||
"included in all copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,",
|
||||
"EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF",
|
||||
"MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND",
|
||||
"NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE",
|
||||
"LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION",
|
||||
"OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION",
|
||||
"WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
|
||||
]
|
||||
},
|
||||
{
|
||||
// Reason: The license at https://github.com/retep998/winapi-rs/blob/0.3/LICENSE-MIT
|
||||
// cannot be found by the OSS tool automatically.
|
||||
"name": "retep998/winapi-rs",
|
||||
"fullLicenseText": [
|
||||
"Copyright (c) 2015-2018 The winapi-rs Developers",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE."
|
||||
]
|
||||
},
|
||||
{
|
||||
// Reason: The license at https://github.com/digitaldesignlabs/es6-promisify/blob/main/LICENSE
|
||||
// cannot be found by the OSS tool automatically.
|
||||
"name": "es6-promisify",
|
||||
"fullLicenseText": [
|
||||
"Copyright (c) 2014 Mike Hall / Digital Design Labs",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE."
|
||||
]
|
||||
},
|
||||
{
|
||||
// Reason: The license at https://github.com/zkat/json-parse-better-errors/blob/latest/LICENSE.md
|
||||
// cannot be found by the OSS tool automatically.
|
||||
"name": "json-parse-better-errors",
|
||||
"fullLicenseText": [
|
||||
"Copyright 2017 Kat Marchán",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the",
|
||||
"\"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,",
|
||||
"sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following",
|
||||
"conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE",
|
||||
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS",
|
||||
"OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR",
|
||||
"OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
|
||||
]
|
||||
},
|
||||
{
|
||||
// Reason: The license at https://github.com/time-rs/time/blob/main/LICENSE-MIT
|
||||
// cannot be found by the OSS tool automatically.
|
||||
"name": "time-rs/time",
|
||||
"fullLicenseText": [
|
||||
"Copyright (c) 2019 Jacob Pratt",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE."
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -60,12 +60,12 @@
|
||||
"git": {
|
||||
"name": "electron",
|
||||
"repositoryUrl": "https://github.com/electron/electron",
|
||||
"commitHash": "bc8fc0d406d32e4c02f3ec9f161deaacbe4f5989"
|
||||
"commitHash": "5f93e889020d279d5a9cd1ecab080ab467312447"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "MIT",
|
||||
"version": "7.3.1"
|
||||
"version": "7.3.2"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
@@ -77,6 +77,40 @@
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"licenseDetail": [
|
||||
"Inno Setup License",
|
||||
"==================",
|
||||
"",
|
||||
"Except where otherwise noted, all of the documentation and software included in the Inno Setup",
|
||||
"package is copyrighted by Jordan Russell.",
|
||||
"",
|
||||
"Copyright (C) 1997-2020 Jordan Russell. All rights reserved.",
|
||||
"Portions Copyright (C) 2000-2020 Martijn Laan. All rights reserved.",
|
||||
"",
|
||||
"This software is provided \"as-is,\" without any express or implied warranty. In no event shall the",
|
||||
"author be held liable for any damages arising from the use of this software.",
|
||||
"",
|
||||
"Permission is granted to anyone to use this software for any purpose, including commercial",
|
||||
"applications, and to alter and redistribute it, provided that the following conditions are met:",
|
||||
"",
|
||||
"1. All redistributions of source code files must retain all copyright notices that are currently in",
|
||||
" place, and this list of conditions without modification.",
|
||||
"",
|
||||
"2. All redistributions in binary form must retain all occurrences of the above copyright notice and",
|
||||
" web site addresses that are currently in place (for example, in the About boxes).",
|
||||
"",
|
||||
"3. The origin of this software must not be misrepresented; you must not claim that you wrote the",
|
||||
" original software. If you use this software to distribute a product, an acknowledgment in the",
|
||||
" product documentation would be appreciated but is not required.",
|
||||
"",
|
||||
"4. Modified versions in source or binary form must be plainly marked as such, and must not be",
|
||||
" misrepresented as being the original software.",
|
||||
"",
|
||||
"",
|
||||
"Jordan Russell",
|
||||
"jr-2010 AT jrsoftware.org",
|
||||
"https://jrsoftware.org/"
|
||||
],
|
||||
"version": "5.5.6"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -7,27 +7,15 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const path = require('path');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
target: 'webworker',
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
entry: {
|
||||
extension: './src/configurationEditingMain.ts'
|
||||
},
|
||||
output: {
|
||||
filename: 'configurationEditingMain.js'
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"onLanguage:jsonc"
|
||||
],
|
||||
"main": "./out/configurationEditingMain",
|
||||
"browser": "./dist/configurationEditingMain",
|
||||
"browser": "./dist/browser/configurationEditingMain",
|
||||
"scripts": {
|
||||
"compile": "gulp compile-extension:configuration-editing",
|
||||
"watch": "gulp watch-extension:configuration-editing"
|
||||
@@ -117,6 +117,10 @@
|
||||
"fileMatch": "/.devcontainer.json",
|
||||
"url": "./schemas/devContainer.schema.json"
|
||||
},
|
||||
{
|
||||
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/nameConfigs/*.json",
|
||||
"url": "./schemas/attachContainer.schema.json"
|
||||
},
|
||||
{
|
||||
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/imageConfigs/*.json",
|
||||
"url": "./schemas/attachContainer.schema.json"
|
||||
|
||||
@@ -42,8 +42,8 @@
|
||||
"description": "An array of extensions that should be installed into the container.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
|
||||
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
|
||||
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
|
||||
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
|
||||
}
|
||||
},
|
||||
"postAttachCommand": {
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
"description": "An array of extensions that should be installed into the container.",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
|
||||
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
|
||||
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
|
||||
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
|
||||
@@ -432,8 +432,8 @@
|
||||
},
|
||||
{
|
||||
"command": "git.unstageSelectedRanges",
|
||||
"key": "ctrl+k ctrl+u",
|
||||
"mac": "cmd+k cmd+u",
|
||||
"key": "ctrl+k ctrl+n",
|
||||
"mac": "cmd+k cmd+n",
|
||||
"when": "isInDiffEditor"
|
||||
},
|
||||
{
|
||||
@@ -1878,7 +1878,7 @@
|
||||
"dependencies": {
|
||||
"byline": "^5.0.0",
|
||||
"file-type": "^7.2.0",
|
||||
"iconv-lite": "0.6.0",
|
||||
"iconv-lite-umd": "0.6.5",
|
||||
"jschardet": "2.1.1",
|
||||
"vscode-extension-telemetry": "0.1.1",
|
||||
"vscode-nls": "^4.0.0",
|
||||
|
||||
@@ -8,15 +8,15 @@ import * as path from 'path';
|
||||
import { Repository, GitResourceGroup } from './repository';
|
||||
import { Model } from './model';
|
||||
import { debounce } from './decorators';
|
||||
import { filterEvent, dispose, anyEvent, fireEvent } from './util';
|
||||
import { filterEvent, dispose, anyEvent, fireEvent, PromiseSource } from './util';
|
||||
import { GitErrorCodes, Status } from './api/git';
|
||||
|
||||
type Callback = { resolve: (status: boolean) => void, reject: (err: any) => void };
|
||||
|
||||
class GitIgnoreDecorationProvider implements DecorationProvider {
|
||||
|
||||
private static Decoration: Decoration = { priority: 3, color: new ThemeColor('gitDecoration.ignoredResourceForeground') };
|
||||
|
||||
readonly onDidChangeDecorations: Event<Uri[]>;
|
||||
private queue = new Map<string, { repository: Repository; queue: Map<string, Callback>; }>();
|
||||
private queue = new Map<string, { repository: Repository; queue: Map<string, PromiseSource<Decoration | undefined>>; }>();
|
||||
private disposables: Disposable[] = [];
|
||||
|
||||
constructor(private model: Model) {
|
||||
@@ -29,32 +29,29 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
|
||||
this.disposables.push(window.registerDecorationProvider(this));
|
||||
}
|
||||
|
||||
provideDecoration(uri: Uri): Promise<Decoration | undefined> {
|
||||
async provideDecoration(uri: Uri): Promise<Decoration | undefined> {
|
||||
const repository = this.model.getRepository(uri);
|
||||
|
||||
if (!repository) {
|
||||
return Promise.resolve(undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
let queueItem = this.queue.get(repository.root);
|
||||
|
||||
if (!queueItem) {
|
||||
queueItem = { repository, queue: new Map<string, Callback>() };
|
||||
queueItem = { repository, queue: new Map<string, PromiseSource<Decoration | undefined>>() };
|
||||
this.queue.set(repository.root, queueItem);
|
||||
}
|
||||
|
||||
return new Promise<boolean>((resolve, reject) => {
|
||||
queueItem!.queue.set(uri.fsPath, { resolve, reject });
|
||||
let promiseSource = queueItem.queue.get(uri.fsPath);
|
||||
|
||||
if (!promiseSource) {
|
||||
promiseSource = new PromiseSource();
|
||||
queueItem!.queue.set(uri.fsPath, promiseSource);
|
||||
this.checkIgnoreSoon();
|
||||
}).then(ignored => {
|
||||
if (ignored) {
|
||||
return <Decoration>{
|
||||
priority: 3,
|
||||
color: new ThemeColor('gitDecoration.ignoredResourceForeground')
|
||||
};
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
return await promiseSource.promise;
|
||||
}
|
||||
|
||||
@debounce(500)
|
||||
@@ -66,16 +63,16 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
|
||||
const paths = [...item.queue.keys()];
|
||||
|
||||
item.repository.checkIgnore(paths).then(ignoreSet => {
|
||||
for (const [key, value] of item.queue.entries()) {
|
||||
value.resolve(ignoreSet.has(key));
|
||||
for (const [path, promiseSource] of item.queue.entries()) {
|
||||
promiseSource.resolve(ignoreSet.has(path) ? GitIgnoreDecorationProvider.Decoration : undefined);
|
||||
}
|
||||
}, err => {
|
||||
if (err.gitErrorCode !== GitErrorCodes.IsInSubmodule) {
|
||||
console.error(err);
|
||||
}
|
||||
|
||||
for (const [, value] of item.queue.entries()) {
|
||||
value.reject(err);
|
||||
for (const [, promiseSource] of item.queue.entries()) {
|
||||
promiseSource.reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import * as os from 'os';
|
||||
import * as cp from 'child_process';
|
||||
import * as which from 'which';
|
||||
import { EventEmitter } from 'events';
|
||||
import iconv = require('iconv-lite');
|
||||
import * as iconv from 'iconv-lite-umd';
|
||||
import * as filetype from 'file-type';
|
||||
import { assign, groupBy, IDisposable, toDisposable, dispose, mkdirp, readBytes, detectUnicodeEncoding, Encoding, onceEvent, splitInChunks, Limiter } from './util';
|
||||
import { CancellationToken, Progress, Uri } from 'vscode';
|
||||
@@ -1939,6 +1939,17 @@ export class Repository {
|
||||
return message.replace(/^\s*#.*$\n?/gm, '').trim();
|
||||
}
|
||||
|
||||
async getSquashMessage(): Promise<string | undefined> {
|
||||
const squashMsgPath = path.join(this.repositoryRoot, '.git', 'SQUASH_MSG');
|
||||
|
||||
try {
|
||||
const raw = await fs.readFile(squashMsgPath, 'utf8');
|
||||
return this.stripCommitMessageComments(raw);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async getMergeMessage(): Promise<string | undefined> {
|
||||
const mergeMsgPath = path.join(this.repositoryRoot, '.git', 'MERGE_MSG');
|
||||
|
||||
|
||||
@@ -537,7 +537,7 @@ class DotGitWatcher implements IFileWatcher {
|
||||
upstreamWatcher.event(this.emitter.fire, this.emitter, this.transientDisposables);
|
||||
} catch (err) {
|
||||
if (Log.logLevel <= LogLevel.Error) {
|
||||
this.outputChannel.appendLine(`Failed to watch ref '${upstreamPath}', is most likely packed.\n${err.stack || err}`);
|
||||
this.outputChannel.appendLine(`Warning: Failed to watch ref '${upstreamPath}', is most likely packed.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -729,10 +729,10 @@ export class Repository implements Disposable {
|
||||
this.updateInputBoxPlaceholder();
|
||||
this.disposables.push(this.onDidRunGitStatus(() => this.updateInputBoxPlaceholder()));
|
||||
|
||||
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "MERGE CHANGES"));
|
||||
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "STAGED CHANGES"));
|
||||
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "CHANGES"));
|
||||
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "UNTRACKED CHANGES"));
|
||||
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "Merge Changes"));
|
||||
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "Staged Changes"));
|
||||
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "Changes"));
|
||||
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "Untracked Changes"));
|
||||
|
||||
const updateIndexGroupVisibility = () => {
|
||||
const config = workspace.getConfiguration('git', root);
|
||||
@@ -865,10 +865,10 @@ export class Repository implements Disposable {
|
||||
}
|
||||
|
||||
async getInputTemplate(): Promise<string> {
|
||||
const mergeMessage = await this.repository.getMergeMessage();
|
||||
const commitMessage = (await Promise.all([this.repository.getMergeMessage(), this.repository.getSquashMessage()])).find(msg => msg !== undefined);
|
||||
|
||||
if (mergeMessage) {
|
||||
return mergeMessage;
|
||||
if (commitMessage) {
|
||||
return commitMessage;
|
||||
}
|
||||
|
||||
return await this.repository.getCommitTemplate();
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Event, Disposable } from 'vscode';
|
||||
import { Event, Disposable, EventEmitter } from 'vscode';
|
||||
import { dirname, sep } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import { promises as fs, createReadStream } from 'fs';
|
||||
@@ -400,3 +400,39 @@ export class Limiter<T> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Completion<T> = { success: true, value: T } | { success: false, err: any };
|
||||
|
||||
export class PromiseSource<T> {
|
||||
|
||||
private _onDidComplete = new EventEmitter<Completion<T>>();
|
||||
|
||||
private _promise: Promise<T> | undefined;
|
||||
get promise(): Promise<T> {
|
||||
if (this._promise) {
|
||||
return this._promise;
|
||||
}
|
||||
|
||||
return eventToPromise(this._onDidComplete.event).then(completion => {
|
||||
if (completion.success) {
|
||||
return completion.value;
|
||||
} else {
|
||||
throw completion.err;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
resolve(value: T): void {
|
||||
if (!this._promise) {
|
||||
this._promise = Promise.resolve(value);
|
||||
this._onDidComplete.fire({ success: true, value });
|
||||
}
|
||||
}
|
||||
|
||||
reject(err: any): void {
|
||||
if (!this._promise) {
|
||||
this._promise = Promise.reject(err);
|
||||
this._onDidComplete.fire({ success: false, err });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -425,12 +425,10 @@ https-proxy-agent@^2.2.1:
|
||||
agent-base "^4.3.0"
|
||||
debug "^3.1.0"
|
||||
|
||||
iconv-lite@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.0.tgz#66a93b80df0bd05d2a43a7426296b7f91073f125"
|
||||
integrity sha512-43ZpGYZ9QtuutX5l6WC1DSO8ane9N+Ct5qPLF2OV7vM9abM69gnAbVkh66ibaZd3aOGkoP1ZmringlKhLBkw2Q==
|
||||
dependencies:
|
||||
safer-buffer ">= 2.1.2 < 3"
|
||||
iconv-lite-umd@0.6.5:
|
||||
version "0.6.5"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
|
||||
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
|
||||
|
||||
inflight@^1.0.4:
|
||||
version "1.0.6"
|
||||
@@ -748,7 +746,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.2:
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
|
||||
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
|
||||
|
||||
"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
|
||||
safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
|
||||
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
|
||||
@@ -8,11 +8,10 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
module.exports = withDefaults({
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
target: 'webworker',
|
||||
node: false,
|
||||
entry: {
|
||||
extension: './src/extension.ts',
|
||||
@@ -20,30 +19,10 @@ module.exports = withDefaults({
|
||||
externals: {
|
||||
'keytar': 'commonjs keytar',
|
||||
},
|
||||
// TODO@eamodio Deal with nls properly for the browser
|
||||
// Specify module here, so we can stop the vscode-nls-dev loader from overwriting nls calls
|
||||
module: {
|
||||
rules: [{
|
||||
test: /\.ts$/,
|
||||
exclude: /node_modules/,
|
||||
use: [{
|
||||
// configure TypeScript loader:
|
||||
// * enable sources maps for end-to-end source maps
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
compilerOptions: {
|
||||
'sourceMap': true,
|
||||
}
|
||||
}
|
||||
}]
|
||||
}]
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
|
||||
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
|
||||
'uuid': path.resolve(__dirname, 'node_modules/uuid/dist/esm-browser/index.js')
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
},
|
||||
"aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217",
|
||||
"main": "./out/extension.js",
|
||||
"browser": "./dist/extension.js",
|
||||
"browser": "./dist/browser/extension.js",
|
||||
"scripts": {
|
||||
"compile": "gulp compile-extension:github-authentication",
|
||||
"compile-web": "npx webpack-cli --config extension-browser.webpack.config --mode none",
|
||||
|
||||
@@ -22,7 +22,7 @@ interface SessionData {
|
||||
}
|
||||
|
||||
export class GitHubAuthenticationProvider {
|
||||
private _sessions: vscode.AuthenticationSession2[] = [];
|
||||
private _sessions: vscode.AuthenticationSession[] = [];
|
||||
private _githubServer = new GitHubServer();
|
||||
|
||||
public async initialize(): Promise<void> {
|
||||
@@ -37,7 +37,7 @@ export class GitHubAuthenticationProvider {
|
||||
|
||||
private pollForChange() {
|
||||
setTimeout(async () => {
|
||||
let storedSessions: vscode.AuthenticationSession2[];
|
||||
let storedSessions: vscode.AuthenticationSession[];
|
||||
try {
|
||||
storedSessions = await this.readSessions();
|
||||
} catch (e) {
|
||||
@@ -80,12 +80,12 @@ export class GitHubAuthenticationProvider {
|
||||
}, 1000 * 30);
|
||||
}
|
||||
|
||||
private async readSessions(): Promise<vscode.AuthenticationSession2[]> {
|
||||
private async readSessions(): Promise<vscode.AuthenticationSession[]> {
|
||||
const storedSessions = await keychain.getToken();
|
||||
if (storedSessions) {
|
||||
try {
|
||||
const sessionData: SessionData[] = JSON.parse(storedSessions);
|
||||
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession2> => {
|
||||
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession> => {
|
||||
const needsUserInfo = !session.account;
|
||||
let userInfo: { id: string, accountName: string };
|
||||
if (needsUserInfo) {
|
||||
@@ -121,11 +121,11 @@ export class GitHubAuthenticationProvider {
|
||||
await keychain.setToken(JSON.stringify(this._sessions));
|
||||
}
|
||||
|
||||
get sessions(): vscode.AuthenticationSession2[] {
|
||||
get sessions(): vscode.AuthenticationSession[] {
|
||||
return this._sessions;
|
||||
}
|
||||
|
||||
public async login(scopes: string): Promise<vscode.AuthenticationSession2> {
|
||||
public async login(scopes: string): Promise<vscode.AuthenticationSession> {
|
||||
const token = await this._githubServer.login(scopes);
|
||||
const session = await this.tokenToSession(token, scopes.split(' '));
|
||||
await this.setToken(session);
|
||||
@@ -136,12 +136,12 @@ export class GitHubAuthenticationProvider {
|
||||
this._githubServer.manuallyProvideToken();
|
||||
}
|
||||
|
||||
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession2> {
|
||||
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession> {
|
||||
const userInfo = await this._githubServer.getUserInfo(token);
|
||||
return new vscode.AuthenticationSession2(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
|
||||
return new vscode.AuthenticationSession(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
|
||||
}
|
||||
|
||||
private async setToken(session: vscode.AuthenticationSession2): Promise<void> {
|
||||
private async setToken(session: vscode.AuthenticationSession): Promise<void> {
|
||||
const sessionIndex = this._sessions.findIndex(s => s.id === session.id);
|
||||
if (sessionIndex > -1) {
|
||||
this._sessions.splice(sessionIndex, 1, session);
|
||||
|
||||
@@ -7,18 +7,19 @@
|
||||
|
||||
'use strict';
|
||||
const path = require('path');
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
module.exports = withDefaults({
|
||||
const config = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
target: 'webworker',
|
||||
node: false,
|
||||
entry: {
|
||||
extension: './src/extension.ts',
|
||||
extension: './src/extension.ts'
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
|
||||
},
|
||||
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@@ -13,11 +13,93 @@
|
||||
"Other"
|
||||
],
|
||||
"activationEvents": [
|
||||
"onFileSystem:github"
|
||||
"onFileSystem:codespace",
|
||||
"onFileSystem:github",
|
||||
"onCommand:githubBrowser.openRepository"
|
||||
],
|
||||
"browser": "./dist/extension.js",
|
||||
"browser": "./dist/browser/extension.js",
|
||||
"main": "./out/extension.js",
|
||||
"contributes": {
|
||||
"commands": [
|
||||
{
|
||||
"command": "githubBrowser.commit",
|
||||
"title": "Commit",
|
||||
"icon": "$(check)",
|
||||
"category": "GitHub Browser"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.discardChanges",
|
||||
"title": "Discard Changes",
|
||||
"icon": "$(discard)",
|
||||
"category": "GitHub Browser"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openChanges",
|
||||
"title": "Open Changes",
|
||||
"icon": "$(git-compare)",
|
||||
"category": "GitHub Browser"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openFile",
|
||||
"title": "Open File",
|
||||
"icon": "$(go-to-file)",
|
||||
"category": "GitHub Browser"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
"commandPalette": [
|
||||
{
|
||||
"command": "githubBrowser.commit",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.discardChanges",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openChanges",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openFile",
|
||||
"when": "false"
|
||||
}
|
||||
],
|
||||
"scm/title": [
|
||||
{
|
||||
"command": "githubBrowser.commit",
|
||||
"group": "navigation",
|
||||
"when": "scmProvider == github"
|
||||
}
|
||||
],
|
||||
"scm/resourceState/context": [
|
||||
{
|
||||
"command": "githubBrowser.openFile",
|
||||
"when": "scmProvider == github && scmResourceGroup == github.changes",
|
||||
"group": "inline@0"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.discardChanges",
|
||||
"when": "scmProvider == github && scmResourceGroup == github.changes",
|
||||
"group": "inline@1"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openChanges",
|
||||
"when": "scmProvider == github && scmResourceGroup == github.changes",
|
||||
"group": "navigation@0"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.openFile",
|
||||
"when": "scmProvider == github && scmResourceGroup == github.changes",
|
||||
"group": "navigation@1"
|
||||
},
|
||||
{
|
||||
"command": "githubBrowser.discardChanges",
|
||||
"when": "scmProvider == github && scmResourceGroup == github.changes",
|
||||
"group": "1_modification@0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"resourceLabelFormatters": [
|
||||
{
|
||||
"scheme": "github",
|
||||
@@ -36,6 +118,24 @@
|
||||
"separator": "/",
|
||||
"workspaceSuffix": "GitHub"
|
||||
}
|
||||
},
|
||||
{
|
||||
"scheme": "codespace",
|
||||
"authority": "HEAD",
|
||||
"formatting": {
|
||||
"label": "github.com${path}",
|
||||
"separator": "/",
|
||||
"workspaceSuffix": "GitHub"
|
||||
}
|
||||
},
|
||||
{
|
||||
"scheme": "codespace",
|
||||
"authority": "*",
|
||||
"formatting": {
|
||||
"label": "github.com${path} (${authority})",
|
||||
"separator": "/",
|
||||
"workspaceSuffix": "GitHub"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -47,14 +147,13 @@
|
||||
"vscode:prepublish": "npm run compile"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/graphql": "4.5.0",
|
||||
"@octokit/rest": "17.11.0",
|
||||
"@octokit/graphql": "4.5.1",
|
||||
"@octokit/rest": "18.0.0",
|
||||
"fuzzysort": "1.1.4",
|
||||
"node-fetch": "2.6.0"
|
||||
"node-fetch": "2.6.0",
|
||||
"vscode-nls": "4.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node-fetch": "2.5.7",
|
||||
"webpack": "4.43.0",
|
||||
"webpack-cli": "3.3.11"
|
||||
"@types/node-fetch": "2.5.7"
|
||||
}
|
||||
}
|
||||
|
||||
380
extensions/github-browser/src/changeStore.ts
Normal file
380
extensions/github-browser/src/changeStore.ts
Normal file
@@ -0,0 +1,380 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
import { commands, Event, EventEmitter, FileStat, FileType, Memento, TextDocumentShowOptions, Uri, ViewColumn } from 'vscode';
|
||||
import { getRootUri, getRelativePath, isChild } from './extension';
|
||||
import { sha1 } from './sha1';
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
interface CreateOperation<T extends string | Uri = string> {
|
||||
type: 'created';
|
||||
size: number;
|
||||
timestamp: number;
|
||||
uri: T;
|
||||
hash: string;
|
||||
originalHash: string;
|
||||
}
|
||||
|
||||
interface ChangeOperation<T extends string | Uri = string> {
|
||||
type: 'changed';
|
||||
size: number;
|
||||
timestamp: number;
|
||||
uri: T;
|
||||
hash: string;
|
||||
originalHash: string;
|
||||
}
|
||||
|
||||
interface DeleteOperation<T extends string | Uri = string> {
|
||||
type: 'deleted';
|
||||
size: undefined;
|
||||
timestamp: number;
|
||||
uri: T;
|
||||
hash: undefined;
|
||||
originalHash: undefined;
|
||||
}
|
||||
|
||||
export type Operation = CreateOperation<Uri> | ChangeOperation<Uri> | DeleteOperation<Uri>;
|
||||
type StoredOperation = CreateOperation | ChangeOperation | DeleteOperation;
|
||||
|
||||
const workingOperationsKeyPrefix = 'github.working.changes|';
|
||||
const workingFileKeyPrefix = 'github.working|';
|
||||
|
||||
function fromSerialized(operations: StoredOperation): Operation {
|
||||
return { ...operations, uri: Uri.parse(operations.uri) };
|
||||
}
|
||||
|
||||
interface CreatedFileChangeStoreEvent {
|
||||
type: 'created';
|
||||
rootUri: Uri;
|
||||
uri: Uri;
|
||||
}
|
||||
|
||||
interface ChangedFileChangeStoreEvent {
|
||||
type: 'changed';
|
||||
rootUri: Uri;
|
||||
uri: Uri;
|
||||
}
|
||||
|
||||
interface DeletedFileChangeStoreEvent {
|
||||
type: 'deleted';
|
||||
rootUri: Uri;
|
||||
uri: Uri;
|
||||
}
|
||||
|
||||
type ChangeStoreEvent = CreatedFileChangeStoreEvent | ChangedFileChangeStoreEvent | DeletedFileChangeStoreEvent;
|
||||
|
||||
function toChangeStoreEvent(operation: Operation | StoredOperation, rootUri: Uri, uri?: Uri): ChangeStoreEvent {
|
||||
return {
|
||||
type: operation.type,
|
||||
rootUri: rootUri,
|
||||
uri: uri ?? (typeof operation.uri === 'string' ? Uri.parse(operation.uri) : operation.uri)
|
||||
};
|
||||
}
|
||||
|
||||
export interface IChangeStore {
|
||||
onDidChange: Event<ChangeStoreEvent>;
|
||||
|
||||
acceptAll(rootUri: Uri): Promise<void>;
|
||||
discard(uri: Uri): Promise<void>;
|
||||
discardAll(rootUri: Uri): Promise<void>;
|
||||
|
||||
getChanges(rootUri: Uri): Operation[];
|
||||
getContent(uri: Uri): string | undefined;
|
||||
|
||||
openChanges(uri: Uri, original: Uri): void;
|
||||
openFile(uri: Uri): void;
|
||||
}
|
||||
|
||||
export interface IWritableChangeStore {
|
||||
onDidChange: Event<ChangeStoreEvent>;
|
||||
|
||||
hasChanges(rootUri: Uri): boolean;
|
||||
|
||||
getContent(uri: Uri): string | undefined;
|
||||
getStat(uri: Uri): FileStat | undefined;
|
||||
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][];
|
||||
|
||||
onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void>;
|
||||
onFileCreated(uri: Uri, content: Uint8Array): Promise<void>;
|
||||
onFileDeleted(uri: Uri): Promise<void>;
|
||||
}
|
||||
|
||||
export class ChangeStore implements IChangeStore, IWritableChangeStore {
|
||||
private _onDidChange = new EventEmitter<ChangeStoreEvent>();
|
||||
get onDidChange(): Event<ChangeStoreEvent> {
|
||||
return this._onDidChange.event;
|
||||
}
|
||||
|
||||
constructor(private readonly memento: Memento) { }
|
||||
|
||||
async acceptAll(rootUri: Uri): Promise<void> {
|
||||
const operations = this.getChanges(rootUri);
|
||||
|
||||
await this.saveWorkingOperations(rootUri, undefined);
|
||||
|
||||
for (const operation of operations) {
|
||||
await this.discardWorkingContent(operation.uri);
|
||||
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri));
|
||||
}
|
||||
}
|
||||
|
||||
async discard(uri: Uri): Promise<void> {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = uri.toString();
|
||||
|
||||
const operations = this.getWorkingOperations(rootUri);
|
||||
const index = operations.findIndex(c => c.uri === key);
|
||||
if (index === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [operation] = operations.splice(index, 1);
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.discardWorkingContent(uri);
|
||||
|
||||
this._onDidChange.fire({
|
||||
type: operation.type === 'created' ? 'deleted' : operation.type === 'deleted' ? 'created' : 'changed',
|
||||
rootUri: rootUri,
|
||||
uri: uri
|
||||
});
|
||||
}
|
||||
|
||||
async discardAll(rootUri: Uri): Promise<void> {
|
||||
const operations = this.getChanges(rootUri);
|
||||
|
||||
await this.saveWorkingOperations(rootUri, undefined);
|
||||
|
||||
for (const operation of operations) {
|
||||
await this.discardWorkingContent(operation.uri);
|
||||
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri));
|
||||
}
|
||||
}
|
||||
|
||||
getChanges(rootUri: Uri) {
|
||||
return this.getWorkingOperations(rootUri).map(c => fromSerialized(c));
|
||||
}
|
||||
|
||||
getContent(uri: Uri): string | undefined {
|
||||
return this.memento.get(`${workingFileKeyPrefix}${uri.toString()}`);
|
||||
}
|
||||
|
||||
getStat(uri: Uri): FileStat | undefined {
|
||||
const key = uri.toString();
|
||||
const operation = this.getChanges(getRootUri(uri)!).find(c => c.uri.toString() === key);
|
||||
if (operation === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
type: FileType.File,
|
||||
size: operation.size ?? 0,
|
||||
ctime: 0,
|
||||
mtime: operation.timestamp
|
||||
};
|
||||
}
|
||||
|
||||
hasChanges(rootUri: Uri): boolean {
|
||||
return this.getWorkingOperations(rootUri).length !== 0;
|
||||
}
|
||||
|
||||
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][] {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return entries;
|
||||
}
|
||||
|
||||
const folderPath = getRelativePath(rootUri, uri);
|
||||
|
||||
const operations = this.getChanges(rootUri);
|
||||
for (const operation of operations) {
|
||||
switch (operation.type) {
|
||||
case 'changed':
|
||||
continue;
|
||||
|
||||
case 'created': {
|
||||
const filePath = getRelativePath(rootUri, operation.uri);
|
||||
if (isChild(folderPath, filePath)) {
|
||||
entries.push([filePath, FileType.File]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'deleted': {
|
||||
const filePath = getRelativePath(rootUri, operation.uri);
|
||||
if (isChild(folderPath, filePath)) {
|
||||
const index = entries.findIndex(([path]) => path === filePath);
|
||||
if (index !== -1) {
|
||||
entries.splice(index, 1);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
async onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void> {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = uri.toString();
|
||||
|
||||
const operations = this.getWorkingOperations(rootUri);
|
||||
|
||||
const hash = await sha1(content);
|
||||
|
||||
let operation = operations.find(c => c.uri === key);
|
||||
if (operation === undefined) {
|
||||
const originalHash = await sha1(await originalContent!());
|
||||
if (hash === originalHash) {
|
||||
return;
|
||||
}
|
||||
|
||||
operation = {
|
||||
type: 'changed',
|
||||
size: content.byteLength,
|
||||
timestamp: Date.now(),
|
||||
uri: key,
|
||||
hash: hash!,
|
||||
originalHash: originalHash
|
||||
} as ChangeOperation;
|
||||
operations.push(operation);
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.saveWorkingContent(uri, textDecoder.decode(content));
|
||||
} else if (hash! === operation.originalHash) {
|
||||
operations.splice(operations.indexOf(operation), 1);
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.discardWorkingContent(uri);
|
||||
} else if (operation.hash !== hash) {
|
||||
operation.hash = hash!;
|
||||
operation.timestamp = Date.now();
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.saveWorkingContent(uri, textDecoder.decode(content));
|
||||
}
|
||||
|
||||
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
|
||||
}
|
||||
|
||||
async onFileCreated(uri: Uri, content: Uint8Array): Promise<void> {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = uri.toString();
|
||||
|
||||
const operations = this.getWorkingOperations(rootUri);
|
||||
|
||||
const hash = await sha1(content);
|
||||
|
||||
let operation = operations.find(c => c.uri === key);
|
||||
if (operation === undefined) {
|
||||
operation = {
|
||||
type: 'created',
|
||||
size: content.byteLength,
|
||||
timestamp: Date.now(),
|
||||
uri: key,
|
||||
hash: hash!,
|
||||
originalHash: hash!
|
||||
} as CreateOperation;
|
||||
operations.push(operation);
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.saveWorkingContent(uri, textDecoder.decode(content));
|
||||
} else {
|
||||
// Shouldn't happen, but if it does just update the contents
|
||||
operation.hash = hash!;
|
||||
operation.timestamp = Date.now();
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.saveWorkingContent(uri, textDecoder.decode(content));
|
||||
}
|
||||
|
||||
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
|
||||
}
|
||||
|
||||
async onFileDeleted(uri: Uri): Promise<void> {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = uri.toString();
|
||||
|
||||
const operations = this.getWorkingOperations(rootUri);
|
||||
|
||||
let operation = operations.find(c => c.uri === key);
|
||||
if (operation !== undefined) {
|
||||
operations.splice(operations.indexOf(operation), 1);
|
||||
}
|
||||
|
||||
const wasCreated = operation?.type === 'created';
|
||||
|
||||
operation = {
|
||||
type: 'deleted',
|
||||
timestamp: Date.now(),
|
||||
uri: key,
|
||||
} as DeleteOperation;
|
||||
|
||||
// Only track the delete, if we weren't tracking the create
|
||||
if (!wasCreated) {
|
||||
operations.push(operation);
|
||||
}
|
||||
|
||||
await this.saveWorkingOperations(rootUri, operations);
|
||||
await this.discardWorkingContent(uri);
|
||||
|
||||
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
|
||||
}
|
||||
|
||||
async openChanges(uri: Uri, original: Uri) {
|
||||
const opts: TextDocumentShowOptions = {
|
||||
preserveFocus: false,
|
||||
preview: true,
|
||||
viewColumn: ViewColumn.Active
|
||||
};
|
||||
|
||||
await commands.executeCommand('vscode.diff', original, uri, `${uri.fsPath} (Working Tree)`, opts);
|
||||
}
|
||||
|
||||
async openFile(uri: Uri) {
|
||||
const opts: TextDocumentShowOptions = {
|
||||
preserveFocus: false,
|
||||
preview: false,
|
||||
viewColumn: ViewColumn.Active
|
||||
};
|
||||
|
||||
await commands.executeCommand('vscode.open', uri, opts);
|
||||
}
|
||||
|
||||
private getWorkingOperations(rootUri: Uri): StoredOperation[] {
|
||||
return this.memento.get(`${workingOperationsKeyPrefix}${rootUri.toString()}`, []);
|
||||
}
|
||||
|
||||
private async saveWorkingOperations(rootUri: Uri, operations: StoredOperation[] | undefined): Promise<void> {
|
||||
await this.memento.update(`${workingOperationsKeyPrefix}${rootUri.toString()}`, operations);
|
||||
}
|
||||
|
||||
private async saveWorkingContent(uri: Uri, content: string): Promise<void> {
|
||||
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, content);
|
||||
}
|
||||
|
||||
private async discardWorkingContent(uri: Uri): Promise<void> {
|
||||
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, undefined);
|
||||
}
|
||||
}
|
||||
36
extensions/github-browser/src/contextStore.ts
Normal file
36
extensions/github-browser/src/contextStore.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
import { Event, EventEmitter, Memento, Uri } from 'vscode';
|
||||
|
||||
export const contextKeyPrefix = 'github.context|';
|
||||
|
||||
export class ContextStore<T> {
|
||||
private _onDidChange = new EventEmitter<Uri>();
|
||||
get onDidChange(): Event<Uri> {
|
||||
return this._onDidChange.event;
|
||||
}
|
||||
|
||||
constructor(private readonly memento: Memento, private readonly scheme: string) { }
|
||||
|
||||
delete(uri: Uri) {
|
||||
return this.set(uri, undefined);
|
||||
}
|
||||
|
||||
get(uri: Uri): T | undefined {
|
||||
return this.memento.get<T>(`${contextKeyPrefix}${uri.toString()}`);
|
||||
}
|
||||
|
||||
|
||||
async set(uri: Uri, context: T | undefined) {
|
||||
if (uri.scheme !== this.scheme) {
|
||||
throw new Error(`Invalid context scheme: ${uri.scheme}`);
|
||||
}
|
||||
|
||||
await this.memento.update(`${contextKeyPrefix}${uri.toString()}`, context);
|
||||
this._onDidChange.fire(uri);
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,71 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import { GitHubFS } from './githubfs';
|
||||
import { ExtensionContext, Uri, workspace } from 'vscode';
|
||||
import { ChangeStore } from './changeStore';
|
||||
import { ContextStore } from './contextStore';
|
||||
import { VirtualFS } from './fs';
|
||||
import { GitHubApiContext, GitHubApi } from './github/api';
|
||||
import { GitHubFS } from './github/fs';
|
||||
import { VirtualSCM } from './scm';
|
||||
|
||||
export function activate(context: vscode.ExtensionContext) {
|
||||
context.subscriptions.push(new GitHubFS());
|
||||
// const repositoryRegex = /^(?:(?:https:\/\/)?github.com\/)?([^\/]+)\/([^\/]+?)(?:\/|.git|$)/i;
|
||||
|
||||
export function activate(context: ExtensionContext) {
|
||||
const contextStore = new ContextStore<GitHubApiContext>(context.workspaceState, GitHubFS.scheme);
|
||||
const changeStore = new ChangeStore(context.workspaceState);
|
||||
|
||||
const githubApi = new GitHubApi(contextStore);
|
||||
const gitHubFS = new GitHubFS(githubApi);
|
||||
const virtualFS = new VirtualFS('codespace', GitHubFS.scheme, contextStore, changeStore, gitHubFS);
|
||||
|
||||
context.subscriptions.push(
|
||||
githubApi,
|
||||
gitHubFS,
|
||||
virtualFS,
|
||||
new VirtualSCM(GitHubFS.scheme, githubApi, changeStore)
|
||||
);
|
||||
|
||||
// commands.registerCommand('githubBrowser.openRepository', async () => {
|
||||
// const value = await window.showInputBox({
|
||||
// placeHolder: 'e.g. https://github.com/microsoft/vscode',
|
||||
// prompt: 'Enter a GitHub repository url',
|
||||
// validateInput: value => repositoryRegex.test(value) ? undefined : 'Invalid repository url'
|
||||
// });
|
||||
|
||||
// if (value) {
|
||||
// const match = repositoryRegex.exec(value);
|
||||
// if (match) {
|
||||
// const [, owner, repo] = match;
|
||||
|
||||
// const uri = Uri.parse(`codespace://HEAD/${owner}/${repo}`);
|
||||
// openWorkspace(uri, repo, 'currentWindow');
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
}
|
||||
|
||||
export function getRelativePath(rootUri: Uri, uri: Uri) {
|
||||
return uri.path.substr(rootUri.path.length + 1);
|
||||
}
|
||||
|
||||
export function getRootUri(uri: Uri) {
|
||||
return workspace.getWorkspaceFolder(uri)?.uri;
|
||||
}
|
||||
|
||||
export function isChild(folderPath: string, filePath: string) {
|
||||
return isDescendent(folderPath, filePath) && filePath.substr(folderPath.length + (folderPath.endsWith('/') ? 0 : 1)).split('/').length === 1;
|
||||
}
|
||||
|
||||
export function isDescendent(folderPath: string, filePath: string) {
|
||||
return folderPath.length === 0 || filePath.startsWith(folderPath.endsWith('/') ? folderPath : `${folderPath}/`);
|
||||
}
|
||||
|
||||
// function openWorkspace(uri: Uri, name: string, location: 'currentWindow' | 'newWindow' | 'addToCurrentWorkspace') {
|
||||
// if (location === 'addToCurrentWorkspace') {
|
||||
// const count = (workspace.workspaceFolders && workspace.workspaceFolders.length) || 0;
|
||||
// return workspace.updateWorkspaceFolders(count, 0, { uri: uri, name: name });
|
||||
// }
|
||||
|
||||
// return commands.executeCommand('vscode.openFolder', uri, location === 'newWindow');
|
||||
// }
|
||||
|
||||
220
extensions/github-browser/src/fs.ts
Normal file
220
extensions/github-browser/src/fs.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
import {
|
||||
CancellationToken,
|
||||
Disposable,
|
||||
Event,
|
||||
EventEmitter,
|
||||
FileChangeEvent,
|
||||
FileChangeType,
|
||||
FileSearchOptions,
|
||||
FileSearchProvider,
|
||||
FileSearchQuery,
|
||||
FileStat,
|
||||
FileSystemError,
|
||||
FileSystemProvider,
|
||||
FileType,
|
||||
Progress,
|
||||
TextSearchOptions,
|
||||
TextSearchProvider,
|
||||
TextSearchQuery,
|
||||
TextSearchResult,
|
||||
Uri,
|
||||
workspace,
|
||||
} from 'vscode';
|
||||
import { IWritableChangeStore } from './changeStore';
|
||||
import { ContextStore } from './contextStore';
|
||||
import { GitHubApiContext } from './github/api';
|
||||
|
||||
const emptyDisposable = { dispose: () => { /* noop */ } };
|
||||
const textEncoder = new TextEncoder();
|
||||
|
||||
export class VirtualFS implements FileSystemProvider, FileSearchProvider, TextSearchProvider, Disposable {
|
||||
private _onDidChangeFile = new EventEmitter<FileChangeEvent[]>();
|
||||
get onDidChangeFile(): Event<FileChangeEvent[]> {
|
||||
return this._onDidChangeFile.event;
|
||||
}
|
||||
|
||||
private readonly disposable: Disposable;
|
||||
|
||||
constructor(
|
||||
readonly scheme: string,
|
||||
private readonly originalScheme: string,
|
||||
contextStore: ContextStore<GitHubApiContext>,
|
||||
private readonly changeStore: IWritableChangeStore,
|
||||
private readonly fs: FileSystemProvider & FileSearchProvider & TextSearchProvider
|
||||
) {
|
||||
// TODO@eamodio listen for workspace folder changes
|
||||
for (const folder of workspace.workspaceFolders ?? []) {
|
||||
const uri = this.getOriginalResource(folder.uri);
|
||||
|
||||
// If we have a saved context, but no longer have any changes, reset the context
|
||||
// We only do this on startup/reload to keep things consistent
|
||||
if (contextStore.get(uri) !== undefined && !changeStore.hasChanges(folder.uri)) {
|
||||
contextStore.delete(uri);
|
||||
}
|
||||
}
|
||||
|
||||
this.disposable = Disposable.from(
|
||||
workspace.registerFileSystemProvider(scheme, this, {
|
||||
isCaseSensitive: true,
|
||||
}),
|
||||
workspace.registerFileSearchProvider(scheme, this),
|
||||
workspace.registerTextSearchProvider(scheme, this),
|
||||
changeStore.onDidChange(e => {
|
||||
switch (e.type) {
|
||||
case 'created':
|
||||
this._onDidChangeFile.fire([{ type: FileChangeType.Created, uri: e.uri }]);
|
||||
break;
|
||||
case 'changed':
|
||||
this._onDidChangeFile.fire([{ type: FileChangeType.Changed, uri: e.uri }]);
|
||||
break;
|
||||
case 'deleted':
|
||||
this._onDidChangeFile.fire([{ type: FileChangeType.Deleted, uri: e.uri }]);
|
||||
break;
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposable?.dispose();
|
||||
}
|
||||
|
||||
private getOriginalResource(uri: Uri): Uri {
|
||||
return uri.with({ scheme: this.originalScheme });
|
||||
}
|
||||
|
||||
private getVirtualResource(uri: Uri): Uri {
|
||||
return uri.with({ scheme: this.scheme });
|
||||
}
|
||||
|
||||
//#region FileSystemProvider
|
||||
|
||||
watch(): Disposable {
|
||||
return emptyDisposable;
|
||||
}
|
||||
|
||||
async stat(uri: Uri): Promise<FileStat> {
|
||||
let stat = this.changeStore.getStat(uri);
|
||||
if (stat !== undefined) {
|
||||
return stat;
|
||||
}
|
||||
|
||||
stat = await this.fs.stat(this.getOriginalResource(uri));
|
||||
return stat;
|
||||
}
|
||||
|
||||
async readDirectory(uri: Uri): Promise<[string, FileType][]> {
|
||||
let entries = await this.fs.readDirectory(this.getOriginalResource(uri));
|
||||
entries = this.changeStore.updateDirectoryEntries(uri, entries);
|
||||
return entries;
|
||||
}
|
||||
|
||||
createDirectory(_uri: Uri): void | Thenable<void> {
|
||||
// TODO@eamodio only support files for now
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
async readFile(uri: Uri): Promise<Uint8Array> {
|
||||
const content = this.changeStore.getContent(uri);
|
||||
if (content !== undefined) {
|
||||
return textEncoder.encode(content);
|
||||
}
|
||||
|
||||
const data = await this.fs.readFile(this.getOriginalResource(uri));
|
||||
return data;
|
||||
}
|
||||
|
||||
async writeFile(uri: Uri, content: Uint8Array, options: { create: boolean, overwrite: boolean }): Promise<void> {
|
||||
let stat;
|
||||
try {
|
||||
stat = await this.stat(uri);
|
||||
if (!options.overwrite) {
|
||||
throw FileSystemError.FileExists();
|
||||
}
|
||||
} catch (ex) {
|
||||
if (ex instanceof FileSystemError && ex.code === 'FileNotFound') {
|
||||
if (!options.create) {
|
||||
throw FileSystemError.FileNotFound();
|
||||
}
|
||||
} else {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
if (stat === undefined) {
|
||||
await this.changeStore.onFileCreated(uri, content);
|
||||
} else {
|
||||
await this.changeStore.onFileChanged(uri, content, () => this.fs.readFile(this.getOriginalResource(uri)));
|
||||
}
|
||||
}
|
||||
|
||||
async delete(uri: Uri, _options: { recursive: boolean }): Promise<void> {
|
||||
const stat = await this.stat(uri);
|
||||
if (stat.type !== FileType.File) {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
await this.changeStore.onFileDeleted(uri);
|
||||
}
|
||||
|
||||
async rename(oldUri: Uri, newUri: Uri, options: { overwrite: boolean }): Promise<void> {
|
||||
const stat = await this.stat(oldUri);
|
||||
// TODO@eamodio only support files for now
|
||||
if (stat.type !== FileType.File) {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
const content = await this.readFile(oldUri);
|
||||
await this.writeFile(newUri, content, { create: true, overwrite: options.overwrite });
|
||||
await this.delete(oldUri, { recursive: false });
|
||||
}
|
||||
|
||||
async copy(source: Uri, destination: Uri, options: { overwrite: boolean }): Promise<void> {
|
||||
const stat = await this.stat(source);
|
||||
// TODO@eamodio only support files for now
|
||||
if (stat.type !== FileType.File) {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
const content = await this.readFile(source);
|
||||
await this.writeFile(destination, content, { create: true, overwrite: options.overwrite });
|
||||
}
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region FileSearchProvider
|
||||
|
||||
provideFileSearchResults(
|
||||
query: FileSearchQuery,
|
||||
options: FileSearchOptions,
|
||||
token: CancellationToken,
|
||||
) {
|
||||
return this.fs.provideFileSearchResults(query, { ...options, folder: this.getOriginalResource(options.folder) }, token);
|
||||
}
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region TextSearchProvider
|
||||
|
||||
provideTextSearchResults(
|
||||
query: TextSearchQuery,
|
||||
options: TextSearchOptions,
|
||||
progress: Progress<TextSearchResult>,
|
||||
token: CancellationToken,
|
||||
) {
|
||||
return this.fs.provideTextSearchResults(
|
||||
query,
|
||||
{ ...options, folder: this.getOriginalResource(options.folder) },
|
||||
{ report: (result: TextSearchResult) => progress.report({ ...result, uri: this.getVirtualResource(result.uri) }) },
|
||||
token
|
||||
);
|
||||
}
|
||||
|
||||
//#endregion
|
||||
}
|
||||
87
extensions/github-browser/src/gate.ts
Normal file
87
extensions/github-browser/src/gate.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const emptyStr = '';
|
||||
|
||||
function defaultResolver(...args: any[]): string {
|
||||
if (args.length === 1) {
|
||||
const arg0 = args[0];
|
||||
if (arg0 === undefined || arg0 === null) {
|
||||
return emptyStr;
|
||||
}
|
||||
if (typeof arg0 === 'string') {
|
||||
return arg0;
|
||||
}
|
||||
if (typeof arg0 === 'number' || typeof arg0 === 'boolean') {
|
||||
return String(arg0);
|
||||
}
|
||||
|
||||
return JSON.stringify(arg0);
|
||||
}
|
||||
|
||||
return JSON.stringify(args);
|
||||
}
|
||||
|
||||
function iPromise<T>(obj: T | Promise<T>): obj is Promise<T> {
|
||||
return typeof (obj as Promise<T>)?.then === 'function';
|
||||
}
|
||||
|
||||
export function gate<T extends (...arg: any) => any>(resolver?: (...args: Parameters<T>) => string) {
|
||||
return (_target: any, key: string, descriptor: PropertyDescriptor) => {
|
||||
let fn: Function | undefined;
|
||||
if (typeof descriptor.value === 'function') {
|
||||
fn = descriptor.value;
|
||||
} else if (typeof descriptor.get === 'function') {
|
||||
fn = descriptor.get;
|
||||
}
|
||||
if (fn === undefined || fn === null) {
|
||||
throw new Error('Not supported');
|
||||
}
|
||||
|
||||
const gateKey = `$gate$${key}`;
|
||||
|
||||
descriptor.value = function (this: any, ...args: any[]) {
|
||||
const prop =
|
||||
args.length === 0 ? gateKey : `${gateKey}$${(resolver ?? defaultResolver)(...(args as Parameters<T>))}`;
|
||||
|
||||
if (!Object.prototype.hasOwnProperty.call(this, prop)) {
|
||||
Object.defineProperty(this, prop, {
|
||||
configurable: false,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
value: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
let promise = this[prop];
|
||||
if (promise === undefined) {
|
||||
let result;
|
||||
try {
|
||||
result = fn!.apply(this, args);
|
||||
if (result === undefined || fn === null || !iPromise(result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
this[prop] = promise = result
|
||||
.then((r: any) => {
|
||||
this[prop] = undefined;
|
||||
return r;
|
||||
})
|
||||
.catch(ex => {
|
||||
this[prop] = undefined;
|
||||
throw ex;
|
||||
});
|
||||
} catch (ex) {
|
||||
this[prop] = undefined;
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
return promise;
|
||||
};
|
||||
};
|
||||
}
|
||||
491
extensions/github-browser/src/github/api.ts
Normal file
491
extensions/github-browser/src/github/api.ts
Normal file
@@ -0,0 +1,491 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { authentication, AuthenticationSession, Disposable, Event, EventEmitter, Range, Uri } from 'vscode';
|
||||
import { graphql } from '@octokit/graphql';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { fromGitHubUri } from './fs';
|
||||
import { ContextStore } from '../contextStore';
|
||||
import { Iterables } from '../iterables';
|
||||
|
||||
export const shaRegex = /^[0-9a-f]{40}$/;
|
||||
|
||||
export interface GitHubApiContext {
|
||||
sha: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface CreateCommitOperation {
|
||||
type: 'created';
|
||||
path: string;
|
||||
content: string
|
||||
}
|
||||
|
||||
interface ChangeCommitOperation {
|
||||
type: 'changed';
|
||||
path: string;
|
||||
content: string
|
||||
}
|
||||
|
||||
interface DeleteCommitOperation {
|
||||
type: 'deleted';
|
||||
path: string;
|
||||
content: undefined
|
||||
}
|
||||
|
||||
export type CommitOperation = CreateCommitOperation | ChangeCommitOperation | DeleteCommitOperation;
|
||||
|
||||
type ArrayElement<T extends Array<unknown>> = T extends (infer U)[] ? U : never;
|
||||
type GitCreateTreeParamsTree = ArrayElement<NonNullable<Parameters<Octokit['git']['createTree']>[0]>['tree']>;
|
||||
|
||||
function getGitHubRootUri(uri: Uri) {
|
||||
const rootIndex = uri.path.indexOf('/', uri.path.indexOf('/', 1) + 1);
|
||||
return uri.with({
|
||||
path: uri.path.substring(0, rootIndex === -1 ? undefined : rootIndex),
|
||||
query: ''
|
||||
});
|
||||
}
|
||||
|
||||
export class GitHubApi implements Disposable {
|
||||
private _onDidChangeContext = new EventEmitter<Uri>();
|
||||
get onDidChangeContext(): Event<Uri> {
|
||||
return this._onDidChangeContext.event;
|
||||
}
|
||||
|
||||
private readonly disposable: Disposable;
|
||||
|
||||
constructor(private readonly context: ContextStore<GitHubApiContext>) {
|
||||
this.disposable = Disposable.from(
|
||||
context.onDidChange(e => this._onDidChangeContext.fire(e))
|
||||
);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposable.dispose();
|
||||
}
|
||||
|
||||
private _session: AuthenticationSession | undefined;
|
||||
async ensureAuthenticated() {
|
||||
if (this._session === undefined) {
|
||||
const providers = await authentication.getProviderIds();
|
||||
if (!providers.includes('github')) {
|
||||
await new Promise(resolve => {
|
||||
authentication.onDidChangeAuthenticationProviders(e => {
|
||||
if (e.added.includes('github')) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
this._session = await authentication.getSession('github', ['repo'], { createIfNone: true });
|
||||
}
|
||||
|
||||
return this._session;
|
||||
}
|
||||
|
||||
private _graphql: typeof graphql | undefined;
|
||||
private async graphql() {
|
||||
if (this._graphql === undefined) {
|
||||
const session = await this.ensureAuthenticated();
|
||||
this._graphql = graphql.defaults({
|
||||
headers: {
|
||||
Authorization: `Bearer ${session.accessToken}`,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return this._graphql;
|
||||
}
|
||||
|
||||
private _octokit: typeof Octokit | undefined;
|
||||
private async octokit(options?: ConstructorParameters<typeof Octokit>[0]) {
|
||||
if (this._octokit === undefined) {
|
||||
const session = await this.ensureAuthenticated();
|
||||
this._octokit = Octokit.defaults({ auth: `token ${session.accessToken}` });
|
||||
}
|
||||
return new this._octokit(options);
|
||||
}
|
||||
|
||||
async commit(rootUri: Uri, message: string, operations: CommitOperation[]): Promise<string | undefined> {
|
||||
let { owner, repo, ref } = fromGitHubUri(rootUri);
|
||||
|
||||
try {
|
||||
if (ref === undefined || ref === 'HEAD') {
|
||||
ref = await this.defaultBranchQuery(rootUri);
|
||||
if (ref === undefined) {
|
||||
throw new Error('Cannot commit — invalid ref');
|
||||
}
|
||||
}
|
||||
|
||||
const context = await this.getContext(rootUri);
|
||||
if (context.sha === undefined) {
|
||||
throw new Error('Cannot commit — invalid context');
|
||||
}
|
||||
|
||||
const hasDeletes = operations.some(op => op.type === 'deleted');
|
||||
|
||||
const github = await this.octokit();
|
||||
const treeResp = await github.git.getTree({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
tree_sha: context.sha,
|
||||
recursive: hasDeletes ? 'true' : undefined,
|
||||
});
|
||||
|
||||
// 0100000000000000 (040000): Directory
|
||||
// 1000000110100100 (100644): Regular non-executable file
|
||||
// 1000000110110100 (100664): Regular non-executable group-writeable file
|
||||
// 1000000111101101 (100755): Regular executable file
|
||||
// 1010000000000000 (120000): Symbolic link
|
||||
// 1110000000000000 (160000): Gitlink
|
||||
let updatedTree: GitCreateTreeParamsTree[];
|
||||
|
||||
if (hasDeletes) {
|
||||
updatedTree = treeResp.data.tree as GitCreateTreeParamsTree[];
|
||||
|
||||
for (const operation of operations) {
|
||||
switch (operation.type) {
|
||||
case 'created':
|
||||
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
|
||||
break;
|
||||
|
||||
case 'changed': {
|
||||
const index = updatedTree.findIndex(item => item.path === operation.path);
|
||||
if (index !== -1) {
|
||||
const { path, mode, type } = updatedTree[index];
|
||||
updatedTree.splice(index, 1, { path: path, mode: mode, type: type, content: operation.content });
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'deleted': {
|
||||
const index = updatedTree.findIndex(item => item.path === operation.path);
|
||||
if (index !== -1) {
|
||||
updatedTree.splice(index, 1);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
updatedTree = [];
|
||||
|
||||
for (const operation of operations) {
|
||||
switch (operation.type) {
|
||||
case 'created':
|
||||
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
|
||||
break;
|
||||
|
||||
case 'changed':
|
||||
const item = treeResp.data.tree.find(item => item.path === operation.path) as GitCreateTreeParamsTree;
|
||||
if (item !== undefined) {
|
||||
const { path, mode, type } = item;
|
||||
updatedTree.push({ path: path, mode: mode, type: type, content: operation.content });
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const updatedTreeResp = await github.git.createTree({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
base_tree: hasDeletes ? undefined : treeResp.data.sha,
|
||||
tree: updatedTree
|
||||
});
|
||||
|
||||
const resp = await github.git.createCommit({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
message: message,
|
||||
tree: updatedTreeResp.data.sha,
|
||||
parents: [context.sha]
|
||||
});
|
||||
|
||||
this.updateContext(rootUri, { sha: resp.data.sha, timestamp: Date.now() });
|
||||
|
||||
// TODO@eamodio need to send a file change for any open files
|
||||
|
||||
await github.git.updateRef({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
ref: `heads/${ref}`,
|
||||
sha: resp.data.sha
|
||||
});
|
||||
|
||||
return resp.data.sha;
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
async defaultBranchQuery(uri: Uri) {
|
||||
const { owner, repo } = fromGitHubUri(uri);
|
||||
|
||||
try {
|
||||
const query = `query defaultBranch($owner: String!, $repo: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
defaultBranchRef {
|
||||
name
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const rsp = await this.gqlQuery<{
|
||||
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
|
||||
}>(query, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
});
|
||||
return rsp?.repository?.defaultBranchRef?.name ?? undefined;
|
||||
} catch (ex) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async filesQuery(uri: Uri) {
|
||||
const { owner, repo, ref } = fromGitHubUri(uri);
|
||||
|
||||
try {
|
||||
const context = await this.getContext(uri);
|
||||
|
||||
const resp = await (await this.octokit()).git.getTree({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
recursive: '1',
|
||||
tree_sha: context?.sha ?? ref ?? 'HEAD',
|
||||
});
|
||||
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
|
||||
} catch (ex) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
|
||||
const { owner, repo, path, ref } = fromGitHubUri(uri);
|
||||
|
||||
try {
|
||||
const context = await this.getContext(uri);
|
||||
|
||||
const query = `query fs($owner: String!, $repo: String!, $path: String) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
object(expression: $path) {
|
||||
${innerQuery}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const rsp = await this.gqlQuery<{
|
||||
repository: { object: T | null | undefined };
|
||||
}>(query, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
path: `${context.sha ?? ref ?? 'HEAD'}:${path}`,
|
||||
});
|
||||
return rsp?.repository?.object ?? undefined;
|
||||
} catch (ex) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async latestCommitQuery(uri: Uri) {
|
||||
const { owner, repo, ref } = fromGitHubUri(uri);
|
||||
|
||||
try {
|
||||
if (ref === undefined || ref === 'HEAD') {
|
||||
const query = `query latest($owner: String!, $repo: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
defaultBranchRef {
|
||||
target {
|
||||
oid
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const rsp = await this.gqlQuery<{
|
||||
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
|
||||
}>(query, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
});
|
||||
return rsp?.repository?.defaultBranchRef?.target.oid ?? undefined;
|
||||
}
|
||||
|
||||
const query = `query latest($owner: String!, $repo: String!, $ref: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
ref(qualifiedName: $ref) {
|
||||
target {
|
||||
oid
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const rsp = await this.gqlQuery<{
|
||||
repository: { ref: { target: { oid: string } } | null | undefined };
|
||||
}>(query, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
ref: ref ?? 'HEAD',
|
||||
});
|
||||
return rsp?.repository?.ref?.target.oid ?? undefined;
|
||||
} catch (ex) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async searchQuery(
|
||||
query: string,
|
||||
uri: Uri,
|
||||
options: { maxResults?: number; context?: { before?: number; after?: number } },
|
||||
): Promise<SearchQueryResults> {
|
||||
const { owner, repo, ref } = fromGitHubUri(uri);
|
||||
|
||||
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
|
||||
if (ref === undefined) {
|
||||
return { matches: [], limitHit: true };
|
||||
}
|
||||
|
||||
try {
|
||||
const resp = await (await this.octokit({
|
||||
request: {
|
||||
headers: {
|
||||
accept: 'application/vnd.github.v3.text-match+json',
|
||||
},
|
||||
}
|
||||
})).search.code({
|
||||
q: `${query} repo:${owner}/${repo}`,
|
||||
});
|
||||
|
||||
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
|
||||
const range = new Range(0, 0, 0, 0);
|
||||
|
||||
const matches: SearchQueryMatch[] = [];
|
||||
|
||||
let counter = 0;
|
||||
let match: SearchQueryMatch;
|
||||
for (const item of resp.data.items) {
|
||||
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
|
||||
counter++;
|
||||
if (options.maxResults !== undefined && counter > options.maxResults) {
|
||||
return { matches: matches, limitHit: true };
|
||||
}
|
||||
|
||||
match = {
|
||||
path: item.path,
|
||||
ranges: [],
|
||||
preview: m.fragment,
|
||||
matches: [],
|
||||
};
|
||||
|
||||
for (const lm of m.matches) {
|
||||
let line = 0;
|
||||
let shartChar = 0;
|
||||
let endChar = 0;
|
||||
for (let i = 0; i < lm.indices[1]; i++) {
|
||||
if (i === lm.indices[0]) {
|
||||
shartChar = endChar;
|
||||
}
|
||||
|
||||
if (m.fragment[i] === '\n') {
|
||||
line++;
|
||||
endChar = 0;
|
||||
} else {
|
||||
endChar++;
|
||||
}
|
||||
}
|
||||
|
||||
match.ranges.push(range);
|
||||
match.matches.push(new Range(line, shartChar, line, endChar));
|
||||
}
|
||||
|
||||
matches.push(match);
|
||||
}
|
||||
}
|
||||
|
||||
return { matches: matches, limitHit: false };
|
||||
} catch (ex) {
|
||||
return { matches: [], limitHit: true };
|
||||
}
|
||||
}
|
||||
|
||||
private async gqlQuery<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
|
||||
return (await this.graphql())<T>(query, variables);
|
||||
}
|
||||
|
||||
private readonly pendingContextRequests = new Map<string, Promise<GitHubApiContext>>();
|
||||
async getContext(uri: Uri): Promise<GitHubApiContext> {
|
||||
const rootUri = getGitHubRootUri(uri);
|
||||
|
||||
let pending = this.pendingContextRequests.get(rootUri.toString());
|
||||
if (pending === undefined) {
|
||||
pending = this.getContextCore(rootUri);
|
||||
this.pendingContextRequests.set(rootUri.toString(), pending);
|
||||
}
|
||||
|
||||
try {
|
||||
return await pending;
|
||||
} finally {
|
||||
this.pendingContextRequests.delete(rootUri.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private readonly rootUriToContextMap = new Map<string, GitHubApiContext>();
|
||||
|
||||
private async getContextCore(rootUri: Uri): Promise<GitHubApiContext> {
|
||||
let context = this.rootUriToContextMap.get(rootUri.toString());
|
||||
if (context === undefined) {
|
||||
const { ref } = fromGitHubUri(rootUri);
|
||||
if (ref !== undefined && shaRegex.test(ref)) {
|
||||
context = { sha: ref, timestamp: Date.now() };
|
||||
} else {
|
||||
context = this.context.get(rootUri);
|
||||
if (context?.sha === undefined) {
|
||||
const sha = await this.latestCommitQuery(rootUri);
|
||||
if (sha !== undefined) {
|
||||
context = { sha: sha, timestamp: Date.now() };
|
||||
} else {
|
||||
context = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (context !== undefined) {
|
||||
this.updateContext(rootUri, context);
|
||||
}
|
||||
}
|
||||
|
||||
return context ?? { sha: rootUri.authority, timestamp: Date.now() };
|
||||
}
|
||||
|
||||
private updateContext(rootUri: Uri, context: GitHubApiContext) {
|
||||
this.rootUriToContextMap.set(rootUri.toString(), context);
|
||||
this.context.set(rootUri, context);
|
||||
}
|
||||
}
|
||||
|
||||
interface GitHubSearchTextMatch {
|
||||
object_url: string;
|
||||
object_type: string;
|
||||
property: string;
|
||||
fragment: string;
|
||||
matches: {
|
||||
text: string;
|
||||
indices: number[];
|
||||
}[];
|
||||
}
|
||||
|
||||
interface SearchQueryMatch {
|
||||
path: string;
|
||||
ranges: Range[];
|
||||
preview: string;
|
||||
matches: Range[];
|
||||
}
|
||||
|
||||
interface SearchQueryResults {
|
||||
matches: SearchQueryMatch[];
|
||||
limitHit: boolean;
|
||||
}
|
||||
@@ -5,8 +5,6 @@
|
||||
|
||||
'use strict';
|
||||
import {
|
||||
authentication,
|
||||
AuthenticationSession2,
|
||||
CancellationToken,
|
||||
Disposable,
|
||||
Event,
|
||||
@@ -20,7 +18,6 @@ import {
|
||||
FileSystemProvider,
|
||||
FileType,
|
||||
Progress,
|
||||
Range,
|
||||
TextSearchComplete,
|
||||
TextSearchOptions,
|
||||
TextSearchProvider,
|
||||
@@ -29,11 +26,11 @@ import {
|
||||
Uri,
|
||||
workspace,
|
||||
} from 'vscode';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { graphql } from '@octokit/graphql/';
|
||||
import * as fuzzySort from 'fuzzysort';
|
||||
import fetch from 'node-fetch';
|
||||
import { Iterables } from './iterables';
|
||||
import { GitHubApi } from './api';
|
||||
import { Iterables } from '../iterables';
|
||||
import { getRootUri } from '../extension';
|
||||
|
||||
const emptyDisposable = { dispose: () => { /* noop */ } };
|
||||
const replaceBackslashRegex = /(\/|\\)/g;
|
||||
@@ -53,16 +50,17 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
}
|
||||
|
||||
private readonly disposable: Disposable;
|
||||
private fsCache = new Map<string, any>();
|
||||
private fsCache = new Map<string, Map<string, any>>();
|
||||
|
||||
constructor() {
|
||||
constructor(private readonly github: GitHubApi) {
|
||||
this.disposable = Disposable.from(
|
||||
workspace.registerFileSystemProvider(GitHubFS.scheme, this, {
|
||||
isCaseSensitive: true,
|
||||
isReadonly: true,
|
||||
isReadonly: true
|
||||
}),
|
||||
workspace.registerFileSearchProvider(GitHubFS.scheme, this),
|
||||
workspace.registerTextSearchProvider(GitHubFS.scheme, this),
|
||||
github.onDidChangeContext(e => this.fsCache.delete(e.toString()))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -70,22 +68,18 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
this.disposable?.dispose();
|
||||
}
|
||||
|
||||
private _github: Promise<GitHubApi | undefined> | undefined;
|
||||
get github(): Promise<GitHubApi | undefined> {
|
||||
if (this._github === undefined) {
|
||||
this._github = this.getGitHubApi();
|
||||
}
|
||||
return this._github;
|
||||
private getCache(uri: Uri) {
|
||||
const rootUri = getRootUri(uri);
|
||||
if (rootUri === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private async getGitHubApi(): Promise<GitHubApi | undefined> {
|
||||
try {
|
||||
const session = await authentication.getSession('github', ['repo'], { createIfNone: true });
|
||||
return new GitHubApi(session);
|
||||
} catch (ex) {
|
||||
this._github = undefined;
|
||||
throw ex;
|
||||
let cache = this.fsCache.get(rootUri.toString());
|
||||
if (cache === undefined) {
|
||||
cache = new Map<string, any>();
|
||||
this.fsCache.set(rootUri.toString(), cache);
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
|
||||
//#region FileSystemProvider
|
||||
@@ -96,7 +90,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
|
||||
async stat(uri: Uri): Promise<FileStat> {
|
||||
if (uri.path === '' || uri.path.lastIndexOf('/') === 0) {
|
||||
return { type: FileType.Directory, size: 0, ctime: 0, mtime: 0 };
|
||||
const context = await this.github.getContext(uri);
|
||||
return { type: FileType.Directory, size: 0, ctime: 0, mtime: context?.timestamp };
|
||||
}
|
||||
|
||||
const data = await this.fsQuery<{
|
||||
@@ -108,14 +103,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
...on Blob {
|
||||
byteSize
|
||||
}`,
|
||||
this.fsCache,
|
||||
this.getCache(uri),
|
||||
);
|
||||
|
||||
if (data === undefined) {
|
||||
throw FileSystemError.FileNotFound();
|
||||
}
|
||||
|
||||
const context = await this.github.getContext(uri);
|
||||
|
||||
return {
|
||||
type: typenameToFileType(data?.__typename),
|
||||
size: data?.byteSize ?? 0,
|
||||
type: typenameToFileType(data.__typename),
|
||||
size: data.byteSize ?? 0,
|
||||
ctime: 0,
|
||||
mtime: 0,
|
||||
mtime: context?.timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -130,7 +131,7 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
type
|
||||
}
|
||||
}`,
|
||||
this.fsCache,
|
||||
this.getCache(uri),
|
||||
);
|
||||
|
||||
return (data?.entries ?? []).map<[string, FileType]>(e => [
|
||||
@@ -139,8 +140,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
]);
|
||||
}
|
||||
|
||||
createDirectory(): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions;
|
||||
createDirectory(_uri: Uri): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
async readFile(uri: Uri): Promise<Uint8Array> {
|
||||
@@ -172,20 +173,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
return textEncoder.encode(data?.text ?? '');
|
||||
}
|
||||
|
||||
writeFile(): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions;
|
||||
async writeFile(_uri: Uri, _content: Uint8Array, _options: { create: boolean, overwrite: boolean }): Promise<void> {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
delete(): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions;
|
||||
delete(_uri: Uri, _options: { recursive: boolean }): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
rename(): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions;
|
||||
rename(_oldUri: Uri, _newUri: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
copy?(): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions;
|
||||
copy(_source: Uri, _destination: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
|
||||
throw FileSystemError.NoPermissions();
|
||||
}
|
||||
|
||||
//#endregion
|
||||
@@ -201,8 +202,10 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
): Promise<Uri[]> {
|
||||
let searchable = this.fileSearchCache.get(options.folder.toString(true));
|
||||
if (searchable === undefined) {
|
||||
const matches = await (await this.github)?.filesQuery(options.folder);
|
||||
if (matches === undefined || token.isCancellationRequested) { return []; }
|
||||
const matches = await this.github.filesQuery(options.folder);
|
||||
if (matches === undefined || token.isCancellationRequested) {
|
||||
return [];
|
||||
}
|
||||
|
||||
searchable = [...Iterables.map(matches, m => (fuzzySort as Fuzzysort).prepareSlow(m))];
|
||||
this.fileSearchCache.set(options.folder.toString(true), searchable);
|
||||
@@ -233,13 +236,12 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
query: TextSearchQuery,
|
||||
options: TextSearchOptions,
|
||||
progress: Progress<TextSearchResult>,
|
||||
token: CancellationToken,
|
||||
_token: CancellationToken,
|
||||
): Promise<TextSearchComplete> {
|
||||
const results = await (await this.github)?.searchQuery(
|
||||
const results = await this.github.searchQuery(
|
||||
query.pattern,
|
||||
options.folder,
|
||||
{ maxResults: options.maxResults, context: { before: options.beforeContext, after: options.afterContext } },
|
||||
token,
|
||||
);
|
||||
if (results === undefined) { return { limitHit: true }; }
|
||||
|
||||
@@ -266,9 +268,11 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
|
||||
const key = `${uri.toString()}:${getHashCode(query)}`;
|
||||
|
||||
let data = cache?.get(key);
|
||||
if (data !== undefined) { return data as T; }
|
||||
if (data !== undefined) {
|
||||
return data as T;
|
||||
}
|
||||
|
||||
data = await (await this.github)?.fsQuery<T>(uri, query);
|
||||
data = await this.github.fsQuery<T>(uri, query);
|
||||
cache?.set(key, data);
|
||||
return data;
|
||||
}
|
||||
@@ -296,12 +300,16 @@ function typenameToFileType(typename: string | undefined | null) {
|
||||
}
|
||||
|
||||
type RepoInfo = { owner: string; repo: string; path: string | undefined; ref?: string };
|
||||
function fromGitHubUri(uri: Uri): RepoInfo {
|
||||
export function fromGitHubUri(uri: Uri): RepoInfo {
|
||||
const [, owner, repo, ...rest] = uri.path.split('/');
|
||||
|
||||
let ref;
|
||||
if (uri.authority) {
|
||||
ref = uri.authority;
|
||||
// The casing of HEAD is important for the GitHub api to work
|
||||
if (/HEAD/i.test(ref)) {
|
||||
ref = 'HEAD';
|
||||
}
|
||||
}
|
||||
return { owner: owner, repo: repo, path: rest.join('/'), ref: ref };
|
||||
}
|
||||
@@ -322,175 +330,3 @@ function getHashCode(s: string): number {
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
interface SearchQueryMatch {
|
||||
path: string;
|
||||
ranges: Range[];
|
||||
preview: string;
|
||||
matches: Range[];
|
||||
}
|
||||
|
||||
interface SearchQueryResults {
|
||||
matches: SearchQueryMatch[];
|
||||
limitHit: boolean;
|
||||
}
|
||||
|
||||
class GitHubApi {
|
||||
constructor(private readonly session: AuthenticationSession2) { }
|
||||
|
||||
private _graphql: typeof graphql | undefined;
|
||||
private get graphql() {
|
||||
if (this._graphql === undefined) {
|
||||
this._graphql = graphql.defaults({
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return this._graphql;
|
||||
}
|
||||
|
||||
get token() {
|
||||
return this.session.accessToken;
|
||||
}
|
||||
|
||||
async filesQuery(uri: Uri) {
|
||||
const { owner, repo, ref } = fromGitHubUri(uri);
|
||||
try {
|
||||
const resp = await new Octokit({
|
||||
auth: `token ${this.token}`,
|
||||
}).git.getTree({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
recursive: '1',
|
||||
tree_sha: ref ?? 'HEAD',
|
||||
});
|
||||
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
|
||||
} catch (ex) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async searchQuery(
|
||||
query: string,
|
||||
uri: Uri,
|
||||
options: { maxResults?: number; context?: { before?: number; after?: number } },
|
||||
_token: CancellationToken,
|
||||
): Promise<SearchQueryResults> {
|
||||
const { owner, repo, ref } = fromGitHubUri(uri);
|
||||
|
||||
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
|
||||
if (ref === undefined) {
|
||||
return { matches: [], limitHit: true };
|
||||
}
|
||||
|
||||
try {
|
||||
const resp = await new Octokit({
|
||||
auth: `token ${this.token}`,
|
||||
request: {
|
||||
headers: {
|
||||
accept: 'application/vnd.github.v3.text-match+json',
|
||||
},
|
||||
}
|
||||
}).search.code({
|
||||
q: `${query} repo:${owner}/${repo}`,
|
||||
});
|
||||
|
||||
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
|
||||
const range = new Range(0, 0, 0, 0);
|
||||
|
||||
const matches: SearchQueryMatch[] = [];
|
||||
|
||||
console.log(resp.data.items.length, resp.data.items);
|
||||
|
||||
let counter = 0;
|
||||
let match: SearchQueryMatch;
|
||||
for (const item of resp.data.items) {
|
||||
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
|
||||
counter++;
|
||||
if (options.maxResults !== undefined && counter > options.maxResults) {
|
||||
return { matches: matches, limitHit: true };
|
||||
}
|
||||
|
||||
match = {
|
||||
path: item.path,
|
||||
ranges: [],
|
||||
preview: m.fragment,
|
||||
matches: [],
|
||||
};
|
||||
|
||||
for (const lm of m.matches) {
|
||||
let line = 0;
|
||||
let shartChar = 0;
|
||||
let endChar = 0;
|
||||
for (let i = 0; i < lm.indices[1]; i++) {
|
||||
if (i === lm.indices[0]) {
|
||||
shartChar = endChar;
|
||||
}
|
||||
|
||||
if (m.fragment[i] === '\n') {
|
||||
line++;
|
||||
endChar = 0;
|
||||
} else {
|
||||
endChar++;
|
||||
}
|
||||
}
|
||||
|
||||
match.ranges.push(range);
|
||||
match.matches.push(new Range(line, shartChar, line, endChar));
|
||||
}
|
||||
|
||||
matches.push(match);
|
||||
}
|
||||
}
|
||||
|
||||
return { matches: matches, limitHit: false };
|
||||
} catch (ex) {
|
||||
return { matches: [], limitHit: true };
|
||||
}
|
||||
}
|
||||
|
||||
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
|
||||
try {
|
||||
const query = `query fs($owner: String!, $repo: String!, $path: String) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
object(expression: $path) {
|
||||
${innerQuery}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
const { owner, repo, path, ref } = fromGitHubUri(uri);
|
||||
const variables = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
path: `${ref ?? 'HEAD'}:${path}`,
|
||||
};
|
||||
|
||||
const rsp = await this.query<{
|
||||
repository: { object: T | null | undefined };
|
||||
}>(query, variables);
|
||||
return rsp?.repository?.object ?? undefined;
|
||||
} catch (ex) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
query<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
|
||||
return this.graphql(query, variables) as Promise<T | undefined>;
|
||||
}
|
||||
}
|
||||
|
||||
interface GitHubSearchTextMatch {
|
||||
object_url: string;
|
||||
object_type: string;
|
||||
property: string;
|
||||
fragment: string;
|
||||
matches: GitHubSearchMatch[];
|
||||
}
|
||||
|
||||
interface GitHubSearchMatch {
|
||||
text: string;
|
||||
indices: number[];
|
||||
}
|
||||
168
extensions/github-browser/src/scm.ts
Normal file
168
extensions/github-browser/src/scm.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
import { CancellationToken, commands, Disposable, scm, SourceControl, SourceControlResourceGroup, SourceControlResourceState, Uri, window, workspace } from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { IChangeStore } from './changeStore';
|
||||
import { GitHubApi, CommitOperation } from './github/api';
|
||||
import { getRelativePath } from './extension';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
interface ScmProvider {
|
||||
sourceControl: SourceControl,
|
||||
groups: SourceControlResourceGroup[]
|
||||
}
|
||||
|
||||
export class VirtualSCM implements Disposable {
|
||||
private readonly providers: ScmProvider[] = [];
|
||||
|
||||
private disposable: Disposable;
|
||||
|
||||
constructor(
|
||||
private readonly originalScheme: string,
|
||||
private readonly github: GitHubApi,
|
||||
private readonly changeStore: IChangeStore,
|
||||
) {
|
||||
this.registerCommands();
|
||||
|
||||
// TODO@eamodio listen for workspace folder changes
|
||||
for (const folder of workspace.workspaceFolders ?? []) {
|
||||
this.createScmProvider(folder.uri, folder.name);
|
||||
}
|
||||
|
||||
this.disposable = Disposable.from(
|
||||
changeStore.onDidChange(e => this.update(e.rootUri, e.uri)),
|
||||
);
|
||||
|
||||
for (const { uri } of workspace.workspaceFolders ?? []) {
|
||||
for (const operation of changeStore.getChanges(uri)) {
|
||||
this.update(uri, operation.uri);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposable.dispose();
|
||||
}
|
||||
|
||||
private registerCommands() {
|
||||
commands.registerCommand('githubBrowser.commit', (...args: any[]) => this.commitChanges(args[0]));
|
||||
|
||||
commands.registerCommand('githubBrowser.discardChanges', (resourceState: SourceControlResourceState) =>
|
||||
this.discardChanges(resourceState.resourceUri)
|
||||
);
|
||||
|
||||
commands.registerCommand('githubBrowser.openChanges', (resourceState: SourceControlResourceState) =>
|
||||
this.openChanges(resourceState.resourceUri)
|
||||
);
|
||||
|
||||
commands.registerCommand('githubBrowser.openFile', (resourceState: SourceControlResourceState) =>
|
||||
this.openFile(resourceState.resourceUri)
|
||||
);
|
||||
}
|
||||
|
||||
async commitChanges(sourceControl: SourceControl): Promise<void> {
|
||||
const operations = this.changeStore
|
||||
.getChanges(sourceControl.rootUri!)
|
||||
.map<CommitOperation>(operation => {
|
||||
const path = getRelativePath(sourceControl.rootUri!, operation.uri);
|
||||
switch (operation.type) {
|
||||
case 'created':
|
||||
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
|
||||
case 'changed':
|
||||
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
|
||||
case 'deleted':
|
||||
return { type: operation.type, path: path };
|
||||
}
|
||||
});
|
||||
if (!operations.length) {
|
||||
window.showInformationMessage(localize('no changes', "There are no changes to commit."));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const message = sourceControl.inputBox.value;
|
||||
if (message) {
|
||||
const sha = await this.github.commit(this.getOriginalResource(sourceControl.rootUri!), message, operations);
|
||||
if (sha !== undefined) {
|
||||
this.changeStore.acceptAll(sourceControl.rootUri!);
|
||||
sourceControl.inputBox.value = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
discardChanges(uri: Uri): Promise<void> {
|
||||
return this.changeStore.discard(uri);
|
||||
}
|
||||
|
||||
openChanges(uri: Uri) {
|
||||
return this.changeStore.openChanges(uri, this.getOriginalResource(uri));
|
||||
}
|
||||
|
||||
openFile(uri: Uri) {
|
||||
return this.changeStore.openFile(uri);
|
||||
}
|
||||
|
||||
private update(rootUri: Uri, uri: Uri) {
|
||||
const folder = workspace.getWorkspaceFolder(uri);
|
||||
if (folder === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const provider = this.createScmProvider(rootUri, folder.name);
|
||||
const group = this.createChangesGroup(provider);
|
||||
group.resourceStates = this.changeStore.getChanges(rootUri).map<SourceControlResourceState>(op => {
|
||||
const rs: SourceControlResourceState = {
|
||||
decorations: {
|
||||
strikeThrough: op.type === 'deleted'
|
||||
},
|
||||
resourceUri: op.uri,
|
||||
command: {
|
||||
command: 'githubBrowser.openChanges',
|
||||
title: 'Open Changes',
|
||||
}
|
||||
};
|
||||
rs.command!.arguments = [rs];
|
||||
return rs;
|
||||
});
|
||||
}
|
||||
|
||||
private createScmProvider(rootUri: Uri, name: string) {
|
||||
let provider = this.providers.find(sc => sc.sourceControl.rootUri?.toString() === rootUri.toString());
|
||||
if (provider === undefined) {
|
||||
const sourceControl = scm.createSourceControl('github', name, rootUri);
|
||||
sourceControl.quickDiffProvider = { provideOriginalResource: uri => this.getOriginalResource(uri) };
|
||||
sourceControl.acceptInputCommand = {
|
||||
command: 'githubBrowser.commit',
|
||||
title: 'Commit',
|
||||
arguments: [sourceControl]
|
||||
};
|
||||
sourceControl.inputBox.placeholder = `Message (Ctrl+Enter to commit '${name}')`;
|
||||
// sourceControl.inputBox.validateInput = value => value ? undefined : 'Invalid commit message';
|
||||
|
||||
provider = { sourceControl: sourceControl, groups: [] };
|
||||
this.createChangesGroup(provider);
|
||||
this.providers.push(provider);
|
||||
}
|
||||
|
||||
return provider;
|
||||
}
|
||||
|
||||
private createChangesGroup(provider: ScmProvider) {
|
||||
let group = provider.groups.find(g => g.id === 'github.changes');
|
||||
if (group === undefined) {
|
||||
group = provider.sourceControl.createResourceGroup('github.changes', 'Changes');
|
||||
provider.groups.push(group);
|
||||
}
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
private getOriginalResource(uri: Uri, _token?: CancellationToken): Uri {
|
||||
return uri.with({ scheme: this.originalScheme });
|
||||
}
|
||||
}
|
||||
29
extensions/github-browser/src/sha1.ts
Normal file
29
extensions/github-browser/src/sha1.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
const textEncoder = new TextEncoder();
|
||||
|
||||
declare let WEBWORKER: boolean;
|
||||
|
||||
export async function sha1(s: string | Uint8Array): Promise<string> {
|
||||
while (true) {
|
||||
try {
|
||||
if (WEBWORKER) {
|
||||
const hash = await globalThis.crypto.subtle.digest({ name: 'sha-1' }, typeof s === 'string' ? textEncoder.encode(s) : s);
|
||||
// Use encodeURIComponent to avoid issues with btoa and Latin-1 characters
|
||||
return globalThis.btoa(encodeURIComponent(textDecoder.decode(hash)));
|
||||
} else {
|
||||
return (await import('crypto')).createHash('sha1').update(s).digest('base64');
|
||||
}
|
||||
} catch (ex) {
|
||||
if (ex instanceof ReferenceError) {
|
||||
(global as any).WEBWORKER = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,12 @@
|
||||
{
|
||||
"extends": "../shared.tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./out",
|
||||
// "experimentalDecorators": true,
|
||||
// "typeRoots": [
|
||||
// "./node_modules/@types"
|
||||
// ]
|
||||
"experimentalDecorators": true,
|
||||
"lib": [
|
||||
"es2018",
|
||||
"dom"
|
||||
],
|
||||
"outDir": "./out"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -27,21 +27,15 @@ function getAgent(url: string | undefined = process.env.HTTPS_PROXY): Agent {
|
||||
const scopes = ['repo', 'workflow'];
|
||||
|
||||
export async function getSession(): Promise<AuthenticationSession> {
|
||||
const authenticationSessions = await authentication.getSessions('github', scopes);
|
||||
|
||||
if (authenticationSessions.length) {
|
||||
return await authenticationSessions[0];
|
||||
} else {
|
||||
return await authentication.login('github', scopes);
|
||||
}
|
||||
return await authentication.getSession('github', scopes, { createIfNone: true });
|
||||
}
|
||||
|
||||
let _octokit: Promise<Octokit> | undefined;
|
||||
|
||||
export function getOctokit(): Promise<Octokit> {
|
||||
if (!_octokit) {
|
||||
_octokit = getSession().then(async session => {
|
||||
const token = await session.getAccessToken();
|
||||
_octokit = getSession().then(session => {
|
||||
const token = session.accessToken;
|
||||
const agent = getAgent();
|
||||
|
||||
return new Octokit({
|
||||
|
||||
@@ -17,7 +17,7 @@ class GitHubCredentialProvider implements CredentialsProvider {
|
||||
}
|
||||
|
||||
const session = await getSession();
|
||||
return { username: session.account.id, password: await session.getAccessToken() };
|
||||
return { username: session.account.id, password: session.accessToken };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,23 +7,11 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const path = require('path');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
target: 'webworker',
|
||||
entry: {
|
||||
extension: './src/extension.ts'
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"vscode": "^1.39.0"
|
||||
},
|
||||
"main": "./out/extension",
|
||||
"browser": "./dist/extension.js",
|
||||
"browser": "./dist/browser/extension.js",
|
||||
"categories": [
|
||||
"Other"
|
||||
],
|
||||
|
||||
@@ -7,10 +7,10 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
const path = require('path');
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
module.exports = withBrowserDefaults({
|
||||
target: 'webworker',
|
||||
context: path.join(__dirname, 'client'),
|
||||
entry: {
|
||||
@@ -19,16 +19,5 @@ const clientConfig = withDefaults({
|
||||
output: {
|
||||
filename: 'jsonClientMain.js',
|
||||
path: path.join(__dirname, 'client', 'dist', 'browser')
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -129,7 +129,7 @@
|
||||
"dependencies": {
|
||||
"request-light": "^0.3.0",
|
||||
"vscode-extension-telemetry": "0.1.1",
|
||||
"vscode-languageclient": "7.0.0-next.5",
|
||||
"vscode-languageclient": "7.0.0-next.5.1",
|
||||
"vscode-nls": "^4.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -7,11 +7,10 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../../shared.webpack.config');
|
||||
const withBrowserDefaults = require('../../shared.webpack.config').browser;
|
||||
const path = require('path');
|
||||
|
||||
const serverConfig = withDefaults({
|
||||
target: 'webworker',
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
entry: {
|
||||
extension: './src/browser/jsonServerMain.ts',
|
||||
@@ -20,16 +19,5 @@ const serverConfig = withDefaults({
|
||||
filename: 'jsonServerMain.js',
|
||||
path: path.join(__dirname, 'dist', 'browser'),
|
||||
libraryTarget: 'var'
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../../../build/polyfills/vscode-nls.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
serverConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = serverConfig;
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"bin": {
|
||||
"vscode-json-languageserver": "./bin/vscode-json-languageserver"
|
||||
},
|
||||
"main": "./out/jsonServerMain",
|
||||
"main": "./out/node/jsonServerMain",
|
||||
"dependencies": {
|
||||
"jsonc-parser": "^2.2.1",
|
||||
"request-light": "^0.3.0",
|
||||
|
||||
@@ -125,10 +125,10 @@ vscode-jsonrpc@6.0.0-next.2:
|
||||
resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0-next.2.tgz#3d73f86d812304cb91b9fb1efee40ec60b09ed7f"
|
||||
integrity sha512-dKQXRYNUY6BHALQJBJlyZyv9oWlYpbJ2vVoQNNVNPLAYQ3hzNp4zy+iSo7zGx1BPXByArJQDWTKLQh8dz3dnNw==
|
||||
|
||||
vscode-languageclient@7.0.0-next.5:
|
||||
version "7.0.0-next.5"
|
||||
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.tgz#7ae84c598dff360bd2bc64322b74e10e5d0b9cd6"
|
||||
integrity sha512-ec+fJg+JiNBIdbeKbzssSuORUaVdtLValtiYdNEUCUjpYE+Y6xXPtXwiZOlS/0OB9pC/RLCMxsj16UwWncQhYQ==
|
||||
vscode-languageclient@7.0.0-next.5.1:
|
||||
version "7.0.0-next.5.1"
|
||||
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.1.tgz#ed93f14e4c2cdccedf15002c7bf8ef9cb638f36c"
|
||||
integrity sha512-OONvbk3IFpubwF8/Y5uPQaq5J5CEskpeET3SfK4iGlv5OUK+44JawH/SEW5wXuEPpfdMLEMZLuGLU5v5d7N7PQ==
|
||||
dependencies:
|
||||
semver "^6.3.0"
|
||||
vscode-languageserver-protocol "3.16.0-next.4"
|
||||
|
||||
@@ -7,26 +7,11 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const path = require('path');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
target: 'webworker',
|
||||
entry: {
|
||||
extension: './src/extension.ts'
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
|
||||
},
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"vscode": "^1.20.0"
|
||||
},
|
||||
"main": "./out/extension",
|
||||
"browser": "./dist/extension.js",
|
||||
"browser": "./dist/browser/extension",
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
],
|
||||
|
||||
@@ -7,27 +7,14 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const path = require('path');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
target: 'webworker',
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
entry: {
|
||||
extension: './src/mergeConflictMain.ts'
|
||||
},
|
||||
output: {
|
||||
filename: 'mergeConflictMain.js'
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"*"
|
||||
],
|
||||
"main": "./out/mergeConflictMain",
|
||||
"browser": "./dist/mergeConflictMain",
|
||||
"browser": "./dist/browser/mergeConflictMain",
|
||||
"scripts": {
|
||||
"compile": "gulp compile-extension:merge-conflict",
|
||||
"watch": "gulp watch-extension:merge-conflict"
|
||||
|
||||
@@ -205,9 +205,9 @@ export class AzureActiveDirectoryService {
|
||||
}, 1000 * 30);
|
||||
}
|
||||
|
||||
private async convertToSession(token: IToken): Promise<vscode.AuthenticationSession2> {
|
||||
private async convertToSession(token: IToken): Promise<vscode.AuthenticationSession> {
|
||||
const resolvedToken = await this.resolveAccessToken(token);
|
||||
return new vscode.AuthenticationSession2(token.sessionId, resolvedToken, token.account, token.scope.split(' '));
|
||||
return new vscode.AuthenticationSession(token.sessionId, resolvedToken, token.account, token.scope.split(' '));
|
||||
}
|
||||
|
||||
private async resolveAccessToken(token: IToken): Promise<string> {
|
||||
@@ -240,11 +240,11 @@ export class AzureActiveDirectoryService {
|
||||
}
|
||||
}
|
||||
|
||||
get sessions(): Promise<vscode.AuthenticationSession2[]> {
|
||||
get sessions(): Promise<vscode.AuthenticationSession[]> {
|
||||
return Promise.all(this._tokens.map(token => this.convertToSession(token)));
|
||||
}
|
||||
|
||||
public async login(scope: string): Promise<vscode.AuthenticationSession2> {
|
||||
public async login(scope: string): Promise<vscode.AuthenticationSession> {
|
||||
Logger.info('Logging in...');
|
||||
if (!scope.includes('offline_access')) {
|
||||
Logger.info('Warning: The \'offline_access\' scope was not included, so the generated token will not be able to be refreshed.');
|
||||
@@ -338,7 +338,7 @@ export class AzureActiveDirectoryService {
|
||||
}
|
||||
}
|
||||
|
||||
private async loginWithoutLocalServer(scope: string): Promise<vscode.AuthenticationSession2> {
|
||||
private async loginWithoutLocalServer(scope: string): Promise<vscode.AuthenticationSession> {
|
||||
const callbackUri = await vscode.env.asExternalUri(vscode.Uri.parse(`${vscode.env.uriScheme}://vscode.microsoft-authentication`));
|
||||
const nonce = crypto.randomBytes(16).toString('base64');
|
||||
const port = (callbackUri.authority.match(/:([0-9]*)$/) || [])[1] || (callbackUri.scheme === 'https' ? 443 : 80);
|
||||
@@ -353,7 +353,7 @@ export class AzureActiveDirectoryService {
|
||||
});
|
||||
vscode.env.openExternal(uri);
|
||||
|
||||
const timeoutPromise = new Promise((_: (value: vscode.AuthenticationSession2) => void, reject) => {
|
||||
const timeoutPromise = new Promise((_: (value: vscode.AuthenticationSession) => void, reject) => {
|
||||
const wait = setTimeout(() => {
|
||||
clearTimeout(wait);
|
||||
reject('Login timed out.');
|
||||
@@ -363,9 +363,9 @@ export class AzureActiveDirectoryService {
|
||||
return Promise.race([this.handleCodeResponse(state, codeVerifier, scope), timeoutPromise]);
|
||||
}
|
||||
|
||||
private async handleCodeResponse(state: string, codeVerifier: string, scope: string): Promise<vscode.AuthenticationSession2> {
|
||||
private async handleCodeResponse(state: string, codeVerifier: string, scope: string): Promise<vscode.AuthenticationSession> {
|
||||
let uriEventListener: vscode.Disposable;
|
||||
return new Promise((resolve: (value: vscode.AuthenticationSession2) => void, reject) => {
|
||||
return new Promise((resolve: (value: vscode.AuthenticationSession) => void, reject) => {
|
||||
uriEventListener = this._uriHandler.event(async (uri: vscode.Uri) => {
|
||||
try {
|
||||
const query = parseQuery(uri);
|
||||
|
||||
@@ -7,27 +7,14 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const path = require('path');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
const clientConfig = withDefaults({
|
||||
target: 'webworker',
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
entry: {
|
||||
extension: './src/pythonMain.ts'
|
||||
},
|
||||
output: {
|
||||
filename: 'pythonMain.js'
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
|
||||
}
|
||||
}
|
||||
});
|
||||
clientConfig.module.rules[0].use.shift(); // remove nls loader
|
||||
|
||||
module.exports = clientConfig;
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"engines": { "vscode": "*" },
|
||||
"activationEvents": ["onLanguage:python"],
|
||||
"main": "./out/pythonMain",
|
||||
"browser": "./dist/pythonMain",
|
||||
"browser": "./dist/browser/pythonMain",
|
||||
"extensionKind": [ "ui", "workspace" ],
|
||||
"contributes": {
|
||||
"languages": [{
|
||||
|
||||
@@ -13,8 +13,9 @@ const fs = require('fs');
|
||||
const merge = require('merge-options');
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||
const { NLSBundlePlugin } = require('vscode-nls-dev/lib/webpack-bundler');
|
||||
const { DefinePlugin } = require('webpack');
|
||||
|
||||
module.exports = function withDefaults(/**@type WebpackConfig*/extConfig) {
|
||||
function withNodeDefaults(/**@type WebpackConfig*/extConfig) {
|
||||
// Need to find the top-most `package.json` file
|
||||
const folderName = path.relative(__dirname, extConfig.context).split(/[\\\/]/)[0];
|
||||
const pkgPath = path.join(__dirname, folderName, 'package.json');
|
||||
@@ -79,3 +80,66 @@ module.exports = function withDefaults(/**@type WebpackConfig*/extConfig) {
|
||||
|
||||
return merge(defaultConfig, extConfig);
|
||||
};
|
||||
|
||||
|
||||
function withBrowserDefaults(/**@type WebpackConfig*/extConfig) {
|
||||
/** @type WebpackConfig */
|
||||
let defaultConfig = {
|
||||
mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
|
||||
target: 'webworker', // extensions run in a webworker context
|
||||
resolve: {
|
||||
mainFields: ['module', 'main'],
|
||||
extensions: ['.ts', '.js'], // support ts-files and js-files
|
||||
alias: {
|
||||
'vscode-nls': path.resolve(__dirname, '../build/polyfills/vscode-nls.js'),
|
||||
'vscode-extension-telemetry': path.resolve(__dirname, '../build/polyfills/vscode-extension-telemetry.js')
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [{
|
||||
test: /\.ts$/,
|
||||
exclude: /node_modules/,
|
||||
use: [{
|
||||
// configure TypeScript loader:
|
||||
// * enable sources maps for end-to-end source maps
|
||||
loader: 'ts-loader',
|
||||
options: {
|
||||
compilerOptions: {
|
||||
'sourceMap': true,
|
||||
}
|
||||
}
|
||||
}]
|
||||
}]
|
||||
},
|
||||
externals: {
|
||||
'vscode': 'commonjs vscode', // ignored because it doesn't exist
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
},
|
||||
output: {
|
||||
// all output goes into `dist`.
|
||||
// packaging depends on that and this must always be like it
|
||||
filename: '[name].js',
|
||||
path: path.join(extConfig.context, 'dist', 'browser'),
|
||||
libraryTarget: 'commonjs',
|
||||
},
|
||||
// yes, really source maps
|
||||
devtool: 'source-map',
|
||||
plugins: [
|
||||
// @ts-expect-error
|
||||
new CopyWebpackPlugin([
|
||||
{ from: 'src', to: '.', ignore: ['**/test/**', '*.ts'] }
|
||||
]),
|
||||
new DefinePlugin({ WEBWORKER: JSON.stringify(true) })
|
||||
]
|
||||
};
|
||||
|
||||
return merge(defaultConfig, extConfig);
|
||||
};
|
||||
|
||||
|
||||
module.exports = withNodeDefaults;
|
||||
module.exports.node = withNodeDefaults;
|
||||
module.exports.browser = withBrowserDefaults;
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"git": {
|
||||
"name": "Microsoft/vscode-mssql",
|
||||
"repositoryUrl": "https://github.com/Microsoft/vscode-mssql",
|
||||
"commitHash": "37a22725186b5b481b2882a78c7b9fe024c13946"
|
||||
"commitHash": "750d30dc48c4c0317b63bb5f1ed3e71487bb84a1"
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"If you want to provide a fix or improvement, please create a pull request against the original repository.",
|
||||
"Once accepted there, we are happy to receive an update request."
|
||||
],
|
||||
"version": "https://github.com/Microsoft/vscode-mssql/commit/37a22725186b5b481b2882a78c7b9fe024c13946",
|
||||
"version": "https://github.com/Microsoft/vscode-mssql/commit/750d30dc48c4c0317b63bb5f1ed3e71487bb84a1",
|
||||
"name": "SQL",
|
||||
"scopeName": "source.sql",
|
||||
"patterns": [
|
||||
@@ -404,7 +404,7 @@
|
||||
}
|
||||
},
|
||||
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
|
||||
"match": "(N)?(')(?:[^'\\\\]|\\\\.)*(')",
|
||||
"match": "(N)?(')[^']*(')",
|
||||
"name": "string.quoted.single.sql"
|
||||
},
|
||||
{
|
||||
@@ -437,7 +437,7 @@
|
||||
}
|
||||
},
|
||||
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
|
||||
"match": "(`)(?:[^`\\\\]|\\\\.)*(`)",
|
||||
"match": "(`)[^`\\\\]*(`)",
|
||||
"name": "string.quoted.other.backtick.sql"
|
||||
},
|
||||
{
|
||||
@@ -470,7 +470,7 @@
|
||||
}
|
||||
},
|
||||
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
|
||||
"match": "(\")(?:[^\"#\\\\]|\\\\.)*(\")",
|
||||
"match": "(\")[^\"#]*(\")",
|
||||
"name": "string.quoted.double.sql"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -19,7 +19,11 @@
|
||||
"statusBarItem.remoteForeground": "#FFF",
|
||||
"statusBarItem.remoteBackground": "#16825D",
|
||||
"sideBarSectionHeader.background": "#0000",
|
||||
"sideBarSectionHeader.border": "#61616130"
|
||||
"sideBarSectionHeader.border": "#61616130",
|
||||
"notebook.cellFocusBackground": "#c8ddf150",
|
||||
"notebook.cellBorderColor": "#dae3e9",
|
||||
"notebook.outputContainerBackgroundColor": "#c8ddf150",
|
||||
"notebook.focusedCellShadow": "#00315040"
|
||||
},
|
||||
"semanticHighlighting": true
|
||||
}
|
||||
|
||||
@@ -7,11 +7,10 @@
|
||||
|
||||
'use strict';
|
||||
const path = require('path');
|
||||
const withDefaults = require('../shared.webpack.config');
|
||||
const withBrowserDefaults = require('../shared.webpack.config').browser;
|
||||
|
||||
module.exports = withDefaults({
|
||||
module.exports = withBrowserDefaults({
|
||||
context: __dirname,
|
||||
target: 'webworker',
|
||||
node: false,
|
||||
entry: {
|
||||
extension: './src/extension.ts',
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"onFileSystem:github",
|
||||
"onDebug"
|
||||
],
|
||||
"browser": "./dist/extension",
|
||||
"browser": "./dist/browser/extension",
|
||||
"main": "./out/extension",
|
||||
"engines": {
|
||||
"vscode": "^1.25.0"
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { MemFS } from './memfs';
|
||||
|
||||
declare const window: unknown;
|
||||
declare const navigator: unknown;
|
||||
|
||||
export function activate(context: vscode.ExtensionContext) {
|
||||
if (typeof window !== 'undefined') { // do not run under node.js
|
||||
if (typeof navigator === 'object') { // do not run under node.js
|
||||
const memFs = enableFs(context);
|
||||
|
||||
if (vscode.workspace.workspaceFolders?.some(f => f.uri.scheme === MemFS.scheme)) {
|
||||
|
||||
23
package.json
23
package.json
@@ -16,8 +16,11 @@
|
||||
"compile": "gulp compile --max_old_space_size=4095",
|
||||
"watch": "gulp watch --max_old_space_size=4095",
|
||||
"watchd": "deemon yarn watch",
|
||||
"watch-webd": "deemon yarn watch-web",
|
||||
"kill-watchd": "deemon --kill yarn watch",
|
||||
"kill-watch-webd": "deemon --kill yarn watch-web",
|
||||
"restart-watchd": "deemon --restart yarn watch",
|
||||
"restart-watch-webd": "deemon --restart yarn watch-web",
|
||||
"watch-client": "gulp watch-client --max_old_space_size=4095",
|
||||
"mocha": "mocha test/unit/node/all.js --delay",
|
||||
"precommit": "node build/gulpfile.hygiene.js",
|
||||
@@ -35,6 +38,8 @@
|
||||
"strict-function-types-watch": "tsc --watch -p src/tsconfig.json --noEmit --strictFunctionTypes",
|
||||
"update-distro": "node build/npm/update-distro.js",
|
||||
"web": "node resources/serverless/code-web.js",
|
||||
"compile-web": "gulp compile-web --max_old_space_size=4095",
|
||||
"watch-web": "gulp watch-web --max_old_space_size=4095",
|
||||
"eslint": "eslint -c .eslintrc.json --rulesdir ./build/lib/eslint --ext .ts --ext .js ./src/vs ./extensions",
|
||||
"sqllint": "eslint --no-eslintrc -c .eslintrc.sql.ts.json --rulesdir ./build/lib/eslint --ext .ts ./src/sql"
|
||||
},
|
||||
@@ -56,7 +61,7 @@
|
||||
"html-query-plan": "git://github.com/anthonydresser/html-query-plan.git#2.6",
|
||||
"http-proxy-agent": "^2.1.0",
|
||||
"https-proxy-agent": "^2.2.3",
|
||||
"iconv-lite": "0.6.0",
|
||||
"iconv-lite-umd": "0.6.5",
|
||||
"jquery": "3.5.0",
|
||||
"jschardet": "2.1.1",
|
||||
"keytar": "^5.5.0",
|
||||
@@ -70,7 +75,7 @@
|
||||
"reflect-metadata": "^0.1.8",
|
||||
"rxjs": "5.4.0",
|
||||
"sanitize-html": "^1.19.1",
|
||||
"semver-umd": "^5.5.6",
|
||||
"semver-umd": "^5.5.7",
|
||||
"slickgrid": "github:anthonydresser/SlickGrid#2.3.33",
|
||||
"spdlog": "^0.11.1",
|
||||
"sudo-prompt": "9.1.1",
|
||||
@@ -78,9 +83,9 @@
|
||||
"vscode-nsfw": "1.2.8",
|
||||
"vscode-oniguruma": "1.3.1",
|
||||
"vscode-proxy-agent": "^0.5.2",
|
||||
"vscode-ripgrep": "^1.5.8",
|
||||
"vscode-ripgrep": "^1.7.0",
|
||||
"vscode-sqlite3": "4.0.10",
|
||||
"vscode-textmate": "5.1.1",
|
||||
"vscode-textmate": "5.2.0",
|
||||
"xterm": "4.7.0-beta.3",
|
||||
"xterm-addon-search": "0.7.0",
|
||||
"xterm-addon-unicode11": "0.2.0",
|
||||
@@ -98,7 +103,6 @@
|
||||
"@types/debug": "^4.1.5",
|
||||
"@types/graceful-fs": "4.1.2",
|
||||
"@types/http-proxy-agent": "^2.0.1",
|
||||
"@types/iconv-lite": "0.0.1",
|
||||
"@types/keytar": "^4.4.0",
|
||||
"@types/minimist": "^1.2.0",
|
||||
"@types/mocha": "2.2.39",
|
||||
@@ -120,12 +124,11 @@
|
||||
"asar": "^0.14.0",
|
||||
"chromium-pickle-js": "^0.2.0",
|
||||
"copy-webpack-plugin": "^4.5.2",
|
||||
"coveralls": "^2.11.11",
|
||||
"cson-parser": "^1.3.3",
|
||||
"css-loader": "^3.2.0",
|
||||
"debounce": "^1.0.0",
|
||||
"deemon": "^1.4.0",
|
||||
"electron": "7.3.1",
|
||||
"electron": "7.3.2",
|
||||
"eslint": "6.8.0",
|
||||
"eslint-plugin-jsdoc": "^19.1.0",
|
||||
"event-stream": "3.3.4",
|
||||
@@ -190,9 +193,9 @@
|
||||
"vsce": "1.48.0",
|
||||
"vscode-debugprotocol": "1.41.0",
|
||||
"vscode-nls-dev": "^3.3.1",
|
||||
"webpack": "^4.16.5",
|
||||
"webpack-cli": "^3.3.8",
|
||||
"webpack-stream": "^5.1.1",
|
||||
"webpack": "^4.43.0",
|
||||
"webpack-cli": "^3.3.12",
|
||||
"webpack-stream": "^5.2.1",
|
||||
"yaserver": "^0.2.0"
|
||||
},
|
||||
"repository": {
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"html-query-plan": "git://github.com/anthonydresser/html-query-plan.git#2.6",
|
||||
"http-proxy-agent": "^2.1.0",
|
||||
"https-proxy-agent": "^2.2.3",
|
||||
"iconv-lite": "0.6.0",
|
||||
"iconv-lite-umd": "0.6.5",
|
||||
"jquery": "3.5.0",
|
||||
"jschardet": "2.1.1",
|
||||
"minimist": "^1.2.5",
|
||||
@@ -30,14 +30,14 @@
|
||||
"reflect-metadata": "^0.1.8",
|
||||
"rxjs": "5.4.0",
|
||||
"sanitize-html": "^1.19.1",
|
||||
"semver-umd": "^5.5.6",
|
||||
"semver-umd": "^5.5.7",
|
||||
"slickgrid": "github:anthonydresser/SlickGrid#2.3.33",
|
||||
"spdlog": "^0.11.1",
|
||||
"vscode-nsfw": "1.2.8",
|
||||
"vscode-oniguruma": "1.3.1",
|
||||
"vscode-proxy-agent": "^0.5.2",
|
||||
"vscode-ripgrep": "^1.5.8",
|
||||
"vscode-textmate": "5.1.1",
|
||||
"vscode-textmate": "5.2.0",
|
||||
"xterm": "4.7.0-beta.3",
|
||||
"xterm-addon-search": "0.7.0",
|
||||
"xterm-addon-unicode11": "0.2.0",
|
||||
|
||||
@@ -14,15 +14,17 @@
|
||||
"ansi_up": "^3.0.0",
|
||||
"chart.js": "^2.6.0",
|
||||
"html-query-plan": "git://github.com/anthonydresser/html-query-plan.git#2.6",
|
||||
"iconv-lite-umd": "0.6.5",
|
||||
"jschardet": "2.1.1",
|
||||
"jquery": "3.5.0",
|
||||
"ng2-charts": "^1.6.0",
|
||||
"reflect-metadata": "^0.1.8",
|
||||
"rxjs": "5.4.0",
|
||||
"sanitize-html": "^1.19.1",
|
||||
"semver-umd": "^5.5.6",
|
||||
"semver-umd": "^5.5.7",
|
||||
"slickgrid": "github:anthonydresser/SlickGrid#2.3.33",
|
||||
"vscode-oniguruma": "1.3.1",
|
||||
"vscode-textmate": "5.1.1",
|
||||
"vscode-textmate": "5.2.0",
|
||||
"xterm": "4.7.0-beta.3",
|
||||
"xterm-addon-search": "0.7.0",
|
||||
"xterm-addon-unicode11": "0.2.0",
|
||||
|
||||
@@ -182,6 +182,11 @@ htmlparser2@^3.10.0:
|
||||
inherits "^2.0.1"
|
||||
readable-stream "^3.1.1"
|
||||
|
||||
iconv-lite-umd@0.6.5:
|
||||
version "0.6.5"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
|
||||
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
|
||||
|
||||
inherits@^2.0.1, inherits@^2.0.3:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
@@ -192,6 +197,11 @@ jquery@3.5.0:
|
||||
resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.5.0.tgz#9980b97d9e4194611c36530e7dc46a58d7340fc9"
|
||||
integrity sha512-Xb7SVYMvygPxbFMpTFQiHh1J7HClEaThguL15N/Gg37Lri/qKyhRGZYzHRyLH8Stq3Aow0LsHO2O2ci86fCrNQ==
|
||||
|
||||
jschardet@2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-2.1.1.tgz#af6f8fd0b3b0f5d46a8fd9614a4fce490575c184"
|
||||
integrity sha512-pA5qG9Zwm8CBpGlK/lo2GE9jPxwqRgMV7Lzc/1iaPccw6v4Rhj8Zg2BTyrdmHmxlJojnbLupLeRnaPLsq03x6Q==
|
||||
|
||||
lodash.clonedeep@^4.5.0:
|
||||
version "4.5.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
|
||||
@@ -285,10 +295,10 @@ sanitize-html@^1.19.1:
|
||||
srcset "^1.0.0"
|
||||
xtend "^4.0.1"
|
||||
|
||||
semver-umd@^5.5.6:
|
||||
version "5.5.6"
|
||||
resolved "https://registry.yarnpkg.com/semver-umd/-/semver-umd-5.5.6.tgz#1d185bbd2caec825c564b54907cd09e14083f228"
|
||||
integrity sha512-6ARYXVi4Y4VO5HfyCjT/6xyykBtJwEXSGQ8ON4UPQSFOjZUDsbAE0J614QcBBsLTTyQMEqvsXN804vAqpydjzw==
|
||||
semver-umd@^5.5.7:
|
||||
version "5.5.7"
|
||||
resolved "https://registry.yarnpkg.com/semver-umd/-/semver-umd-5.5.7.tgz#966beb5e96c7da6fbf09c3da14c2872d6836c528"
|
||||
integrity sha512-XgjPNlD0J6aIc8xoTN6GQGwWc2Xg0kq8NzrqMVuKG/4Arl6ab1F8+Am5Y/XKKCR+FceFr2yN/Uv5ZJBhRyRqKg==
|
||||
|
||||
"slickgrid@github:anthonydresser/SlickGrid#2.3.33":
|
||||
version "2.3.33"
|
||||
@@ -343,10 +353,10 @@ vscode-oniguruma@1.3.1:
|
||||
resolved "https://registry.yarnpkg.com/vscode-oniguruma/-/vscode-oniguruma-1.3.1.tgz#e2383879c3485b19f533ec34efea9d7a2b14be8f"
|
||||
integrity sha512-gz6ZBofA7UXafVA+m2Yt2zHKgXC2qedArprIsHAPKByTkwq9l5y/izAGckqxYml7mSbYxTRTfdRwsFq3cwF4LQ==
|
||||
|
||||
vscode-textmate@5.1.1:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-5.1.1.tgz#d88dbf271bee7cede455a21bd4894ba5724a4a7e"
|
||||
integrity sha512-5VHjF+Fglf9d2JI5OyQ7FHutK6/29G0qYyD920K0SWO7uY8JTWbqyKAHEtfB/ZDk2fOe/E23n3wz9fHXKi63yg==
|
||||
vscode-textmate@5.2.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-5.2.0.tgz#01f01760a391e8222fe4f33fbccbd1ad71aed74e"
|
||||
integrity sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==
|
||||
|
||||
xtend@^4.0.1:
|
||||
version "4.0.2"
|
||||
|
||||
@@ -356,12 +356,10 @@ https-proxy-agent@^2.2.3:
|
||||
agent-base "^4.3.0"
|
||||
debug "^3.1.0"
|
||||
|
||||
iconv-lite@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.0.tgz#66a93b80df0bd05d2a43a7426296b7f91073f125"
|
||||
integrity sha512-43ZpGYZ9QtuutX5l6WC1DSO8ane9N+Ct5qPLF2OV7vM9abM69gnAbVkh66ibaZd3aOGkoP1ZmringlKhLBkw2Q==
|
||||
dependencies:
|
||||
safer-buffer ">= 2.1.2 < 3"
|
||||
iconv-lite-umd@0.6.5:
|
||||
version "0.6.5"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
|
||||
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
|
||||
|
||||
inherits@^2.0.1, inherits@^2.0.3:
|
||||
version "2.0.4"
|
||||
@@ -567,11 +565,6 @@ safe-buffer@~5.2.0:
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
|
||||
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
|
||||
|
||||
"safer-buffer@>= 2.1.2 < 3":
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
|
||||
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
|
||||
sanitize-html@^1.19.1:
|
||||
version "1.20.1"
|
||||
resolved "https://registry.yarnpkg.com/sanitize-html/-/sanitize-html-1.20.1.tgz#f6effdf55dd398807171215a62bfc21811bacf85"
|
||||
@@ -588,10 +581,10 @@ sanitize-html@^1.19.1:
|
||||
srcset "^1.0.0"
|
||||
xtend "^4.0.1"
|
||||
|
||||
semver-umd@^5.5.6:
|
||||
version "5.5.6"
|
||||
resolved "https://registry.yarnpkg.com/semver-umd/-/semver-umd-5.5.6.tgz#1d185bbd2caec825c564b54907cd09e14083f228"
|
||||
integrity sha512-6ARYXVi4Y4VO5HfyCjT/6xyykBtJwEXSGQ8ON4UPQSFOjZUDsbAE0J614QcBBsLTTyQMEqvsXN804vAqpydjzw==
|
||||
semver-umd@^5.5.7:
|
||||
version "5.5.7"
|
||||
resolved "https://registry.yarnpkg.com/semver-umd/-/semver-umd-5.5.7.tgz#966beb5e96c7da6fbf09c3da14c2872d6836c528"
|
||||
integrity sha512-XgjPNlD0J6aIc8xoTN6GQGwWc2Xg0kq8NzrqMVuKG/4Arl6ab1F8+Am5Y/XKKCR+FceFr2yN/Uv5ZJBhRyRqKg==
|
||||
|
||||
semver@^5.3.0:
|
||||
version "5.6.0"
|
||||
@@ -718,10 +711,10 @@ vscode-ripgrep@^1.5.8:
|
||||
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.5.8.tgz#32cb33da6d1a9ca8f5de8c2813ed5114fd55fc11"
|
||||
integrity sha512-l6Pv/t1Jk63RU+kEkMO04XxnNRYdyzuesizj9AzFpcfrUxxpAjEJBK1qO9Mov30UUGZl7uDUBn+uCv9koaHPPA==
|
||||
|
||||
vscode-textmate@5.1.1:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-5.1.1.tgz#d88dbf271bee7cede455a21bd4894ba5724a4a7e"
|
||||
integrity sha512-5VHjF+Fglf9d2JI5OyQ7FHutK6/29G0qYyD920K0SWO7uY8JTWbqyKAHEtfB/ZDk2fOe/E23n3wz9fHXKi63yg==
|
||||
vscode-textmate@5.2.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-5.2.0.tgz#01f01760a391e8222fe4f33fbccbd1ad71aed74e"
|
||||
integrity sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==
|
||||
|
||||
vscode-windows-ca-certs@0.2.0:
|
||||
version "0.2.0"
|
||||
|
||||
@@ -1,28 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/usr/bin/env sh
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
|
||||
# test that VSCode wasn't installed inside WSL
|
||||
if grep -qi Microsoft /proc/version && [ -z "$DONT_PROMPT_WSL_INSTALL" ]; then
|
||||
echo "To use VS Code with the Windows Subsystem for Linux, please install VS Code in Windows and uninstall the Linux version in WSL. You can then use the '@@PRODNAME@@' command in a WSL terminal just as you would in a normal command prompt." 1>&2
|
||||
read -e -p "Do you want to continue anyways ? [y/N] " YN
|
||||
|
||||
[[ $YN == "n" || $YN == "N" || $YN == "" ]] && exit 1
|
||||
echo "To no longer see this prompt, start @@PRODNAME@@ with the environment variable DONT_PROMPT_WSL_INSTALL defined."
|
||||
echo "To use @@PRODNAME@@ with the Windows Subsystem for Linux, please install @@PRODNAME@@ in Windows and uninstall the Linux version in WSL. You can then use the \`@@NAME@@\` command in a WSL terminal just as you would in a normal command prompt." 1>&2
|
||||
printf "Do you want to continue anyway? [y/N] " 1>&2
|
||||
read -r YN
|
||||
YN=$(printf '%s' "$YN" | tr '[:upper:]' '[:lower:]')
|
||||
case "$YN" in
|
||||
y | yes )
|
||||
;;
|
||||
* )
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
echo "To no longer see this prompt, start @@PRODNAME@@ with the environment variable DONT_PROMPT_WSL_INSTALL defined." 1>&2
|
||||
fi
|
||||
|
||||
|
||||
# If root, ensure that --user-data-dir or --file-write is specified
|
||||
if [ "$(id -u)" = "0" ]; then
|
||||
for i in $@
|
||||
for i in "$@"
|
||||
do
|
||||
if [[ $i == --user-data-dir || $i == --user-data-dir=* || $i == --file-write ]]; then
|
||||
case "$i" in
|
||||
--user-data-dir | --user-data-dir=* | --file-write )
|
||||
CAN_LAUNCH_AS_ROOT=1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ -z $CAN_LAUNCH_AS_ROOT ]; then
|
||||
echo "You are trying to start Azure Data Studio as a super user which is not recommended. If you really want to, you must specify an alternate user data directory using the --user-data-dir argument." 1>&2
|
||||
echo "You are trying to start @@PRODNAME@@ as a super user which isn't recommended. If this was intended, please specify an alternate user data directory using the \`--user-data-dir\` argument." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -33,7 +41,7 @@ if [ ! -L "$0" ]; then
|
||||
else
|
||||
if command -v readlink >/dev/null; then
|
||||
# if readlink exists, follow the symlink and find relatively
|
||||
VSCODE_PATH="$(dirname $(readlink -f "$0"))/.."
|
||||
VSCODE_PATH="$(dirname "$(readlink -f "$0")")/.."
|
||||
else
|
||||
# else use the standard install location
|
||||
VSCODE_PATH="/usr/share/@@NAME@@"
|
||||
|
||||
@@ -24,6 +24,7 @@ parts:
|
||||
plugin: dump
|
||||
source: .
|
||||
stage-packages:
|
||||
- ibus-gtk3
|
||||
- fcitx-frontend-gtk3
|
||||
- gvfs-libs
|
||||
- libasound2
|
||||
|
||||
@@ -6,17 +6,18 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
// @ts-check
|
||||
/** @typedef {import('../../src/vs/workbench/workbench.web.api').IWorkbenchConstructionOptions} WebConfiguration **/
|
||||
|
||||
const http = require('http');
|
||||
const url = require('url');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const glob = require('glob');
|
||||
const opn = require('opn');
|
||||
const minimist = require('minimist');
|
||||
const webpack = require('webpack');
|
||||
const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
|
||||
const extensions = require('../../build/lib/extensions');
|
||||
|
||||
const APP_ROOT = path.join(__dirname, '..', '..');
|
||||
const EXTENSIONS_ROOT = path.join(APP_ROOT, 'extensions');
|
||||
@@ -60,20 +61,35 @@ const AUTHORITY = process.env.VSCODE_AUTHORITY || `${HOST}:${PORT}`;
|
||||
|
||||
const exists = (path) => util.promisify(fs.exists)(path);
|
||||
const readFile = (path) => util.promisify(fs.readFile)(path);
|
||||
const CharCode_PC = '%'.charCodeAt(0);
|
||||
|
||||
let unbuiltExensions = [];
|
||||
|
||||
async function initialize() {
|
||||
const extensionFolders = await util.promisify(fs.readdir)(EXTENSIONS_ROOT);
|
||||
const builtinExtensions = [];
|
||||
|
||||
const staticExtensions = [];
|
||||
const children = await util.promisify(fs.readdir)(EXTENSIONS_ROOT, { withFileTypes: true });
|
||||
const folders = children.filter(c => !c.isFile());
|
||||
await Promise.all(folders.map(async folder => {
|
||||
const folderName = folder.name;
|
||||
const extensionPath = path.join(EXTENSIONS_ROOT, folderName);
|
||||
|
||||
const webpackConfigs = [];
|
||||
let children = [];
|
||||
try {
|
||||
children = await util.promisify(fs.readdir)(extensionPath);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return;
|
||||
}
|
||||
|
||||
await Promise.all(extensionFolders.map(async extensionFolder => {
|
||||
const packageJSONPath = path.join(EXTENSIONS_ROOT, extensionFolder, 'package.json');
|
||||
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
|
||||
const readmePath = readme ? path.join(extensionPath, readme) : undefined;
|
||||
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
|
||||
const changelogPath = changelog ? path.join(extensionPath, changelog) : undefined;
|
||||
|
||||
const packageJSONPath = path.join(EXTENSIONS_ROOT, folderName, 'package.json');
|
||||
if (await exists(packageJSONPath)) {
|
||||
try {
|
||||
const packageJSON = JSON.parse((await readFile(packageJSONPath)).toString());
|
||||
let packageJSON = JSON.parse((await readFile(packageJSONPath)).toString());
|
||||
if (packageJSON.main && !packageJSON.browser) {
|
||||
return; // unsupported
|
||||
}
|
||||
@@ -81,82 +97,40 @@ async function initialize() {
|
||||
if (packageJSON.browser) {
|
||||
packageJSON.main = packageJSON.browser;
|
||||
|
||||
const webpackConfigLocations = await util.promisify(glob)(
|
||||
path.join(EXTENSIONS_ROOT, extensionFolder, '**', 'extension-browser.webpack.config.js'),
|
||||
{ ignore: ['**/node_modules'] }
|
||||
);
|
||||
|
||||
for (const webpackConfigPath of webpackConfigLocations) {
|
||||
const configOrFnOrArray = require(webpackConfigPath);
|
||||
function addConfig(configOrFn) {
|
||||
if (typeof configOrFn === 'function') {
|
||||
webpackConfigs.push(configOrFn({}, {}));
|
||||
} else {
|
||||
webpackConfigs.push(configOrFn);
|
||||
let mainFilePath = path.join(EXTENSIONS_ROOT, folderName, packageJSON.browser);
|
||||
if (path.extname(mainFilePath) !== '.js') {
|
||||
mainFilePath += '.js';
|
||||
}
|
||||
if (!await exists(mainFilePath)) {
|
||||
unbuiltExensions.push(path.relative(EXTENSIONS_ROOT, mainFilePath));
|
||||
}
|
||||
addConfig(configOrFnOrArray);
|
||||
}
|
||||
}
|
||||
|
||||
const packageNlsPath = path.join(EXTENSIONS_ROOT, extensionFolder, 'package.nls.json');
|
||||
if (await exists(packageNlsPath)) {
|
||||
const packageNls = JSON.parse((await readFile(packageNlsPath)).toString());
|
||||
const translate = (obj) => {
|
||||
for (let key in obj) {
|
||||
const val = obj[key];
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(translate);
|
||||
} else if (val && typeof val === 'object') {
|
||||
translate(val);
|
||||
} else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
|
||||
const translated = packageNls[val.substr(1, val.length - 2)];
|
||||
if (translated) {
|
||||
obj[key] = translated;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
translate(packageJSON);
|
||||
}
|
||||
packageJSON.extensionKind = ['web']; // enable for Web
|
||||
staticExtensions.push({
|
||||
|
||||
const packageNLSPath = path.join(folderName, 'package.nls.json');
|
||||
const packageNLSExists = await exists(path.join(EXTENSIONS_ROOT, packageNLSPath));
|
||||
if (packageNLSExists) {
|
||||
packageJSON = extensions.translatePackageJSON(packageJSON, path.join(EXTENSIONS_ROOT, packageNLSPath)); // temporary, until fixed in core
|
||||
}
|
||||
builtinExtensions.push({
|
||||
extensionPath: folderName,
|
||||
packageJSON,
|
||||
extensionLocation: { scheme: SCHEME, authority: AUTHORITY, path: `/static-extension/${extensionFolder}` },
|
||||
isBuiltin: true
|
||||
packageNLSPath: packageNLSExists ? packageNLSPath : undefined,
|
||||
readmePath,
|
||||
changelogPath
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (args.watch) {
|
||||
webpack(webpackConfigs).watch({}, (err, stats) => {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
reject();
|
||||
} else {
|
||||
console.log(stats.toString());
|
||||
resolve(staticExtensions);
|
||||
if (unbuiltExensions.length) {
|
||||
fancyLog(`${ansiColors.yellow('Warning')}: Make sure to run ${ansiColors.cyan('yarn gulp watch-web')}\nCould not find the following browser main files: \n${unbuiltExensions.join('\n')}`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
webpack(webpackConfigs).run((err, stats) => {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
reject();
|
||||
} else {
|
||||
console.log(stats.toString());
|
||||
resolve(staticExtensions);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return builtinExtensions;
|
||||
}
|
||||
|
||||
const staticExtensionsPromise = initialize();
|
||||
const builtinExtensionsPromise = initialize();
|
||||
|
||||
const mapCallbackUriToRequestId = new Map();
|
||||
|
||||
@@ -252,31 +226,40 @@ function handleStaticExtension(req, res, parsedUrl) {
|
||||
* @param {import('http').ServerResponse} res
|
||||
*/
|
||||
async function handleRoot(req, res) {
|
||||
let folderUri = { scheme: 'memfs', path: `/sample-folder` };
|
||||
|
||||
const match = req.url && req.url.match(/\?([^#]+)/);
|
||||
let ghPath;
|
||||
if (match) {
|
||||
const qs = new URLSearchParams(match[1]);
|
||||
ghPath = qs.get('gh');
|
||||
if (ghPath && !ghPath.startsWith('/')) {
|
||||
|
||||
let ghPath = qs.get('gh');
|
||||
if (ghPath) {
|
||||
if (!ghPath.startsWith('/')) {
|
||||
ghPath = '/' + ghPath;
|
||||
}
|
||||
folderUri = { scheme: 'github', authority: 'HEAD', path: ghPath };
|
||||
} else {
|
||||
|
||||
let csPath = qs.get('cs');
|
||||
if (csPath) {
|
||||
if (!csPath.startsWith('/')) {
|
||||
csPath = '/' + csPath;
|
||||
}
|
||||
folderUri = { scheme: 'codespace', authority: 'HEAD', path: csPath };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const staticExtensions = await staticExtensionsPromise;
|
||||
/** @type {WebConfiguration} */
|
||||
const webConfig = {
|
||||
staticExtensions: staticExtensions,
|
||||
};
|
||||
const builtinExtensions = await builtinExtensionsPromise;
|
||||
|
||||
const webConfigJSON = escapeAttribute(JSON.stringify({
|
||||
...webConfig,
|
||||
folderUri: ghPath
|
||||
? { scheme: 'github', authority: 'HEAD', path: ghPath }
|
||||
: { scheme: 'memfs', path: `/sample-folder` },
|
||||
folderUri: folderUri,
|
||||
builtinExtensionsServiceUrl: `${SCHEME}://${AUTHORITY}/static-extension`
|
||||
}));
|
||||
|
||||
const data = (await util.promisify(fs.readFile)(WEB_MAIN)).toString()
|
||||
.replace('{{WORKBENCH_WEB_CONFIGURATION}}', () => webConfigJSON) // use a replace function to avoid that regexp replace patterns ($&, $0, ...) are applied
|
||||
.replace('{{WORKBENCH_BUILTIN_EXTENSIONS}}', () => escapeAttribute(JSON.stringify(builtinExtensions)))
|
||||
.replace('{{WEBVIEW_ENDPOINT}}', '')
|
||||
.replace('{{REMOTE_USER_DATA_URI}}', '');
|
||||
|
||||
|
||||
@@ -13,40 +13,18 @@ NAME="@@NAME@@"
|
||||
DATAFOLDER="@@DATAFOLDER@@"
|
||||
VSCODE_PATH="$(dirname "$(dirname "$(realpath "$0")")")"
|
||||
ELECTRON="$VSCODE_PATH/$NAME.exe"
|
||||
if grep -qi Microsoft /proc/version; then
|
||||
# in a wsl shell
|
||||
WSL_BUILD=$(uname -r | sed -E 's/^[0-9.]+-([0-9]+)-Microsoft.*|([0-9]+).([0-9]+).([0-9]+)-microsoft-standard.*|.*/\1\2\3\4/')
|
||||
if [ -z "$WSL_BUILD" ]; then
|
||||
WSL_BUILD=0
|
||||
fi
|
||||
|
||||
if [ $WSL_BUILD -ge 17063 ]; then
|
||||
IN_WSL=false
|
||||
if [ -n "$WSL_DISTRO_NAME" ]; then
|
||||
# $WSL_DISTRO_NAME is available since WSL builds 18362, also for WSL2
|
||||
IN_WSL=true
|
||||
else
|
||||
WSL_BUILD=$(uname -r | sed -E 's/^[0-9.]+-([0-9]+)-Microsoft.*|.*/\1/')
|
||||
if [ -n "$WSL_BUILD" ]; then
|
||||
if [ "$WSL_BUILD" -ge 17063 ]; then
|
||||
# WSLPATH is available since WSL build 17046
|
||||
# WSLENV is available since WSL build 17063
|
||||
export WSLENV=ELECTRON_RUN_AS_NODE/w:$WSLENV
|
||||
CLI=$(wslpath -m "$VSCODE_PATH/resources/app/out/cli.js")
|
||||
|
||||
# use the Remote WSL extension if installed
|
||||
WSL_EXT_ID="ms-vscode-remote.remote-wsl"
|
||||
|
||||
if [ $WSL_BUILD -ge 41955 -a $WSL_BUILD -lt 41959 ]; then
|
||||
# WSL2 workaround for https://github.com/microsoft/WSL/issues/4337
|
||||
CWD="$(pwd)"
|
||||
cd "$VSCODE_PATH"
|
||||
cmd.exe /C ".\\bin\\$APP_NAME.cmd --locate-extension $WSL_EXT_ID >%TEMP%\\remote-wsl-loc.txt"
|
||||
WSL_EXT_WLOC=$(cmd.exe /C type %TEMP%\\remote-wsl-loc.txt)
|
||||
cd "$CWD"
|
||||
else
|
||||
ELECTRON_RUN_AS_NODE=1 "$ELECTRON" "$CLI" --locate-extension $WSL_EXT_ID >/tmp/remote-wsl-loc.txt 2>/dev/null
|
||||
WSL_EXT_WLOC=$(cat /tmp/remote-wsl-loc.txt)
|
||||
fi
|
||||
if [ -n "$WSL_EXT_WLOC" ]; then
|
||||
# replace \r\n with \n in WSL_EXT_WLOC
|
||||
WSL_CODE=$(wslpath -u "${WSL_EXT_WLOC%%[[:cntrl:]]}")/scripts/wslCode.sh
|
||||
"$WSL_CODE" "$COMMIT" "$QUALITY" "$ELECTRON" "$APP_NAME" "$DATAFOLDER" "$@"
|
||||
exit $?
|
||||
fi
|
||||
IN_WSL=true
|
||||
else
|
||||
# If running under older WSL, don't pass cli.js to Electron as
|
||||
# environment vars cannot be transferred from WSL to Windows
|
||||
@@ -55,6 +33,26 @@ if grep -qi Microsoft /proc/version; then
|
||||
"$ELECTRON" "$@"
|
||||
exit $?
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ $IN_WSL = true ]; then
|
||||
|
||||
export WSLENV=ELECTRON_RUN_AS_NODE/w:$WSLENV
|
||||
CLI=$(wslpath -m "$VSCODE_PATH/resources/app/out/cli.js")
|
||||
|
||||
# use the Remote WSL extension if installed
|
||||
WSL_EXT_ID="ms-vscode-remote.remote-wsl"
|
||||
|
||||
ELECTRON_RUN_AS_NODE=1 "$ELECTRON" "$CLI" --locate-extension $WSL_EXT_ID >/tmp/remote-wsl-loc.txt 2>/dev/null
|
||||
WSL_EXT_WLOC=$(cat /tmp/remote-wsl-loc.txt)
|
||||
|
||||
if [ -n "$WSL_EXT_WLOC" ]; then
|
||||
# replace \r\n with \n in WSL_EXT_WLOC
|
||||
WSL_CODE=$(wslpath -u "${WSL_EXT_WLOC%%[[:cntrl:]]}")/scripts/wslCode.sh
|
||||
"$WSL_CODE" "$COMMIT" "$QUALITY" "$ELECTRON" "$APP_NAME" "$DATAFOLDER" "$@"
|
||||
exit $?
|
||||
fi
|
||||
|
||||
elif [ -x "$(command -v cygpath)" ]; then
|
||||
CLI=$(cygpath -m "$VSCODE_PATH/resources/app/out/cli.js")
|
||||
else
|
||||
|
||||
@@ -7,7 +7,8 @@ if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
ROOT=$(dirname "$(dirname "$(realpath "$0")")")
|
||||
else
|
||||
ROOT=$(dirname "$(dirname "$(readlink -f $0)")")
|
||||
if grep -qi Microsoft /proc/version; then
|
||||
# If the script is running in Docker using the WSL2 engine, powershell.exe won't exist
|
||||
if grep -qi Microsoft /proc/version && type powershell.exe > /dev/null 2>&1; then
|
||||
IN_WSL=true
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -37,8 +37,8 @@ if "%INTEGRATION_TEST_ELECTRON_PATH%"=="" (
|
||||
|
||||
:: Tests in the extension host
|
||||
|
||||
REM call "%INTEGRATION_TEST_ELECTRON_PATH%" %~dp0\..\extensions\vscode-notebook-tests\test --enable-proposed-api=vscode.vscode-notebook-tests --extensionDevelopmentPath=%~dp0\..\extensions\vscode-notebook-tests --extensionTestsPath=%~dp0\..\extensions\vscode-notebook-tests\out --disable-telemetry --crash-reporter-directory=%VSCODECRASHDIR% --no-cached-data --disable-updates --disable-extensions --user-data-dir=%VSCODEUSERDATADIR%
|
||||
REM if %errorlevel% neq 0 exit /b %errorlevel%
|
||||
:: call "%INTEGRATION_TEST_ELECTRON_PATH%" %~dp0\..\extensions\vscode-notebook-tests\test --enable-proposed-api=vscode.vscode-notebook-tests --extensionDevelopmentPath=%~dp0\..\extensions\vscode-notebook-tests --extensionTestsPath=%~dp0\..\extensions\vscode-notebook-tests\out --disable-telemetry --crash-reporter-directory=%VSCODECRASHDIR% --no-cached-data --disable-updates --disable-extensions --user-data-dir=%VSCODEUSERDATADIR%
|
||||
:: if %errorlevel% neq 0 exit /b %errorlevel%
|
||||
|
||||
REM call "%INTEGRATION_TEST_ELECTRON_PATH%" %~dp0\..\extensions\vscode-api-tests\testWorkspace --enable-proposed-api=vscode.vscode-api-tests --extensionDevelopmentPath=%~dp0\..\extensions\vscode-api-tests --extensionTestsPath=%~dp0\..\extensions\vscode-api-tests\out\singlefolder-tests --disable-telemetry --crash-reporter-directory=%VSCODECRASHDIR% --no-cached-data --disable-updates --disable-extensions --user-data-dir=%VSCODEUSERDATADIR%
|
||||
REM if %errorlevel% neq 0 exit /b %errorlevel%
|
||||
|
||||
18
src/bootstrap-fork.js
vendored
18
src/bootstrap-fork.js
vendored
@@ -7,15 +7,16 @@
|
||||
'use strict';
|
||||
|
||||
const bootstrap = require('./bootstrap');
|
||||
const bootstrapNode = require('./bootstrap-node');
|
||||
|
||||
// Remove global paths from the node module lookup
|
||||
bootstrap.removeGlobalNodeModuleLookupPaths();
|
||||
bootstrapNode.removeGlobalNodeModuleLookupPaths();
|
||||
|
||||
// Enable ASAR in our forked processes
|
||||
bootstrap.enableASARSupport();
|
||||
|
||||
if (process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH']) {
|
||||
bootstrap.injectNodeModuleLookupPath(process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH']);
|
||||
bootstrapNode.injectNodeModuleLookupPath(process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH']);
|
||||
}
|
||||
|
||||
// Configure: pipe logging to parent process
|
||||
@@ -39,6 +40,7 @@ configureCrashReporter();
|
||||
// Load AMD entry point
|
||||
require('./bootstrap-amd').load(process.env['AMD_ENTRYPOINT']);
|
||||
|
||||
|
||||
//#region Helpers
|
||||
|
||||
function pipeLoggingToParent() {
|
||||
@@ -49,8 +51,6 @@ function pipeLoggingToParent() {
|
||||
const seen = [];
|
||||
const argsArray = [];
|
||||
|
||||
let res;
|
||||
|
||||
// Massage some arguments with special treatment
|
||||
if (args.length) {
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
@@ -85,7 +85,7 @@ function pipeLoggingToParent() {
|
||||
}
|
||||
|
||||
try {
|
||||
res = JSON.stringify(argsArray, function (key, value) {
|
||||
const res = JSON.stringify(argsArray, function (key, value) {
|
||||
|
||||
// Objects get special treatment to prevent circles
|
||||
if (isObject(value) || Array.isArray(value)) {
|
||||
@@ -98,15 +98,15 @@ function pipeLoggingToParent() {
|
||||
|
||||
return value;
|
||||
});
|
||||
} catch (error) {
|
||||
return 'Output omitted for an object that cannot be inspected (' + error.toString() + ')';
|
||||
}
|
||||
|
||||
if (res && res.length > MAX_LENGTH) {
|
||||
if (res.length > MAX_LENGTH) {
|
||||
return 'Output omitted for a large object that exceeds the limits';
|
||||
}
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
return `Output omitted for an object that cannot be inspected ('${error.toString()}')`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
60
src/bootstrap-node.js
vendored
Normal file
60
src/bootstrap-node.js
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
//@ts-check
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Add support for redirecting the loading of node modules
|
||||
*
|
||||
* @param {string} injectPath
|
||||
*/
|
||||
exports.injectNodeModuleLookupPath = function (injectPath) {
|
||||
if (!injectPath) {
|
||||
throw new Error('Missing injectPath');
|
||||
}
|
||||
|
||||
const Module = require('module');
|
||||
const path = require('path');
|
||||
|
||||
const nodeModulesPath = path.join(__dirname, '../node_modules');
|
||||
|
||||
// @ts-ignore
|
||||
const originalResolveLookupPaths = Module._resolveLookupPaths;
|
||||
|
||||
// @ts-ignore
|
||||
Module._resolveLookupPaths = function (moduleName, parent) {
|
||||
const paths = originalResolveLookupPaths(moduleName, parent);
|
||||
if (Array.isArray(paths)) {
|
||||
for (let i = 0, len = paths.length; i < len; i++) {
|
||||
if (paths[i] === nodeModulesPath) {
|
||||
paths.splice(i, 0, injectPath);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
};
|
||||
};
|
||||
|
||||
exports.removeGlobalNodeModuleLookupPaths = function () {
|
||||
const Module = require('module');
|
||||
// @ts-ignore
|
||||
const globalPaths = Module.globalPaths;
|
||||
|
||||
// @ts-ignore
|
||||
const originalResolveLookupPaths = Module._resolveLookupPaths;
|
||||
|
||||
// @ts-ignore
|
||||
Module._resolveLookupPaths = function (moduleName, parent) {
|
||||
const paths = originalResolveLookupPaths(moduleName, parent);
|
||||
let commonSuffixLength = 0;
|
||||
while (commonSuffixLength < paths.length && paths[paths.length - 1 - commonSuffixLength] === globalPaths[globalPaths.length - 1 - commonSuffixLength]) {
|
||||
commonSuffixLength++;
|
||||
}
|
||||
return paths.slice(0, paths.length - commonSuffixLength);
|
||||
};
|
||||
};
|
||||
93
src/bootstrap-window.js
vendored
93
src/bootstrap-window.js
vendored
@@ -3,30 +3,35 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/// <reference path="typings/require.d.ts" />
|
||||
|
||||
//@ts-check
|
||||
'use strict';
|
||||
|
||||
const bootstrap = require('./bootstrap');
|
||||
// Simple module style to support node.js and browser environments
|
||||
(function (globalThis, factory) {
|
||||
|
||||
/**
|
||||
* @param {object} destination
|
||||
* @param {object} source
|
||||
* @returns {object}
|
||||
*/
|
||||
exports.assign = function assign(destination, source) {
|
||||
return Object.keys(source).reduce(function (r, key) { r[key] = source[key]; return r; }, destination);
|
||||
};
|
||||
// Node.js
|
||||
if (typeof exports === 'object') {
|
||||
module.exports = factory();
|
||||
}
|
||||
|
||||
/**
|
||||
// Browser
|
||||
else {
|
||||
globalThis.MonacoBootstrapWindow = factory();
|
||||
}
|
||||
}(this, function () {
|
||||
const path = require.__$__nodeRequire('path');
|
||||
const webFrame = require.__$__nodeRequire('electron').webFrame;
|
||||
const ipc = require.__$__nodeRequire('electron').ipcRenderer;
|
||||
const bootstrap = globalThis.MonacoBootstrap;
|
||||
|
||||
/**
|
||||
* @param {string[]} modulePaths
|
||||
* @param {(result, configuration: object) => any} resultCallback
|
||||
* @param {{ forceEnableDeveloperKeybindings?: boolean, disallowReloadKeybinding?: boolean, removeDeveloperKeybindingsAfterLoad?: boolean, canModifyDOM?: (config: object) => void, beforeLoaderConfig?: (config: object, loaderConfig: object) => void, beforeRequire?: () => void }=} options
|
||||
*/
|
||||
exports.load = function (modulePaths, resultCallback, options) {
|
||||
|
||||
const webFrame = require('electron').webFrame;
|
||||
const path = require('path');
|
||||
|
||||
function load(modulePaths, resultCallback, options) {
|
||||
const args = parseURLQueryArgs();
|
||||
/**
|
||||
* // configuration: INativeWindowConfiguration
|
||||
@@ -59,7 +64,7 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
}
|
||||
|
||||
// Correctly inherit the parent's environment
|
||||
exports.assign(process.env, configuration.userEnv);
|
||||
Object.assign(process.env, configuration.userEnv);
|
||||
|
||||
// Enable ASAR support
|
||||
bootstrap.enableASARSupport(path.join(configuration.appRoot, 'node_modules'));
|
||||
@@ -80,22 +85,16 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
|
||||
window.document.documentElement.setAttribute('lang', locale);
|
||||
|
||||
// Load the loader
|
||||
const amdLoader = require(configuration.appRoot + '/out/vs/loader.js');
|
||||
const amdRequire = amdLoader.require;
|
||||
const amdDefine = amdLoader.require.define;
|
||||
const nodeRequire = amdLoader.require.nodeRequire;
|
||||
|
||||
window['nodeRequire'] = nodeRequire;
|
||||
window['require'] = amdRequire;
|
||||
// do not advertise AMD to avoid confusing UMD modules loaded with nodejs
|
||||
window['define'] = undefined;
|
||||
|
||||
// replace the patched electron fs with the original node fs for all AMD code
|
||||
amdDefine('fs', ['original-fs'], function (originalFS) { return originalFS; });
|
||||
require.define('fs', ['original-fs'], function (originalFS) { return originalFS; });
|
||||
|
||||
window['MonacoEnvironment'] = {};
|
||||
|
||||
const loaderConfig = {
|
||||
baseUrl: bootstrap.uriFromPath(configuration.appRoot) + '/out',
|
||||
baseUrl: `${bootstrap.uriFromPath(configuration.appRoot)}/out`,
|
||||
'vs/nls': nlsConfig,
|
||||
nodeModules: [/*BUILD->INSERT_NODE_MODULES*/]
|
||||
};
|
||||
@@ -118,7 +117,6 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
'slickgrid/slick.editors',
|
||||
'slickgrid/slick.dataview'
|
||||
]);
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
// cached data config
|
||||
if (configuration.nodeCachedDataDir) {
|
||||
@@ -132,10 +130,10 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
options.beforeLoaderConfig(configuration, loaderConfig);
|
||||
}
|
||||
|
||||
amdRequire.config(loaderConfig);
|
||||
require.config(loaderConfig);
|
||||
|
||||
if (nlsConfig.pseudo) {
|
||||
amdRequire(['vs/nls'], function (nlsPlugin) {
|
||||
require(['vs/nls'], function (nlsPlugin) {
|
||||
nlsPlugin.setPseudoTranslation(nlsConfig.pseudo);
|
||||
});
|
||||
}
|
||||
@@ -144,7 +142,7 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
options.beforeRequire();
|
||||
}
|
||||
|
||||
amdRequire(modulePaths, result => {
|
||||
require(modulePaths, result => {
|
||||
try {
|
||||
const callbackResult = resultCallback(result, configuration);
|
||||
if (callbackResult && typeof callbackResult.then === 'function') {
|
||||
@@ -160,12 +158,12 @@ exports.load = function (modulePaths, resultCallback, options) {
|
||||
onUnexpectedError(error, enableDeveloperTools);
|
||||
}
|
||||
}, onUnexpectedError);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @returns {{[param: string]: string }}
|
||||
*/
|
||||
function parseURLQueryArgs() {
|
||||
function parseURLQueryArgs() {
|
||||
const search = window.location.search || '';
|
||||
|
||||
return search.split(/[?&]/)
|
||||
@@ -173,16 +171,13 @@ function parseURLQueryArgs() {
|
||||
.map(function (param) { return param.split('='); })
|
||||
.filter(function (param) { return param.length === 2; })
|
||||
.reduce(function (r, param) { r[param[0]] = decodeURIComponent(param[1]); return r; }, {});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param {boolean} disallowReloadKeybinding
|
||||
* @returns {() => void}
|
||||
*/
|
||||
function registerDeveloperKeybindings(disallowReloadKeybinding) {
|
||||
|
||||
const ipc = require('electron').ipcRenderer;
|
||||
|
||||
function registerDeveloperKeybindings(disallowReloadKeybinding) {
|
||||
const extractKey = function (e) {
|
||||
return [
|
||||
e.ctrlKey ? 'ctrl-' : '',
|
||||
@@ -215,23 +210,25 @@ function registerDeveloperKeybindings(disallowReloadKeybinding) {
|
||||
listener = undefined;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param {string | Error} error
|
||||
* @param {boolean} enableDeveloperTools
|
||||
* @param {boolean} [enableDeveloperTools]
|
||||
*/
|
||||
function onUnexpectedError(error, enableDeveloperTools) {
|
||||
|
||||
const ipc = require('electron').ipcRenderer;
|
||||
|
||||
function onUnexpectedError(error, enableDeveloperTools) {
|
||||
if (enableDeveloperTools) {
|
||||
ipc.send('vscode:openDevTools');
|
||||
}
|
||||
|
||||
console.error('[uncaught exception]: ' + error);
|
||||
console.error(`[uncaught exception]: ${error}`);
|
||||
|
||||
if (error && typeof error !== 'string' && error.stack) {
|
||||
console.error(error.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
load
|
||||
};
|
||||
}));
|
||||
|
||||
242
src/bootstrap.js
vendored
242
src/bootstrap.js
vendored
@@ -6,90 +6,54 @@
|
||||
//@ts-check
|
||||
'use strict';
|
||||
|
||||
//#region global bootstrapping
|
||||
// Simple module style to support node.js and browser environments
|
||||
(function (globalThis, factory) {
|
||||
|
||||
// increase number of stack frames(from 10, https://github.com/v8/v8/wiki/Stack-Trace-API)
|
||||
Error.stackTraceLimit = 100;
|
||||
|
||||
// Workaround for Electron not installing a handler to ignore SIGPIPE
|
||||
// (https://github.com/electron/electron/issues/13254)
|
||||
process.on('SIGPIPE', () => {
|
||||
console.error(new Error('Unexpected SIGPIPE'));
|
||||
});
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Add support for redirecting the loading of node modules
|
||||
|
||||
exports.injectNodeModuleLookupPath = function (injectPath) {
|
||||
if (!injectPath) {
|
||||
throw new Error('Missing injectPath');
|
||||
// Node.js
|
||||
if (typeof exports === 'object') {
|
||||
module.exports = factory();
|
||||
}
|
||||
|
||||
// Browser
|
||||
else {
|
||||
globalThis.MonacoBootstrap = factory();
|
||||
}
|
||||
}(this, function () {
|
||||
const Module = require('module');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const nodeModulesPath = path.join(__dirname, '../node_modules');
|
||||
//#region global bootstrapping
|
||||
|
||||
// @ts-ignore
|
||||
const originalResolveLookupPaths = Module._resolveLookupPaths;
|
||||
// increase number of stack frames(from 10, https://github.com/v8/v8/wiki/Stack-Trace-API)
|
||||
Error.stackTraceLimit = 100;
|
||||
|
||||
// @ts-ignore
|
||||
Module._resolveLookupPaths = function (moduleName, parent) {
|
||||
const paths = originalResolveLookupPaths(moduleName, parent);
|
||||
if (Array.isArray(paths)) {
|
||||
for (let i = 0, len = paths.length; i < len; i++) {
|
||||
if (paths[i] === nodeModulesPath) {
|
||||
paths.splice(i, 0, injectPath);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Workaround for Electron not installing a handler to ignore SIGPIPE
|
||||
// (https://github.com/electron/electron/issues/13254)
|
||||
process.on('SIGPIPE', () => {
|
||||
console.error(new Error('Unexpected SIGPIPE'));
|
||||
});
|
||||
|
||||
return paths;
|
||||
};
|
||||
};
|
||||
//#endregion
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Remove global paths from the node lookup paths
|
||||
//#region Add support for using node_modules.asar
|
||||
|
||||
exports.removeGlobalNodeModuleLookupPaths = function () {
|
||||
const Module = require('module');
|
||||
// @ts-ignore
|
||||
const globalPaths = Module.globalPaths;
|
||||
|
||||
// @ts-ignore
|
||||
const originalResolveLookupPaths = Module._resolveLookupPaths;
|
||||
|
||||
// @ts-ignore
|
||||
Module._resolveLookupPaths = function (moduleName, parent) {
|
||||
const paths = originalResolveLookupPaths(moduleName, parent);
|
||||
let commonSuffixLength = 0;
|
||||
while (commonSuffixLength < paths.length && paths[paths.length - 1 - commonSuffixLength] === globalPaths[globalPaths.length - 1 - commonSuffixLength]) {
|
||||
commonSuffixLength++;
|
||||
}
|
||||
return paths.slice(0, paths.length - commonSuffixLength);
|
||||
};
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Add support for using node_modules.asar
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param {string=} nodeModulesPath
|
||||
*/
|
||||
exports.enableASARSupport = function (nodeModulesPath) {
|
||||
const Module = require('module');
|
||||
const path = require('path');
|
||||
|
||||
function enableASARSupport(nodeModulesPath) {
|
||||
let NODE_MODULES_PATH = nodeModulesPath;
|
||||
if (!NODE_MODULES_PATH) {
|
||||
NODE_MODULES_PATH = path.join(__dirname, '../node_modules');
|
||||
} else {
|
||||
// use the drive letter casing of __dirname
|
||||
if (process.platform === 'win32') {
|
||||
NODE_MODULES_PATH = __dirname.substr(0, 1) + NODE_MODULES_PATH.substr(1);
|
||||
}
|
||||
}
|
||||
|
||||
const NODE_MODULES_ASAR_PATH = NODE_MODULES_PATH + '.asar';
|
||||
const NODE_MODULES_ASAR_PATH = `${NODE_MODULES_PATH}.asar`;
|
||||
|
||||
// @ts-ignore
|
||||
const originalResolveLookupPaths = Module._resolveLookupPaths;
|
||||
@@ -108,95 +72,43 @@ exports.enableASARSupport = function (nodeModulesPath) {
|
||||
|
||||
return paths;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
//#endregion
|
||||
//#endregion
|
||||
|
||||
//#region URI helpers
|
||||
|
||||
/**
|
||||
//#region URI helpers
|
||||
|
||||
/**
|
||||
* @param {string} _path
|
||||
* @returns {string}
|
||||
*/
|
||||
exports.uriFromPath = function (_path) {
|
||||
const path = require('path');
|
||||
|
||||
function uriFromPath(_path) {
|
||||
let pathName = path.resolve(_path).replace(/\\/g, '/');
|
||||
if (pathName.length > 0 && pathName.charAt(0) !== '/') {
|
||||
pathName = '/' + pathName;
|
||||
pathName = `/${pathName}`;
|
||||
}
|
||||
|
||||
/** @type {string} */
|
||||
let uri;
|
||||
if (process.platform === 'win32' && pathName.startsWith('//')) { // specially handle Windows UNC paths
|
||||
uri = encodeURI('file:' + pathName);
|
||||
uri = encodeURI(`file:${pathName}`);
|
||||
} else {
|
||||
uri = encodeURI('file://' + pathName);
|
||||
uri = encodeURI(`file://${pathName}`);
|
||||
}
|
||||
|
||||
return uri.replace(/#/g, '%23');
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region FS helpers
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
exports.readFile = function (file) {
|
||||
const fs = require('fs');
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
fs.readFile(file, 'utf8', function (err, data) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve(data);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @param {string} content
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
exports.writeFile = function (file, content) {
|
||||
const fs = require('fs');
|
||||
//#endregion
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
fs.writeFile(file, content, 'utf8', function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} dir
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
exports.mkdirp = function mkdirp(dir) {
|
||||
const fs = require('fs');
|
||||
//#region NLS helpers
|
||||
|
||||
return new Promise((c, e) => fs.mkdir(dir, { recursive: true }, err => (err && err.code !== 'EEXIST') ? e(err) : c(dir)));
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region NLS helpers
|
||||
|
||||
/**
|
||||
/**
|
||||
* @returns {{locale?: string, availableLanguages: {[lang: string]: string;}, pseudo?: boolean }}
|
||||
*/
|
||||
exports.setupNLS = function () {
|
||||
const path = require('path');
|
||||
function setupNLS() {
|
||||
|
||||
// Get the nls configuration into the process.env as early as possible.
|
||||
let nlsConfig = { availableLanguages: {} };
|
||||
@@ -219,8 +131,8 @@ exports.setupNLS = function () {
|
||||
return;
|
||||
}
|
||||
|
||||
const bundleFile = path.join(nlsConfig._resolvedLanguagePackCoreLocation, bundle.replace(/\//g, '!') + '.nls.json');
|
||||
exports.readFile(bundleFile).then(function (content) {
|
||||
const bundleFile = path.join(nlsConfig._resolvedLanguagePackCoreLocation, `${bundle.replace(/\//g, '!')}.nls.json`);
|
||||
readFile(bundleFile).then(function (content) {
|
||||
const json = JSON.parse(content);
|
||||
bundles[bundle] = json;
|
||||
|
||||
@@ -228,7 +140,7 @@ exports.setupNLS = function () {
|
||||
}).catch((error) => {
|
||||
try {
|
||||
if (nlsConfig._corruptedFile) {
|
||||
exports.writeFile(nlsConfig._corruptedFile, 'corrupted').catch(function (error) { console.error(error); });
|
||||
writeFile(nlsConfig._corruptedFile, 'corrupted').catch(function (error) { console.error(error); });
|
||||
}
|
||||
} finally {
|
||||
cb(error, undefined);
|
||||
@@ -238,20 +150,35 @@ exports.setupNLS = function () {
|
||||
}
|
||||
|
||||
return nlsConfig;
|
||||
};
|
||||
}
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Portable helpers
|
||||
|
||||
/**
|
||||
* @returns {{ portableDataPath: string, isPortable: boolean }}
|
||||
/**
|
||||
* @param {string} file
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
exports.configurePortable = function () {
|
||||
const product = require('../product.json');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
function readFile(file) {
|
||||
return fs.promises.readFile(file, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @param {string} content
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
function writeFile(file, content) {
|
||||
return fs.promises.writeFile(file, content, 'utf8');
|
||||
}
|
||||
|
||||
//#endregion
|
||||
|
||||
|
||||
//#region Portable helpers
|
||||
|
||||
/**
|
||||
* @param {{ portable: string; applicationName: string; }} product
|
||||
* @returns {{portableDataPath: string;isPortable: boolean;}}
|
||||
*/
|
||||
function configurePortable(product) {
|
||||
const appRoot = path.dirname(__dirname);
|
||||
|
||||
function getApplicationPath() {
|
||||
@@ -304,18 +231,29 @@ exports.configurePortable = function () {
|
||||
portableDataPath,
|
||||
isPortable
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
//#endregion
|
||||
//#endregion
|
||||
|
||||
//#region ApplicationInsights
|
||||
|
||||
// Prevents appinsights from monkey patching modules.
|
||||
// This should be called before importing the applicationinsights module
|
||||
exports.avoidMonkeyPatchFromAppInsights = function () {
|
||||
//#region ApplicationInsights
|
||||
|
||||
// Prevents appinsights from monkey patching modules.
|
||||
// This should be called before importing the applicationinsights module
|
||||
function avoidMonkeyPatchFromAppInsights() {
|
||||
// @ts-ignore
|
||||
process.env['APPLICATION_INSIGHTS_NO_DIAGNOSTIC_CHANNEL'] = true; // Skip monkey patching of 3rd party modules by appinsights
|
||||
global['diagnosticsSource'] = {}; // Prevents diagnostic channel (which patches "require") from initializing entirely
|
||||
};
|
||||
}
|
||||
|
||||
//#endregion
|
||||
//#endregion
|
||||
|
||||
|
||||
return {
|
||||
enableASARSupport,
|
||||
avoidMonkeyPatchFromAppInsights,
|
||||
configurePortable,
|
||||
setupNLS,
|
||||
uriFromPath
|
||||
};
|
||||
}));
|
||||
|
||||
@@ -7,12 +7,13 @@
|
||||
'use strict';
|
||||
|
||||
const bootstrap = require('./bootstrap');
|
||||
const product = require('../product.json');
|
||||
|
||||
// Avoid Monkey Patches from Application Insights
|
||||
bootstrap.avoidMonkeyPatchFromAppInsights();
|
||||
|
||||
// Enable portable support
|
||||
bootstrap.configurePortable();
|
||||
bootstrap.configurePortable(product);
|
||||
|
||||
// Enable ASAR support
|
||||
bootstrap.enableASARSupport();
|
||||
|
||||
19
src/main.js
19
src/main.js
@@ -21,7 +21,7 @@ const product = require('../product.json');
|
||||
const { app, protocol } = require('electron');
|
||||
|
||||
// Enable portable support
|
||||
const portable = bootstrap.configurePortable();
|
||||
const portable = bootstrap.configurePortable(product);
|
||||
|
||||
// Enable ASAR support
|
||||
bootstrap.enableASARSupport();
|
||||
@@ -93,8 +93,9 @@ setCurrentWorkingDirectory();
|
||||
// Register custom schemes with privileges
|
||||
protocol.registerSchemesAsPrivileged([
|
||||
{
|
||||
scheme: 'vscode-resource',
|
||||
scheme: 'vscode-webview',
|
||||
privileges: {
|
||||
standard: true,
|
||||
secure: true,
|
||||
supportFetchAPI: true,
|
||||
corsEnabled: true,
|
||||
@@ -473,7 +474,7 @@ function getNodeCachedDir() {
|
||||
|
||||
async ensureExists() {
|
||||
try {
|
||||
await bootstrap.mkdirp(this.value);
|
||||
await mkdirp(this.value);
|
||||
|
||||
return this.value;
|
||||
} catch (error) {
|
||||
@@ -502,6 +503,18 @@ function getNodeCachedDir() {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} dir
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
function mkdirp(dir) {
|
||||
const fs = require('fs');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.mkdir(dir, { recursive: true }, err => (err && err.code !== 'EEXIST') ? reject(err) : resolve(dir));
|
||||
});
|
||||
}
|
||||
|
||||
//#region NLS Support
|
||||
|
||||
/**
|
||||
|
||||
@@ -69,14 +69,4 @@ export class BrowserClipboardService implements IClipboardService {
|
||||
hasResources(): Promise<boolean> {
|
||||
return this._vsClipboardService.hasResources();
|
||||
}
|
||||
|
||||
readFindTextSync(): string {
|
||||
// eslint-disable-next-line no-sync
|
||||
return this._vsClipboardService.readFindTextSync();
|
||||
}
|
||||
|
||||
writeFindTextSync(text: string): void {
|
||||
// eslint-disable-next-line no-sync
|
||||
return this._vsClipboardService.writeFindTextSync(text);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,6 +77,14 @@ export class ExtHostModelViewTreeViews implements ExtHostModelViewTreeViewsShape
|
||||
$setVisible(treeViewId: string, visible: boolean): void {
|
||||
}
|
||||
|
||||
$hasResolve(treeViewId: string): Promise<boolean> {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
$resolve(treeViewId: string, treeItemHandle: string): Promise<ITreeComponentItem | undefined> {
|
||||
return Promise.resolve(undefined);
|
||||
}
|
||||
|
||||
private createExtHostTreeViewer<T>(handle: number, id: string, dataProvider: azdata.TreeComponentDataProvider<T>, extension: IExtensionDescription, logService: ILogService): ExtHostTreeView<T> {
|
||||
const treeView = new ExtHostTreeView<T>(handle, id, dataProvider, this._proxy, undefined, extension, logService);
|
||||
this.treeViews.set(`${handle}-${id}`, treeView);
|
||||
|
||||
@@ -561,10 +561,10 @@ export function createAdsApiFactory(accessor: ServicesAccessor): IAdsExtensionAp
|
||||
CardType: sqlExtHostTypes.CardType,
|
||||
Orientation: sqlExtHostTypes.Orientation,
|
||||
SqlThemeIcon: sqlExtHostTypes.SqlThemeIcon,
|
||||
TreeComponentItem: sqlExtHostTypes.TreeComponentItem,
|
||||
TreeComponentItem: sqlExtHostTypes.TreeComponentItem as any, // work around
|
||||
nb: nb,
|
||||
AzureResource: sqlExtHostTypes.AzureResource,
|
||||
TreeItem: sqlExtHostTypes.TreeItem,
|
||||
TreeItem: sqlExtHostTypes.TreeItem as any, // work around
|
||||
extensions: extensions,
|
||||
ColumnType: sqlExtHostTypes.ColumnType,
|
||||
ActionOnCellCheckboxCheck: sqlExtHostTypes.ActionOnCellCheckboxCheck,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user