Merge from vscode 2cd495805cf99b31b6926f08ff4348124b2cf73d

This commit is contained in:
ADS Merger
2020-06-30 04:40:21 +00:00
committed by AzureDataStudio
parent a8a7559229
commit 1388493cc1
602 changed files with 16375 additions and 12940 deletions

View File

@@ -7,27 +7,15 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/configurationEditingMain.ts'
},
output: {
filename: 'configurationEditingMain.js'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -13,7 +13,7 @@
"onLanguage:jsonc"
],
"main": "./out/configurationEditingMain",
"browser": "./dist/configurationEditingMain",
"browser": "./dist/browser/configurationEditingMain",
"scripts": {
"compile": "gulp compile-extension:configuration-editing",
"watch": "gulp watch-extension:configuration-editing"
@@ -117,6 +117,10 @@
"fileMatch": "/.devcontainer.json",
"url": "./schemas/devContainer.schema.json"
},
{
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/nameConfigs/*.json",
"url": "./schemas/attachContainer.schema.json"
},
{
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/imageConfigs/*.json",
"url": "./schemas/attachContainer.schema.json"

View File

@@ -42,8 +42,8 @@
"description": "An array of extensions that should be installed into the container.",
"items": {
"type": "string",
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
}
},
"postAttachCommand": {

View File

@@ -17,8 +17,8 @@
"description": "An array of extensions that should be installed into the container.",
"items": {
"type": "string",
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
}
},
"settings": {

View File

@@ -432,8 +432,8 @@
},
{
"command": "git.unstageSelectedRanges",
"key": "ctrl+k ctrl+u",
"mac": "cmd+k cmd+u",
"key": "ctrl+k ctrl+n",
"mac": "cmd+k cmd+n",
"when": "isInDiffEditor"
},
{
@@ -1878,7 +1878,7 @@
"dependencies": {
"byline": "^5.0.0",
"file-type": "^7.2.0",
"iconv-lite": "0.6.0",
"iconv-lite-umd": "0.6.5",
"jschardet": "2.1.1",
"vscode-extension-telemetry": "0.1.1",
"vscode-nls": "^4.0.0",

View File

@@ -8,15 +8,15 @@ import * as path from 'path';
import { Repository, GitResourceGroup } from './repository';
import { Model } from './model';
import { debounce } from './decorators';
import { filterEvent, dispose, anyEvent, fireEvent } from './util';
import { filterEvent, dispose, anyEvent, fireEvent, PromiseSource } from './util';
import { GitErrorCodes, Status } from './api/git';
type Callback = { resolve: (status: boolean) => void, reject: (err: any) => void };
class GitIgnoreDecorationProvider implements DecorationProvider {
private static Decoration: Decoration = { priority: 3, color: new ThemeColor('gitDecoration.ignoredResourceForeground') };
readonly onDidChangeDecorations: Event<Uri[]>;
private queue = new Map<string, { repository: Repository; queue: Map<string, Callback>; }>();
private queue = new Map<string, { repository: Repository; queue: Map<string, PromiseSource<Decoration | undefined>>; }>();
private disposables: Disposable[] = [];
constructor(private model: Model) {
@@ -29,32 +29,29 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
this.disposables.push(window.registerDecorationProvider(this));
}
provideDecoration(uri: Uri): Promise<Decoration | undefined> {
async provideDecoration(uri: Uri): Promise<Decoration | undefined> {
const repository = this.model.getRepository(uri);
if (!repository) {
return Promise.resolve(undefined);
return;
}
let queueItem = this.queue.get(repository.root);
if (!queueItem) {
queueItem = { repository, queue: new Map<string, Callback>() };
queueItem = { repository, queue: new Map<string, PromiseSource<Decoration | undefined>>() };
this.queue.set(repository.root, queueItem);
}
return new Promise<boolean>((resolve, reject) => {
queueItem!.queue.set(uri.fsPath, { resolve, reject });
let promiseSource = queueItem.queue.get(uri.fsPath);
if (!promiseSource) {
promiseSource = new PromiseSource();
queueItem!.queue.set(uri.fsPath, promiseSource);
this.checkIgnoreSoon();
}).then(ignored => {
if (ignored) {
return <Decoration>{
priority: 3,
color: new ThemeColor('gitDecoration.ignoredResourceForeground')
};
}
return undefined;
});
}
return await promiseSource.promise;
}
@debounce(500)
@@ -66,16 +63,16 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
const paths = [...item.queue.keys()];
item.repository.checkIgnore(paths).then(ignoreSet => {
for (const [key, value] of item.queue.entries()) {
value.resolve(ignoreSet.has(key));
for (const [path, promiseSource] of item.queue.entries()) {
promiseSource.resolve(ignoreSet.has(path) ? GitIgnoreDecorationProvider.Decoration : undefined);
}
}, err => {
if (err.gitErrorCode !== GitErrorCodes.IsInSubmodule) {
console.error(err);
}
for (const [, value] of item.queue.entries()) {
value.reject(err);
for (const [, promiseSource] of item.queue.entries()) {
promiseSource.reject(err);
}
});
}

View File

@@ -9,7 +9,7 @@ import * as os from 'os';
import * as cp from 'child_process';
import * as which from 'which';
import { EventEmitter } from 'events';
import iconv = require('iconv-lite');
import * as iconv from 'iconv-lite-umd';
import * as filetype from 'file-type';
import { assign, groupBy, IDisposable, toDisposable, dispose, mkdirp, readBytes, detectUnicodeEncoding, Encoding, onceEvent, splitInChunks, Limiter } from './util';
import { CancellationToken, Progress, Uri } from 'vscode';
@@ -1939,6 +1939,17 @@ export class Repository {
return message.replace(/^\s*#.*$\n?/gm, '').trim();
}
async getSquashMessage(): Promise<string | undefined> {
const squashMsgPath = path.join(this.repositoryRoot, '.git', 'SQUASH_MSG');
try {
const raw = await fs.readFile(squashMsgPath, 'utf8');
return this.stripCommitMessageComments(raw);
} catch {
return undefined;
}
}
async getMergeMessage(): Promise<string | undefined> {
const mergeMsgPath = path.join(this.repositoryRoot, '.git', 'MERGE_MSG');

View File

@@ -537,7 +537,7 @@ class DotGitWatcher implements IFileWatcher {
upstreamWatcher.event(this.emitter.fire, this.emitter, this.transientDisposables);
} catch (err) {
if (Log.logLevel <= LogLevel.Error) {
this.outputChannel.appendLine(`Failed to watch ref '${upstreamPath}', is most likely packed.\n${err.stack || err}`);
this.outputChannel.appendLine(`Warning: Failed to watch ref '${upstreamPath}', is most likely packed.`);
}
}
}
@@ -729,10 +729,10 @@ export class Repository implements Disposable {
this.updateInputBoxPlaceholder();
this.disposables.push(this.onDidRunGitStatus(() => this.updateInputBoxPlaceholder()));
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "MERGE CHANGES"));
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "STAGED CHANGES"));
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "CHANGES"));
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "UNTRACKED CHANGES"));
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "Merge Changes"));
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "Staged Changes"));
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "Changes"));
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "Untracked Changes"));
const updateIndexGroupVisibility = () => {
const config = workspace.getConfiguration('git', root);
@@ -865,10 +865,10 @@ export class Repository implements Disposable {
}
async getInputTemplate(): Promise<string> {
const mergeMessage = await this.repository.getMergeMessage();
const commitMessage = (await Promise.all([this.repository.getMergeMessage(), this.repository.getSquashMessage()])).find(msg => msg !== undefined);
if (mergeMessage) {
return mergeMessage;
if (commitMessage) {
return commitMessage;
}
return await this.repository.getCommitTemplate();

View File

@@ -3,7 +3,7 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Event, Disposable } from 'vscode';
import { Event, Disposable, EventEmitter } from 'vscode';
import { dirname, sep } from 'path';
import { Readable } from 'stream';
import { promises as fs, createReadStream } from 'fs';
@@ -400,3 +400,39 @@ export class Limiter<T> {
}
}
}
type Completion<T> = { success: true, value: T } | { success: false, err: any };
export class PromiseSource<T> {
private _onDidComplete = new EventEmitter<Completion<T>>();
private _promise: Promise<T> | undefined;
get promise(): Promise<T> {
if (this._promise) {
return this._promise;
}
return eventToPromise(this._onDidComplete.event).then(completion => {
if (completion.success) {
return completion.value;
} else {
throw completion.err;
}
});
}
resolve(value: T): void {
if (!this._promise) {
this._promise = Promise.resolve(value);
this._onDidComplete.fire({ success: true, value });
}
}
reject(err: any): void {
if (!this._promise) {
this._promise = Promise.reject(err);
this._onDidComplete.fire({ success: false, err });
}
}
}

View File

@@ -425,12 +425,10 @@ https-proxy-agent@^2.2.1:
agent-base "^4.3.0"
debug "^3.1.0"
iconv-lite@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.0.tgz#66a93b80df0bd05d2a43a7426296b7f91073f125"
integrity sha512-43ZpGYZ9QtuutX5l6WC1DSO8ane9N+Ct5qPLF2OV7vM9abM69gnAbVkh66ibaZd3aOGkoP1ZmringlKhLBkw2Q==
dependencies:
safer-buffer ">= 2.1.2 < 3"
iconv-lite-umd@0.6.5:
version "0.6.5"
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
inflight@^1.0.4:
version "1.0.6"
@@ -748,7 +746,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.2:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==

View File

@@ -8,11 +8,10 @@
'use strict';
const path = require('path');
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
module.exports = withDefaults({
module.exports = withBrowserDefaults({
context: __dirname,
target: 'webworker',
node: false,
entry: {
extension: './src/extension.ts',
@@ -20,30 +19,10 @@ module.exports = withDefaults({
externals: {
'keytar': 'commonjs keytar',
},
// TODO@eamodio Deal with nls properly for the browser
// Specify module here, so we can stop the vscode-nls-dev loader from overwriting nls calls
module: {
rules: [{
test: /\.ts$/,
exclude: /node_modules/,
use: [{
// configure TypeScript loader:
// * enable sources maps for end-to-end source maps
loader: 'ts-loader',
options: {
compilerOptions: {
'sourceMap': true,
}
}
}]
}]
},
resolve: {
alias: {
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
'uuid': path.resolve(__dirname, 'node_modules/uuid/dist/esm-browser/index.js')
},
}
}
});

View File

@@ -32,7 +32,7 @@
},
"aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217",
"main": "./out/extension.js",
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension.js",
"scripts": {
"compile": "gulp compile-extension:github-authentication",
"compile-web": "npx webpack-cli --config extension-browser.webpack.config --mode none",

View File

@@ -22,7 +22,7 @@ interface SessionData {
}
export class GitHubAuthenticationProvider {
private _sessions: vscode.AuthenticationSession2[] = [];
private _sessions: vscode.AuthenticationSession[] = [];
private _githubServer = new GitHubServer();
public async initialize(): Promise<void> {
@@ -37,7 +37,7 @@ export class GitHubAuthenticationProvider {
private pollForChange() {
setTimeout(async () => {
let storedSessions: vscode.AuthenticationSession2[];
let storedSessions: vscode.AuthenticationSession[];
try {
storedSessions = await this.readSessions();
} catch (e) {
@@ -80,12 +80,12 @@ export class GitHubAuthenticationProvider {
}, 1000 * 30);
}
private async readSessions(): Promise<vscode.AuthenticationSession2[]> {
private async readSessions(): Promise<vscode.AuthenticationSession[]> {
const storedSessions = await keychain.getToken();
if (storedSessions) {
try {
const sessionData: SessionData[] = JSON.parse(storedSessions);
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession2> => {
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession> => {
const needsUserInfo = !session.account;
let userInfo: { id: string, accountName: string };
if (needsUserInfo) {
@@ -121,11 +121,11 @@ export class GitHubAuthenticationProvider {
await keychain.setToken(JSON.stringify(this._sessions));
}
get sessions(): vscode.AuthenticationSession2[] {
get sessions(): vscode.AuthenticationSession[] {
return this._sessions;
}
public async login(scopes: string): Promise<vscode.AuthenticationSession2> {
public async login(scopes: string): Promise<vscode.AuthenticationSession> {
const token = await this._githubServer.login(scopes);
const session = await this.tokenToSession(token, scopes.split(' '));
await this.setToken(session);
@@ -136,12 +136,12 @@ export class GitHubAuthenticationProvider {
this._githubServer.manuallyProvideToken();
}
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession2> {
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession> {
const userInfo = await this._githubServer.getUserInfo(token);
return new vscode.AuthenticationSession2(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
return new vscode.AuthenticationSession(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
}
private async setToken(session: vscode.AuthenticationSession2): Promise<void> {
private async setToken(session: vscode.AuthenticationSession): Promise<void> {
const sessionIndex = this._sessions.findIndex(s => s.id === session.id);
if (sessionIndex > -1) {
this._sessions.splice(sessionIndex, 1, session);

View File

@@ -7,18 +7,19 @@
'use strict';
const path = require('path');
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
module.exports = withDefaults({
const config = withBrowserDefaults({
context: __dirname,
target: 'webworker',
node: false,
entry: {
extension: './src/extension.ts',
extension: './src/extension.ts'
},
resolve: {
alias: {
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
},
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js')
}
}
});
module.exports = config;

View File

@@ -13,11 +13,93 @@
"Other"
],
"activationEvents": [
"onFileSystem:github"
"onFileSystem:codespace",
"onFileSystem:github",
"onCommand:githubBrowser.openRepository"
],
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension.js",
"main": "./out/extension.js",
"contributes": {
"commands": [
{
"command": "githubBrowser.commit",
"title": "Commit",
"icon": "$(check)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.discardChanges",
"title": "Discard Changes",
"icon": "$(discard)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.openChanges",
"title": "Open Changes",
"icon": "$(git-compare)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.openFile",
"title": "Open File",
"icon": "$(go-to-file)",
"category": "GitHub Browser"
}
],
"menus": {
"commandPalette": [
{
"command": "githubBrowser.commit",
"when": "false"
},
{
"command": "githubBrowser.discardChanges",
"when": "false"
},
{
"command": "githubBrowser.openChanges",
"when": "false"
},
{
"command": "githubBrowser.openFile",
"when": "false"
}
],
"scm/title": [
{
"command": "githubBrowser.commit",
"group": "navigation",
"when": "scmProvider == github"
}
],
"scm/resourceState/context": [
{
"command": "githubBrowser.openFile",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "inline@0"
},
{
"command": "githubBrowser.discardChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "inline@1"
},
{
"command": "githubBrowser.openChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "navigation@0"
},
{
"command": "githubBrowser.openFile",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "navigation@1"
},
{
"command": "githubBrowser.discardChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "1_modification@0"
}
]
},
"resourceLabelFormatters": [
{
"scheme": "github",
@@ -36,6 +118,24 @@
"separator": "/",
"workspaceSuffix": "GitHub"
}
},
{
"scheme": "codespace",
"authority": "HEAD",
"formatting": {
"label": "github.com${path}",
"separator": "/",
"workspaceSuffix": "GitHub"
}
},
{
"scheme": "codespace",
"authority": "*",
"formatting": {
"label": "github.com${path} (${authority})",
"separator": "/",
"workspaceSuffix": "GitHub"
}
}
]
},
@@ -47,14 +147,13 @@
"vscode:prepublish": "npm run compile"
},
"dependencies": {
"@octokit/graphql": "4.5.0",
"@octokit/rest": "17.11.0",
"@octokit/graphql": "4.5.1",
"@octokit/rest": "18.0.0",
"fuzzysort": "1.1.4",
"node-fetch": "2.6.0"
"node-fetch": "2.6.0",
"vscode-nls": "4.1.2"
},
"devDependencies": {
"@types/node-fetch": "2.5.7",
"webpack": "4.43.0",
"webpack-cli": "3.3.11"
"@types/node-fetch": "2.5.7"
}
}

View File

@@ -0,0 +1,380 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { commands, Event, EventEmitter, FileStat, FileType, Memento, TextDocumentShowOptions, Uri, ViewColumn } from 'vscode';
import { getRootUri, getRelativePath, isChild } from './extension';
import { sha1 } from './sha1';
const textDecoder = new TextDecoder();
interface CreateOperation<T extends string | Uri = string> {
type: 'created';
size: number;
timestamp: number;
uri: T;
hash: string;
originalHash: string;
}
interface ChangeOperation<T extends string | Uri = string> {
type: 'changed';
size: number;
timestamp: number;
uri: T;
hash: string;
originalHash: string;
}
interface DeleteOperation<T extends string | Uri = string> {
type: 'deleted';
size: undefined;
timestamp: number;
uri: T;
hash: undefined;
originalHash: undefined;
}
export type Operation = CreateOperation<Uri> | ChangeOperation<Uri> | DeleteOperation<Uri>;
type StoredOperation = CreateOperation | ChangeOperation | DeleteOperation;
const workingOperationsKeyPrefix = 'github.working.changes|';
const workingFileKeyPrefix = 'github.working|';
function fromSerialized(operations: StoredOperation): Operation {
return { ...operations, uri: Uri.parse(operations.uri) };
}
interface CreatedFileChangeStoreEvent {
type: 'created';
rootUri: Uri;
uri: Uri;
}
interface ChangedFileChangeStoreEvent {
type: 'changed';
rootUri: Uri;
uri: Uri;
}
interface DeletedFileChangeStoreEvent {
type: 'deleted';
rootUri: Uri;
uri: Uri;
}
type ChangeStoreEvent = CreatedFileChangeStoreEvent | ChangedFileChangeStoreEvent | DeletedFileChangeStoreEvent;
function toChangeStoreEvent(operation: Operation | StoredOperation, rootUri: Uri, uri?: Uri): ChangeStoreEvent {
return {
type: operation.type,
rootUri: rootUri,
uri: uri ?? (typeof operation.uri === 'string' ? Uri.parse(operation.uri) : operation.uri)
};
}
export interface IChangeStore {
onDidChange: Event<ChangeStoreEvent>;
acceptAll(rootUri: Uri): Promise<void>;
discard(uri: Uri): Promise<void>;
discardAll(rootUri: Uri): Promise<void>;
getChanges(rootUri: Uri): Operation[];
getContent(uri: Uri): string | undefined;
openChanges(uri: Uri, original: Uri): void;
openFile(uri: Uri): void;
}
export interface IWritableChangeStore {
onDidChange: Event<ChangeStoreEvent>;
hasChanges(rootUri: Uri): boolean;
getContent(uri: Uri): string | undefined;
getStat(uri: Uri): FileStat | undefined;
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][];
onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void>;
onFileCreated(uri: Uri, content: Uint8Array): Promise<void>;
onFileDeleted(uri: Uri): Promise<void>;
}
export class ChangeStore implements IChangeStore, IWritableChangeStore {
private _onDidChange = new EventEmitter<ChangeStoreEvent>();
get onDidChange(): Event<ChangeStoreEvent> {
return this._onDidChange.event;
}
constructor(private readonly memento: Memento) { }
async acceptAll(rootUri: Uri): Promise<void> {
const operations = this.getChanges(rootUri);
await this.saveWorkingOperations(rootUri, undefined);
for (const operation of operations) {
await this.discardWorkingContent(operation.uri);
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri));
}
}
async discard(uri: Uri): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const index = operations.findIndex(c => c.uri === key);
if (index === -1) {
return;
}
const [operation] = operations.splice(index, 1);
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
this._onDidChange.fire({
type: operation.type === 'created' ? 'deleted' : operation.type === 'deleted' ? 'created' : 'changed',
rootUri: rootUri,
uri: uri
});
}
async discardAll(rootUri: Uri): Promise<void> {
const operations = this.getChanges(rootUri);
await this.saveWorkingOperations(rootUri, undefined);
for (const operation of operations) {
await this.discardWorkingContent(operation.uri);
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri));
}
}
getChanges(rootUri: Uri) {
return this.getWorkingOperations(rootUri).map(c => fromSerialized(c));
}
getContent(uri: Uri): string | undefined {
return this.memento.get(`${workingFileKeyPrefix}${uri.toString()}`);
}
getStat(uri: Uri): FileStat | undefined {
const key = uri.toString();
const operation = this.getChanges(getRootUri(uri)!).find(c => c.uri.toString() === key);
if (operation === undefined) {
return undefined;
}
return {
type: FileType.File,
size: operation.size ?? 0,
ctime: 0,
mtime: operation.timestamp
};
}
hasChanges(rootUri: Uri): boolean {
return this.getWorkingOperations(rootUri).length !== 0;
}
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][] {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return entries;
}
const folderPath = getRelativePath(rootUri, uri);
const operations = this.getChanges(rootUri);
for (const operation of operations) {
switch (operation.type) {
case 'changed':
continue;
case 'created': {
const filePath = getRelativePath(rootUri, operation.uri);
if (isChild(folderPath, filePath)) {
entries.push([filePath, FileType.File]);
}
break;
}
case 'deleted': {
const filePath = getRelativePath(rootUri, operation.uri);
if (isChild(folderPath, filePath)) {
const index = entries.findIndex(([path]) => path === filePath);
if (index !== -1) {
entries.splice(index, 1);
}
}
break;
}
}
}
return entries;
}
async onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const hash = await sha1(content);
let operation = operations.find(c => c.uri === key);
if (operation === undefined) {
const originalHash = await sha1(await originalContent!());
if (hash === originalHash) {
return;
}
operation = {
type: 'changed',
size: content.byteLength,
timestamp: Date.now(),
uri: key,
hash: hash!,
originalHash: originalHash
} as ChangeOperation;
operations.push(operation);
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
} else if (hash! === operation.originalHash) {
operations.splice(operations.indexOf(operation), 1);
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
} else if (operation.hash !== hash) {
operation.hash = hash!;
operation.timestamp = Date.now();
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
}
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async onFileCreated(uri: Uri, content: Uint8Array): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const hash = await sha1(content);
let operation = operations.find(c => c.uri === key);
if (operation === undefined) {
operation = {
type: 'created',
size: content.byteLength,
timestamp: Date.now(),
uri: key,
hash: hash!,
originalHash: hash!
} as CreateOperation;
operations.push(operation);
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
} else {
// Shouldn't happen, but if it does just update the contents
operation.hash = hash!;
operation.timestamp = Date.now();
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
}
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async onFileDeleted(uri: Uri): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
let operation = operations.find(c => c.uri === key);
if (operation !== undefined) {
operations.splice(operations.indexOf(operation), 1);
}
const wasCreated = operation?.type === 'created';
operation = {
type: 'deleted',
timestamp: Date.now(),
uri: key,
} as DeleteOperation;
// Only track the delete, if we weren't tracking the create
if (!wasCreated) {
operations.push(operation);
}
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async openChanges(uri: Uri, original: Uri) {
const opts: TextDocumentShowOptions = {
preserveFocus: false,
preview: true,
viewColumn: ViewColumn.Active
};
await commands.executeCommand('vscode.diff', original, uri, `${uri.fsPath} (Working Tree)`, opts);
}
async openFile(uri: Uri) {
const opts: TextDocumentShowOptions = {
preserveFocus: false,
preview: false,
viewColumn: ViewColumn.Active
};
await commands.executeCommand('vscode.open', uri, opts);
}
private getWorkingOperations(rootUri: Uri): StoredOperation[] {
return this.memento.get(`${workingOperationsKeyPrefix}${rootUri.toString()}`, []);
}
private async saveWorkingOperations(rootUri: Uri, operations: StoredOperation[] | undefined): Promise<void> {
await this.memento.update(`${workingOperationsKeyPrefix}${rootUri.toString()}`, operations);
}
private async saveWorkingContent(uri: Uri, content: string): Promise<void> {
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, content);
}
private async discardWorkingContent(uri: Uri): Promise<void> {
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, undefined);
}
}

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { Event, EventEmitter, Memento, Uri } from 'vscode';
export const contextKeyPrefix = 'github.context|';
export class ContextStore<T> {
private _onDidChange = new EventEmitter<Uri>();
get onDidChange(): Event<Uri> {
return this._onDidChange.event;
}
constructor(private readonly memento: Memento, private readonly scheme: string) { }
delete(uri: Uri) {
return this.set(uri, undefined);
}
get(uri: Uri): T | undefined {
return this.memento.get<T>(`${contextKeyPrefix}${uri.toString()}`);
}
async set(uri: Uri, context: T | undefined) {
if (uri.scheme !== this.scheme) {
throw new Error(`Invalid context scheme: ${uri.scheme}`);
}
await this.memento.update(`${contextKeyPrefix}${uri.toString()}`, context);
this._onDidChange.fire(uri);
}
}

View File

@@ -3,9 +3,71 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as vscode from 'vscode';
import { GitHubFS } from './githubfs';
import { ExtensionContext, Uri, workspace } from 'vscode';
import { ChangeStore } from './changeStore';
import { ContextStore } from './contextStore';
import { VirtualFS } from './fs';
import { GitHubApiContext, GitHubApi } from './github/api';
import { GitHubFS } from './github/fs';
import { VirtualSCM } from './scm';
export function activate(context: vscode.ExtensionContext) {
context.subscriptions.push(new GitHubFS());
// const repositoryRegex = /^(?:(?:https:\/\/)?github.com\/)?([^\/]+)\/([^\/]+?)(?:\/|.git|$)/i;
export function activate(context: ExtensionContext) {
const contextStore = new ContextStore<GitHubApiContext>(context.workspaceState, GitHubFS.scheme);
const changeStore = new ChangeStore(context.workspaceState);
const githubApi = new GitHubApi(contextStore);
const gitHubFS = new GitHubFS(githubApi);
const virtualFS = new VirtualFS('codespace', GitHubFS.scheme, contextStore, changeStore, gitHubFS);
context.subscriptions.push(
githubApi,
gitHubFS,
virtualFS,
new VirtualSCM(GitHubFS.scheme, githubApi, changeStore)
);
// commands.registerCommand('githubBrowser.openRepository', async () => {
// const value = await window.showInputBox({
// placeHolder: 'e.g. https://github.com/microsoft/vscode',
// prompt: 'Enter a GitHub repository url',
// validateInput: value => repositoryRegex.test(value) ? undefined : 'Invalid repository url'
// });
// if (value) {
// const match = repositoryRegex.exec(value);
// if (match) {
// const [, owner, repo] = match;
// const uri = Uri.parse(`codespace://HEAD/${owner}/${repo}`);
// openWorkspace(uri, repo, 'currentWindow');
// }
// }
// });
}
export function getRelativePath(rootUri: Uri, uri: Uri) {
return uri.path.substr(rootUri.path.length + 1);
}
export function getRootUri(uri: Uri) {
return workspace.getWorkspaceFolder(uri)?.uri;
}
export function isChild(folderPath: string, filePath: string) {
return isDescendent(folderPath, filePath) && filePath.substr(folderPath.length + (folderPath.endsWith('/') ? 0 : 1)).split('/').length === 1;
}
export function isDescendent(folderPath: string, filePath: string) {
return folderPath.length === 0 || filePath.startsWith(folderPath.endsWith('/') ? folderPath : `${folderPath}/`);
}
// function openWorkspace(uri: Uri, name: string, location: 'currentWindow' | 'newWindow' | 'addToCurrentWorkspace') {
// if (location === 'addToCurrentWorkspace') {
// const count = (workspace.workspaceFolders && workspace.workspaceFolders.length) || 0;
// return workspace.updateWorkspaceFolders(count, 0, { uri: uri, name: name });
// }
// return commands.executeCommand('vscode.openFolder', uri, location === 'newWindow');
// }

View File

@@ -0,0 +1,220 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {
CancellationToken,
Disposable,
Event,
EventEmitter,
FileChangeEvent,
FileChangeType,
FileSearchOptions,
FileSearchProvider,
FileSearchQuery,
FileStat,
FileSystemError,
FileSystemProvider,
FileType,
Progress,
TextSearchOptions,
TextSearchProvider,
TextSearchQuery,
TextSearchResult,
Uri,
workspace,
} from 'vscode';
import { IWritableChangeStore } from './changeStore';
import { ContextStore } from './contextStore';
import { GitHubApiContext } from './github/api';
const emptyDisposable = { dispose: () => { /* noop */ } };
const textEncoder = new TextEncoder();
export class VirtualFS implements FileSystemProvider, FileSearchProvider, TextSearchProvider, Disposable {
private _onDidChangeFile = new EventEmitter<FileChangeEvent[]>();
get onDidChangeFile(): Event<FileChangeEvent[]> {
return this._onDidChangeFile.event;
}
private readonly disposable: Disposable;
constructor(
readonly scheme: string,
private readonly originalScheme: string,
contextStore: ContextStore<GitHubApiContext>,
private readonly changeStore: IWritableChangeStore,
private readonly fs: FileSystemProvider & FileSearchProvider & TextSearchProvider
) {
// TODO@eamodio listen for workspace folder changes
for (const folder of workspace.workspaceFolders ?? []) {
const uri = this.getOriginalResource(folder.uri);
// If we have a saved context, but no longer have any changes, reset the context
// We only do this on startup/reload to keep things consistent
if (contextStore.get(uri) !== undefined && !changeStore.hasChanges(folder.uri)) {
contextStore.delete(uri);
}
}
this.disposable = Disposable.from(
workspace.registerFileSystemProvider(scheme, this, {
isCaseSensitive: true,
}),
workspace.registerFileSearchProvider(scheme, this),
workspace.registerTextSearchProvider(scheme, this),
changeStore.onDidChange(e => {
switch (e.type) {
case 'created':
this._onDidChangeFile.fire([{ type: FileChangeType.Created, uri: e.uri }]);
break;
case 'changed':
this._onDidChangeFile.fire([{ type: FileChangeType.Changed, uri: e.uri }]);
break;
case 'deleted':
this._onDidChangeFile.fire([{ type: FileChangeType.Deleted, uri: e.uri }]);
break;
}
}),
);
}
dispose() {
this.disposable?.dispose();
}
private getOriginalResource(uri: Uri): Uri {
return uri.with({ scheme: this.originalScheme });
}
private getVirtualResource(uri: Uri): Uri {
return uri.with({ scheme: this.scheme });
}
//#region FileSystemProvider
watch(): Disposable {
return emptyDisposable;
}
async stat(uri: Uri): Promise<FileStat> {
let stat = this.changeStore.getStat(uri);
if (stat !== undefined) {
return stat;
}
stat = await this.fs.stat(this.getOriginalResource(uri));
return stat;
}
async readDirectory(uri: Uri): Promise<[string, FileType][]> {
let entries = await this.fs.readDirectory(this.getOriginalResource(uri));
entries = this.changeStore.updateDirectoryEntries(uri, entries);
return entries;
}
createDirectory(_uri: Uri): void | Thenable<void> {
// TODO@eamodio only support files for now
throw FileSystemError.NoPermissions();
}
async readFile(uri: Uri): Promise<Uint8Array> {
const content = this.changeStore.getContent(uri);
if (content !== undefined) {
return textEncoder.encode(content);
}
const data = await this.fs.readFile(this.getOriginalResource(uri));
return data;
}
async writeFile(uri: Uri, content: Uint8Array, options: { create: boolean, overwrite: boolean }): Promise<void> {
let stat;
try {
stat = await this.stat(uri);
if (!options.overwrite) {
throw FileSystemError.FileExists();
}
} catch (ex) {
if (ex instanceof FileSystemError && ex.code === 'FileNotFound') {
if (!options.create) {
throw FileSystemError.FileNotFound();
}
} else {
throw ex;
}
}
if (stat === undefined) {
await this.changeStore.onFileCreated(uri, content);
} else {
await this.changeStore.onFileChanged(uri, content, () => this.fs.readFile(this.getOriginalResource(uri)));
}
}
async delete(uri: Uri, _options: { recursive: boolean }): Promise<void> {
const stat = await this.stat(uri);
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
await this.changeStore.onFileDeleted(uri);
}
async rename(oldUri: Uri, newUri: Uri, options: { overwrite: boolean }): Promise<void> {
const stat = await this.stat(oldUri);
// TODO@eamodio only support files for now
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
const content = await this.readFile(oldUri);
await this.writeFile(newUri, content, { create: true, overwrite: options.overwrite });
await this.delete(oldUri, { recursive: false });
}
async copy(source: Uri, destination: Uri, options: { overwrite: boolean }): Promise<void> {
const stat = await this.stat(source);
// TODO@eamodio only support files for now
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
const content = await this.readFile(source);
await this.writeFile(destination, content, { create: true, overwrite: options.overwrite });
}
//#endregion
//#region FileSearchProvider
provideFileSearchResults(
query: FileSearchQuery,
options: FileSearchOptions,
token: CancellationToken,
) {
return this.fs.provideFileSearchResults(query, { ...options, folder: this.getOriginalResource(options.folder) }, token);
}
//#endregion
//#region TextSearchProvider
provideTextSearchResults(
query: TextSearchQuery,
options: TextSearchOptions,
progress: Progress<TextSearchResult>,
token: CancellationToken,
) {
return this.fs.provideTextSearchResults(
query,
{ ...options, folder: this.getOriginalResource(options.folder) },
{ report: (result: TextSearchResult) => progress.report({ ...result, uri: this.getVirtualResource(result.uri) }) },
token
);
}
//#endregion
}

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const emptyStr = '';
function defaultResolver(...args: any[]): string {
if (args.length === 1) {
const arg0 = args[0];
if (arg0 === undefined || arg0 === null) {
return emptyStr;
}
if (typeof arg0 === 'string') {
return arg0;
}
if (typeof arg0 === 'number' || typeof arg0 === 'boolean') {
return String(arg0);
}
return JSON.stringify(arg0);
}
return JSON.stringify(args);
}
function iPromise<T>(obj: T | Promise<T>): obj is Promise<T> {
return typeof (obj as Promise<T>)?.then === 'function';
}
export function gate<T extends (...arg: any) => any>(resolver?: (...args: Parameters<T>) => string) {
return (_target: any, key: string, descriptor: PropertyDescriptor) => {
let fn: Function | undefined;
if (typeof descriptor.value === 'function') {
fn = descriptor.value;
} else if (typeof descriptor.get === 'function') {
fn = descriptor.get;
}
if (fn === undefined || fn === null) {
throw new Error('Not supported');
}
const gateKey = `$gate$${key}`;
descriptor.value = function (this: any, ...args: any[]) {
const prop =
args.length === 0 ? gateKey : `${gateKey}$${(resolver ?? defaultResolver)(...(args as Parameters<T>))}`;
if (!Object.prototype.hasOwnProperty.call(this, prop)) {
Object.defineProperty(this, prop, {
configurable: false,
enumerable: false,
writable: true,
value: undefined,
});
}
let promise = this[prop];
if (promise === undefined) {
let result;
try {
result = fn!.apply(this, args);
if (result === undefined || fn === null || !iPromise(result)) {
return result;
}
this[prop] = promise = result
.then((r: any) => {
this[prop] = undefined;
return r;
})
.catch(ex => {
this[prop] = undefined;
throw ex;
});
} catch (ex) {
this[prop] = undefined;
throw ex;
}
}
return promise;
};
};
}

View File

@@ -0,0 +1,491 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { authentication, AuthenticationSession, Disposable, Event, EventEmitter, Range, Uri } from 'vscode';
import { graphql } from '@octokit/graphql';
import { Octokit } from '@octokit/rest';
import { fromGitHubUri } from './fs';
import { ContextStore } from '../contextStore';
import { Iterables } from '../iterables';
export const shaRegex = /^[0-9a-f]{40}$/;
export interface GitHubApiContext {
sha: string;
timestamp: number;
}
interface CreateCommitOperation {
type: 'created';
path: string;
content: string
}
interface ChangeCommitOperation {
type: 'changed';
path: string;
content: string
}
interface DeleteCommitOperation {
type: 'deleted';
path: string;
content: undefined
}
export type CommitOperation = CreateCommitOperation | ChangeCommitOperation | DeleteCommitOperation;
type ArrayElement<T extends Array<unknown>> = T extends (infer U)[] ? U : never;
type GitCreateTreeParamsTree = ArrayElement<NonNullable<Parameters<Octokit['git']['createTree']>[0]>['tree']>;
function getGitHubRootUri(uri: Uri) {
const rootIndex = uri.path.indexOf('/', uri.path.indexOf('/', 1) + 1);
return uri.with({
path: uri.path.substring(0, rootIndex === -1 ? undefined : rootIndex),
query: ''
});
}
export class GitHubApi implements Disposable {
private _onDidChangeContext = new EventEmitter<Uri>();
get onDidChangeContext(): Event<Uri> {
return this._onDidChangeContext.event;
}
private readonly disposable: Disposable;
constructor(private readonly context: ContextStore<GitHubApiContext>) {
this.disposable = Disposable.from(
context.onDidChange(e => this._onDidChangeContext.fire(e))
);
}
dispose() {
this.disposable.dispose();
}
private _session: AuthenticationSession | undefined;
async ensureAuthenticated() {
if (this._session === undefined) {
const providers = await authentication.getProviderIds();
if (!providers.includes('github')) {
await new Promise(resolve => {
authentication.onDidChangeAuthenticationProviders(e => {
if (e.added.includes('github')) {
resolve();
}
});
});
}
this._session = await authentication.getSession('github', ['repo'], { createIfNone: true });
}
return this._session;
}
private _graphql: typeof graphql | undefined;
private async graphql() {
if (this._graphql === undefined) {
const session = await this.ensureAuthenticated();
this._graphql = graphql.defaults({
headers: {
Authorization: `Bearer ${session.accessToken}`,
}
});
}
return this._graphql;
}
private _octokit: typeof Octokit | undefined;
private async octokit(options?: ConstructorParameters<typeof Octokit>[0]) {
if (this._octokit === undefined) {
const session = await this.ensureAuthenticated();
this._octokit = Octokit.defaults({ auth: `token ${session.accessToken}` });
}
return new this._octokit(options);
}
async commit(rootUri: Uri, message: string, operations: CommitOperation[]): Promise<string | undefined> {
let { owner, repo, ref } = fromGitHubUri(rootUri);
try {
if (ref === undefined || ref === 'HEAD') {
ref = await this.defaultBranchQuery(rootUri);
if (ref === undefined) {
throw new Error('Cannot commit — invalid ref');
}
}
const context = await this.getContext(rootUri);
if (context.sha === undefined) {
throw new Error('Cannot commit — invalid context');
}
const hasDeletes = operations.some(op => op.type === 'deleted');
const github = await this.octokit();
const treeResp = await github.git.getTree({
owner: owner,
repo: repo,
tree_sha: context.sha,
recursive: hasDeletes ? 'true' : undefined,
});
// 0100000000000000 (040000): Directory
// 1000000110100100 (100644): Regular non-executable file
// 1000000110110100 (100664): Regular non-executable group-writeable file
// 1000000111101101 (100755): Regular executable file
// 1010000000000000 (120000): Symbolic link
// 1110000000000000 (160000): Gitlink
let updatedTree: GitCreateTreeParamsTree[];
if (hasDeletes) {
updatedTree = treeResp.data.tree as GitCreateTreeParamsTree[];
for (const operation of operations) {
switch (operation.type) {
case 'created':
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
break;
case 'changed': {
const index = updatedTree.findIndex(item => item.path === operation.path);
if (index !== -1) {
const { path, mode, type } = updatedTree[index];
updatedTree.splice(index, 1, { path: path, mode: mode, type: type, content: operation.content });
}
break;
}
case 'deleted': {
const index = updatedTree.findIndex(item => item.path === operation.path);
if (index !== -1) {
updatedTree.splice(index, 1);
}
break;
}
}
}
} else {
updatedTree = [];
for (const operation of operations) {
switch (operation.type) {
case 'created':
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
break;
case 'changed':
const item = treeResp.data.tree.find(item => item.path === operation.path) as GitCreateTreeParamsTree;
if (item !== undefined) {
const { path, mode, type } = item;
updatedTree.push({ path: path, mode: mode, type: type, content: operation.content });
}
break;
}
}
}
const updatedTreeResp = await github.git.createTree({
owner: owner,
repo: repo,
base_tree: hasDeletes ? undefined : treeResp.data.sha,
tree: updatedTree
});
const resp = await github.git.createCommit({
owner: owner,
repo: repo,
message: message,
tree: updatedTreeResp.data.sha,
parents: [context.sha]
});
this.updateContext(rootUri, { sha: resp.data.sha, timestamp: Date.now() });
// TODO@eamodio need to send a file change for any open files
await github.git.updateRef({
owner: owner,
repo: repo,
ref: `heads/${ref}`,
sha: resp.data.sha
});
return resp.data.sha;
} catch (ex) {
console.log(ex);
throw ex;
}
}
async defaultBranchQuery(uri: Uri) {
const { owner, repo } = fromGitHubUri(uri);
try {
const query = `query defaultBranch($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
defaultBranchRef {
name
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
});
return rsp?.repository?.defaultBranchRef?.name ?? undefined;
} catch (ex) {
return undefined;
}
}
async filesQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
const context = await this.getContext(uri);
const resp = await (await this.octokit()).git.getTree({
owner: owner,
repo: repo,
recursive: '1',
tree_sha: context?.sha ?? ref ?? 'HEAD',
});
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
} catch (ex) {
return [];
}
}
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
const { owner, repo, path, ref } = fromGitHubUri(uri);
try {
const context = await this.getContext(uri);
const query = `query fs($owner: String!, $repo: String!, $path: String) {
repository(owner: $owner, name: $repo) {
object(expression: $path) {
${innerQuery}
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { object: T | null | undefined };
}>(query, {
owner: owner,
repo: repo,
path: `${context.sha ?? ref ?? 'HEAD'}:${path}`,
});
return rsp?.repository?.object ?? undefined;
} catch (ex) {
return undefined;
}
}
async latestCommitQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
if (ref === undefined || ref === 'HEAD') {
const query = `query latest($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
defaultBranchRef {
target {
oid
}
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
});
return rsp?.repository?.defaultBranchRef?.target.oid ?? undefined;
}
const query = `query latest($owner: String!, $repo: String!, $ref: String!) {
repository(owner: $owner, name: $repo) {
ref(qualifiedName: $ref) {
target {
oid
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { ref: { target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
ref: ref ?? 'HEAD',
});
return rsp?.repository?.ref?.target.oid ?? undefined;
} catch (ex) {
return undefined;
}
}
async searchQuery(
query: string,
uri: Uri,
options: { maxResults?: number; context?: { before?: number; after?: number } },
): Promise<SearchQueryResults> {
const { owner, repo, ref } = fromGitHubUri(uri);
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
if (ref === undefined) {
return { matches: [], limitHit: true };
}
try {
const resp = await (await this.octokit({
request: {
headers: {
accept: 'application/vnd.github.v3.text-match+json',
},
}
})).search.code({
q: `${query} repo:${owner}/${repo}`,
});
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
const range = new Range(0, 0, 0, 0);
const matches: SearchQueryMatch[] = [];
let counter = 0;
let match: SearchQueryMatch;
for (const item of resp.data.items) {
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
counter++;
if (options.maxResults !== undefined && counter > options.maxResults) {
return { matches: matches, limitHit: true };
}
match = {
path: item.path,
ranges: [],
preview: m.fragment,
matches: [],
};
for (const lm of m.matches) {
let line = 0;
let shartChar = 0;
let endChar = 0;
for (let i = 0; i < lm.indices[1]; i++) {
if (i === lm.indices[0]) {
shartChar = endChar;
}
if (m.fragment[i] === '\n') {
line++;
endChar = 0;
} else {
endChar++;
}
}
match.ranges.push(range);
match.matches.push(new Range(line, shartChar, line, endChar));
}
matches.push(match);
}
}
return { matches: matches, limitHit: false };
} catch (ex) {
return { matches: [], limitHit: true };
}
}
private async gqlQuery<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
return (await this.graphql())<T>(query, variables);
}
private readonly pendingContextRequests = new Map<string, Promise<GitHubApiContext>>();
async getContext(uri: Uri): Promise<GitHubApiContext> {
const rootUri = getGitHubRootUri(uri);
let pending = this.pendingContextRequests.get(rootUri.toString());
if (pending === undefined) {
pending = this.getContextCore(rootUri);
this.pendingContextRequests.set(rootUri.toString(), pending);
}
try {
return await pending;
} finally {
this.pendingContextRequests.delete(rootUri.toString());
}
}
private readonly rootUriToContextMap = new Map<string, GitHubApiContext>();
private async getContextCore(rootUri: Uri): Promise<GitHubApiContext> {
let context = this.rootUriToContextMap.get(rootUri.toString());
if (context === undefined) {
const { ref } = fromGitHubUri(rootUri);
if (ref !== undefined && shaRegex.test(ref)) {
context = { sha: ref, timestamp: Date.now() };
} else {
context = this.context.get(rootUri);
if (context?.sha === undefined) {
const sha = await this.latestCommitQuery(rootUri);
if (sha !== undefined) {
context = { sha: sha, timestamp: Date.now() };
} else {
context = undefined;
}
}
}
if (context !== undefined) {
this.updateContext(rootUri, context);
}
}
return context ?? { sha: rootUri.authority, timestamp: Date.now() };
}
private updateContext(rootUri: Uri, context: GitHubApiContext) {
this.rootUriToContextMap.set(rootUri.toString(), context);
this.context.set(rootUri, context);
}
}
interface GitHubSearchTextMatch {
object_url: string;
object_type: string;
property: string;
fragment: string;
matches: {
text: string;
indices: number[];
}[];
}
interface SearchQueryMatch {
path: string;
ranges: Range[];
preview: string;
matches: Range[];
}
interface SearchQueryResults {
matches: SearchQueryMatch[];
limitHit: boolean;
}

View File

@@ -5,8 +5,6 @@
'use strict';
import {
authentication,
AuthenticationSession2,
CancellationToken,
Disposable,
Event,
@@ -20,7 +18,6 @@ import {
FileSystemProvider,
FileType,
Progress,
Range,
TextSearchComplete,
TextSearchOptions,
TextSearchProvider,
@@ -29,11 +26,11 @@ import {
Uri,
workspace,
} from 'vscode';
import { Octokit } from '@octokit/rest';
import { graphql } from '@octokit/graphql/';
import * as fuzzySort from 'fuzzysort';
import fetch from 'node-fetch';
import { Iterables } from './iterables';
import { GitHubApi } from './api';
import { Iterables } from '../iterables';
import { getRootUri } from '../extension';
const emptyDisposable = { dispose: () => { /* noop */ } };
const replaceBackslashRegex = /(\/|\\)/g;
@@ -53,16 +50,17 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
}
private readonly disposable: Disposable;
private fsCache = new Map<string, any>();
private fsCache = new Map<string, Map<string, any>>();
constructor() {
constructor(private readonly github: GitHubApi) {
this.disposable = Disposable.from(
workspace.registerFileSystemProvider(GitHubFS.scheme, this, {
isCaseSensitive: true,
isReadonly: true,
isReadonly: true
}),
workspace.registerFileSearchProvider(GitHubFS.scheme, this),
workspace.registerTextSearchProvider(GitHubFS.scheme, this),
github.onDidChangeContext(e => this.fsCache.delete(e.toString()))
);
}
@@ -70,22 +68,18 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
this.disposable?.dispose();
}
private _github: Promise<GitHubApi | undefined> | undefined;
get github(): Promise<GitHubApi | undefined> {
if (this._github === undefined) {
this._github = this.getGitHubApi();
private getCache(uri: Uri) {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return undefined;
}
return this._github;
}
private async getGitHubApi(): Promise<GitHubApi | undefined> {
try {
const session = await authentication.getSession('github', ['repo'], { createIfNone: true });
return new GitHubApi(session);
} catch (ex) {
this._github = undefined;
throw ex;
let cache = this.fsCache.get(rootUri.toString());
if (cache === undefined) {
cache = new Map<string, any>();
this.fsCache.set(rootUri.toString(), cache);
}
return cache;
}
//#region FileSystemProvider
@@ -96,7 +90,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
async stat(uri: Uri): Promise<FileStat> {
if (uri.path === '' || uri.path.lastIndexOf('/') === 0) {
return { type: FileType.Directory, size: 0, ctime: 0, mtime: 0 };
const context = await this.github.getContext(uri);
return { type: FileType.Directory, size: 0, ctime: 0, mtime: context?.timestamp };
}
const data = await this.fsQuery<{
@@ -108,14 +103,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
...on Blob {
byteSize
}`,
this.fsCache,
this.getCache(uri),
);
if (data === undefined) {
throw FileSystemError.FileNotFound();
}
const context = await this.github.getContext(uri);
return {
type: typenameToFileType(data?.__typename),
size: data?.byteSize ?? 0,
type: typenameToFileType(data.__typename),
size: data.byteSize ?? 0,
ctime: 0,
mtime: 0,
mtime: context?.timestamp,
};
}
@@ -130,7 +131,7 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
type
}
}`,
this.fsCache,
this.getCache(uri),
);
return (data?.entries ?? []).map<[string, FileType]>(e => [
@@ -139,8 +140,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
]);
}
createDirectory(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
createDirectory(_uri: Uri): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
async readFile(uri: Uri): Promise<Uint8Array> {
@@ -172,20 +173,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
return textEncoder.encode(data?.text ?? '');
}
writeFile(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
async writeFile(_uri: Uri, _content: Uint8Array, _options: { create: boolean, overwrite: boolean }): Promise<void> {
throw FileSystemError.NoPermissions();
}
delete(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
delete(_uri: Uri, _options: { recursive: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
rename(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
rename(_oldUri: Uri, _newUri: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
copy?(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
copy(_source: Uri, _destination: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
//#endregion
@@ -201,8 +202,10 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
): Promise<Uri[]> {
let searchable = this.fileSearchCache.get(options.folder.toString(true));
if (searchable === undefined) {
const matches = await (await this.github)?.filesQuery(options.folder);
if (matches === undefined || token.isCancellationRequested) { return []; }
const matches = await this.github.filesQuery(options.folder);
if (matches === undefined || token.isCancellationRequested) {
return [];
}
searchable = [...Iterables.map(matches, m => (fuzzySort as Fuzzysort).prepareSlow(m))];
this.fileSearchCache.set(options.folder.toString(true), searchable);
@@ -233,13 +236,12 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
query: TextSearchQuery,
options: TextSearchOptions,
progress: Progress<TextSearchResult>,
token: CancellationToken,
_token: CancellationToken,
): Promise<TextSearchComplete> {
const results = await (await this.github)?.searchQuery(
const results = await this.github.searchQuery(
query.pattern,
options.folder,
{ maxResults: options.maxResults, context: { before: options.beforeContext, after: options.afterContext } },
token,
);
if (results === undefined) { return { limitHit: true }; }
@@ -266,9 +268,11 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
const key = `${uri.toString()}:${getHashCode(query)}`;
let data = cache?.get(key);
if (data !== undefined) { return data as T; }
if (data !== undefined) {
return data as T;
}
data = await (await this.github)?.fsQuery<T>(uri, query);
data = await this.github.fsQuery<T>(uri, query);
cache?.set(key, data);
return data;
}
@@ -296,12 +300,16 @@ function typenameToFileType(typename: string | undefined | null) {
}
type RepoInfo = { owner: string; repo: string; path: string | undefined; ref?: string };
function fromGitHubUri(uri: Uri): RepoInfo {
export function fromGitHubUri(uri: Uri): RepoInfo {
const [, owner, repo, ...rest] = uri.path.split('/');
let ref;
if (uri.authority) {
ref = uri.authority;
// The casing of HEAD is important for the GitHub api to work
if (/HEAD/i.test(ref)) {
ref = 'HEAD';
}
}
return { owner: owner, repo: repo, path: rest.join('/'), ref: ref };
}
@@ -322,175 +330,3 @@ function getHashCode(s: string): number {
}
return hash;
}
interface SearchQueryMatch {
path: string;
ranges: Range[];
preview: string;
matches: Range[];
}
interface SearchQueryResults {
matches: SearchQueryMatch[];
limitHit: boolean;
}
class GitHubApi {
constructor(private readonly session: AuthenticationSession2) { }
private _graphql: typeof graphql | undefined;
private get graphql() {
if (this._graphql === undefined) {
this._graphql = graphql.defaults({
headers: {
Authorization: `Bearer ${this.token}`,
}
});
}
return this._graphql;
}
get token() {
return this.session.accessToken;
}
async filesQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
const resp = await new Octokit({
auth: `token ${this.token}`,
}).git.getTree({
owner: owner,
repo: repo,
recursive: '1',
tree_sha: ref ?? 'HEAD',
});
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
} catch (ex) {
return [];
}
}
async searchQuery(
query: string,
uri: Uri,
options: { maxResults?: number; context?: { before?: number; after?: number } },
_token: CancellationToken,
): Promise<SearchQueryResults> {
const { owner, repo, ref } = fromGitHubUri(uri);
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
if (ref === undefined) {
return { matches: [], limitHit: true };
}
try {
const resp = await new Octokit({
auth: `token ${this.token}`,
request: {
headers: {
accept: 'application/vnd.github.v3.text-match+json',
},
}
}).search.code({
q: `${query} repo:${owner}/${repo}`,
});
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
const range = new Range(0, 0, 0, 0);
const matches: SearchQueryMatch[] = [];
console.log(resp.data.items.length, resp.data.items);
let counter = 0;
let match: SearchQueryMatch;
for (const item of resp.data.items) {
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
counter++;
if (options.maxResults !== undefined && counter > options.maxResults) {
return { matches: matches, limitHit: true };
}
match = {
path: item.path,
ranges: [],
preview: m.fragment,
matches: [],
};
for (const lm of m.matches) {
let line = 0;
let shartChar = 0;
let endChar = 0;
for (let i = 0; i < lm.indices[1]; i++) {
if (i === lm.indices[0]) {
shartChar = endChar;
}
if (m.fragment[i] === '\n') {
line++;
endChar = 0;
} else {
endChar++;
}
}
match.ranges.push(range);
match.matches.push(new Range(line, shartChar, line, endChar));
}
matches.push(match);
}
}
return { matches: matches, limitHit: false };
} catch (ex) {
return { matches: [], limitHit: true };
}
}
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
try {
const query = `query fs($owner: String!, $repo: String!, $path: String) {
repository(owner: $owner, name: $repo) {
object(expression: $path) {
${innerQuery}
}
}
}`;
const { owner, repo, path, ref } = fromGitHubUri(uri);
const variables = {
owner: owner,
repo: repo,
path: `${ref ?? 'HEAD'}:${path}`,
};
const rsp = await this.query<{
repository: { object: T | null | undefined };
}>(query, variables);
return rsp?.repository?.object ?? undefined;
} catch (ex) {
return undefined;
}
}
query<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
return this.graphql(query, variables) as Promise<T | undefined>;
}
}
interface GitHubSearchTextMatch {
object_url: string;
object_type: string;
property: string;
fragment: string;
matches: GitHubSearchMatch[];
}
interface GitHubSearchMatch {
text: string;
indices: number[];
}

View File

@@ -0,0 +1,168 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { CancellationToken, commands, Disposable, scm, SourceControl, SourceControlResourceGroup, SourceControlResourceState, Uri, window, workspace } from 'vscode';
import * as nls from 'vscode-nls';
import { IChangeStore } from './changeStore';
import { GitHubApi, CommitOperation } from './github/api';
import { getRelativePath } from './extension';
const localize = nls.loadMessageBundle();
interface ScmProvider {
sourceControl: SourceControl,
groups: SourceControlResourceGroup[]
}
export class VirtualSCM implements Disposable {
private readonly providers: ScmProvider[] = [];
private disposable: Disposable;
constructor(
private readonly originalScheme: string,
private readonly github: GitHubApi,
private readonly changeStore: IChangeStore,
) {
this.registerCommands();
// TODO@eamodio listen for workspace folder changes
for (const folder of workspace.workspaceFolders ?? []) {
this.createScmProvider(folder.uri, folder.name);
}
this.disposable = Disposable.from(
changeStore.onDidChange(e => this.update(e.rootUri, e.uri)),
);
for (const { uri } of workspace.workspaceFolders ?? []) {
for (const operation of changeStore.getChanges(uri)) {
this.update(uri, operation.uri);
}
}
}
dispose() {
this.disposable.dispose();
}
private registerCommands() {
commands.registerCommand('githubBrowser.commit', (...args: any[]) => this.commitChanges(args[0]));
commands.registerCommand('githubBrowser.discardChanges', (resourceState: SourceControlResourceState) =>
this.discardChanges(resourceState.resourceUri)
);
commands.registerCommand('githubBrowser.openChanges', (resourceState: SourceControlResourceState) =>
this.openChanges(resourceState.resourceUri)
);
commands.registerCommand('githubBrowser.openFile', (resourceState: SourceControlResourceState) =>
this.openFile(resourceState.resourceUri)
);
}
async commitChanges(sourceControl: SourceControl): Promise<void> {
const operations = this.changeStore
.getChanges(sourceControl.rootUri!)
.map<CommitOperation>(operation => {
const path = getRelativePath(sourceControl.rootUri!, operation.uri);
switch (operation.type) {
case 'created':
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
case 'changed':
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
case 'deleted':
return { type: operation.type, path: path };
}
});
if (!operations.length) {
window.showInformationMessage(localize('no changes', "There are no changes to commit."));
return;
}
const message = sourceControl.inputBox.value;
if (message) {
const sha = await this.github.commit(this.getOriginalResource(sourceControl.rootUri!), message, operations);
if (sha !== undefined) {
this.changeStore.acceptAll(sourceControl.rootUri!);
sourceControl.inputBox.value = '';
}
}
}
discardChanges(uri: Uri): Promise<void> {
return this.changeStore.discard(uri);
}
openChanges(uri: Uri) {
return this.changeStore.openChanges(uri, this.getOriginalResource(uri));
}
openFile(uri: Uri) {
return this.changeStore.openFile(uri);
}
private update(rootUri: Uri, uri: Uri) {
const folder = workspace.getWorkspaceFolder(uri);
if (folder === undefined) {
return;
}
const provider = this.createScmProvider(rootUri, folder.name);
const group = this.createChangesGroup(provider);
group.resourceStates = this.changeStore.getChanges(rootUri).map<SourceControlResourceState>(op => {
const rs: SourceControlResourceState = {
decorations: {
strikeThrough: op.type === 'deleted'
},
resourceUri: op.uri,
command: {
command: 'githubBrowser.openChanges',
title: 'Open Changes',
}
};
rs.command!.arguments = [rs];
return rs;
});
}
private createScmProvider(rootUri: Uri, name: string) {
let provider = this.providers.find(sc => sc.sourceControl.rootUri?.toString() === rootUri.toString());
if (provider === undefined) {
const sourceControl = scm.createSourceControl('github', name, rootUri);
sourceControl.quickDiffProvider = { provideOriginalResource: uri => this.getOriginalResource(uri) };
sourceControl.acceptInputCommand = {
command: 'githubBrowser.commit',
title: 'Commit',
arguments: [sourceControl]
};
sourceControl.inputBox.placeholder = `Message (Ctrl+Enter to commit '${name}')`;
// sourceControl.inputBox.validateInput = value => value ? undefined : 'Invalid commit message';
provider = { sourceControl: sourceControl, groups: [] };
this.createChangesGroup(provider);
this.providers.push(provider);
}
return provider;
}
private createChangesGroup(provider: ScmProvider) {
let group = provider.groups.find(g => g.id === 'github.changes');
if (group === undefined) {
group = provider.sourceControl.createResourceGroup('github.changes', 'Changes');
provider.groups.push(group);
}
return group;
}
private getOriginalResource(uri: Uri, _token?: CancellationToken): Uri {
return uri.with({ scheme: this.originalScheme });
}
}

View File

@@ -0,0 +1,29 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const textDecoder = new TextDecoder();
const textEncoder = new TextEncoder();
declare let WEBWORKER: boolean;
export async function sha1(s: string | Uint8Array): Promise<string> {
while (true) {
try {
if (WEBWORKER) {
const hash = await globalThis.crypto.subtle.digest({ name: 'sha-1' }, typeof s === 'string' ? textEncoder.encode(s) : s);
// Use encodeURIComponent to avoid issues with btoa and Latin-1 characters
return globalThis.btoa(encodeURIComponent(textDecoder.decode(hash)));
} else {
return (await import('crypto')).createHash('sha1').update(s).digest('base64');
}
} catch (ex) {
if (ex instanceof ReferenceError) {
(global as any).WEBWORKER = false;
}
}
}
}

View File

@@ -1,11 +1,12 @@
{
"extends": "../shared.tsconfig.json",
"compilerOptions": {
"outDir": "./out",
// "experimentalDecorators": true,
// "typeRoots": [
// "./node_modules/@types"
// ]
"experimentalDecorators": true,
"lib": [
"es2018",
"dom"
],
"outDir": "./out"
},
"include": [
"src/**/*"

File diff suppressed because it is too large Load Diff

View File

@@ -27,21 +27,15 @@ function getAgent(url: string | undefined = process.env.HTTPS_PROXY): Agent {
const scopes = ['repo', 'workflow'];
export async function getSession(): Promise<AuthenticationSession> {
const authenticationSessions = await authentication.getSessions('github', scopes);
if (authenticationSessions.length) {
return await authenticationSessions[0];
} else {
return await authentication.login('github', scopes);
}
return await authentication.getSession('github', scopes, { createIfNone: true });
}
let _octokit: Promise<Octokit> | undefined;
export function getOctokit(): Promise<Octokit> {
if (!_octokit) {
_octokit = getSession().then(async session => {
const token = await session.getAccessToken();
_octokit = getSession().then(session => {
const token = session.accessToken;
const agent = getAgent();
return new Octokit({

View File

@@ -17,7 +17,7 @@ class GitHubCredentialProvider implements CredentialsProvider {
}
const session = await getSession();
return { username: session.account.id, password: await session.getAccessToken() };
return { username: session.account.id, password: session.accessToken };
}
}

View File

@@ -7,23 +7,11 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
module.exports = withBrowserDefaults({
context: __dirname,
target: 'webworker',
entry: {
extension: './src/extension.ts'
},
resolve: {
alias: {
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
},
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -16,7 +16,7 @@
"vscode": "^1.39.0"
},
"main": "./out/extension",
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension.js",
"categories": [
"Other"
],

View File

@@ -7,10 +7,10 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const path = require('path');
const clientConfig = withDefaults({
module.exports = withBrowserDefaults({
target: 'webworker',
context: path.join(__dirname, 'client'),
entry: {
@@ -19,16 +19,5 @@ const clientConfig = withDefaults({
output: {
filename: 'jsonClientMain.js',
path: path.join(__dirname, 'client', 'dist', 'browser')
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -129,7 +129,7 @@
"dependencies": {
"request-light": "^0.3.0",
"vscode-extension-telemetry": "0.1.1",
"vscode-languageclient": "7.0.0-next.5",
"vscode-languageclient": "7.0.0-next.5.1",
"vscode-nls": "^4.1.2"
},
"devDependencies": {

View File

@@ -7,11 +7,10 @@
'use strict';
const withDefaults = require('../../shared.webpack.config');
const withBrowserDefaults = require('../../shared.webpack.config').browser;
const path = require('path');
const serverConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/browser/jsonServerMain.ts',
@@ -20,16 +19,5 @@ const serverConfig = withDefaults({
filename: 'jsonServerMain.js',
path: path.join(__dirname, 'dist', 'browser'),
libraryTarget: 'var'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../../build/polyfills/vscode-nls.js')
}
}
});
serverConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = serverConfig;

View File

@@ -10,7 +10,7 @@
"bin": {
"vscode-json-languageserver": "./bin/vscode-json-languageserver"
},
"main": "./out/jsonServerMain",
"main": "./out/node/jsonServerMain",
"dependencies": {
"jsonc-parser": "^2.2.1",
"request-light": "^0.3.0",

View File

@@ -125,10 +125,10 @@ vscode-jsonrpc@6.0.0-next.2:
resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0-next.2.tgz#3d73f86d812304cb91b9fb1efee40ec60b09ed7f"
integrity sha512-dKQXRYNUY6BHALQJBJlyZyv9oWlYpbJ2vVoQNNVNPLAYQ3hzNp4zy+iSo7zGx1BPXByArJQDWTKLQh8dz3dnNw==
vscode-languageclient@7.0.0-next.5:
version "7.0.0-next.5"
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.tgz#7ae84c598dff360bd2bc64322b74e10e5d0b9cd6"
integrity sha512-ec+fJg+JiNBIdbeKbzssSuORUaVdtLValtiYdNEUCUjpYE+Y6xXPtXwiZOlS/0OB9pC/RLCMxsj16UwWncQhYQ==
vscode-languageclient@7.0.0-next.5.1:
version "7.0.0-next.5.1"
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.1.tgz#ed93f14e4c2cdccedf15002c7bf8ef9cb638f36c"
integrity sha512-OONvbk3IFpubwF8/Y5uPQaq5J5CEskpeET3SfK4iGlv5OUK+44JawH/SEW5wXuEPpfdMLEMZLuGLU5v5d7N7PQ==
dependencies:
semver "^6.3.0"
vscode-languageserver-protocol "3.16.0-next.4"

View File

@@ -7,26 +7,11 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
module.exports = withBrowserDefaults({
context: __dirname,
target: 'webworker',
entry: {
extension: './src/extension.ts'
},
resolve: {
alias: {
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
},
},
performance: {
hints: false
},
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -12,7 +12,7 @@
"vscode": "^1.20.0"
},
"main": "./out/extension",
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension",
"categories": [
"Programming Languages"
],

View File

@@ -7,27 +7,14 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/mergeConflictMain.ts'
},
output: {
filename: 'mergeConflictMain.js'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -17,7 +17,7 @@
"*"
],
"main": "./out/mergeConflictMain",
"browser": "./dist/mergeConflictMain",
"browser": "./dist/browser/mergeConflictMain",
"scripts": {
"compile": "gulp compile-extension:merge-conflict",
"watch": "gulp watch-extension:merge-conflict"

View File

@@ -205,9 +205,9 @@ export class AzureActiveDirectoryService {
}, 1000 * 30);
}
private async convertToSession(token: IToken): Promise<vscode.AuthenticationSession2> {
private async convertToSession(token: IToken): Promise<vscode.AuthenticationSession> {
const resolvedToken = await this.resolveAccessToken(token);
return new vscode.AuthenticationSession2(token.sessionId, resolvedToken, token.account, token.scope.split(' '));
return new vscode.AuthenticationSession(token.sessionId, resolvedToken, token.account, token.scope.split(' '));
}
private async resolveAccessToken(token: IToken): Promise<string> {
@@ -240,11 +240,11 @@ export class AzureActiveDirectoryService {
}
}
get sessions(): Promise<vscode.AuthenticationSession2[]> {
get sessions(): Promise<vscode.AuthenticationSession[]> {
return Promise.all(this._tokens.map(token => this.convertToSession(token)));
}
public async login(scope: string): Promise<vscode.AuthenticationSession2> {
public async login(scope: string): Promise<vscode.AuthenticationSession> {
Logger.info('Logging in...');
if (!scope.includes('offline_access')) {
Logger.info('Warning: The \'offline_access\' scope was not included, so the generated token will not be able to be refreshed.');
@@ -338,7 +338,7 @@ export class AzureActiveDirectoryService {
}
}
private async loginWithoutLocalServer(scope: string): Promise<vscode.AuthenticationSession2> {
private async loginWithoutLocalServer(scope: string): Promise<vscode.AuthenticationSession> {
const callbackUri = await vscode.env.asExternalUri(vscode.Uri.parse(`${vscode.env.uriScheme}://vscode.microsoft-authentication`));
const nonce = crypto.randomBytes(16).toString('base64');
const port = (callbackUri.authority.match(/:([0-9]*)$/) || [])[1] || (callbackUri.scheme === 'https' ? 443 : 80);
@@ -353,7 +353,7 @@ export class AzureActiveDirectoryService {
});
vscode.env.openExternal(uri);
const timeoutPromise = new Promise((_: (value: vscode.AuthenticationSession2) => void, reject) => {
const timeoutPromise = new Promise((_: (value: vscode.AuthenticationSession) => void, reject) => {
const wait = setTimeout(() => {
clearTimeout(wait);
reject('Login timed out.');
@@ -363,9 +363,9 @@ export class AzureActiveDirectoryService {
return Promise.race([this.handleCodeResponse(state, codeVerifier, scope), timeoutPromise]);
}
private async handleCodeResponse(state: string, codeVerifier: string, scope: string): Promise<vscode.AuthenticationSession2> {
private async handleCodeResponse(state: string, codeVerifier: string, scope: string): Promise<vscode.AuthenticationSession> {
let uriEventListener: vscode.Disposable;
return new Promise((resolve: (value: vscode.AuthenticationSession2) => void, reject) => {
return new Promise((resolve: (value: vscode.AuthenticationSession) => void, reject) => {
uriEventListener = this._uriHandler.event(async (uri: vscode.Uri) => {
try {
const query = parseQuery(uri);

View File

@@ -7,27 +7,14 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/pythonMain.ts'
},
output: {
filename: 'pythonMain.js'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View File

@@ -8,7 +8,7 @@
"engines": { "vscode": "*" },
"activationEvents": ["onLanguage:python"],
"main": "./out/pythonMain",
"browser": "./dist/pythonMain",
"browser": "./dist/browser/pythonMain",
"extensionKind": [ "ui", "workspace" ],
"contributes": {
"languages": [{

View File

@@ -13,8 +13,9 @@ const fs = require('fs');
const merge = require('merge-options');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const { NLSBundlePlugin } = require('vscode-nls-dev/lib/webpack-bundler');
const { DefinePlugin } = require('webpack');
module.exports = function withDefaults(/**@type WebpackConfig*/extConfig) {
function withNodeDefaults(/**@type WebpackConfig*/extConfig) {
// Need to find the top-most `package.json` file
const folderName = path.relative(__dirname, extConfig.context).split(/[\\\/]/)[0];
const pkgPath = path.join(__dirname, folderName, 'package.json');
@@ -79,3 +80,66 @@ module.exports = function withDefaults(/**@type WebpackConfig*/extConfig) {
return merge(defaultConfig, extConfig);
};
function withBrowserDefaults(/**@type WebpackConfig*/extConfig) {
/** @type WebpackConfig */
let defaultConfig = {
mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
target: 'webworker', // extensions run in a webworker context
resolve: {
mainFields: ['module', 'main'],
extensions: ['.ts', '.js'], // support ts-files and js-files
alias: {
'vscode-nls': path.resolve(__dirname, '../build/polyfills/vscode-nls.js'),
'vscode-extension-telemetry': path.resolve(__dirname, '../build/polyfills/vscode-extension-telemetry.js')
}
},
module: {
rules: [{
test: /\.ts$/,
exclude: /node_modules/,
use: [{
// configure TypeScript loader:
// * enable sources maps for end-to-end source maps
loader: 'ts-loader',
options: {
compilerOptions: {
'sourceMap': true,
}
}
}]
}]
},
externals: {
'vscode': 'commonjs vscode', // ignored because it doesn't exist
},
performance: {
hints: false
},
output: {
// all output goes into `dist`.
// packaging depends on that and this must always be like it
filename: '[name].js',
path: path.join(extConfig.context, 'dist', 'browser'),
libraryTarget: 'commonjs',
},
// yes, really source maps
devtool: 'source-map',
plugins: [
// @ts-expect-error
new CopyWebpackPlugin([
{ from: 'src', to: '.', ignore: ['**/test/**', '*.ts'] }
]),
new DefinePlugin({ WEBWORKER: JSON.stringify(true) })
]
};
return merge(defaultConfig, extConfig);
};
module.exports = withNodeDefaults;
module.exports.node = withNodeDefaults;
module.exports.browser = withBrowserDefaults;

View File

@@ -6,7 +6,7 @@
"git": {
"name": "Microsoft/vscode-mssql",
"repositoryUrl": "https://github.com/Microsoft/vscode-mssql",
"commitHash": "37a22725186b5b481b2882a78c7b9fe024c13946"
"commitHash": "750d30dc48c4c0317b63bb5f1ed3e71487bb84a1"
}
},
"license": "MIT",

View File

@@ -4,7 +4,7 @@
"If you want to provide a fix or improvement, please create a pull request against the original repository.",
"Once accepted there, we are happy to receive an update request."
],
"version": "https://github.com/Microsoft/vscode-mssql/commit/37a22725186b5b481b2882a78c7b9fe024c13946",
"version": "https://github.com/Microsoft/vscode-mssql/commit/750d30dc48c4c0317b63bb5f1ed3e71487bb84a1",
"name": "SQL",
"scopeName": "source.sql",
"patterns": [
@@ -404,7 +404,7 @@
}
},
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
"match": "(N)?(')(?:[^'\\\\]|\\\\.)*(')",
"match": "(N)?(')[^']*(')",
"name": "string.quoted.single.sql"
},
{
@@ -437,7 +437,7 @@
}
},
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
"match": "(`)(?:[^`\\\\]|\\\\.)*(`)",
"match": "(`)[^`\\\\]*(`)",
"name": "string.quoted.other.backtick.sql"
},
{
@@ -470,7 +470,7 @@
}
},
"comment": "this is faster than the next begin/end rule since sub-pattern will match till end-of-line and SQL files tend to have very long lines.",
"match": "(\")(?:[^\"#\\\\]|\\\\.)*(\")",
"match": "(\")[^\"#]*(\")",
"name": "string.quoted.double.sql"
},
{

View File

@@ -19,7 +19,11 @@
"statusBarItem.remoteForeground": "#FFF",
"statusBarItem.remoteBackground": "#16825D",
"sideBarSectionHeader.background": "#0000",
"sideBarSectionHeader.border": "#61616130"
"sideBarSectionHeader.border": "#61616130",
"notebook.cellFocusBackground": "#c8ddf150",
"notebook.cellBorderColor": "#dae3e9",
"notebook.outputContainerBackgroundColor": "#c8ddf150",
"notebook.focusedCellShadow": "#00315040"
},
"semanticHighlighting": true
}

View File

@@ -7,11 +7,10 @@
'use strict';
const path = require('path');
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
module.exports = withDefaults({
module.exports = withBrowserDefaults({
context: __dirname,
target: 'webworker',
node: false,
entry: {
extension: './src/extension.ts',

View File

@@ -11,7 +11,7 @@
"onFileSystem:github",
"onDebug"
],
"browser": "./dist/extension",
"browser": "./dist/browser/extension",
"main": "./out/extension",
"engines": {
"vscode": "^1.25.0"

View File

@@ -15,10 +15,10 @@
import * as vscode from 'vscode';
import { MemFS } from './memfs';
declare const window: unknown;
declare const navigator: unknown;
export function activate(context: vscode.ExtensionContext) {
if (typeof window !== 'undefined') { // do not run under node.js
if (typeof navigator === 'object') { // do not run under node.js
const memFs = enableFs(context);
if (vscode.workspace.workspaceFolders?.some(f => f.uri.scheme === MemFS.scheme)) {