mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
SQL Operations Studio Public Preview 1 (0.23) release source code
This commit is contained in:
80
src/vs/workbench/services/backup/common/backup.ts
Normal file
80
src/vs/workbench/services/backup/common/backup.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import Uri from 'vs/base/common/uri';
|
||||
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { IResolveContentOptions, IUpdateContentOptions } from 'vs/platform/files/common/files';
|
||||
import { IRawTextSource } from 'vs/editor/common/model/textSource';
|
||||
|
||||
export const IBackupFileService = createDecorator<IBackupFileService>('backupFileService');
|
||||
|
||||
export const BACKUP_FILE_RESOLVE_OPTIONS: IResolveContentOptions = { acceptTextOnly: true, encoding: 'utf-8' };
|
||||
export const BACKUP_FILE_UPDATE_OPTIONS: IUpdateContentOptions = { encoding: 'utf-8' };
|
||||
|
||||
/**
|
||||
* A service that handles any I/O and state associated with the backup system.
|
||||
*/
|
||||
export interface IBackupFileService {
|
||||
_serviceBrand: any;
|
||||
|
||||
/**
|
||||
* If backups are enabled.
|
||||
*/
|
||||
backupEnabled: boolean;
|
||||
|
||||
/**
|
||||
* Finds out if there are any backups stored.
|
||||
*/
|
||||
hasBackups(): TPromise<boolean>;
|
||||
|
||||
/**
|
||||
* Loads the backup resource for a particular resource within the current workspace.
|
||||
*
|
||||
* @param resource The resource that is backed up.
|
||||
* @return The backup resource if any.
|
||||
*/
|
||||
loadBackupResource(resource: Uri): TPromise<Uri>;
|
||||
|
||||
/**
|
||||
* Backs up a resource.
|
||||
*
|
||||
* @param resource The resource to back up.
|
||||
* @param content The content of the resource.
|
||||
* @param versionId The version id of the resource to backup.
|
||||
*/
|
||||
backupResource(resource: Uri, content: string, versionId?: number): TPromise<void>;
|
||||
|
||||
/**
|
||||
* Gets a list of file backups for the current workspace.
|
||||
*
|
||||
* @return The list of backups.
|
||||
*/
|
||||
getWorkspaceFileBackups(): TPromise<Uri[]>;
|
||||
|
||||
/**
|
||||
* Parses backup raw text content into the content, removing the metadata that is also stored
|
||||
* in the file.
|
||||
*
|
||||
* @param rawText The IRawTextProvider from a backup resource.
|
||||
* @return The backup file's backed up content.
|
||||
*/
|
||||
parseBackupContent(textSource: IRawTextSource): string;
|
||||
|
||||
/**
|
||||
* Discards the backup associated with a resource if it exists..
|
||||
*
|
||||
* @param resource The resource whose backup is being discarded discard to back up.
|
||||
*/
|
||||
discardResourceBackup(resource: Uri): TPromise<void>;
|
||||
|
||||
/**
|
||||
* Discards all backups associated with the current workspace and prevents further backups from
|
||||
* being made.
|
||||
*/
|
||||
discardAllWorkspaceBackups(): TPromise<void>;
|
||||
}
|
||||
268
src/vs/workbench/services/backup/node/backupFileService.ts
Normal file
268
src/vs/workbench/services/backup/node/backupFileService.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as path from 'path';
|
||||
import * as crypto from 'crypto';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import pfs = require('vs/base/node/pfs');
|
||||
import Uri from 'vs/base/common/uri';
|
||||
import { Queue } from 'vs/base/common/async';
|
||||
import { IBackupFileService, BACKUP_FILE_UPDATE_OPTIONS } from 'vs/workbench/services/backup/common/backup';
|
||||
import { IFileService } from 'vs/platform/files/common/files';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { readToMatchingString } from 'vs/base/node/stream';
|
||||
import { TextSource, IRawTextSource } from 'vs/editor/common/model/textSource';
|
||||
import { DefaultEndOfLine } from 'vs/editor/common/editorCommon';
|
||||
|
||||
export interface IBackupFilesModel {
|
||||
resolve(backupRoot: string): TPromise<IBackupFilesModel>;
|
||||
|
||||
add(resource: Uri, versionId?: number): void;
|
||||
has(resource: Uri, versionId?: number): boolean;
|
||||
get(): Uri[];
|
||||
remove(resource: Uri): void;
|
||||
count(): number;
|
||||
clear(): void;
|
||||
}
|
||||
|
||||
export class BackupFilesModel implements IBackupFilesModel {
|
||||
private cache: { [resource: string]: number /* version ID */ } = Object.create(null);
|
||||
|
||||
public resolve(backupRoot: string): TPromise<IBackupFilesModel> {
|
||||
return pfs.readDirsInDir(backupRoot).then(backupSchemas => {
|
||||
|
||||
// For all supported schemas
|
||||
return TPromise.join(backupSchemas.map(backupSchema => {
|
||||
|
||||
// Read backup directory for backups
|
||||
const backupSchemaPath = path.join(backupRoot, backupSchema);
|
||||
return pfs.readdir(backupSchemaPath).then(backupHashes => {
|
||||
|
||||
// Remember known backups in our caches
|
||||
backupHashes.forEach(backupHash => {
|
||||
const backupResource = Uri.file(path.join(backupSchemaPath, backupHash));
|
||||
this.add(backupResource);
|
||||
});
|
||||
});
|
||||
}));
|
||||
}).then(() => this, error => this);
|
||||
}
|
||||
|
||||
public add(resource: Uri, versionId = 0): void {
|
||||
this.cache[resource.toString()] = versionId;
|
||||
}
|
||||
|
||||
public count(): number {
|
||||
return Object.keys(this.cache).length;
|
||||
}
|
||||
|
||||
public has(resource: Uri, versionId?: number): boolean {
|
||||
const cachedVersionId = this.cache[resource.toString()];
|
||||
if (typeof cachedVersionId !== 'number') {
|
||||
return false; // unknown resource
|
||||
}
|
||||
|
||||
if (typeof versionId === 'number') {
|
||||
return versionId === cachedVersionId; // if we are asked with a specific version ID, make sure to test for it
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public get(): Uri[] {
|
||||
return Object.keys(this.cache).map(k => Uri.parse(k));
|
||||
}
|
||||
|
||||
public remove(resource: Uri): void {
|
||||
delete this.cache[resource.toString()];
|
||||
}
|
||||
|
||||
public clear(): void {
|
||||
this.cache = Object.create(null);
|
||||
}
|
||||
}
|
||||
|
||||
export class BackupFileService implements IBackupFileService {
|
||||
|
||||
public _serviceBrand: any;
|
||||
|
||||
private static readonly META_MARKER = '\n';
|
||||
|
||||
private isShuttingDown: boolean;
|
||||
private ready: TPromise<IBackupFilesModel>;
|
||||
/**
|
||||
* Ensure IO operations on individual files are performed in order, this could otherwise lead
|
||||
* to unexpected behavior when backups are persisted and discarded in the wrong order.
|
||||
*/
|
||||
private ioOperationQueues: { [path: string]: Queue<void> };
|
||||
|
||||
constructor(
|
||||
private backupWorkspacePath: string,
|
||||
@IFileService private fileService: IFileService
|
||||
) {
|
||||
this.isShuttingDown = false;
|
||||
this.ioOperationQueues = {};
|
||||
this.ready = this.init();
|
||||
}
|
||||
|
||||
public get backupEnabled(): boolean {
|
||||
return !!this.backupWorkspacePath; // Hot exit requires a backup path
|
||||
}
|
||||
|
||||
private init(): TPromise<IBackupFilesModel> {
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
if (!this.backupEnabled) {
|
||||
return TPromise.as(model);
|
||||
}
|
||||
|
||||
return model.resolve(this.backupWorkspacePath);
|
||||
}
|
||||
|
||||
public hasBackups(): TPromise<boolean> {
|
||||
return this.ready.then(model => {
|
||||
return model.count() > 0;
|
||||
});
|
||||
}
|
||||
|
||||
public loadBackupResource(resource: Uri): TPromise<Uri> {
|
||||
return this.ready.then(model => {
|
||||
const backupResource = this.getBackupResource(resource);
|
||||
if (!backupResource) {
|
||||
return void 0;
|
||||
}
|
||||
|
||||
// Return directly if we have a known backup with that resource
|
||||
if (model.has(backupResource)) {
|
||||
return backupResource;
|
||||
}
|
||||
|
||||
// Otherwise: on Windows and Mac pre v1.11 we used to store backups in lowercase format
|
||||
// Therefor we also want to check if we have backups of this old format hanging around
|
||||
// TODO@Ben migration
|
||||
if (platform.isWindows || platform.isMacintosh) {
|
||||
const legacyBackupResource = this.getBackupResource(resource, true /* legacyMacWindowsFormat */);
|
||||
if (model.has(legacyBackupResource)) {
|
||||
return legacyBackupResource;
|
||||
}
|
||||
}
|
||||
|
||||
return void 0;
|
||||
});
|
||||
}
|
||||
|
||||
public backupResource(resource: Uri, content: string, versionId?: number): TPromise<void> {
|
||||
if (this.isShuttingDown) {
|
||||
return TPromise.as(void 0);
|
||||
}
|
||||
|
||||
return this.ready.then(model => {
|
||||
const backupResource = this.getBackupResource(resource);
|
||||
if (!backupResource) {
|
||||
return void 0;
|
||||
}
|
||||
|
||||
if (model.has(backupResource, versionId)) {
|
||||
return void 0; // return early if backup version id matches requested one
|
||||
}
|
||||
|
||||
// Add metadata to top of file
|
||||
content = `${resource.toString()}${BackupFileService.META_MARKER}${content}`;
|
||||
|
||||
return this.getResourceIOQueue(backupResource).queue(() => {
|
||||
return this.fileService.updateContent(backupResource, content, BACKUP_FILE_UPDATE_OPTIONS).then(() => model.add(backupResource, versionId));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public discardResourceBackup(resource: Uri): TPromise<void> {
|
||||
return this.ready.then(model => {
|
||||
const backupResource = this.getBackupResource(resource);
|
||||
if (!backupResource) {
|
||||
return void 0;
|
||||
}
|
||||
|
||||
return this.getResourceIOQueue(backupResource).queue(() => {
|
||||
return pfs.del(backupResource.fsPath).then(() => model.remove(backupResource));
|
||||
}).then(() => {
|
||||
|
||||
// On Windows and Mac pre v1.11 we used to store backups in lowercase format
|
||||
// Therefor we also want to check if we have backups of this old format laying around
|
||||
// TODO@Ben migration
|
||||
if (platform.isWindows || platform.isMacintosh) {
|
||||
const legacyBackupResource = this.getBackupResource(resource, true /* legacyMacWindowsFormat */);
|
||||
if (model.has(legacyBackupResource)) {
|
||||
return this.getResourceIOQueue(legacyBackupResource).queue(() => {
|
||||
return pfs.del(legacyBackupResource.fsPath).then(() => model.remove(legacyBackupResource));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return TPromise.as(void 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private getResourceIOQueue(resource: Uri) {
|
||||
const key = resource.toString();
|
||||
if (!this.ioOperationQueues[key]) {
|
||||
const queue = new Queue<void>();
|
||||
queue.onFinished(() => {
|
||||
queue.dispose();
|
||||
delete this.ioOperationQueues[key];
|
||||
});
|
||||
this.ioOperationQueues[key] = queue;
|
||||
}
|
||||
return this.ioOperationQueues[key];
|
||||
}
|
||||
|
||||
public discardAllWorkspaceBackups(): TPromise<void> {
|
||||
this.isShuttingDown = true;
|
||||
|
||||
return this.ready.then(model => {
|
||||
if (!this.backupEnabled) {
|
||||
return void 0;
|
||||
}
|
||||
|
||||
return pfs.del(this.backupWorkspacePath).then(() => model.clear());
|
||||
});
|
||||
}
|
||||
|
||||
public getWorkspaceFileBackups(): TPromise<Uri[]> {
|
||||
return this.ready.then(model => {
|
||||
const readPromises: TPromise<Uri>[] = [];
|
||||
|
||||
model.get().forEach(fileBackup => {
|
||||
readPromises.push(
|
||||
readToMatchingString(fileBackup.fsPath, BackupFileService.META_MARKER, 2000, 10000)
|
||||
.then(Uri.parse)
|
||||
);
|
||||
});
|
||||
|
||||
return TPromise.join(readPromises);
|
||||
});
|
||||
}
|
||||
|
||||
public parseBackupContent(rawTextSource: IRawTextSource): string {
|
||||
const textSource = TextSource.fromRawTextSource(rawTextSource, DefaultEndOfLine.LF);
|
||||
return textSource.lines.slice(1).join(textSource.EOL); // The first line of a backup text file is the file name
|
||||
}
|
||||
|
||||
protected getBackupResource(resource: Uri, legacyMacWindowsFormat?: boolean): Uri {
|
||||
if (!this.backupEnabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Uri.file(path.join(this.backupWorkspacePath, resource.scheme, this.hashPath(resource, legacyMacWindowsFormat)));
|
||||
}
|
||||
|
||||
private hashPath(resource: Uri, legacyMacWindowsFormat?: boolean): string {
|
||||
const caseAwarePath = legacyMacWindowsFormat ? resource.fsPath.toLowerCase() : resource.fsPath;
|
||||
|
||||
return crypto.createHash('md5').update(caseAwarePath).digest('hex');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,382 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import crypto = require('crypto');
|
||||
import os = require('os');
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
import extfs = require('vs/base/node/extfs');
|
||||
import pfs = require('vs/base/node/pfs');
|
||||
import Uri from 'vs/base/common/uri';
|
||||
import { BackupFileService, BackupFilesModel } from 'vs/workbench/services/backup/node/backupFileService';
|
||||
import { FileService } from 'vs/workbench/services/files/node/fileService';
|
||||
import { EnvironmentService } from 'vs/platform/environment/node/environmentService';
|
||||
import { parseArgs } from 'vs/platform/environment/node/argv';
|
||||
import { RawTextSource } from 'vs/editor/common/model/textSource';
|
||||
import { TestContextService } from 'vs/workbench/test/workbenchTestServices';
|
||||
import { Workspace } from 'vs/platform/workspace/common/workspace';
|
||||
import { TestConfigurationService } from 'vs/platform/configuration/test/common/testConfigurationService';
|
||||
|
||||
class TestEnvironmentService extends EnvironmentService {
|
||||
|
||||
constructor(private _backupHome: string, private _backupWorkspacesPath: string) {
|
||||
super(parseArgs(process.argv), process.execPath);
|
||||
}
|
||||
|
||||
get backupHome(): string { return this._backupHome; }
|
||||
|
||||
get backupWorkspacesPath(): string { return this._backupWorkspacesPath; }
|
||||
}
|
||||
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', 'service');
|
||||
const backupHome = path.join(parentDir, 'Backups');
|
||||
const workspacesJsonPath = path.join(backupHome, 'workspaces.json');
|
||||
|
||||
const workspaceResource = Uri.file(platform.isWindows ? 'c:\\workspace' : '/workspace');
|
||||
const workspaceBackupPath = path.join(backupHome, crypto.createHash('md5').update(workspaceResource.fsPath).digest('hex'));
|
||||
const fooFile = Uri.file(platform.isWindows ? 'c:\\Foo' : '/Foo');
|
||||
const barFile = Uri.file(platform.isWindows ? 'c:\\Bar' : '/Bar');
|
||||
const untitledFile = Uri.from({ scheme: 'untitled', path: 'Untitled-1' });
|
||||
const fooBackupPath = path.join(workspaceBackupPath, 'file', crypto.createHash('md5').update(fooFile.fsPath).digest('hex'));
|
||||
const fooBackupPathLegacy = path.join(workspaceBackupPath, 'file', crypto.createHash('md5').update(fooFile.fsPath.toLowerCase()).digest('hex'));
|
||||
const barBackupPath = path.join(workspaceBackupPath, 'file', crypto.createHash('md5').update(barFile.fsPath).digest('hex'));
|
||||
const untitledBackupPath = path.join(workspaceBackupPath, 'untitled', crypto.createHash('md5').update(untitledFile.fsPath).digest('hex'));
|
||||
|
||||
class TestBackupFileService extends BackupFileService {
|
||||
constructor(workspace: Uri, backupHome: string, workspacesJsonPath: string) {
|
||||
const fileService = new FileService(new TestContextService(new Workspace(workspace.fsPath, workspace.fsPath, [workspace])), new TestConfigurationService(), { disableWatcher: true });
|
||||
|
||||
super(workspaceBackupPath, fileService);
|
||||
}
|
||||
|
||||
public getBackupResource(resource: Uri, legacyMacWindowsFormat?: boolean): Uri {
|
||||
return super.getBackupResource(resource, legacyMacWindowsFormat);
|
||||
}
|
||||
}
|
||||
|
||||
suite('BackupFileService', () => {
|
||||
let service: TestBackupFileService;
|
||||
|
||||
setup(done => {
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
|
||||
// Delete any existing backups completely and then re-create it.
|
||||
extfs.del(backupHome, os.tmpdir(), () => {
|
||||
pfs.mkdirp(backupHome).then(() => {
|
||||
pfs.writeFile(workspacesJsonPath, '').then(() => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
teardown(done => {
|
||||
extfs.del(backupHome, os.tmpdir(), done);
|
||||
});
|
||||
|
||||
suite('getBackupResource', () => {
|
||||
test('should get the correct backup path for text files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePathHash>
|
||||
const backupResource = fooFile;
|
||||
const workspaceHash = crypto.createHash('md5').update(workspaceResource.fsPath).digest('hex');
|
||||
const filePathHash = crypto.createHash('md5').update(backupResource.fsPath).digest('hex');
|
||||
const expectedPath = Uri.file(path.join(backupHome, workspaceHash, 'file', filePathHash)).fsPath;
|
||||
assert.equal(service.getBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
|
||||
test('should get the correct backup path for untitled files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePath>
|
||||
const backupResource = Uri.from({ scheme: 'untitled', path: 'Untitled-1' });
|
||||
const workspaceHash = crypto.createHash('md5').update(workspaceResource.fsPath).digest('hex');
|
||||
const filePathHash = crypto.createHash('md5').update(backupResource.fsPath).digest('hex');
|
||||
const expectedPath = Uri.file(path.join(backupHome, workspaceHash, 'untitled', filePathHash)).fsPath;
|
||||
assert.equal(service.getBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
});
|
||||
|
||||
suite('loadBackupResource', () => {
|
||||
test('should return whether a backup resource exists', done => {
|
||||
pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
service.loadBackupResource(fooFile).then(resource => {
|
||||
assert.ok(resource);
|
||||
assert.equal(path.basename(resource.fsPath), path.basename(fooBackupPath));
|
||||
return service.hasBackups().then(hasBackups => {
|
||||
assert.ok(hasBackups);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should return whether a backup resource exists - legacy support (read old lowercase format as fallback)', done => {
|
||||
if (platform.isLinux) {
|
||||
done();
|
||||
return; // only on mac and windows
|
||||
}
|
||||
|
||||
pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPathLegacy, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
service.loadBackupResource(fooFile).then(resource => {
|
||||
assert.ok(resource);
|
||||
assert.equal(path.basename(resource.fsPath), path.basename(fooBackupPathLegacy));
|
||||
return service.hasBackups().then(hasBackups => {
|
||||
assert.ok(hasBackups);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should return whether a backup resource exists - legacy support #2 (both cases present, return case sensitive backup)', done => {
|
||||
if (platform.isLinux) {
|
||||
done();
|
||||
return; // only on mac and windows
|
||||
}
|
||||
|
||||
pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
fs.writeFileSync(fooBackupPathLegacy, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
service.loadBackupResource(fooFile).then(resource => {
|
||||
assert.ok(resource);
|
||||
assert.equal(path.basename(resource.fsPath), path.basename(fooBackupPath));
|
||||
return service.hasBackups().then(hasBackups => {
|
||||
assert.ok(hasBackups);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('backupResource', () => {
|
||||
test('text file', function (done: () => void) {
|
||||
service.backupResource(fooFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\ntest`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function (done: () => void) {
|
||||
service.backupResource(untitledFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\ntest`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardResourceBackup', () => {
|
||||
test('text file', function (done: () => void) {
|
||||
service.backupResource(fooFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
service.discardResourceBackup(fooFile).then(() => {
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function (done: () => void) {
|
||||
service.backupResource(untitledFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
service.discardResourceBackup(untitledFile).then(() => {
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('text file - legacy support (dicard lowercase backup file if present)', done => {
|
||||
if (platform.isLinux) {
|
||||
done();
|
||||
return; // only on mac and windows
|
||||
}
|
||||
|
||||
pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPathLegacy, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
service.backupResource(fooFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 2);
|
||||
service.discardResourceBackup(fooFile).then(() => {
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.existsSync(fooBackupPathLegacy), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardAllWorkspaceBackups', () => {
|
||||
test('text file', function (done: () => void) {
|
||||
service.backupResource(fooFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
service.backupResource(barFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 2);
|
||||
service.discardAllWorkspaceBackups().then(() => {
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.existsSync(barBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'file')), false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function (done: () => void) {
|
||||
service.backupResource(untitledFile, 'test').then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
service.discardAllWorkspaceBackups().then(() => {
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'untitled')), false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should disable further backups', function (done: () => void) {
|
||||
service.discardAllWorkspaceBackups().then(() => {
|
||||
service.backupResource(untitledFile, 'test').then(() => {
|
||||
assert.equal(fs.existsSync(workspaceBackupPath), false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('getWorkspaceFileBackups', () => {
|
||||
test('("file") - text file', done => {
|
||||
service.backupResource(fooFile, `test`).then(() => {
|
||||
service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [fooFile.fsPath]);
|
||||
service.backupResource(barFile, `test`).then(() => {
|
||||
service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [fooFile.fsPath, barFile.fsPath]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('("file") - untitled file', done => {
|
||||
service.backupResource(untitledFile, `test`).then(() => {
|
||||
service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [untitledFile.fsPath]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('("untitled") - untitled file', done => {
|
||||
service.backupResource(untitledFile, `test`).then(() => {
|
||||
service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), ['Untitled-1']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('parseBackupContent', () => {
|
||||
test('should separate metadata from content', () => {
|
||||
const textSource = RawTextSource.fromString('metadata\ncontent');
|
||||
assert.equal(service.parseBackupContent(textSource), 'content');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('BackupFilesModel', () => {
|
||||
test('simple', () => {
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
const resource1 = Uri.file('test.html');
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
|
||||
model.remove(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
|
||||
model.clear();
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1, 1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), false);
|
||||
assert.equal(model.has(resource1, 1), true);
|
||||
|
||||
const resource2 = Uri.file('test1.html');
|
||||
const resource3 = Uri.file('test2.html');
|
||||
const resource4 = Uri.file('test3.html');
|
||||
|
||||
model.add(resource2);
|
||||
model.add(resource3);
|
||||
model.add(resource4);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource2), true);
|
||||
assert.equal(model.has(resource3), true);
|
||||
assert.equal(model.has(resource4), true);
|
||||
});
|
||||
|
||||
test('resolve', (done) => {
|
||||
pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
model.resolve(workspaceBackupPath).then(model => {
|
||||
assert.equal(model.has(Uri.file(fooBackupPath)), true);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('get', () => {
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
assert.deepEqual(model.get(), []);
|
||||
|
||||
const file1 = Uri.file('/root/file/foo.html');
|
||||
const file2 = Uri.file('/root/file/bar.html');
|
||||
const untitled = Uri.file('/root/untitled/bar.html');
|
||||
|
||||
model.add(file1);
|
||||
model.add(file2);
|
||||
model.add(untitled);
|
||||
|
||||
assert.deepEqual(model.get().map(f => f.fsPath), [file1.fsPath, file2.fsPath, untitled.fsPath]);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user