mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Revert "Revert "Merge from vscode ada4bddb8edc69eea6ebaaa0e88c5f903cbd43d8 (#5529)" (#5553)" (#5562)
This reverts commit c9a4f8f664.
This commit is contained in:
@@ -9,6 +9,11 @@ import { ITextBufferFactory, ITextSnapshot } from 'vs/editor/common/model';
|
||||
|
||||
export const IBackupFileService = createDecorator<IBackupFileService>('backupFileService');
|
||||
|
||||
export interface IResolvedBackup<T extends object> {
|
||||
value: ITextBufferFactory;
|
||||
meta?: T;
|
||||
}
|
||||
|
||||
/**
|
||||
* A service that handles any I/O and state associated with the backup system.
|
||||
*/
|
||||
@@ -42,8 +47,10 @@ export interface IBackupFileService {
|
||||
* @param resource The resource to back up.
|
||||
* @param content The content of the resource as snapshot.
|
||||
* @param versionId The version id of the resource to backup.
|
||||
* @param meta The (optional) meta data of the resource to backup. This information
|
||||
* can be restored later when loading the backup again.
|
||||
*/
|
||||
backupResource(resource: URI, content: ITextSnapshot, versionId?: number): Promise<void>;
|
||||
backupResource<T extends object>(resource: URI, content: ITextSnapshot, versionId?: number, meta?: T): Promise<void>;
|
||||
|
||||
/**
|
||||
* Gets a list of file backups for the current workspace.
|
||||
@@ -55,10 +62,10 @@ export interface IBackupFileService {
|
||||
/**
|
||||
* Resolves the backup for the given resource.
|
||||
*
|
||||
* @param value The contents from a backup resource as stream.
|
||||
* @return The backup file's backed up content as text buffer factory.
|
||||
* @param resource The resource to get the backup for.
|
||||
* @return The backup file's backed up content and metadata if available.
|
||||
*/
|
||||
resolveBackupContent(backup: URI): Promise<ITextBufferFactory | undefined>;
|
||||
resolveBackupContent<T extends object>(resource: URI): Promise<IResolvedBackup<T>>;
|
||||
|
||||
/**
|
||||
* Discards the backup associated with a resource if it exists..
|
||||
|
||||
@@ -3,94 +3,112 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as crypto from 'crypto';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { URI as Uri } from 'vs/base/common/uri';
|
||||
import { join } from 'vs/base/common/path';
|
||||
import { joinPath } from 'vs/base/common/resources';
|
||||
import { createHash } from 'crypto';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { coalesce } from 'vs/base/common/arrays';
|
||||
import { equals, deepClone } from 'vs/base/common/objects';
|
||||
import { ResourceQueue } from 'vs/base/common/async';
|
||||
import { IBackupFileService } from 'vs/workbench/services/backup/common/backup';
|
||||
import { IBackupFileService, IResolvedBackup } from 'vs/workbench/services/backup/common/backup';
|
||||
import { IFileService } from 'vs/platform/files/common/files';
|
||||
import { readToMatchingString } from 'vs/base/node/stream';
|
||||
import { ITextBufferFactory, ITextSnapshot } from 'vs/editor/common/model';
|
||||
import { ITextSnapshot } from 'vs/editor/common/model';
|
||||
import { createTextBufferFactoryFromStream, createTextBufferFactoryFromSnapshot } from 'vs/editor/common/model/textModel';
|
||||
import { keys } from 'vs/base/common/map';
|
||||
import { keys, ResourceMap } from 'vs/base/common/map';
|
||||
import { Schemas } from 'vs/base/common/network';
|
||||
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
|
||||
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
|
||||
import { VSBuffer } from 'vs/base/common/buffer';
|
||||
import { TextSnapshotReadable } from 'vs/workbench/services/textfile/common/textfiles';
|
||||
import { ServiceIdentifier } from 'vs/platform/instantiation/common/instantiation';
|
||||
|
||||
export interface IBackupFilesModel {
|
||||
resolve(backupRoot: string): Promise<IBackupFilesModel>;
|
||||
resolve(backupRoot: URI): Promise<IBackupFilesModel>;
|
||||
|
||||
add(resource: Uri, versionId?: number): void;
|
||||
has(resource: Uri, versionId?: number): boolean;
|
||||
get(): Uri[];
|
||||
remove(resource: Uri): void;
|
||||
add(resource: URI, versionId?: number, meta?: object): void;
|
||||
has(resource: URI, versionId?: number, meta?: object): boolean;
|
||||
get(): URI[];
|
||||
remove(resource: URI): void;
|
||||
count(): number;
|
||||
clear(): void;
|
||||
}
|
||||
|
||||
interface IBackupCacheEntry {
|
||||
versionId?: number;
|
||||
meta?: object;
|
||||
}
|
||||
|
||||
export class BackupFilesModel implements IBackupFilesModel {
|
||||
private cache: { [resource: string]: number /* version ID */ } = Object.create(null);
|
||||
private cache: ResourceMap<IBackupCacheEntry> = new ResourceMap();
|
||||
|
||||
resolve(backupRoot: string): Promise<IBackupFilesModel> {
|
||||
return pfs.readDirsInDir(backupRoot).then(backupSchemas => {
|
||||
constructor(private fileService: IFileService) { }
|
||||
|
||||
// For all supported schemas
|
||||
return Promise.all(backupSchemas.map(backupSchema => {
|
||||
async resolve(backupRoot: URI): Promise<IBackupFilesModel> {
|
||||
try {
|
||||
const backupRootStat = await this.fileService.resolve(backupRoot);
|
||||
if (backupRootStat.children) {
|
||||
await Promise.all(backupRootStat.children
|
||||
.filter(child => child.isDirectory)
|
||||
.map(async backupSchema => {
|
||||
|
||||
// Read backup directory for backups
|
||||
const backupSchemaPath = path.join(backupRoot, backupSchema);
|
||||
return pfs.readdir(backupSchemaPath).then(backupHashes => {
|
||||
// Read backup directory for backups
|
||||
const backupSchemaStat = await this.fileService.resolve(backupSchema.resource);
|
||||
|
||||
// Remember known backups in our caches
|
||||
backupHashes.forEach(backupHash => {
|
||||
const backupResource = Uri.file(path.join(backupSchemaPath, backupHash));
|
||||
this.add(backupResource);
|
||||
});
|
||||
});
|
||||
}));
|
||||
}).then(() => this, error => this);
|
||||
// Remember known backups in our caches
|
||||
if (backupSchemaStat.children) {
|
||||
backupSchemaStat.children.forEach(backupHash => this.add(backupHash.resource));
|
||||
}
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
// ignore any errors
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
add(resource: Uri, versionId = 0): void {
|
||||
this.cache[resource.toString()] = versionId;
|
||||
add(resource: URI, versionId = 0, meta?: object): void {
|
||||
this.cache.set(resource, { versionId, meta: deepClone(meta) }); // make sure to not store original meta in our cache...
|
||||
}
|
||||
|
||||
count(): number {
|
||||
return Object.keys(this.cache).length;
|
||||
return this.cache.size;
|
||||
}
|
||||
|
||||
has(resource: Uri, versionId?: number): boolean {
|
||||
const cachedVersionId = this.cache[resource.toString()];
|
||||
if (typeof cachedVersionId !== 'number') {
|
||||
has(resource: URI, versionId?: number, meta?: object): boolean {
|
||||
const entry = this.cache.get(resource);
|
||||
if (!entry) {
|
||||
return false; // unknown resource
|
||||
}
|
||||
|
||||
if (typeof versionId === 'number') {
|
||||
return versionId === cachedVersionId; // if we are asked with a specific version ID, make sure to test for it
|
||||
if (typeof versionId === 'number' && versionId !== entry.versionId) {
|
||||
return false; // different versionId
|
||||
}
|
||||
|
||||
if (meta && !equals(meta, entry.meta)) {
|
||||
return false; // different metadata
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
get(): Uri[] {
|
||||
return Object.keys(this.cache).map(k => Uri.parse(k));
|
||||
get(): URI[] {
|
||||
return this.cache.keys();
|
||||
}
|
||||
|
||||
remove(resource: Uri): void {
|
||||
delete this.cache[resource.toString()];
|
||||
remove(resource: URI): void {
|
||||
this.cache.delete(resource);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache = Object.create(null);
|
||||
this.cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export class BackupFileService implements IBackupFileService {
|
||||
|
||||
_serviceBrand: any;
|
||||
_serviceBrand: ServiceIdentifier<IBackupFileService>;
|
||||
|
||||
private impl: IBackupFileService;
|
||||
|
||||
@@ -116,15 +134,15 @@ export class BackupFileService implements IBackupFileService {
|
||||
return this.impl.hasBackups();
|
||||
}
|
||||
|
||||
loadBackupResource(resource: Uri): Promise<Uri | undefined> {
|
||||
loadBackupResource(resource: URI): Promise<URI | undefined> {
|
||||
return this.impl.loadBackupResource(resource);
|
||||
}
|
||||
|
||||
backupResource(resource: Uri, content: ITextSnapshot, versionId?: number): Promise<void> {
|
||||
return this.impl.backupResource(resource, content, versionId);
|
||||
backupResource<T extends object>(resource: URI, content: ITextSnapshot, versionId?: number, meta?: T): Promise<void> {
|
||||
return this.impl.backupResource(resource, content, versionId, meta);
|
||||
}
|
||||
|
||||
discardResourceBackup(resource: Uri): Promise<void> {
|
||||
discardResourceBackup(resource: URI): Promise<void> {
|
||||
return this.impl.discardResourceBackup(resource);
|
||||
}
|
||||
|
||||
@@ -132,26 +150,28 @@ export class BackupFileService implements IBackupFileService {
|
||||
return this.impl.discardAllWorkspaceBackups();
|
||||
}
|
||||
|
||||
getWorkspaceFileBackups(): Promise<Uri[]> {
|
||||
getWorkspaceFileBackups(): Promise<URI[]> {
|
||||
return this.impl.getWorkspaceFileBackups();
|
||||
}
|
||||
|
||||
resolveBackupContent(backup: Uri): Promise<ITextBufferFactory | undefined> {
|
||||
resolveBackupContent<T extends object>(backup: URI): Promise<IResolvedBackup<T>> {
|
||||
return this.impl.resolveBackupContent(backup);
|
||||
}
|
||||
|
||||
toBackupResource(resource: Uri): Uri {
|
||||
toBackupResource(resource: URI): URI {
|
||||
return this.impl.toBackupResource(resource);
|
||||
}
|
||||
}
|
||||
|
||||
class BackupFileServiceImpl implements IBackupFileService {
|
||||
|
||||
private static readonly META_MARKER = '\n';
|
||||
private static readonly PREAMBLE_END_MARKER = '\n';
|
||||
private static readonly PREAMBLE_META_SEPARATOR = ' '; // using a character that is know to be escaped in a URI as separator
|
||||
private static readonly PREAMBLE_MAX_LENGTH = 10000;
|
||||
|
||||
_serviceBrand: any;
|
||||
|
||||
private backupWorkspacePath: string;
|
||||
private backupWorkspacePath: URI;
|
||||
|
||||
private isShuttingDown: boolean;
|
||||
private ready: Promise<IBackupFilesModel>;
|
||||
@@ -168,115 +188,165 @@ class BackupFileServiceImpl implements IBackupFileService {
|
||||
}
|
||||
|
||||
initialize(backupWorkspacePath: string): void {
|
||||
this.backupWorkspacePath = backupWorkspacePath;
|
||||
this.backupWorkspacePath = URI.file(backupWorkspacePath);
|
||||
|
||||
this.ready = this.init();
|
||||
}
|
||||
|
||||
private init(): Promise<IBackupFilesModel> {
|
||||
const model = new BackupFilesModel();
|
||||
const model = new BackupFilesModel(this.fileService);
|
||||
|
||||
return model.resolve(this.backupWorkspacePath);
|
||||
}
|
||||
|
||||
hasBackups(): Promise<boolean> {
|
||||
return this.ready.then(model => {
|
||||
return model.count() > 0;
|
||||
});
|
||||
async hasBackups(): Promise<boolean> {
|
||||
const model = await this.ready;
|
||||
|
||||
return model.count() > 0;
|
||||
}
|
||||
|
||||
loadBackupResource(resource: Uri): Promise<Uri | undefined> {
|
||||
return this.ready.then(model => {
|
||||
async loadBackupResource(resource: URI): Promise<URI | undefined> {
|
||||
const model = await this.ready;
|
||||
|
||||
// Return directly if we have a known backup with that resource
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
if (model.has(backupResource)) {
|
||||
return backupResource;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
|
||||
backupResource(resource: Uri, content: ITextSnapshot, versionId?: number): Promise<void> {
|
||||
if (this.isShuttingDown) {
|
||||
return Promise.resolve();
|
||||
// Return directly if we have a known backup with that resource
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
if (model.has(backupResource)) {
|
||||
return backupResource;
|
||||
}
|
||||
|
||||
return this.ready.then(model => {
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
if (model.has(backupResource, versionId)) {
|
||||
return undefined; // return early if backup version id matches requested one
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async backupResource<T extends object>(resource: URI, content: ITextSnapshot, versionId?: number, meta?: T): Promise<void> {
|
||||
if (this.isShuttingDown) {
|
||||
return;
|
||||
}
|
||||
|
||||
const model = await this.ready;
|
||||
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
if (model.has(backupResource, versionId, meta)) {
|
||||
return; // return early if backup version id matches requested one
|
||||
}
|
||||
|
||||
return this.ioOperationQueues.queueFor(backupResource).queue(async () => {
|
||||
let preamble: string | undefined = undefined;
|
||||
|
||||
// With Metadata: URI + META-START + Meta + END
|
||||
if (meta) {
|
||||
const preambleWithMeta = `${resource.toString()}${BackupFileServiceImpl.PREAMBLE_META_SEPARATOR}${JSON.stringify(meta)}${BackupFileServiceImpl.PREAMBLE_END_MARKER}`;
|
||||
if (preambleWithMeta.length < BackupFileServiceImpl.PREAMBLE_MAX_LENGTH) {
|
||||
preamble = preambleWithMeta;
|
||||
}
|
||||
}
|
||||
|
||||
return this.ioOperationQueues.queueFor(backupResource).queue(() => {
|
||||
const preamble = `${resource.toString()}${BackupFileServiceImpl.META_MARKER}`;
|
||||
// Without Metadata: URI + END
|
||||
if (!preamble) {
|
||||
preamble = `${resource.toString()}${BackupFileServiceImpl.PREAMBLE_END_MARKER}`;
|
||||
}
|
||||
|
||||
// Update content with value
|
||||
return this.fileService.writeFile(backupResource, new TextSnapshotReadable(content, preamble)).then(() => model.add(backupResource, versionId));
|
||||
});
|
||||
// Update content with value
|
||||
await this.fileService.writeFile(backupResource, new TextSnapshotReadable(content, preamble));
|
||||
|
||||
// Update model
|
||||
model.add(backupResource, versionId, meta);
|
||||
});
|
||||
}
|
||||
|
||||
discardResourceBackup(resource: Uri): Promise<void> {
|
||||
return this.ready.then(model => {
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
async discardResourceBackup(resource: URI): Promise<void> {
|
||||
const model = await this.ready;
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
|
||||
return this.ioOperationQueues.queueFor(backupResource).queue(() => {
|
||||
return pfs.rimraf(backupResource.fsPath, pfs.RimRafMode.MOVE).then(() => model.remove(backupResource));
|
||||
});
|
||||
return this.ioOperationQueues.queueFor(backupResource).queue(async () => {
|
||||
await this.fileService.del(backupResource, { recursive: true });
|
||||
|
||||
model.remove(backupResource);
|
||||
});
|
||||
}
|
||||
|
||||
discardAllWorkspaceBackups(): Promise<void> {
|
||||
async discardAllWorkspaceBackups(): Promise<void> {
|
||||
this.isShuttingDown = true;
|
||||
|
||||
return this.ready.then(model => {
|
||||
return pfs.rimraf(this.backupWorkspacePath, pfs.RimRafMode.MOVE).then(() => model.clear());
|
||||
});
|
||||
const model = await this.ready;
|
||||
|
||||
await this.fileService.del(this.backupWorkspacePath, { recursive: true });
|
||||
|
||||
model.clear();
|
||||
}
|
||||
|
||||
getWorkspaceFileBackups(): Promise<Uri[]> {
|
||||
return this.ready.then(model => {
|
||||
const readPromises: Promise<Uri>[] = [];
|
||||
async getWorkspaceFileBackups(): Promise<URI[]> {
|
||||
const model = await this.ready;
|
||||
|
||||
model.get().forEach(fileBackup => {
|
||||
readPromises.push(
|
||||
readToMatchingString(fileBackup.fsPath, BackupFileServiceImpl.META_MARKER, 2000, 10000).then(Uri.parse)
|
||||
);
|
||||
});
|
||||
const backups = await Promise.all(model.get().map(async fileBackup => {
|
||||
const backupPreamble = await readToMatchingString(fileBackup.fsPath, BackupFileServiceImpl.PREAMBLE_END_MARKER, BackupFileServiceImpl.PREAMBLE_MAX_LENGTH / 5, BackupFileServiceImpl.PREAMBLE_MAX_LENGTH);
|
||||
if (!backupPreamble) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return Promise.all(readPromises);
|
||||
});
|
||||
// Preamble with metadata: URI + META-START + Meta + END
|
||||
const metaStartIndex = backupPreamble.indexOf(BackupFileServiceImpl.PREAMBLE_META_SEPARATOR);
|
||||
if (metaStartIndex > 0) {
|
||||
return URI.parse(backupPreamble.substring(0, metaStartIndex));
|
||||
}
|
||||
|
||||
// Preamble without metadata: URI + END
|
||||
else {
|
||||
return URI.parse(backupPreamble);
|
||||
}
|
||||
}));
|
||||
|
||||
return coalesce(backups);
|
||||
}
|
||||
|
||||
resolveBackupContent(backup: Uri): Promise<ITextBufferFactory> {
|
||||
return this.fileService.readFileStream(backup).then(content => {
|
||||
async resolveBackupContent<T extends object>(backup: URI): Promise<IResolvedBackup<T>> {
|
||||
|
||||
// Add a filter method to filter out everything until the meta marker
|
||||
let metaFound = false;
|
||||
const metaPreambleFilter = (chunk: VSBuffer) => {
|
||||
const chunkString = chunk.toString();
|
||||
// Metadata extraction
|
||||
let metaRaw = '';
|
||||
let metaEndFound = false;
|
||||
|
||||
if (!metaFound && chunk) {
|
||||
const metaIndex = chunkString.indexOf(BackupFileServiceImpl.META_MARKER);
|
||||
if (metaIndex === -1) {
|
||||
return VSBuffer.fromString(''); // meta not yet found, return empty string
|
||||
}
|
||||
// Add a filter method to filter out everything until the meta end marker
|
||||
const metaPreambleFilter = (chunk: VSBuffer) => {
|
||||
const chunkString = chunk.toString();
|
||||
|
||||
metaFound = true;
|
||||
return VSBuffer.fromString(chunkString.substr(metaIndex + 1)); // meta found, return everything after
|
||||
if (!metaEndFound) {
|
||||
const metaEndIndex = chunkString.indexOf(BackupFileServiceImpl.PREAMBLE_END_MARKER);
|
||||
if (metaEndIndex === -1) {
|
||||
metaRaw += chunkString;
|
||||
|
||||
return VSBuffer.fromString(''); // meta not yet found, return empty string
|
||||
}
|
||||
|
||||
return chunk;
|
||||
};
|
||||
metaEndFound = true;
|
||||
metaRaw += chunkString.substring(0, metaEndIndex); // ensure to get last chunk from metadata
|
||||
|
||||
return createTextBufferFactoryFromStream(content.value, metaPreambleFilter);
|
||||
});
|
||||
return VSBuffer.fromString(chunkString.substr(metaEndIndex + 1)); // meta found, return everything after
|
||||
}
|
||||
|
||||
return chunk;
|
||||
};
|
||||
|
||||
// Read backup into factory
|
||||
const content = await this.fileService.readFileStream(backup);
|
||||
const factory = await createTextBufferFactoryFromStream(content.value, metaPreambleFilter);
|
||||
|
||||
// Trigger read for meta data extraction from the filter above
|
||||
factory.getFirstLineText(1);
|
||||
|
||||
let meta: T | undefined;
|
||||
const metaStartIndex = metaRaw.indexOf(BackupFileServiceImpl.PREAMBLE_META_SEPARATOR);
|
||||
if (metaStartIndex !== -1) {
|
||||
try {
|
||||
meta = JSON.parse(metaRaw.substr(metaStartIndex + 1));
|
||||
} catch (error) {
|
||||
// ignore JSON parse errors
|
||||
}
|
||||
}
|
||||
|
||||
return { value: factory, meta };
|
||||
}
|
||||
|
||||
toBackupResource(resource: Uri): Uri {
|
||||
return Uri.file(path.join(this.backupWorkspacePath, resource.scheme, hashPath(resource)));
|
||||
toBackupResource(resource: URI): URI {
|
||||
return joinPath(this.backupWorkspacePath, resource.scheme, hashPath(resource));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -290,7 +360,7 @@ export class InMemoryBackupFileService implements IBackupFileService {
|
||||
return Promise.resolve(this.backups.size > 0);
|
||||
}
|
||||
|
||||
loadBackupResource(resource: Uri): Promise<Uri | undefined> {
|
||||
loadBackupResource(resource: URI): Promise<URI | undefined> {
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
if (this.backups.has(backupResource.toString())) {
|
||||
return Promise.resolve(backupResource);
|
||||
@@ -299,27 +369,27 @@ export class InMemoryBackupFileService implements IBackupFileService {
|
||||
return Promise.resolve(undefined);
|
||||
}
|
||||
|
||||
backupResource(resource: Uri, content: ITextSnapshot, versionId?: number): Promise<void> {
|
||||
backupResource<T extends object>(resource: URI, content: ITextSnapshot, versionId?: number, meta?: T): Promise<void> {
|
||||
const backupResource = this.toBackupResource(resource);
|
||||
this.backups.set(backupResource.toString(), content);
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
resolveBackupContent(backupResource: Uri): Promise<ITextBufferFactory | undefined> {
|
||||
resolveBackupContent<T extends object>(backupResource: URI): Promise<IResolvedBackup<T>> {
|
||||
const snapshot = this.backups.get(backupResource.toString());
|
||||
if (snapshot) {
|
||||
return Promise.resolve(createTextBufferFactoryFromSnapshot(snapshot));
|
||||
return Promise.resolve({ value: createTextBufferFactoryFromSnapshot(snapshot) });
|
||||
}
|
||||
|
||||
return Promise.resolve(undefined);
|
||||
return Promise.reject('Unexpected backup resource to resolve');
|
||||
}
|
||||
|
||||
getWorkspaceFileBackups(): Promise<Uri[]> {
|
||||
return Promise.resolve(keys(this.backups).map(key => Uri.parse(key)));
|
||||
getWorkspaceFileBackups(): Promise<URI[]> {
|
||||
return Promise.resolve(keys(this.backups).map(key => URI.parse(key)));
|
||||
}
|
||||
|
||||
discardResourceBackup(resource: Uri): Promise<void> {
|
||||
discardResourceBackup(resource: URI): Promise<void> {
|
||||
this.backups.delete(this.toBackupResource(resource).toString());
|
||||
|
||||
return Promise.resolve();
|
||||
@@ -331,17 +401,17 @@ export class InMemoryBackupFileService implements IBackupFileService {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
toBackupResource(resource: Uri): Uri {
|
||||
return Uri.file(path.join(resource.scheme, hashPath(resource)));
|
||||
toBackupResource(resource: URI): URI {
|
||||
return URI.file(join(resource.scheme, hashPath(resource)));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Exported only for testing
|
||||
*/
|
||||
export function hashPath(resource: Uri): string {
|
||||
export function hashPath(resource: URI): string {
|
||||
const str = resource.scheme === Schemas.file || resource.scheme === Schemas.untitled ? resource.fsPath : resource.toString();
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
return createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
registerSingleton(IBackupFileService, BackupFileService);
|
||||
@@ -1,402 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import * as crypto from 'crypto';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { URI as Uri } from 'vs/base/common/uri';
|
||||
import { BackupFileService, BackupFilesModel, hashPath } from 'vs/workbench/services/backup/node/backupFileService';
|
||||
import { TextModel, createTextBufferFactory } from 'vs/editor/common/model/textModel';
|
||||
import { getRandomTestPath } from 'vs/base/test/node/testUtils';
|
||||
import { DefaultEndOfLine } from 'vs/editor/common/model';
|
||||
import { Schemas } from 'vs/base/common/network';
|
||||
import { IWindowConfiguration } from 'vs/platform/windows/common/windows';
|
||||
import { FileService } from 'vs/workbench/services/files/common/fileService';
|
||||
import { NullLogService } from 'vs/platform/log/common/log';
|
||||
import { DiskFileSystemProvider } from 'vs/workbench/services/files/node/diskFileSystemProvider';
|
||||
import { WorkbenchEnvironmentService } from 'vs/workbench/services/environment/node/environmentService';
|
||||
import { parseArgs } from 'vs/platform/environment/node/argv';
|
||||
import { snapshotToString } from 'vs/workbench/services/textfile/common/textfiles';
|
||||
|
||||
const parentDir = getRandomTestPath(os.tmpdir(), 'vsctests', 'backupfileservice');
|
||||
const backupHome = path.join(parentDir, 'Backups');
|
||||
const workspacesJsonPath = path.join(backupHome, 'workspaces.json');
|
||||
|
||||
const workspaceResource = Uri.file(platform.isWindows ? 'c:\\workspace' : '/workspace');
|
||||
const workspaceBackupPath = path.join(backupHome, hashPath(workspaceResource));
|
||||
const fooFile = Uri.file(platform.isWindows ? 'c:\\Foo' : '/Foo');
|
||||
const barFile = Uri.file(platform.isWindows ? 'c:\\Bar' : '/Bar');
|
||||
const untitledFile = Uri.from({ scheme: Schemas.untitled, path: 'Untitled-1' });
|
||||
const fooBackupPath = path.join(workspaceBackupPath, 'file', hashPath(fooFile));
|
||||
const barBackupPath = path.join(workspaceBackupPath, 'file', hashPath(barFile));
|
||||
const untitledBackupPath = path.join(workspaceBackupPath, 'untitled', hashPath(untitledFile));
|
||||
|
||||
class TestBackupEnvironmentService extends WorkbenchEnvironmentService {
|
||||
|
||||
private config: IWindowConfiguration;
|
||||
|
||||
constructor(workspaceBackupPath: string) {
|
||||
super(parseArgs(process.argv) as IWindowConfiguration, process.execPath);
|
||||
|
||||
this.config = Object.create(null);
|
||||
this.config.backupPath = workspaceBackupPath;
|
||||
}
|
||||
|
||||
get configuration(): IWindowConfiguration {
|
||||
return this.config;
|
||||
}
|
||||
}
|
||||
|
||||
class TestBackupFileService extends BackupFileService {
|
||||
constructor(workspace: Uri, backupHome: string, workspacesJsonPath: string) {
|
||||
const fileService = new FileService(new NullLogService());
|
||||
fileService.registerProvider(Schemas.file, new DiskFileSystemProvider(new NullLogService()));
|
||||
const environmentService = new TestBackupEnvironmentService(workspaceBackupPath);
|
||||
|
||||
super(environmentService, fileService);
|
||||
}
|
||||
|
||||
public toBackupResource(resource: Uri): Uri {
|
||||
return super.toBackupResource(resource);
|
||||
}
|
||||
}
|
||||
|
||||
suite('BackupFileService', () => {
|
||||
let service: TestBackupFileService;
|
||||
|
||||
setup(() => {
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
|
||||
// Delete any existing backups completely and then re-create it.
|
||||
return pfs.rimraf(backupHome, pfs.RimRafMode.MOVE).then(() => {
|
||||
return pfs.mkdirp(backupHome).then(() => {
|
||||
return pfs.writeFile(workspacesJsonPath, '');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
teardown(() => {
|
||||
return pfs.rimraf(backupHome, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
suite('hashPath', () => {
|
||||
test('should correctly hash the path for untitled scheme URIs', () => {
|
||||
const uri = Uri.from({
|
||||
scheme: 'untitled',
|
||||
path: 'Untitled-1'
|
||||
});
|
||||
const actual = hashPath(uri);
|
||||
// If these hashes change people will lose their backed up files!
|
||||
assert.equal(actual, '13264068d108c6901b3592ea654fcd57');
|
||||
assert.equal(actual, crypto.createHash('md5').update(uri.fsPath).digest('hex'));
|
||||
});
|
||||
|
||||
test('should correctly hash the path for file scheme URIs', () => {
|
||||
const uri = Uri.file('/foo');
|
||||
const actual = hashPath(uri);
|
||||
// If these hashes change people will lose their backed up files!
|
||||
if (platform.isWindows) {
|
||||
assert.equal(actual, 'dec1a583f52468a020bd120c3f01d812');
|
||||
} else {
|
||||
assert.equal(actual, '1effb2475fcfba4f9e8b8a1dbc8f3caf');
|
||||
}
|
||||
assert.equal(actual, crypto.createHash('md5').update(uri.fsPath).digest('hex'));
|
||||
});
|
||||
});
|
||||
|
||||
suite('getBackupResource', () => {
|
||||
test('should get the correct backup path for text files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePathHash>
|
||||
const backupResource = fooFile;
|
||||
const workspaceHash = hashPath(workspaceResource);
|
||||
const filePathHash = hashPath(backupResource);
|
||||
const expectedPath = Uri.file(path.join(backupHome, workspaceHash, 'file', filePathHash)).fsPath;
|
||||
assert.equal(service.toBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
|
||||
test('should get the correct backup path for untitled files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePath>
|
||||
const backupResource = Uri.from({ scheme: Schemas.untitled, path: 'Untitled-1' });
|
||||
const workspaceHash = hashPath(workspaceResource);
|
||||
const filePathHash = hashPath(backupResource);
|
||||
const expectedPath = Uri.file(path.join(backupHome, workspaceHash, 'untitled', filePathHash)).fsPath;
|
||||
assert.equal(service.toBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
});
|
||||
|
||||
suite('loadBackupResource', () => {
|
||||
test('should return whether a backup resource exists', () => {
|
||||
return pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
return service.loadBackupResource(fooFile).then(resource => {
|
||||
assert.ok(resource);
|
||||
assert.equal(path.basename(resource!.fsPath), path.basename(fooBackupPath));
|
||||
return service.hasBackups().then(hasBackups => {
|
||||
assert.ok(hasBackups);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('backupResource', () => {
|
||||
test('text file', function () {
|
||||
return service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\ntest`);
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function () {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\ntest`);
|
||||
});
|
||||
});
|
||||
|
||||
test('text file (ITextSnapshot)', function () {
|
||||
const model = TextModel.createFromString('test');
|
||||
|
||||
return service.backupResource(fooFile, model.createSnapshot()).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\ntest`);
|
||||
model.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file (ITextSnapshot)', function () {
|
||||
const model = TextModel.createFromString('test');
|
||||
|
||||
return service.backupResource(untitledFile, model.createSnapshot()).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\ntest`);
|
||||
model.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
test('text file (large file, ITextSnapshot)', function () {
|
||||
const largeString = (new Array(10 * 1024)).join('Large String\n');
|
||||
const model = TextModel.createFromString(largeString);
|
||||
|
||||
return service.backupResource(fooFile, model.createSnapshot()).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\n${largeString}`);
|
||||
model.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file (large file, ITextSnapshot)', function () {
|
||||
const largeString = (new Array(10 * 1024)).join('Large String\n');
|
||||
const model = TextModel.createFromString(largeString);
|
||||
|
||||
return service.backupResource(untitledFile, model.createSnapshot()).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\n${largeString}`);
|
||||
model.dispose();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardResourceBackup', () => {
|
||||
test('text file', function () {
|
||||
return service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
return service.discardResourceBackup(fooFile).then(() => {
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function () {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
return service.discardResourceBackup(untitledFile).then(() => {
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardAllWorkspaceBackups', () => {
|
||||
test('text file', function () {
|
||||
return service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
return service.backupResource(barFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 2);
|
||||
return service.discardAllWorkspaceBackups().then(() => {
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.existsSync(barBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'file')), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('untitled file', function () {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
return service.discardAllWorkspaceBackups().then(() => {
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'untitled')), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should disable further backups', function () {
|
||||
return service.discardAllWorkspaceBackups().then(() => {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
assert.equal(fs.existsSync(workspaceBackupPath), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('getWorkspaceFileBackups', () => {
|
||||
test('("file") - text file', () => {
|
||||
return service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
return service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [fooFile.fsPath]);
|
||||
return service.backupResource(barFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
return service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [fooFile.fsPath, barFile.fsPath]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('("file") - untitled file', () => {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
return service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [untitledFile.fsPath]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('("untitled") - untitled file', () => {
|
||||
return service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
return service.getWorkspaceFileBackups().then(textFiles => {
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), ['Untitled-1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('resolveBackupContent', () => {
|
||||
test('should restore the original contents (untitled file)', () => {
|
||||
const contents = 'test\nand more stuff';
|
||||
service.backupResource(untitledFile, createTextBufferFactory(contents).create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
service.resolveBackupContent(service.toBackupResource(untitledFile)).then(factory => {
|
||||
assert.equal(contents, snapshotToString(factory!.create(platform.isWindows ? DefaultEndOfLine.CRLF : DefaultEndOfLine.LF).createSnapshot(true)));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file)', () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'consectetur ',
|
||||
'adipiscing ßß elit',
|
||||
].join('');
|
||||
|
||||
service.backupResource(fooFile, createTextBufferFactory(contents).create(DefaultEndOfLine.LF).createSnapshot(false)).then(() => {
|
||||
service.resolveBackupContent(service.toBackupResource(untitledFile)).then(factory => {
|
||||
assert.equal(contents, snapshotToString(factory!.create(platform.isWindows ? DefaultEndOfLine.CRLF : DefaultEndOfLine.LF).createSnapshot(true)));
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
suite('BackupFilesModel', () => {
|
||||
test('simple', () => {
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
const resource1 = Uri.file('test.html');
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
|
||||
model.remove(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
|
||||
model.clear();
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1, 1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), false);
|
||||
assert.equal(model.has(resource1, 1), true);
|
||||
|
||||
const resource2 = Uri.file('test1.html');
|
||||
const resource3 = Uri.file('test2.html');
|
||||
const resource4 = Uri.file('test3.html');
|
||||
|
||||
model.add(resource2);
|
||||
model.add(resource3);
|
||||
model.add(resource4);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource2), true);
|
||||
assert.equal(model.has(resource3), true);
|
||||
assert.equal(model.has(resource4), true);
|
||||
});
|
||||
|
||||
test('resolve', () => {
|
||||
return pfs.mkdirp(path.dirname(fooBackupPath)).then(() => {
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
return model.resolve(workspaceBackupPath).then(model => {
|
||||
assert.equal(model.has(Uri.file(fooBackupPath)), true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('get', () => {
|
||||
const model = new BackupFilesModel();
|
||||
|
||||
assert.deepEqual(model.get(), []);
|
||||
|
||||
const file1 = Uri.file('/root/file/foo.html');
|
||||
const file2 = Uri.file('/root/file/bar.html');
|
||||
const untitled = Uri.file('/root/untitled/bar.html');
|
||||
|
||||
model.add(file1);
|
||||
model.add(file2);
|
||||
model.add(untitled);
|
||||
|
||||
assert.deepEqual(model.get().map(f => f.fsPath), [file1.fsPath, file2.fsPath, untitled.fsPath]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,579 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import * as crypto from 'crypto';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'vs/base/common/path';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { BackupFileService, BackupFilesModel, hashPath } from 'vs/workbench/services/backup/node/backupFileService';
|
||||
import { TextModel, createTextBufferFactory } from 'vs/editor/common/model/textModel';
|
||||
import { getRandomTestPath } from 'vs/base/test/node/testUtils';
|
||||
import { DefaultEndOfLine } from 'vs/editor/common/model';
|
||||
import { Schemas } from 'vs/base/common/network';
|
||||
import { IWindowConfiguration } from 'vs/platform/windows/common/windows';
|
||||
import { FileService } from 'vs/workbench/services/files/common/fileService';
|
||||
import { NullLogService } from 'vs/platform/log/common/log';
|
||||
import { DiskFileSystemProvider } from 'vs/workbench/services/files/node/diskFileSystemProvider';
|
||||
import { WorkbenchEnvironmentService } from 'vs/workbench/services/environment/node/environmentService';
|
||||
import { parseArgs } from 'vs/platform/environment/node/argv';
|
||||
import { snapshotToString } from 'vs/workbench/services/textfile/common/textfiles';
|
||||
import { IFileService } from 'vs/platform/files/common/files';
|
||||
|
||||
const parentDir = getRandomTestPath(os.tmpdir(), 'vsctests', 'backupfileservice');
|
||||
const backupHome = path.join(parentDir, 'Backups');
|
||||
const workspacesJsonPath = path.join(backupHome, 'workspaces.json');
|
||||
|
||||
const workspaceResource = URI.file(platform.isWindows ? 'c:\\workspace' : '/workspace');
|
||||
const workspaceBackupPath = path.join(backupHome, hashPath(workspaceResource));
|
||||
const fooFile = URI.file(platform.isWindows ? 'c:\\Foo' : '/Foo');
|
||||
const customFile = URI.parse('customScheme://some/path');
|
||||
const customFileWithFragment = URI.parse('customScheme2://some/path#fragment');
|
||||
const barFile = URI.file(platform.isWindows ? 'c:\\Bar' : '/Bar');
|
||||
const fooBarFile = URI.file(platform.isWindows ? 'c:\\Foo Bar' : '/Foo Bar');
|
||||
const untitledFile = URI.from({ scheme: Schemas.untitled, path: 'Untitled-1' });
|
||||
const fooBackupPath = path.join(workspaceBackupPath, 'file', hashPath(fooFile));
|
||||
const barBackupPath = path.join(workspaceBackupPath, 'file', hashPath(barFile));
|
||||
const untitledBackupPath = path.join(workspaceBackupPath, 'untitled', hashPath(untitledFile));
|
||||
|
||||
class TestBackupEnvironmentService extends WorkbenchEnvironmentService {
|
||||
|
||||
private config: IWindowConfiguration;
|
||||
|
||||
constructor(workspaceBackupPath: string) {
|
||||
super(parseArgs(process.argv) as IWindowConfiguration, process.execPath);
|
||||
|
||||
this.config = Object.create(null);
|
||||
this.config.backupPath = workspaceBackupPath;
|
||||
}
|
||||
|
||||
get configuration(): IWindowConfiguration {
|
||||
return this.config;
|
||||
}
|
||||
}
|
||||
|
||||
class TestBackupFileService extends BackupFileService {
|
||||
|
||||
readonly fileService: IFileService;
|
||||
|
||||
constructor(workspace: URI, backupHome: string, workspacesJsonPath: string) {
|
||||
const fileService = new FileService(new NullLogService());
|
||||
fileService.registerProvider(Schemas.file, new DiskFileSystemProvider(new NullLogService()));
|
||||
const environmentService = new TestBackupEnvironmentService(workspaceBackupPath);
|
||||
|
||||
super(environmentService, fileService);
|
||||
|
||||
this.fileService = fileService;
|
||||
}
|
||||
|
||||
toBackupResource(resource: URI): URI {
|
||||
return super.toBackupResource(resource);
|
||||
}
|
||||
}
|
||||
|
||||
suite('BackupFileService', () => {
|
||||
let service: TestBackupFileService;
|
||||
|
||||
setup(async () => {
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
|
||||
// Delete any existing backups completely and then re-create it.
|
||||
await pfs.rimraf(backupHome, pfs.RimRafMode.MOVE);
|
||||
await pfs.mkdirp(backupHome);
|
||||
|
||||
return pfs.writeFile(workspacesJsonPath, '');
|
||||
});
|
||||
|
||||
teardown(() => {
|
||||
return pfs.rimraf(backupHome, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
suite('hashPath', () => {
|
||||
test('should correctly hash the path for untitled scheme URIs', () => {
|
||||
const uri = URI.from({
|
||||
scheme: 'untitled',
|
||||
path: 'Untitled-1'
|
||||
});
|
||||
const actual = hashPath(uri);
|
||||
// If these hashes change people will lose their backed up files!
|
||||
assert.equal(actual, '13264068d108c6901b3592ea654fcd57');
|
||||
assert.equal(actual, crypto.createHash('md5').update(uri.fsPath).digest('hex'));
|
||||
});
|
||||
|
||||
test('should correctly hash the path for file scheme URIs', () => {
|
||||
const uri = URI.file('/foo');
|
||||
const actual = hashPath(uri);
|
||||
// If these hashes change people will lose their backed up files!
|
||||
if (platform.isWindows) {
|
||||
assert.equal(actual, 'dec1a583f52468a020bd120c3f01d812');
|
||||
} else {
|
||||
assert.equal(actual, '1effb2475fcfba4f9e8b8a1dbc8f3caf');
|
||||
}
|
||||
assert.equal(actual, crypto.createHash('md5').update(uri.fsPath).digest('hex'));
|
||||
});
|
||||
});
|
||||
|
||||
suite('getBackupResource', () => {
|
||||
test('should get the correct backup path for text files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePathHash>
|
||||
const backupResource = fooFile;
|
||||
const workspaceHash = hashPath(workspaceResource);
|
||||
const filePathHash = hashPath(backupResource);
|
||||
const expectedPath = URI.file(path.join(backupHome, workspaceHash, 'file', filePathHash)).fsPath;
|
||||
assert.equal(service.toBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
|
||||
test('should get the correct backup path for untitled files', () => {
|
||||
// Format should be: <backupHome>/<workspaceHash>/<scheme>/<filePath>
|
||||
const backupResource = URI.from({ scheme: Schemas.untitled, path: 'Untitled-1' });
|
||||
const workspaceHash = hashPath(workspaceResource);
|
||||
const filePathHash = hashPath(backupResource);
|
||||
const expectedPath = URI.file(path.join(backupHome, workspaceHash, 'untitled', filePathHash)).fsPath;
|
||||
assert.equal(service.toBackupResource(backupResource).fsPath, expectedPath);
|
||||
});
|
||||
});
|
||||
|
||||
suite('loadBackupResource', () => {
|
||||
test('should return whether a backup resource exists', async () => {
|
||||
await pfs.mkdirp(path.dirname(fooBackupPath));
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
const resource = await service.loadBackupResource(fooFile);
|
||||
assert.ok(resource);
|
||||
assert.equal(path.basename(resource!.fsPath), path.basename(fooBackupPath));
|
||||
const hasBackups = await service.hasBackups();
|
||||
assert.ok(hasBackups);
|
||||
});
|
||||
});
|
||||
|
||||
suite('backupResource', () => {
|
||||
test('text file', async () => {
|
||||
await service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\ntest`);
|
||||
});
|
||||
|
||||
test('text file (with meta)', async () => {
|
||||
await service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false), undefined, { etag: '678', orphaned: true });
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath).toString(), `${fooFile.toString()} {"etag":"678","orphaned":true}\ntest`);
|
||||
});
|
||||
|
||||
test('untitled file', async () => {
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\ntest`);
|
||||
});
|
||||
|
||||
test('text file (ITextSnapshot)', async () => {
|
||||
const model = TextModel.createFromString('test');
|
||||
|
||||
await service.backupResource(fooFile, model.createSnapshot());
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\ntest`);
|
||||
model.dispose();
|
||||
});
|
||||
|
||||
test('untitled file (ITextSnapshot)', async () => {
|
||||
const model = TextModel.createFromString('test');
|
||||
|
||||
await service.backupResource(untitledFile, model.createSnapshot());
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\ntest`);
|
||||
model.dispose();
|
||||
});
|
||||
|
||||
test('text file (large file, ITextSnapshot)', async () => {
|
||||
const largeString = (new Array(10 * 1024)).join('Large String\n');
|
||||
const model = TextModel.createFromString(largeString);
|
||||
|
||||
await service.backupResource(fooFile, model.createSnapshot());
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
assert.equal(fs.existsSync(fooBackupPath), true);
|
||||
assert.equal(fs.readFileSync(fooBackupPath), `${fooFile.toString()}\n${largeString}`);
|
||||
model.dispose();
|
||||
});
|
||||
|
||||
test('untitled file (large file, ITextSnapshot)', async () => {
|
||||
const largeString = (new Array(10 * 1024)).join('Large String\n');
|
||||
const model = TextModel.createFromString(largeString);
|
||||
|
||||
await service.backupResource(untitledFile, model.createSnapshot());
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), true);
|
||||
assert.equal(fs.readFileSync(untitledBackupPath), `${untitledFile.toString()}\n${largeString}`);
|
||||
model.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardResourceBackup', () => {
|
||||
test('text file', async () => {
|
||||
await service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
await service.discardResourceBackup(fooFile);
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 0);
|
||||
});
|
||||
|
||||
test('untitled file', async () => {
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
await service.discardResourceBackup(untitledFile);
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 0);
|
||||
});
|
||||
});
|
||||
|
||||
suite('discardAllWorkspaceBackups', () => {
|
||||
test('text file', async () => {
|
||||
await service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 1);
|
||||
await service.backupResource(barFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'file')).length, 2);
|
||||
await service.discardAllWorkspaceBackups();
|
||||
assert.equal(fs.existsSync(fooBackupPath), false);
|
||||
assert.equal(fs.existsSync(barBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'file')), false);
|
||||
});
|
||||
|
||||
test('untitled file', async () => {
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.readdirSync(path.join(workspaceBackupPath, 'untitled')).length, 1);
|
||||
await service.discardAllWorkspaceBackups();
|
||||
assert.equal(fs.existsSync(untitledBackupPath), false);
|
||||
assert.equal(fs.existsSync(path.join(workspaceBackupPath, 'untitled')), false);
|
||||
});
|
||||
|
||||
test('should disable further backups', async () => {
|
||||
await service.discardAllWorkspaceBackups();
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
assert.equal(fs.existsSync(workspaceBackupPath), false);
|
||||
});
|
||||
});
|
||||
|
||||
suite('getWorkspaceFileBackups', () => {
|
||||
test('("file") - text file', async () => {
|
||||
await service.backupResource(fooFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
const textFiles = await service.getWorkspaceFileBackups();
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [fooFile.fsPath]);
|
||||
await service.backupResource(barFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
const textFiles_1 = await service.getWorkspaceFileBackups();
|
||||
assert.deepEqual(textFiles_1.map(f => f.fsPath), [fooFile.fsPath, barFile.fsPath]);
|
||||
});
|
||||
|
||||
test('("file") - untitled file', async () => {
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
const textFiles = await service.getWorkspaceFileBackups();
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), [untitledFile.fsPath]);
|
||||
});
|
||||
|
||||
test('("untitled") - untitled file', async () => {
|
||||
await service.backupResource(untitledFile, createTextBufferFactory('test').create(DefaultEndOfLine.LF).createSnapshot(false));
|
||||
const textFiles = await service.getWorkspaceFileBackups();
|
||||
assert.deepEqual(textFiles.map(f => f.fsPath), ['Untitled-1']);
|
||||
});
|
||||
});
|
||||
|
||||
suite('resolveBackupContent', () => {
|
||||
|
||||
interface IBackupTestMetaData {
|
||||
mtime?: number;
|
||||
size?: number;
|
||||
etag?: string;
|
||||
orphaned?: boolean;
|
||||
}
|
||||
|
||||
test('should restore the original contents (untitled file)', async () => {
|
||||
const contents = 'test\nand more stuff';
|
||||
|
||||
await testResolveBackup(untitledFile, contents);
|
||||
});
|
||||
|
||||
test('should restore the original contents (untitled file with metadata)', async () => {
|
||||
const contents = 'test\nand more stuff';
|
||||
|
||||
const meta = {
|
||||
etag: 'the Etag',
|
||||
size: 666,
|
||||
mtime: Date.now(),
|
||||
orphaned: true
|
||||
};
|
||||
|
||||
await testResolveBackup(untitledFile, contents, meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'consectetur ',
|
||||
'adipiscing ßß elit'
|
||||
].join('');
|
||||
|
||||
await testResolveBackup(fooFile, contents);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file - custom scheme)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'consectetur ',
|
||||
'adipiscing ßß elit'
|
||||
].join('');
|
||||
|
||||
await testResolveBackup(customFile, contents);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with metadata)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: 'theEtag',
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await testResolveBackup(fooFile, contents, meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with metadata changed once)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: 'theEtag',
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await testResolveBackup(fooFile, contents, meta);
|
||||
|
||||
// Change meta and test again
|
||||
meta.size = 999;
|
||||
await testResolveBackup(fooFile, contents, meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with broken metadata)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: 'theEtag',
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await service.backupResource(fooFile, createTextBufferFactory(contents).create(DefaultEndOfLine.LF).createSnapshot(false), 1, meta);
|
||||
|
||||
assert.ok(await service.loadBackupResource(fooFile));
|
||||
|
||||
const fileContents = fs.readFileSync(fooBackupPath).toString();
|
||||
assert.equal(fileContents.indexOf(fooFile.toString()), 0);
|
||||
|
||||
const metaIndex = fileContents.indexOf('{');
|
||||
const newFileContents = fileContents.substring(0, metaIndex) + '{{' + fileContents.substr(metaIndex);
|
||||
fs.writeFileSync(fooBackupPath, newFileContents);
|
||||
|
||||
const backup = await service.resolveBackupContent(service.toBackupResource(fooFile));
|
||||
assert.equal(contents, snapshotToString(backup.value.create(platform.isWindows ? DefaultEndOfLine.CRLF : DefaultEndOfLine.LF).createSnapshot(true)));
|
||||
assert.ok(!backup.meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with metadata and fragment URI)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: 'theEtag',
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await testResolveBackup(customFileWithFragment, contents, meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with space in name with metadata)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: 'theEtag',
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await testResolveBackup(fooBarFile, contents, meta);
|
||||
});
|
||||
|
||||
test('should restore the original contents (text file with too large metadata to persist)', async () => {
|
||||
const contents = [
|
||||
'Lorem ipsum ',
|
||||
'dolor öäü sit amet ',
|
||||
'adipiscing ßß elit',
|
||||
'consectetur '
|
||||
].join('');
|
||||
|
||||
const meta = {
|
||||
etag: (new Array(100 * 1024)).join('Large String'),
|
||||
size: 888,
|
||||
mtime: Date.now(),
|
||||
orphaned: false
|
||||
};
|
||||
|
||||
await testResolveBackup(fooBarFile, contents, meta, null);
|
||||
});
|
||||
|
||||
async function testResolveBackup(resource: URI, contents: string, meta?: IBackupTestMetaData, expectedMeta?: IBackupTestMetaData | null) {
|
||||
if (typeof expectedMeta === 'undefined') {
|
||||
expectedMeta = meta;
|
||||
}
|
||||
|
||||
await service.backupResource(resource, createTextBufferFactory(contents).create(DefaultEndOfLine.LF).createSnapshot(false), 1, meta);
|
||||
|
||||
assert.ok(await service.loadBackupResource(resource));
|
||||
|
||||
const backup = await service.resolveBackupContent<IBackupTestMetaData>(service.toBackupResource(resource));
|
||||
assert.equal(contents, snapshotToString(backup.value.create(platform.isWindows ? DefaultEndOfLine.CRLF : DefaultEndOfLine.LF).createSnapshot(true)));
|
||||
|
||||
if (expectedMeta) {
|
||||
assert.equal(backup.meta!.etag, expectedMeta.etag);
|
||||
assert.equal(backup.meta!.size, expectedMeta.size);
|
||||
assert.equal(backup.meta!.mtime, expectedMeta.mtime);
|
||||
assert.equal(backup.meta!.orphaned, expectedMeta.orphaned);
|
||||
} else {
|
||||
assert.ok(!backup.meta);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
suite('BackupFilesModel', () => {
|
||||
|
||||
let service: TestBackupFileService;
|
||||
|
||||
setup(async () => {
|
||||
service = new TestBackupFileService(workspaceResource, backupHome, workspacesJsonPath);
|
||||
|
||||
// Delete any existing backups completely and then re-create it.
|
||||
await pfs.rimraf(backupHome, pfs.RimRafMode.MOVE);
|
||||
await pfs.mkdirp(backupHome);
|
||||
|
||||
return pfs.writeFile(workspacesJsonPath, '');
|
||||
});
|
||||
|
||||
teardown(() => {
|
||||
return pfs.rimraf(backupHome, pfs.RimRafMode.MOVE);
|
||||
});
|
||||
|
||||
test('simple', () => {
|
||||
const model = new BackupFilesModel(service.fileService);
|
||||
|
||||
const resource1 = URI.file('test.html');
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
assert.equal(model.has(resource1, 1, { foo: 'bar' }), false);
|
||||
|
||||
model.remove(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), true);
|
||||
assert.equal(model.has(resource1, 1), false);
|
||||
|
||||
model.clear();
|
||||
|
||||
assert.equal(model.has(resource1), false);
|
||||
|
||||
model.add(resource1, 1);
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource1, 0), false);
|
||||
assert.equal(model.has(resource1, 1), true);
|
||||
|
||||
const resource2 = URI.file('test1.html');
|
||||
const resource3 = URI.file('test2.html');
|
||||
const resource4 = URI.file('test3.html');
|
||||
|
||||
model.add(resource2);
|
||||
model.add(resource3);
|
||||
model.add(resource4, undefined, { foo: 'bar' });
|
||||
|
||||
assert.equal(model.has(resource1), true);
|
||||
assert.equal(model.has(resource2), true);
|
||||
assert.equal(model.has(resource3), true);
|
||||
|
||||
assert.equal(model.has(resource4), true);
|
||||
assert.equal(model.has(resource4, undefined, { foo: 'bar' }), true);
|
||||
assert.equal(model.has(resource4, undefined, { bar: 'foo' }), false);
|
||||
});
|
||||
|
||||
test('resolve', async () => {
|
||||
await pfs.mkdirp(path.dirname(fooBackupPath));
|
||||
fs.writeFileSync(fooBackupPath, 'foo');
|
||||
const model = new BackupFilesModel(service.fileService);
|
||||
|
||||
const resolvedModel = await model.resolve(URI.file(workspaceBackupPath));
|
||||
assert.equal(resolvedModel.has(URI.file(fooBackupPath)), true);
|
||||
});
|
||||
|
||||
test('get', () => {
|
||||
const model = new BackupFilesModel(service.fileService);
|
||||
|
||||
assert.deepEqual(model.get(), []);
|
||||
|
||||
const file1 = URI.file('/root/file/foo.html');
|
||||
const file2 = URI.file('/root/file/bar.html');
|
||||
const untitled = URI.file('/root/untitled/bar.html');
|
||||
|
||||
model.add(file1);
|
||||
model.add(file2);
|
||||
model.add(untitled);
|
||||
|
||||
assert.deepEqual(model.get().map(f => f.fsPath), [file1.fsPath, file2.fsPath, untitled.fsPath]);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user