mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Merge from master
This commit is contained in:
@@ -3,36 +3,31 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as childProcess from 'child_process';
|
||||
import { StringDecoder, NodeStringDecoder } from 'string_decoder';
|
||||
import { toErrorMessage } from 'vs/base/common/errorMessage';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { isEqualOrParent } from 'vs/base/common/paths';
|
||||
import { Readable } from 'stream';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import { NodeStringDecoder, StringDecoder } from 'string_decoder';
|
||||
import * as arrays from 'vs/base/common/arrays';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import * as normalization from 'vs/base/common/normalization';
|
||||
import * as types from 'vs/base/common/types';
|
||||
import { toErrorMessage } from 'vs/base/common/errorMessage';
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import { IProgress, IUncachedSearchStats } from 'vs/platform/search/common/search';
|
||||
|
||||
import * as normalization from 'vs/base/common/normalization';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import { isEqualOrParent } from 'vs/base/common/paths';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import * as types from 'vs/base/common/types';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import * as flow from 'vs/base/node/flow';
|
||||
import { IRawFileMatch, IRawSearch, ISearchEngine, IFolderSearch, ISerializedSearchSuccess } from './search';
|
||||
import { IFileQuery, IFolderQuery, IProgress, ISearchEngineStats } from 'vs/platform/search/common/search';
|
||||
import { IRawFileMatch, ISearchEngine, ISearchEngineSuccess } from 'vs/workbench/services/search/node/search';
|
||||
import { spawnRipgrepCmd } from './ripgrepFileSearch';
|
||||
import { rgErrorMsgForDisplay } from './ripgrepTextSearch';
|
||||
|
||||
enum Traversal {
|
||||
Node = 1,
|
||||
MacFind,
|
||||
WindowsDir,
|
||||
LinuxFind,
|
||||
Ripgrep
|
||||
}
|
||||
@@ -48,8 +43,13 @@ interface IDirectoryTree {
|
||||
pathToEntries: { [relativePath: string]: IDirectoryEntry[] };
|
||||
}
|
||||
|
||||
const killCmds = new Set<() => void>();
|
||||
process.on('exit', () => {
|
||||
killCmds.forEach(cmd => cmd());
|
||||
});
|
||||
|
||||
export class FileWalker {
|
||||
private config: IRawSearch;
|
||||
private config: IFileQuery;
|
||||
private useRipgrep: boolean;
|
||||
private filePattern: string;
|
||||
private normalizedFilePatternLowercase: string;
|
||||
@@ -60,13 +60,12 @@ export class FileWalker {
|
||||
private isLimitHit: boolean;
|
||||
private resultCount: number;
|
||||
private isCanceled: boolean;
|
||||
private fileWalkStartTime: number;
|
||||
private fileWalkSW: StopWatch;
|
||||
private directoriesWalked: number;
|
||||
private filesWalked: number;
|
||||
private traversal: Traversal;
|
||||
private errors: string[];
|
||||
private cmdForkStartTime: number;
|
||||
private cmdForkResultTime: number;
|
||||
private cmdSW: StopWatch;
|
||||
private cmdResultCount: number;
|
||||
|
||||
private folderExcludePatterns: Map<string, AbsoluteAndRelativeParsedExpression>;
|
||||
@@ -74,14 +73,14 @@ export class FileWalker {
|
||||
|
||||
private walkedPaths: { [path: string]: boolean; };
|
||||
|
||||
constructor(config: IRawSearch) {
|
||||
constructor(config: IFileQuery, maxFileSize?: number) {
|
||||
this.config = config;
|
||||
this.useRipgrep = config.useRipgrep !== false;
|
||||
this.filePattern = config.filePattern;
|
||||
this.includePattern = config.includePattern && glob.parse(config.includePattern);
|
||||
this.maxResults = config.maxResults || null;
|
||||
this.exists = config.exists;
|
||||
this.maxFilesize = config.maxFilesize || null;
|
||||
this.maxFilesize = maxFileSize || null;
|
||||
this.walkedPaths = Object.create(null);
|
||||
this.resultCount = 0;
|
||||
this.isLimitHit = false;
|
||||
@@ -101,17 +100,18 @@ export class FileWalker {
|
||||
const folderExcludeExpression: glob.IExpression = objects.assign({}, folderQuery.excludePattern || {}, this.config.excludePattern || {});
|
||||
|
||||
// Add excludes for other root folders
|
||||
const fqPath = folderQuery.folder.fsPath;
|
||||
config.folderQueries
|
||||
.map(rootFolderQuery => rootFolderQuery.folder)
|
||||
.filter(rootFolder => rootFolder !== folderQuery.folder)
|
||||
.map(rootFolderQuery => rootFolderQuery.folder.fsPath)
|
||||
.filter(rootFolder => rootFolder !== fqPath)
|
||||
.forEach(otherRootFolder => {
|
||||
// Exclude nested root folders
|
||||
if (isEqualOrParent(otherRootFolder, folderQuery.folder)) {
|
||||
folderExcludeExpression[path.relative(folderQuery.folder, otherRootFolder)] = true;
|
||||
if (isEqualOrParent(otherRootFolder, fqPath)) {
|
||||
folderExcludeExpression[path.relative(fqPath, otherRootFolder)] = true;
|
||||
}
|
||||
});
|
||||
|
||||
this.folderExcludePatterns.set(folderQuery.folder, new AbsoluteAndRelativeParsedExpression(folderExcludeExpression, folderQuery.folder));
|
||||
this.folderExcludePatterns.set(fqPath, new AbsoluteAndRelativeParsedExpression(folderExcludeExpression, fqPath));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -119,8 +119,8 @@ export class FileWalker {
|
||||
this.isCanceled = true;
|
||||
}
|
||||
|
||||
public walk(folderQueries: IFolderSearch[], extraFiles: string[], onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (error: Error, isLimitHit: boolean) => void): void {
|
||||
this.fileWalkStartTime = Date.now();
|
||||
public walk(folderQueries: IFolderQuery[], extraFiles: URI[], onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (error: Error, isLimitHit: boolean) => void): void {
|
||||
this.fileWalkSW = StopWatch.create(false);
|
||||
|
||||
// Support that the file pattern is a full path to a file that exists
|
||||
if (this.isCanceled) {
|
||||
@@ -130,13 +130,13 @@ export class FileWalker {
|
||||
// For each extra file
|
||||
if (extraFiles) {
|
||||
extraFiles.forEach(extraFilePath => {
|
||||
const basename = path.basename(extraFilePath);
|
||||
if (this.globalExcludePattern && this.globalExcludePattern(extraFilePath, basename)) {
|
||||
const basename = path.basename(extraFilePath.fsPath);
|
||||
if (this.globalExcludePattern && this.globalExcludePattern(extraFilePath.fsPath, basename)) {
|
||||
return; // excluded
|
||||
}
|
||||
|
||||
// File: Check for match on file pattern and include pattern
|
||||
this.matchFile(onResult, { relativePath: extraFilePath /* no workspace relative path */, basename });
|
||||
this.matchFile(onResult, { relativePath: extraFilePath.fsPath /* no workspace relative path */, basename });
|
||||
});
|
||||
}
|
||||
|
||||
@@ -148,11 +148,7 @@ export class FileWalker {
|
||||
} else if (platform.isMacintosh) {
|
||||
this.traversal = Traversal.MacFind;
|
||||
traverse = this.cmdTraversal;
|
||||
// Disable 'dir' for now (#11181, #11179, #11183, #11182).
|
||||
} /* else if (platform.isWindows) {
|
||||
this.traversal = Traversal.WindowsDir;
|
||||
traverse = this.windowsDirTraversal;
|
||||
} */ else if (platform.isLinux) {
|
||||
} else if (platform.isLinux) {
|
||||
this.traversal = Traversal.LinuxFind;
|
||||
traverse = this.cmdTraversal;
|
||||
}
|
||||
@@ -160,11 +156,11 @@ export class FileWalker {
|
||||
|
||||
const isNodeTraversal = traverse === this.nodeJSTraversal;
|
||||
if (!isNodeTraversal) {
|
||||
this.cmdForkStartTime = Date.now();
|
||||
this.cmdSW = StopWatch.create(false);
|
||||
}
|
||||
|
||||
// For each root folder
|
||||
flow.parallel<IFolderSearch, void>(folderQueries, (folderQuery: IFolderSearch, rootFolderDone: (err: Error, result: void) => void) => {
|
||||
flow.parallel<IFolderQuery, void>(folderQueries, (folderQuery: IFolderQuery, rootFolderDone: (err: Error, result: void) => void) => {
|
||||
this.call(traverse, this, folderQuery, onResult, onMessage, (err?: Error) => {
|
||||
if (err) {
|
||||
const errorMessage = toErrorMessage(err);
|
||||
@@ -176,6 +172,7 @@ export class FileWalker {
|
||||
}
|
||||
});
|
||||
}, (errors, result) => {
|
||||
this.fileWalkSW.stop();
|
||||
const err = errors ? errors.filter(e => !!e)[0] : null;
|
||||
done(err, this.isLimitHit);
|
||||
});
|
||||
@@ -189,14 +186,15 @@ export class FileWalker {
|
||||
}
|
||||
}
|
||||
|
||||
private cmdTraversal(folderQuery: IFolderSearch, onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, cb: (err?: Error) => void): void {
|
||||
const rootFolder = folderQuery.folder;
|
||||
private cmdTraversal(folderQuery: IFolderQuery, onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, cb: (err?: Error) => void): void {
|
||||
const rootFolder = folderQuery.folder.fsPath;
|
||||
const isMac = platform.isMacintosh;
|
||||
let cmd: childProcess.ChildProcess;
|
||||
const killCmd = () => cmd && cmd.kill();
|
||||
killCmds.add(killCmd);
|
||||
|
||||
let done = (err?: Error) => {
|
||||
process.removeListener('exit', killCmd);
|
||||
killCmds.delete(killCmd);
|
||||
done = () => { };
|
||||
cb(err);
|
||||
};
|
||||
@@ -207,26 +205,24 @@ export class FileWalker {
|
||||
const useRipgrep = this.useRipgrep;
|
||||
let noSiblingsClauses: boolean;
|
||||
if (useRipgrep) {
|
||||
const ripgrep = spawnRipgrepCmd(this.config, folderQuery, this.config.includePattern, this.folderExcludePatterns.get(folderQuery.folder).expression);
|
||||
const ripgrep = spawnRipgrepCmd(this.config, folderQuery, this.config.includePattern, this.folderExcludePatterns.get(folderQuery.folder.fsPath).expression);
|
||||
cmd = ripgrep.cmd;
|
||||
noSiblingsClauses = !Object.keys(ripgrep.siblingClauses).length;
|
||||
|
||||
process.nextTick(() => {
|
||||
const escapedArgs = ripgrep.rgArgs.args
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
const escapedArgs = ripgrep.rgArgs.args
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
|
||||
let rgCmd = `rg ${escapedArgs}\n - cwd: ${ripgrep.cwd}`;
|
||||
if (ripgrep.rgArgs.siblingClauses) {
|
||||
rgCmd += `\n - Sibling clauses: ${JSON.stringify(ripgrep.rgArgs.siblingClauses)}`;
|
||||
}
|
||||
onMessage({ message: rgCmd });
|
||||
});
|
||||
let rgCmd = `rg ${escapedArgs}\n - cwd: ${ripgrep.cwd}`;
|
||||
if (ripgrep.rgArgs.siblingClauses) {
|
||||
rgCmd += `\n - Sibling clauses: ${JSON.stringify(ripgrep.rgArgs.siblingClauses)}`;
|
||||
}
|
||||
onMessage({ message: rgCmd });
|
||||
} else {
|
||||
cmd = this.spawnFindCmd(folderQuery);
|
||||
}
|
||||
|
||||
process.on('exit', killCmd);
|
||||
this.cmdResultCount = 0;
|
||||
this.collectStdout(cmd, 'utf8', useRipgrep, onMessage, (err: Error, stdout?: string, last?: boolean) => {
|
||||
if (err) {
|
||||
done(err);
|
||||
@@ -288,38 +284,11 @@ export class FileWalker {
|
||||
});
|
||||
}
|
||||
|
||||
// protected windowsDirTraversal(rootFolder: string, onResult: (result: IRawFileMatch) => void, done: (err?: Error) => void): void {
|
||||
// const cmd = childProcess.spawn('cmd', ['/U', '/c', 'dir', '/s', '/b', '/a-d', rootFolder]);
|
||||
// this.readStdout(cmd, 'ucs2', (err: Error, stdout?: string) => {
|
||||
// if (err) {
|
||||
// done(err);
|
||||
// return;
|
||||
// }
|
||||
|
||||
// const relativeFiles = stdout.split(`\r\n${rootFolder}\\`);
|
||||
// relativeFiles[0] = relativeFiles[0].trim().substr(rootFolder.length + 1);
|
||||
// const n = relativeFiles.length;
|
||||
// relativeFiles[n - 1] = relativeFiles[n - 1].trim();
|
||||
// if (!relativeFiles[n - 1]) {
|
||||
// relativeFiles.pop();
|
||||
// }
|
||||
|
||||
// if (relativeFiles.length && relativeFiles[0].indexOf('\n') !== -1) {
|
||||
// done(new Error('Splitting up files failed'));
|
||||
// return;
|
||||
// }
|
||||
|
||||
// this.matchFiles(rootFolder, relativeFiles, onResult);
|
||||
|
||||
// done();
|
||||
// });
|
||||
// }
|
||||
|
||||
/**
|
||||
* Public for testing.
|
||||
*/
|
||||
public spawnFindCmd(folderQuery: IFolderSearch) {
|
||||
const excludePattern = this.folderExcludePatterns.get(folderQuery.folder);
|
||||
public spawnFindCmd(folderQuery: IFolderQuery) {
|
||||
const excludePattern = this.folderExcludePatterns.get(folderQuery.folder.fsPath);
|
||||
const basenames = excludePattern.getBasenameTerms();
|
||||
const pathTerms = excludePattern.getPathTerms();
|
||||
let args = ['-L', '.'];
|
||||
@@ -337,7 +306,7 @@ export class FileWalker {
|
||||
args.push(')', '-prune', ')');
|
||||
}
|
||||
args.push('-type', 'f');
|
||||
return childProcess.spawn('find', args, { cwd: folderQuery.folder });
|
||||
return childProcess.spawn('find', args, { cwd: folderQuery.folder.fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -362,7 +331,10 @@ export class FileWalker {
|
||||
let onData = (err: Error, stdout?: string, last?: boolean) => {
|
||||
if (err || last) {
|
||||
onData = () => { };
|
||||
this.cmdForkResultTime = Date.now();
|
||||
|
||||
if (this.cmdSW) {
|
||||
this.cmdSW.stop();
|
||||
}
|
||||
}
|
||||
cb(err, stdout, last);
|
||||
};
|
||||
@@ -390,8 +362,8 @@ export class FileWalker {
|
||||
|
||||
cmd.on('close', (code: number) => {
|
||||
// ripgrep returns code=1 when no results are found
|
||||
let stderrText, displayMsg: string;
|
||||
if (isRipgrep ? (!gotData && (stderrText = this.decodeData(stderr, encoding)) && (displayMsg = rgErrorMsgForDisplay(stderrText))) : code !== 0) {
|
||||
let stderrText: string;
|
||||
if (isRipgrep ? (!gotData && (stderrText = this.decodeData(stderr, encoding)) && rgErrorMsgForDisplay(stderrText)) : code !== 0) {
|
||||
onData(new Error(`command failed with error code ${code}: ${this.decodeData(stderr, encoding)}`));
|
||||
} else {
|
||||
if (isRipgrep && this.exists && code === 0) {
|
||||
@@ -495,9 +467,9 @@ export class FileWalker {
|
||||
matchDirectory(rootEntries);
|
||||
}
|
||||
|
||||
private nodeJSTraversal(folderQuery: IFolderSearch, onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (err?: Error) => void): void {
|
||||
private nodeJSTraversal(folderQuery: IFolderQuery, onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (err?: Error) => void): void {
|
||||
this.directoriesWalked++;
|
||||
extfs.readdir(folderQuery.folder, (error: Error, files: string[]) => {
|
||||
extfs.readdir(folderQuery.folder.fsPath, (error: Error, files: string[]) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return done();
|
||||
}
|
||||
@@ -510,23 +482,18 @@ export class FileWalker {
|
||||
});
|
||||
}
|
||||
|
||||
public getStats(): IUncachedSearchStats {
|
||||
public getStats(): ISearchEngineStats {
|
||||
return {
|
||||
fromCache: false,
|
||||
cmdTime: this.cmdSW && this.cmdSW.elapsed(),
|
||||
fileWalkTime: this.fileWalkSW.elapsed(),
|
||||
traversal: Traversal[this.traversal],
|
||||
errors: this.errors,
|
||||
fileWalkStartTime: this.fileWalkStartTime,
|
||||
fileWalkResultTime: Date.now(),
|
||||
directoriesWalked: this.directoriesWalked,
|
||||
filesWalked: this.filesWalked,
|
||||
resultCount: this.resultCount,
|
||||
cmdForkStartTime: this.cmdForkStartTime,
|
||||
cmdForkResultTime: this.cmdForkResultTime,
|
||||
cmdResultCount: this.cmdResultCount
|
||||
};
|
||||
}
|
||||
|
||||
private doWalk(folderQuery: IFolderSearch, relativeParentPath: string, files: string[], onResult: (result: IRawFileMatch) => void, done: (error: Error) => void): void {
|
||||
private doWalk(folderQuery: IFolderQuery, relativeParentPath: string, files: string[], onResult: (result: IRawFileMatch) => void, done: (error: Error) => void): void {
|
||||
const rootFolder = folderQuery.folder;
|
||||
|
||||
// Execute tasks on each file in parallel to optimize throughput
|
||||
@@ -543,12 +510,12 @@ export class FileWalker {
|
||||
// to ignore filtering by siblings because the user seems to know what she
|
||||
// is searching for and we want to include the result in that case anyway
|
||||
let currentRelativePath = relativeParentPath ? [relativeParentPath, file].join(path.sep) : file;
|
||||
if (this.folderExcludePatterns.get(folderQuery.folder).test(currentRelativePath, file, this.config.filePattern !== file ? hasSibling : undefined)) {
|
||||
if (this.folderExcludePatterns.get(folderQuery.folder.fsPath).test(currentRelativePath, file, this.config.filePattern !== file ? hasSibling : undefined)) {
|
||||
return clb(null, undefined);
|
||||
}
|
||||
|
||||
// Use lstat to detect links
|
||||
let currentAbsolutePath = [rootFolder, currentRelativePath].join(path.sep);
|
||||
let currentAbsolutePath = [rootFolder.fsPath, currentRelativePath].join(path.sep);
|
||||
fs.lstat(currentAbsolutePath, (error, lstat) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
@@ -600,7 +567,7 @@ export class FileWalker {
|
||||
return clb(null, undefined); // ignore file if max file size is hit
|
||||
}
|
||||
|
||||
this.matchFile(onResult, { base: rootFolder, relativePath: currentRelativePath, basename: file, size: stat.size });
|
||||
this.matchFile(onResult, { base: rootFolder.fsPath, relativePath: currentRelativePath, basename: file, size: stat.size });
|
||||
}
|
||||
|
||||
// Unwind
|
||||
@@ -669,21 +636,20 @@ export class FileWalker {
|
||||
}
|
||||
|
||||
export class Engine implements ISearchEngine<IRawFileMatch> {
|
||||
private folderQueries: IFolderSearch[];
|
||||
private extraFiles: string[];
|
||||
private folderQueries: IFolderQuery[];
|
||||
private extraFiles: URI[];
|
||||
private walker: FileWalker;
|
||||
|
||||
constructor(config: IRawSearch) {
|
||||
constructor(config: IFileQuery) {
|
||||
this.folderQueries = config.folderQueries;
|
||||
this.extraFiles = config.extraFiles;
|
||||
this.extraFiles = config.extraFileResources;
|
||||
|
||||
this.walker = new FileWalker(config);
|
||||
}
|
||||
|
||||
public search(onResult: (result: IRawFileMatch) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISerializedSearchSuccess) => void): void {
|
||||
public search(onResult: (result: IRawFileMatch) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void): void {
|
||||
this.walker.walk(this.folderQueries, this.extraFiles, onResult, onProgress, (err: Error, isLimitHit: boolean) => {
|
||||
done(err, {
|
||||
type: 'success',
|
||||
limitHit: isLimitHit,
|
||||
stats: this.walker.getStats()
|
||||
});
|
||||
@@ -730,13 +696,13 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
this.relativeParsedExpr = relativeGlobExpr && glob.parse(relativeGlobExpr, { trimForExclusions: true });
|
||||
}
|
||||
|
||||
public test(_path: string, basename?: string, hasSibling?: (name: string) => boolean | TPromise<boolean>): string | TPromise<string> {
|
||||
public test(_path: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): string | Promise<string> {
|
||||
return (this.relativeParsedExpr && this.relativeParsedExpr(_path, basename, hasSibling)) ||
|
||||
(this.absoluteParsedExpr && this.absoluteParsedExpr(path.join(this.root, _path), basename, hasSibling));
|
||||
}
|
||||
|
||||
public getBasenameTerms(): string[] {
|
||||
const basenameTerms = [];
|
||||
const basenameTerms: string[] = [];
|
||||
if (this.absoluteParsedExpr) {
|
||||
basenameTerms.push(...glob.getBasenameTerms(this.absoluteParsedExpr));
|
||||
}
|
||||
@@ -749,7 +715,7 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
}
|
||||
|
||||
public getPathTerms(): string[] {
|
||||
const pathTerms = [];
|
||||
const pathTerms: string[] = [];
|
||||
if (this.absoluteParsedExpr) {
|
||||
pathTerms.push(...glob.getPathTerms(this.absoluteParsedExpr));
|
||||
}
|
||||
@@ -761,3 +727,34 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
return pathTerms;
|
||||
}
|
||||
}
|
||||
|
||||
export function rgErrorMsgForDisplay(msg: string): string | undefined {
|
||||
const lines = msg.trim().split('\n');
|
||||
const firstLine = lines[0].trim();
|
||||
|
||||
if (strings.startsWith(firstLine, 'Error parsing regex')) {
|
||||
return firstLine;
|
||||
}
|
||||
|
||||
if (strings.startsWith(firstLine, 'regex parse error')) {
|
||||
return strings.uppercaseFirstLetter(lines[lines.length - 1].trim());
|
||||
}
|
||||
|
||||
if (strings.startsWith(firstLine, 'error parsing glob') ||
|
||||
strings.startsWith(firstLine, 'unsupported encoding')) {
|
||||
// Uppercase first letter
|
||||
return firstLine.charAt(0).toUpperCase() + firstLine.substr(1);
|
||||
}
|
||||
|
||||
if (firstLine === `Literal '\\n' not allowed.`) {
|
||||
// I won't localize this because none of the Ripgrep error messages are localized
|
||||
return `Literal '\\n' currently not supported`;
|
||||
}
|
||||
|
||||
if (strings.startsWith(firstLine, 'Literal ')) {
|
||||
// Other unsupported chars
|
||||
return firstLine;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
373
src/vs/workbench/services/search/node/fileSearchManager.ts
Normal file
373
src/vs/workbench/services/search/node/fileSearchManager.ts
Normal file
@@ -0,0 +1,373 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { toErrorMessage } from 'vs/base/common/errorMessage';
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import * as resources from 'vs/base/common/resources';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { IFileMatch, IFileSearchProviderStats, IFolderQuery, ISearchCompleteStats, IFileQuery } from 'vs/platform/search/common/search';
|
||||
import { QueryGlobTester, resolvePatternsForProvider } from 'vs/workbench/services/search/node/search';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export interface IInternalFileMatch {
|
||||
base: URI;
|
||||
original?: URI;
|
||||
relativePath?: string; // Not present for extraFiles or absolute path matches
|
||||
basename: string;
|
||||
size?: number;
|
||||
}
|
||||
|
||||
export interface IDirectoryEntry {
|
||||
base: URI;
|
||||
relativePath: string;
|
||||
basename: string;
|
||||
}
|
||||
|
||||
export interface IDirectoryTree {
|
||||
rootEntries: IDirectoryEntry[];
|
||||
pathToEntries: { [relativePath: string]: IDirectoryEntry[] };
|
||||
}
|
||||
|
||||
class FileSearchEngine {
|
||||
private filePattern?: string;
|
||||
private includePattern?: glob.ParsedExpression;
|
||||
private maxResults?: number;
|
||||
private exists?: boolean;
|
||||
private isLimitHit = false;
|
||||
private resultCount = 0;
|
||||
private isCanceled = false;
|
||||
|
||||
private activeCancellationTokens: Set<CancellationTokenSource>;
|
||||
|
||||
private globalExcludePattern?: glob.ParsedExpression;
|
||||
|
||||
constructor(private config: IFileQuery, private provider: vscode.FileSearchProvider, private sessionToken?: CancellationToken) {
|
||||
this.filePattern = config.filePattern;
|
||||
this.includePattern = config.includePattern && glob.parse(config.includePattern);
|
||||
this.maxResults = config.maxResults || undefined;
|
||||
this.exists = config.exists;
|
||||
this.activeCancellationTokens = new Set<CancellationTokenSource>();
|
||||
|
||||
this.globalExcludePattern = config.excludePattern && glob.parse(config.excludePattern);
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
this.isCanceled = true;
|
||||
this.activeCancellationTokens.forEach(t => t.cancel());
|
||||
this.activeCancellationTokens = new Set();
|
||||
}
|
||||
|
||||
public search(_onResult: (match: IInternalFileMatch) => void): Promise<IInternalSearchComplete> {
|
||||
const folderQueries = this.config.folderQueries || [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const onResult = (match: IInternalFileMatch) => {
|
||||
this.resultCount++;
|
||||
_onResult(match);
|
||||
};
|
||||
|
||||
// Support that the file pattern is a full path to a file that exists
|
||||
if (this.isCanceled) {
|
||||
return resolve({ limitHit: this.isLimitHit });
|
||||
}
|
||||
|
||||
// For each extra file
|
||||
if (this.config.extraFileResources) {
|
||||
this.config.extraFileResources
|
||||
.forEach(extraFile => {
|
||||
const extraFileStr = extraFile.toString(); // ?
|
||||
const basename = path.basename(extraFileStr);
|
||||
if (this.globalExcludePattern && this.globalExcludePattern(extraFileStr, basename)) {
|
||||
return; // excluded
|
||||
}
|
||||
|
||||
// File: Check for match on file pattern and include pattern
|
||||
this.matchFile(onResult, { base: extraFile, basename });
|
||||
});
|
||||
}
|
||||
|
||||
// For each root folder
|
||||
Promise.all(folderQueries.map(fq => {
|
||||
return this.searchInFolder(fq, onResult);
|
||||
})).then(stats => {
|
||||
resolve({
|
||||
limitHit: this.isLimitHit,
|
||||
stats: stats[0] || undefined // Only looking at single-folder workspace stats...
|
||||
});
|
||||
}, (err: Error) => {
|
||||
reject(new Error(toErrorMessage(err)));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private searchInFolder(fq: IFolderQuery<URI>, onResult: (match: IInternalFileMatch) => void): Promise<IFileSearchProviderStats | null> {
|
||||
const cancellation = new CancellationTokenSource();
|
||||
return new Promise((resolve, reject) => {
|
||||
const options = this.getSearchOptionsForFolder(fq);
|
||||
const tree = this.initDirectoryTree();
|
||||
|
||||
const queryTester = new QueryGlobTester(this.config, fq);
|
||||
const noSiblingsClauses = !queryTester.hasSiblingExcludeClauses();
|
||||
|
||||
let providerSW: StopWatch;
|
||||
new Promise(_resolve => process.nextTick(_resolve))
|
||||
.then(() => {
|
||||
this.activeCancellationTokens.add(cancellation);
|
||||
|
||||
providerSW = StopWatch.create();
|
||||
return this.provider.provideFileSearchResults(
|
||||
{
|
||||
pattern: this.config.filePattern || ''
|
||||
},
|
||||
options,
|
||||
cancellation.token);
|
||||
})
|
||||
.then(results => {
|
||||
const providerTime = providerSW.elapsed();
|
||||
const postProcessSW = StopWatch.create();
|
||||
|
||||
if (this.isCanceled && !this.isLimitHit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (results) {
|
||||
results.forEach(result => {
|
||||
const relativePath = path.relative(fq.folder.fsPath, result.fsPath);
|
||||
|
||||
if (noSiblingsClauses) {
|
||||
const basename = path.basename(result.fsPath);
|
||||
this.matchFile(onResult, { base: fq.folder, relativePath, basename });
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Optimize siblings clauses with ripgrep here.
|
||||
this.addDirectoryEntries(tree, fq.folder, relativePath, onResult);
|
||||
});
|
||||
}
|
||||
|
||||
this.activeCancellationTokens.delete(cancellation);
|
||||
if (this.isCanceled && !this.isLimitHit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.matchDirectoryTree(tree, queryTester, onResult);
|
||||
return <IFileSearchProviderStats>{
|
||||
providerTime,
|
||||
postProcessTime: postProcessSW.elapsed()
|
||||
};
|
||||
}).then(
|
||||
stats => {
|
||||
cancellation.dispose();
|
||||
resolve(stats);
|
||||
},
|
||||
err => {
|
||||
cancellation.dispose();
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private getSearchOptionsForFolder(fq: IFolderQuery<URI>): vscode.FileSearchOptions {
|
||||
const includes = resolvePatternsForProvider(this.config.includePattern, fq.includePattern);
|
||||
const excludes = resolvePatternsForProvider(this.config.excludePattern, fq.excludePattern);
|
||||
|
||||
return {
|
||||
folder: fq.folder,
|
||||
excludes,
|
||||
includes,
|
||||
useIgnoreFiles: !fq.disregardIgnoreFiles,
|
||||
useGlobalIgnoreFiles: !fq.disregardGlobalIgnoreFiles,
|
||||
followSymlinks: !fq.ignoreSymlinks,
|
||||
maxResults: this.config.maxResults,
|
||||
session: this.sessionToken
|
||||
};
|
||||
}
|
||||
|
||||
private initDirectoryTree(): IDirectoryTree {
|
||||
const tree: IDirectoryTree = {
|
||||
rootEntries: [],
|
||||
pathToEntries: Object.create(null)
|
||||
};
|
||||
tree.pathToEntries['.'] = tree.rootEntries;
|
||||
return tree;
|
||||
}
|
||||
|
||||
private addDirectoryEntries({ pathToEntries }: IDirectoryTree, base: URI, relativeFile: string, onResult: (result: IInternalFileMatch) => void) {
|
||||
// Support relative paths to files from a root resource (ignores excludes)
|
||||
if (relativeFile === this.filePattern) {
|
||||
const basename = path.basename(this.filePattern);
|
||||
this.matchFile(onResult, { base: base, relativePath: this.filePattern, basename });
|
||||
}
|
||||
|
||||
function add(relativePath: string) {
|
||||
const basename = path.basename(relativePath);
|
||||
const dirname = path.dirname(relativePath);
|
||||
let entries = pathToEntries[dirname];
|
||||
if (!entries) {
|
||||
entries = pathToEntries[dirname] = [];
|
||||
add(dirname);
|
||||
}
|
||||
entries.push({
|
||||
base,
|
||||
relativePath,
|
||||
basename
|
||||
});
|
||||
}
|
||||
|
||||
add(relativeFile);
|
||||
}
|
||||
|
||||
private matchDirectoryTree({ rootEntries, pathToEntries }: IDirectoryTree, queryTester: QueryGlobTester, onResult: (result: IInternalFileMatch) => void) {
|
||||
const self = this;
|
||||
const filePattern = this.filePattern;
|
||||
function matchDirectory(entries: IDirectoryEntry[]) {
|
||||
const hasSibling = glob.hasSiblingFn(() => entries.map(entry => entry.basename));
|
||||
for (let i = 0, n = entries.length; i < n; i++) {
|
||||
const entry = entries[i];
|
||||
const { relativePath, basename } = entry;
|
||||
|
||||
// Check exclude pattern
|
||||
// If the user searches for the exact file name, we adjust the glob matching
|
||||
// to ignore filtering by siblings because the user seems to know what she
|
||||
// is searching for and we want to include the result in that case anyway
|
||||
if (!queryTester.includedInQuerySync(relativePath, basename, filePattern !== basename ? hasSibling : undefined)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const sub = pathToEntries[relativePath];
|
||||
if (sub) {
|
||||
matchDirectory(sub);
|
||||
} else {
|
||||
if (relativePath === filePattern) {
|
||||
continue; // ignore file if its path matches with the file pattern because that is already matched above
|
||||
}
|
||||
|
||||
self.matchFile(onResult, entry);
|
||||
}
|
||||
|
||||
if (self.isLimitHit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
matchDirectory(rootEntries);
|
||||
}
|
||||
|
||||
private matchFile(onResult: (result: IInternalFileMatch) => void, candidate: IInternalFileMatch): void {
|
||||
if (!this.includePattern || (candidate.relativePath && this.includePattern(candidate.relativePath, candidate.basename))) {
|
||||
if (this.exists || (this.maxResults && this.resultCount >= this.maxResults)) {
|
||||
this.isLimitHit = true;
|
||||
this.cancel();
|
||||
}
|
||||
|
||||
if (!this.isLimitHit) {
|
||||
onResult(candidate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface IInternalSearchComplete {
|
||||
limitHit: boolean;
|
||||
stats?: IFileSearchProviderStats;
|
||||
}
|
||||
|
||||
export class FileSearchManager {
|
||||
|
||||
private static readonly BATCH_SIZE = 512;
|
||||
|
||||
private readonly sessions = new Map<string, CancellationTokenSource>();
|
||||
|
||||
fileSearch(config: IFileQuery, provider: vscode.FileSearchProvider, onBatch: (matches: IFileMatch[]) => void, token: CancellationToken): Promise<ISearchCompleteStats> {
|
||||
const sessionTokenSource = this.getSessionTokenSource(config.cacheKey);
|
||||
const engine = new FileSearchEngine(config, provider, sessionTokenSource && sessionTokenSource.token);
|
||||
|
||||
let resultCount = 0;
|
||||
const onInternalResult = (batch: IInternalFileMatch[]) => {
|
||||
resultCount += batch.length;
|
||||
onBatch(batch.map(m => this.rawMatchToSearchItem(m)));
|
||||
};
|
||||
|
||||
return this.doSearch(engine, FileSearchManager.BATCH_SIZE, onInternalResult, token).then(
|
||||
result => {
|
||||
return <ISearchCompleteStats>{
|
||||
limitHit: result.limitHit,
|
||||
stats: {
|
||||
fromCache: false,
|
||||
type: 'fileSearchProvider',
|
||||
resultCount,
|
||||
detailStats: result.stats
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
clearCache(cacheKey: string): void {
|
||||
const sessionTokenSource = this.getSessionTokenSource(cacheKey);
|
||||
if (sessionTokenSource) {
|
||||
sessionTokenSource.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
private getSessionTokenSource(cacheKey: string | undefined): CancellationTokenSource | undefined {
|
||||
if (!cacheKey) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!this.sessions.has(cacheKey)) {
|
||||
this.sessions.set(cacheKey, new CancellationTokenSource());
|
||||
}
|
||||
|
||||
return this.sessions.get(cacheKey);
|
||||
}
|
||||
|
||||
private rawMatchToSearchItem(match: IInternalFileMatch): IFileMatch {
|
||||
if (match.relativePath) {
|
||||
return {
|
||||
resource: resources.joinPath(match.base, match.relativePath)
|
||||
};
|
||||
} else {
|
||||
// extraFileResources
|
||||
return {
|
||||
resource: match.base
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private doSearch(engine: FileSearchEngine, batchSize: number, onResultBatch: (matches: IInternalFileMatch[]) => void, token: CancellationToken): Promise<IInternalSearchComplete> {
|
||||
token.onCancellationRequested(() => {
|
||||
engine.cancel();
|
||||
});
|
||||
|
||||
const _onResult = match => {
|
||||
if (match) {
|
||||
batch.push(match);
|
||||
if (batchSize > 0 && batch.length >= batchSize) {
|
||||
onResultBatch(batch);
|
||||
batch = [];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let batch: IInternalFileMatch[] = [];
|
||||
return engine.search(_onResult).then(result => {
|
||||
if (batch.length) {
|
||||
onResultBatch(batch);
|
||||
}
|
||||
|
||||
return result;
|
||||
}, error => {
|
||||
if (batch.length) {
|
||||
onResultBatch(batch);
|
||||
}
|
||||
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as gracefulFs from 'graceful-fs';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { MAX_FILE_SIZE } from 'vs/platform/files/node/files';
|
||||
import { ITextQuery, QueryType } from 'vs/platform/search/common/search';
|
||||
import { FileWalker } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { Engine } from 'vs/workbench/services/search/node/legacy/textSearch';
|
||||
import { TextSearchWorkerProvider } from 'vs/workbench/services/search/node/legacy/textSearchWorkerProvider';
|
||||
import { BatchedCollector } from 'vs/workbench/services/search/node/textSearchManager';
|
||||
import { ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, ISerializedSearchSuccess } from '../search';
|
||||
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
type IProgressCallback = (p: ISerializedSearchProgressItem) => void;
|
||||
|
||||
export class LegacyTextSearchService {
|
||||
private static readonly BATCH_SIZE = 512;
|
||||
|
||||
private textSearchWorkerProvider: TextSearchWorkerProvider;
|
||||
|
||||
textSearch(config: ITextQuery, progressCallback: IProgressCallback, token?: CancellationToken): Promise<ISerializedSearchComplete> {
|
||||
if (!this.textSearchWorkerProvider) {
|
||||
this.textSearchWorkerProvider = new TextSearchWorkerProvider();
|
||||
}
|
||||
|
||||
let engine = new Engine(
|
||||
config,
|
||||
new FileWalker({
|
||||
type: QueryType.File,
|
||||
folderQueries: config.folderQueries,
|
||||
extraFileResources: config.extraFileResources,
|
||||
includePattern: config.includePattern,
|
||||
excludePattern: config.excludePattern,
|
||||
useRipgrep: false
|
||||
}, MAX_FILE_SIZE),
|
||||
this.textSearchWorkerProvider);
|
||||
|
||||
return this.doTextSearch(engine, progressCallback, LegacyTextSearchService.BATCH_SIZE, token);
|
||||
}
|
||||
|
||||
private doTextSearch(engine: Engine, progressCallback: IProgressCallback, batchSize: number, token?: CancellationToken): Promise<ISerializedSearchSuccess> {
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => engine.cancel());
|
||||
}
|
||||
|
||||
return new Promise<ISerializedSearchSuccess>((c, e) => {
|
||||
// Use BatchedCollector to get new results to the frontend every 2s at least, until 50 results have been returned
|
||||
const collector = new BatchedCollector<ISerializedFileMatch>(batchSize, progressCallback);
|
||||
engine.search((matches) => {
|
||||
const totalMatches = matches.reduce((acc, m) => acc + m.numMatches, 0);
|
||||
collector.addItems(matches, totalMatches);
|
||||
}, (progress) => {
|
||||
progressCallback(progress);
|
||||
}, (error, stats) => {
|
||||
collector.flush();
|
||||
|
||||
if (error) {
|
||||
e(error);
|
||||
} else {
|
||||
c({
|
||||
type: 'success',
|
||||
limitHit: stats.limitHit,
|
||||
stats: null
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
35
src/vs/workbench/services/search/node/legacy/search.ts
Normal file
35
src/vs/workbench/services/search/node/legacy/search.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import { IPatternInfo, ITextSearchPreviewOptions } from 'vs/platform/search/common/search';
|
||||
|
||||
export interface IFolderSearch {
|
||||
folder: string;
|
||||
excludePattern?: glob.IExpression;
|
||||
includePattern?: glob.IExpression;
|
||||
fileEncoding?: string;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
disregardGlobalIgnoreFiles?: boolean;
|
||||
}
|
||||
|
||||
export interface IRawSearch {
|
||||
folderQueries: IFolderSearch[];
|
||||
ignoreSymlinks?: boolean;
|
||||
extraFiles?: string[];
|
||||
filePattern?: string;
|
||||
excludePattern?: glob.IExpression;
|
||||
includePattern?: glob.IExpression;
|
||||
contentPattern?: IPatternInfo;
|
||||
maxResults?: number;
|
||||
exists?: boolean;
|
||||
sortByScore?: boolean;
|
||||
cacheKey?: string;
|
||||
maxFilesize?: number;
|
||||
useRipgrep?: boolean;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
previewOptions?: ITextSearchPreviewOptions;
|
||||
disregardGlobalIgnoreFiles?: boolean;
|
||||
}
|
||||
@@ -3,23 +3,21 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as path from 'path';
|
||||
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { IProgress } from 'vs/platform/search/common/search';
|
||||
import { IProgress, ITextQuery } from 'vs/platform/search/common/search';
|
||||
import { FileWalker } from 'vs/workbench/services/search/node/fileSearch';
|
||||
|
||||
import { ISerializedFileMatch, IRawSearch, ISearchEngine, ISerializedSearchSuccess } from './search';
|
||||
import { ISearchWorker } from './worker/searchWorkerIpc';
|
||||
import { ISearchEngine, ISearchEngineSuccess, ISerializedFileMatch } from '../search';
|
||||
import { ITextSearchWorkerProvider } from './textSearchWorkerProvider';
|
||||
import { ISearchWorker, ISearchWorkerSearchArgs } from './worker/searchWorkerIpc';
|
||||
import { IRawSearch } from 'vs/workbench/services/search/node/legacy/search';
|
||||
|
||||
export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
|
||||
private static readonly PROGRESS_FLUSH_CHUNK_SIZE = 50; // optimization: number of files to process before emitting progress event
|
||||
|
||||
private config: IRawSearch;
|
||||
private config2: ITextQuery;
|
||||
private walker: FileWalker;
|
||||
private walkerError: Error;
|
||||
|
||||
@@ -37,8 +35,9 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
|
||||
private nextWorker = 0;
|
||||
|
||||
constructor(config: IRawSearch, walker: FileWalker, workerProvider: ITextSearchWorkerProvider) {
|
||||
this.config = config;
|
||||
constructor(config: ITextQuery, walker: FileWalker, workerProvider: ITextSearchWorkerProvider) {
|
||||
this.config = makeRawSearch(config);
|
||||
this.config2 = config;
|
||||
this.walker = walker;
|
||||
this.workerProvider = workerProvider;
|
||||
}
|
||||
@@ -60,7 +59,7 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
});
|
||||
}
|
||||
|
||||
search(onResult: (match: ISerializedFileMatch[]) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISerializedSearchSuccess) => void): void {
|
||||
search(onResult: (match: ISerializedFileMatch[]) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void): void {
|
||||
this.workers = this.workerProvider.getWorkers();
|
||||
this.initializeWorkers();
|
||||
|
||||
@@ -86,7 +85,6 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
if (!this.isDone && this.processedBytes === this.totalBytes && this.walkerIsDone) {
|
||||
this.isDone = true;
|
||||
done(this.walkerError, {
|
||||
type: 'success',
|
||||
limitHit: this.limitReached,
|
||||
stats: this.walker.getStats()
|
||||
});
|
||||
@@ -98,7 +96,7 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
this.nextWorker = (this.nextWorker + 1) % this.workers.length;
|
||||
|
||||
const maxResults = this.config.maxResults && (this.config.maxResults - this.numResults);
|
||||
const searchArgs = { absolutePaths: batch, maxResults, pattern: this.config.contentPattern, fileEncoding };
|
||||
const searchArgs: ISearchWorkerSearchArgs = { absolutePaths: batch, maxResults, pattern: this.config.contentPattern, fileEncoding, previewOptions: this.config.previewOptions };
|
||||
worker.search(searchArgs).then(result => {
|
||||
if (!result || this.limitReached || this.isCanceled) {
|
||||
return unwind(batchBytes);
|
||||
@@ -127,7 +125,7 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
let nextBatch: string[] = [];
|
||||
let nextBatchBytes = 0;
|
||||
const batchFlushBytes = 2 ** 20; // 1MB
|
||||
this.walker.walk(this.config.folderQueries, this.config.extraFiles, result => {
|
||||
this.walker.walk(this.config2.folderQueries, this.config2.extraFileResources, result => {
|
||||
let bytes = result.size || 1;
|
||||
this.totalBytes += bytes;
|
||||
|
||||
@@ -167,3 +165,42 @@ export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exported for tests
|
||||
*/
|
||||
export function makeRawSearch(query: ITextQuery): IRawSearch {
|
||||
let rawSearch: IRawSearch = {
|
||||
folderQueries: [],
|
||||
extraFiles: [],
|
||||
excludePattern: query.excludePattern,
|
||||
includePattern: query.includePattern,
|
||||
maxResults: query.maxResults,
|
||||
useRipgrep: query.useRipgrep,
|
||||
disregardIgnoreFiles: query.folderQueries.some(fq => fq.disregardIgnoreFiles),
|
||||
disregardGlobalIgnoreFiles: query.folderQueries.some(fq => fq.disregardGlobalIgnoreFiles),
|
||||
ignoreSymlinks: query.folderQueries.some(fq => fq.ignoreSymlinks),
|
||||
previewOptions: query.previewOptions
|
||||
};
|
||||
|
||||
for (const q of query.folderQueries) {
|
||||
rawSearch.folderQueries.push({
|
||||
excludePattern: q.excludePattern,
|
||||
includePattern: q.includePattern,
|
||||
fileEncoding: q.fileEncoding,
|
||||
disregardIgnoreFiles: q.disregardIgnoreFiles,
|
||||
disregardGlobalIgnoreFiles: q.disregardGlobalIgnoreFiles,
|
||||
folder: q.folder.fsPath
|
||||
});
|
||||
}
|
||||
|
||||
if (query.extraFileResources) {
|
||||
for (const r of query.extraFileResources) {
|
||||
rawSearch.extraFiles.push(r.fsPath);
|
||||
}
|
||||
}
|
||||
|
||||
rawSearch.contentPattern = query.contentPattern;
|
||||
|
||||
return rawSearch;
|
||||
}
|
||||
@@ -3,15 +3,13 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as os from 'os';
|
||||
|
||||
import uri from 'vs/base/common/uri';
|
||||
import * as ipc from 'vs/base/parts/ipc/common/ipc';
|
||||
import * as ipc from 'vs/base/parts/ipc/node/ipc';
|
||||
import { Client } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
|
||||
import { ISearchWorker, ISearchWorkerChannel, SearchWorkerChannelClient } from './worker/searchWorkerIpc';
|
||||
import { ISearchWorker, SearchWorkerChannelClient } from './worker/searchWorkerIpc';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
|
||||
export interface ITextSearchWorkerProvider {
|
||||
getWorkers(): ISearchWorker[];
|
||||
@@ -31,22 +29,22 @@ export class TextSearchWorkerProvider implements ITextSearchWorkerProvider {
|
||||
|
||||
private createWorker(): void {
|
||||
let client = new Client(
|
||||
uri.parse(require.toUrl('bootstrap')).fsPath,
|
||||
getPathFromAmdModule(require, 'bootstrap-fork'),
|
||||
{
|
||||
serverName: 'Search Worker ' + this.workers.length,
|
||||
args: ['--type=searchWorker'],
|
||||
timeout: 30 * 1000,
|
||||
env: {
|
||||
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/worker/searchWorkerApp',
|
||||
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/legacy/worker/searchWorkerApp',
|
||||
PIPE_LOGGING: 'true',
|
||||
VERBOSE_LOGGING: process.env.VERBOSE_LOGGING
|
||||
},
|
||||
useQueue: true
|
||||
});
|
||||
|
||||
const channel = ipc.getNextTickChannel(client.getChannel<ISearchWorkerChannel>('searchWorker'));
|
||||
const channel = ipc.getNextTickChannel(client.getChannel('searchWorker'));
|
||||
const channelClient = new SearchWorkerChannelClient(channel);
|
||||
|
||||
this.workers.push(channelClient);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,19 +3,17 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as gracefulFs from 'graceful-fs';
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { LineMatch, FileMatch } from '../search';
|
||||
import { UTF16le, UTF16be, UTF8, UTF8_with_bom, encodingExists, decode, bomLength, detectEncodingFromBuffer } from 'vs/base/node/encoding';
|
||||
|
||||
import { bomLength, decode, detectEncodingFromBuffer, encodingExists, UTF16be, UTF16le, UTF8, UTF8_with_bom } from 'vs/base/node/encoding';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { ITextSearchPreviewOptions, TextSearchMatch } from 'vs/platform/search/common/search';
|
||||
import { ISearchWorker, ISearchWorkerSearchArgs, ISearchWorkerSearchResult } from './searchWorkerIpc';
|
||||
import { FileMatch } from 'vs/workbench/services/search/node/search';
|
||||
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
interface ReadLinesOptions {
|
||||
bufferLength: number;
|
||||
@@ -33,21 +31,21 @@ function onError(error: any): void {
|
||||
export class SearchWorker implements ISearchWorker {
|
||||
private currentSearchEngine: SearchWorkerEngine;
|
||||
|
||||
initialize(): TPromise<void> {
|
||||
initialize(): Promise<void> {
|
||||
this.currentSearchEngine = new SearchWorkerEngine();
|
||||
return TPromise.wrap<void>(undefined);
|
||||
return Promise.resolve<void>(undefined);
|
||||
}
|
||||
|
||||
cancel(): TPromise<void> {
|
||||
cancel(): Promise<void> {
|
||||
// Cancel the current search. It will stop searching and close its open files.
|
||||
if (this.currentSearchEngine) {
|
||||
this.currentSearchEngine.cancel();
|
||||
}
|
||||
|
||||
return TPromise.wrap<void>(null);
|
||||
return Promise.resolve<void>(null);
|
||||
}
|
||||
|
||||
search(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
|
||||
search(args: ISearchWorkerSearchArgs): Promise<ISearchWorkerSearchResult> {
|
||||
if (!this.currentSearchEngine) {
|
||||
// Worker timed out during search
|
||||
this.initialize();
|
||||
@@ -67,13 +65,13 @@ const LF = 0x0a;
|
||||
const CR = 0x0d;
|
||||
|
||||
export class SearchWorkerEngine {
|
||||
private nextSearch = TPromise.wrap(null);
|
||||
private nextSearch = Promise.resolve(null);
|
||||
private isCanceled = false;
|
||||
|
||||
/**
|
||||
* Searches some number of the given paths concurrently, and starts searches in other paths when those complete.
|
||||
*/
|
||||
searchBatch(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
|
||||
searchBatch(args: ISearchWorkerSearchArgs): Promise<ISearchWorkerSearchResult> {
|
||||
const contentPattern = strings.createRegExp(args.pattern.pattern, args.pattern.isRegExp, { matchCase: args.pattern.isCaseSensitive, wholeWord: args.pattern.isWordMatch, multiline: false, global: true });
|
||||
const fileEncoding = encodingExists(args.fileEncoding) ? args.fileEncoding : UTF8;
|
||||
return this.nextSearch =
|
||||
@@ -81,12 +79,12 @@ export class SearchWorkerEngine {
|
||||
}
|
||||
|
||||
|
||||
private _searchBatch(args: ISearchWorkerSearchArgs, contentPattern: RegExp, fileEncoding: string): TPromise<ISearchWorkerSearchResult> {
|
||||
private _searchBatch(args: ISearchWorkerSearchArgs, contentPattern: RegExp, fileEncoding: string): Promise<ISearchWorkerSearchResult> {
|
||||
if (this.isCanceled) {
|
||||
return TPromise.wrap<ISearchWorkerSearchResult>(null);
|
||||
return Promise.resolve<ISearchWorkerSearchResult>(null);
|
||||
}
|
||||
|
||||
return new TPromise<ISearchWorkerSearchResult>(batchDone => {
|
||||
return new Promise<ISearchWorkerSearchResult>(batchDone => {
|
||||
const result: ISearchWorkerSearchResult = {
|
||||
matches: [],
|
||||
numMatches: 0,
|
||||
@@ -94,8 +92,8 @@ export class SearchWorkerEngine {
|
||||
};
|
||||
|
||||
// Search in the given path, and when it's finished, search in the next path in absolutePaths
|
||||
const startSearchInFile = (absolutePath: string): TPromise<void> => {
|
||||
return this.searchInFile(absolutePath, contentPattern, fileEncoding, args.maxResults && (args.maxResults - result.numMatches)).then(fileResult => {
|
||||
const startSearchInFile = (absolutePath: string): Promise<void> => {
|
||||
return this.searchInFile(absolutePath, contentPattern, fileEncoding, args.maxResults && (args.maxResults - result.numMatches), args.previewOptions).then(fileResult => {
|
||||
// Finish early if search is canceled
|
||||
if (this.isCanceled) {
|
||||
return;
|
||||
@@ -114,7 +112,7 @@ export class SearchWorkerEngine {
|
||||
}, onError);
|
||||
};
|
||||
|
||||
TPromise.join(args.absolutePaths.map(startSearchInFile)).then(() => {
|
||||
Promise.all(args.absolutePaths.map(startSearchInFile)).then(() => {
|
||||
batchDone(result);
|
||||
});
|
||||
});
|
||||
@@ -124,13 +122,12 @@ export class SearchWorkerEngine {
|
||||
this.isCanceled = true;
|
||||
}
|
||||
|
||||
private searchInFile(absolutePath: string, contentPattern: RegExp, fileEncoding: string, maxResults?: number): TPromise<IFileSearchResult> {
|
||||
let fileMatch: FileMatch = null;
|
||||
private searchInFile(absolutePath: string, contentPattern: RegExp, fileEncoding: string, maxResults?: number, previewOptions?: ITextSearchPreviewOptions): Promise<IFileSearchResult> {
|
||||
let fileMatch: FileMatch | null = null;
|
||||
let limitReached = false;
|
||||
let numMatches = 0;
|
||||
|
||||
const perLineCallback = (line: string, lineNumber: number) => {
|
||||
let lineMatch: LineMatch = null;
|
||||
let match = contentPattern.exec(line);
|
||||
|
||||
// Record all matches into file result
|
||||
@@ -139,12 +136,8 @@ export class SearchWorkerEngine {
|
||||
fileMatch = new FileMatch(absolutePath);
|
||||
}
|
||||
|
||||
if (lineMatch === null) {
|
||||
lineMatch = new LineMatch(line, lineNumber);
|
||||
fileMatch.addMatch(lineMatch);
|
||||
}
|
||||
|
||||
lineMatch.addMatch(match.index, match[0].length);
|
||||
const lineMatch = new TextSearchMatch(line, new Range(lineNumber, match.index, lineNumber, match.index + match[0].length), previewOptions);
|
||||
fileMatch.addMatch(lineMatch);
|
||||
|
||||
numMatches++;
|
||||
if (maxResults && numMatches >= maxResults) {
|
||||
@@ -160,8 +153,8 @@ export class SearchWorkerEngine {
|
||||
() => fileMatch ? { match: fileMatch, limitReached, numMatches } : null);
|
||||
}
|
||||
|
||||
private readlinesAsync(filename: string, perLineCallback: (line: string, lineNumber: number) => void, options: ReadLinesOptions): TPromise<void> {
|
||||
return new TPromise<void>((resolve, reject) => {
|
||||
private readlinesAsync(filename: string, perLineCallback: (line: string, lineNumber: number) => void, options: ReadLinesOptions): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
fs.open(filename, 'r', null, (error: Error, fd: number) => {
|
||||
if (error) {
|
||||
return resolve(null);
|
||||
@@ -177,8 +170,8 @@ export class SearchWorkerEngine {
|
||||
return clb(null); // return early if canceled or limit reached
|
||||
}
|
||||
|
||||
fs.read(fd, buffer, 0, buffer.length, null, (error: Error, bytesRead: number, buffer: NodeBuffer) => {
|
||||
const decodeBuffer = (buffer: NodeBuffer, start: number, end: number): string => {
|
||||
fs.read(fd, buffer, 0, buffer.length, null, (error: Error, bytesRead: number, buffer: Buffer) => {
|
||||
const decodeBuffer = (buffer: Buffer, start: number, end: number): string => {
|
||||
if (options.encoding === UTF8 || options.encoding === UTF8_with_bom) {
|
||||
return buffer.toString(undefined, start, end); // much faster to use built in toString() when encoding is default
|
||||
}
|
||||
@@ -3,13 +3,11 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { Server } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
import { SearchWorkerChannel } from './searchWorkerIpc';
|
||||
import { SearchWorker } from './searchWorker';
|
||||
|
||||
const server = new Server();
|
||||
const server = new Server('searchWorker');
|
||||
const worker = new SearchWorker();
|
||||
const channel = new SearchWorkerChannel(worker);
|
||||
server.registerChannel('searchWorker', channel);
|
||||
@@ -3,20 +3,18 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { IChannel } from 'vs/base/parts/ipc/common/ipc';
|
||||
import { ISerializedFileMatch } from '../search';
|
||||
import { IPatternInfo } from 'vs/platform/search/common/search';
|
||||
import { IChannel, IServerChannel } from 'vs/base/parts/ipc/node/ipc';
|
||||
import { IPatternInfo, ITextSearchPreviewOptions } from 'vs/platform/search/common/search';
|
||||
import { SearchWorker } from './searchWorker';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { ISerializedFileMatch } from 'vs/workbench/services/search/node/search';
|
||||
|
||||
export interface ISearchWorkerSearchArgs {
|
||||
pattern: IPatternInfo;
|
||||
fileEncoding: string;
|
||||
absolutePaths: string[];
|
||||
maxResults?: number;
|
||||
previewOptions?: ITextSearchPreviewOptions;
|
||||
}
|
||||
|
||||
export interface ISearchWorkerSearchResult {
|
||||
@@ -26,48 +24,41 @@ export interface ISearchWorkerSearchResult {
|
||||
}
|
||||
|
||||
export interface ISearchWorker {
|
||||
initialize(): TPromise<void>;
|
||||
search(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult>;
|
||||
cancel(): TPromise<void>;
|
||||
initialize(): Thenable<void>;
|
||||
search(args: ISearchWorkerSearchArgs): Thenable<ISearchWorkerSearchResult>;
|
||||
cancel(): Thenable<void>;
|
||||
}
|
||||
|
||||
export interface ISearchWorkerChannel extends IChannel {
|
||||
call(command: 'initialize'): TPromise<void>;
|
||||
call(command: 'search', args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult>;
|
||||
call(command: 'cancel'): TPromise<void>;
|
||||
call(command: string, arg?: any): TPromise<any>;
|
||||
}
|
||||
|
||||
export class SearchWorkerChannel implements ISearchWorkerChannel {
|
||||
export class SearchWorkerChannel implements IServerChannel {
|
||||
constructor(private worker: SearchWorker) {
|
||||
}
|
||||
|
||||
listen<T>(event: string, arg?: any): Event<T> {
|
||||
listen<T>(): Event<T> {
|
||||
throw new Error('No events');
|
||||
}
|
||||
|
||||
call(command: string, arg?: any): TPromise<any> {
|
||||
call(_, command: string, arg?: any): Promise<any> {
|
||||
switch (command) {
|
||||
case 'initialize': return this.worker.initialize();
|
||||
case 'search': return this.worker.search(arg);
|
||||
case 'cancel': return this.worker.cancel();
|
||||
}
|
||||
return undefined;
|
||||
throw new Error(`Call not found: ${command}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class SearchWorkerChannelClient implements ISearchWorker {
|
||||
constructor(private channel: ISearchWorkerChannel) { }
|
||||
constructor(private channel: IChannel) { }
|
||||
|
||||
initialize(): TPromise<void> {
|
||||
initialize(): Thenable<void> {
|
||||
return this.channel.call('initialize');
|
||||
}
|
||||
|
||||
search(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
|
||||
search(args: ISearchWorkerSearchArgs): Thenable<ISearchWorkerSearchResult> {
|
||||
return this.channel.call('search', args);
|
||||
}
|
||||
|
||||
cancel(): TPromise<void> {
|
||||
cancel(): Thenable<void> {
|
||||
return this.channel.call('cancel');
|
||||
}
|
||||
}
|
||||
@@ -3,24 +3,25 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as gracefulFs from 'graceful-fs';
|
||||
import { join, sep } from 'path';
|
||||
import * as arrays from 'vs/base/common/arrays';
|
||||
import { CancelablePromise, createCancelablePromise } from 'vs/base/common/async';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { canceled } from 'vs/base/common/errors';
|
||||
import { Emitter, Event } from 'vs/base/common/event';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { URI, UriComponents } from 'vs/base/common/uri';
|
||||
import { compareItemsByScore, IItemAccessor, prepareQuery, ScorerCache } from 'vs/base/parts/quickopen/common/quickOpenScorer';
|
||||
import { MAX_FILE_SIZE } from 'vs/platform/files/node/files';
|
||||
import { ICachedSearchStats, IProgress } from 'vs/platform/search/common/search';
|
||||
import { Engine as FileSearchEngine, FileWalker } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { RipgrepEngine } from 'vs/workbench/services/search/node/ripgrepTextSearch';
|
||||
import { Engine as TextSearchEngine } from 'vs/workbench/services/search/node/textSearch';
|
||||
import { TextSearchWorkerProvider } from 'vs/workbench/services/search/node/textSearchWorkerProvider';
|
||||
import { IFileSearchProgressItem, IRawFileMatch, IRawSearch, IRawSearchService, ISearchEngine, ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, ITelemetryEvent, ISerializedSearchSuccess } from './search';
|
||||
import { Event, Emitter } from 'vs/base/common/event';
|
||||
import { ICachedSearchStats, IFileQuery, IFileSearchStats, IFolderQuery, IProgress, IRawFileQuery, IRawQuery, IRawTextQuery, ITextQuery } from 'vs/platform/search/common/search';
|
||||
import { Engine as FileSearchEngine } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { LegacyTextSearchService } from 'vs/workbench/services/search/node/legacy/rawLegacyTextSearchService';
|
||||
import { TextSearchEngineAdapter } from 'vs/workbench/services/search/node/textSearchAdapter';
|
||||
import { IFileSearchProgressItem, IRawFileMatch, IRawSearchService, ISearchEngine, ISearchEngineSuccess, ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, ISerializedSearchSuccess } from './search';
|
||||
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
@@ -31,20 +32,22 @@ export class SearchService implements IRawSearchService {
|
||||
|
||||
private static readonly BATCH_SIZE = 512;
|
||||
|
||||
private legacyTextSearchService = new LegacyTextSearchService();
|
||||
private caches: { [cacheKey: string]: Cache; } = Object.create(null);
|
||||
|
||||
private textSearchWorkerProvider: TextSearchWorkerProvider;
|
||||
|
||||
private _onTelemetry = new Emitter<ITelemetryEvent>();
|
||||
readonly onTelemetry: Event<ITelemetryEvent> = this._onTelemetry.event;
|
||||
|
||||
public fileSearch(config: IRawSearch, batchSize = SearchService.BATCH_SIZE): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: TPromise<ISerializedSearchSuccess>;
|
||||
public fileSearch(config: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: CancelablePromise<ISerializedSearchSuccess>;
|
||||
|
||||
const query = reviveQuery(config);
|
||||
const emitter = new Emitter<ISerializedSearchProgressItem | ISerializedSearchComplete>({
|
||||
onFirstListenerDidAdd: () => {
|
||||
promise = this.doFileSearch(FileSearchEngine, config, p => emitter.fire(p), batchSize)
|
||||
.then(c => emitter.fire(c), err => emitter.fire({ type: 'error', error: { message: err.message, stack: err.stack } }));
|
||||
promise = createCancelablePromise(token => {
|
||||
return this.doFileSearchWithEngine(FileSearchEngine, query, p => emitter.fire(p), token);
|
||||
});
|
||||
|
||||
promise.then(
|
||||
c => emitter.fire(c),
|
||||
err => emitter.fire({ type: 'error', error: { message: err.message, stack: err.stack } }));
|
||||
},
|
||||
onLastListenerRemove: () => {
|
||||
promise.cancel();
|
||||
@@ -54,13 +57,21 @@ export class SearchService implements IRawSearchService {
|
||||
return emitter.event;
|
||||
}
|
||||
|
||||
public textSearch(config: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: TPromise<ISerializedSearchSuccess>;
|
||||
public textSearch(rawQuery: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: CancelablePromise<ISerializedSearchComplete>;
|
||||
|
||||
const query = reviveQuery(rawQuery);
|
||||
const emitter = new Emitter<ISerializedSearchProgressItem | ISerializedSearchComplete>({
|
||||
onFirstListenerDidAdd: () => {
|
||||
promise = (config.useRipgrep ? this.ripgrepTextSearch(config, p => emitter.fire(p)) : this.legacyTextSearch(config, p => emitter.fire(p)))
|
||||
.then(c => emitter.fire(c), err => emitter.fire({ type: 'error', error: { message: err.message, stack: err.stack } }));
|
||||
promise = createCancelablePromise(token => {
|
||||
return (rawQuery.useRipgrep ?
|
||||
this.ripgrepTextSearch(query, p => emitter.fire(p), token) :
|
||||
this.legacyTextSearchService.textSearch(query, p => emitter.fire(p), token));
|
||||
});
|
||||
|
||||
promise.then(
|
||||
c => emitter.fire(c),
|
||||
err => emitter.fire({ type: 'error', error: { message: err.message, stack: err.stack } }));
|
||||
},
|
||||
onLastListenerRemove: () => {
|
||||
promise.cancel();
|
||||
@@ -70,57 +81,25 @@ export class SearchService implements IRawSearchService {
|
||||
return emitter.event;
|
||||
}
|
||||
|
||||
private ripgrepTextSearch(config: IRawSearch, progressCallback: IProgressCallback): TPromise<ISerializedSearchSuccess> {
|
||||
config.maxFilesize = MAX_FILE_SIZE;
|
||||
let engine = new RipgrepEngine(config);
|
||||
private ripgrepTextSearch(config: ITextQuery, progressCallback: IProgressCallback, token: CancellationToken): Promise<ISerializedSearchSuccess> {
|
||||
config.maxFileSize = MAX_FILE_SIZE;
|
||||
const engine = new TextSearchEngineAdapter(config);
|
||||
|
||||
return new TPromise<ISerializedSearchSuccess>((c, e) => {
|
||||
// Use BatchedCollector to get new results to the frontend every 2s at least, until 50 results have been returned
|
||||
const collector = new BatchedCollector<ISerializedFileMatch>(SearchService.BATCH_SIZE, progressCallback);
|
||||
engine.search((match) => {
|
||||
collector.addItem(match, match.numMatches);
|
||||
}, (message) => {
|
||||
progressCallback(message);
|
||||
}, (error, stats) => {
|
||||
collector.flush();
|
||||
|
||||
if (error) {
|
||||
e(error);
|
||||
} else {
|
||||
c(stats);
|
||||
}
|
||||
});
|
||||
}, () => {
|
||||
engine.cancel();
|
||||
});
|
||||
return engine.search(token, progressCallback, progressCallback);
|
||||
}
|
||||
|
||||
private legacyTextSearch(config: IRawSearch, progressCallback: IProgressCallback): TPromise<ISerializedSearchComplete> {
|
||||
if (!this.textSearchWorkerProvider) {
|
||||
this.textSearchWorkerProvider = new TextSearchWorkerProvider();
|
||||
}
|
||||
|
||||
let engine = new TextSearchEngine(
|
||||
config,
|
||||
new FileWalker({
|
||||
folderQueries: config.folderQueries,
|
||||
extraFiles: config.extraFiles,
|
||||
includePattern: config.includePattern,
|
||||
excludePattern: config.excludePattern,
|
||||
filePattern: config.filePattern,
|
||||
useRipgrep: false,
|
||||
maxFilesize: MAX_FILE_SIZE
|
||||
}),
|
||||
this.textSearchWorkerProvider);
|
||||
|
||||
return this.doTextSearch(engine, progressCallback, SearchService.BATCH_SIZE);
|
||||
doFileSearch(config: IFileQuery, progressCallback: IProgressCallback, token?: CancellationToken): Promise<ISerializedSearchSuccess> {
|
||||
return this.doFileSearchWithEngine(FileSearchEngine, config, progressCallback, token);
|
||||
}
|
||||
|
||||
doFileSearch(EngineClass: { new(config: IRawSearch): ISearchEngine<IRawFileMatch>; }, config: IRawSearch, progressCallback: IProgressCallback, batchSize?: number): TPromise<ISerializedSearchSuccess> {
|
||||
doFileSearchWithEngine(EngineClass: { new(config: IFileQuery): ISearchEngine<IRawFileMatch>; }, config: IFileQuery, progressCallback: IProgressCallback, token?: CancellationToken, batchSize = SearchService.BATCH_SIZE): Promise<ISerializedSearchSuccess> {
|
||||
let resultCount = 0;
|
||||
const fileProgressCallback: IFileProgressCallback = progress => {
|
||||
if (Array.isArray(progress)) {
|
||||
resultCount += progress.length;
|
||||
progressCallback(progress.map(m => this.rawMatchToSearchItem(m)));
|
||||
} else if ((<IRawFileMatch>progress).relativePath) {
|
||||
resultCount++;
|
||||
progressCallback(this.rawMatchToSearchItem(<IRawFileMatch>progress));
|
||||
} else {
|
||||
progressCallback(<IProgress>progress);
|
||||
@@ -128,40 +107,47 @@ export class SearchService implements IRawSearchService {
|
||||
};
|
||||
|
||||
if (config.sortByScore) {
|
||||
let sortedSearch = this.trySortedSearchFromCache(config, fileProgressCallback);
|
||||
let sortedSearch = this.trySortedSearchFromCache(config, fileProgressCallback, token);
|
||||
if (!sortedSearch) {
|
||||
const walkerConfig = config.maxResults ? objects.assign({}, config, { maxResults: null }) : config;
|
||||
const engine = new EngineClass(walkerConfig);
|
||||
sortedSearch = this.doSortedSearch(engine, config, progressCallback, fileProgressCallback);
|
||||
sortedSearch = this.doSortedSearch(engine, config, progressCallback, fileProgressCallback, token);
|
||||
}
|
||||
|
||||
return new TPromise<ISerializedSearchSuccess>((c, e) => {
|
||||
process.nextTick(() => { // allow caller to register progress callback first
|
||||
sortedSearch.then(([result, rawMatches]) => {
|
||||
const serializedMatches = rawMatches.map(rawMatch => this.rawMatchToSearchItem(rawMatch));
|
||||
this.sendProgress(serializedMatches, progressCallback, batchSize);
|
||||
c(result);
|
||||
}, e);
|
||||
});
|
||||
}, () => {
|
||||
sortedSearch.cancel();
|
||||
return new Promise<ISerializedSearchSuccess>((c, e) => {
|
||||
sortedSearch.then(([result, rawMatches]) => {
|
||||
const serializedMatches = rawMatches.map(rawMatch => this.rawMatchToSearchItem(rawMatch));
|
||||
this.sendProgress(serializedMatches, progressCallback, batchSize);
|
||||
c(result);
|
||||
}, e);
|
||||
});
|
||||
}
|
||||
|
||||
const engine = new EngineClass(config);
|
||||
|
||||
return this.doSearch(engine, fileProgressCallback, batchSize);
|
||||
return this.doSearch(engine, fileProgressCallback, batchSize, token).then(complete => {
|
||||
return <ISerializedSearchSuccess>{
|
||||
limitHit: complete.limitHit,
|
||||
type: 'success',
|
||||
stats: {
|
||||
detailStats: complete.stats,
|
||||
type: 'searchProcess',
|
||||
fromCache: false,
|
||||
resultCount,
|
||||
sortingTime: undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
private rawMatchToSearchItem(match: IRawFileMatch): ISerializedFileMatch {
|
||||
return { path: match.base ? join(match.base, match.relativePath) : match.relativePath };
|
||||
}
|
||||
|
||||
private doSortedSearch(engine: ISearchEngine<IRawFileMatch>, config: IRawSearch, progressCallback: IProgressCallback, fileProgressCallback: IFileProgressCallback): TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]> {
|
||||
let searchPromise: TPromise<void>;
|
||||
private doSortedSearch(engine: ISearchEngine<IRawFileMatch>, config: IFileQuery, progressCallback: IProgressCallback, fileProgressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISerializedSearchSuccess, IRawFileMatch[]]> {
|
||||
const emitter = new Emitter<IFileSearchProgressItem>();
|
||||
|
||||
let allResultsPromise = new TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]>((c, e) => {
|
||||
let allResultsPromise = createCancelablePromise(token => {
|
||||
let results: IRawFileMatch[] = [];
|
||||
|
||||
const innerProgressCallback: IFileProgressCallback = progress => {
|
||||
@@ -173,53 +159,52 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
};
|
||||
|
||||
searchPromise = this.doSearch(engine, innerProgressCallback, -1)
|
||||
.then(result => {
|
||||
c([result, results]);
|
||||
// __GDPR__TODO__ classify event
|
||||
this._onTelemetry.fire({
|
||||
eventName: 'fileSearch',
|
||||
data: result.stats
|
||||
});
|
||||
}, e);
|
||||
}, () => {
|
||||
searchPromise.cancel();
|
||||
return this.doSearch(engine, innerProgressCallback, -1, token)
|
||||
.then<[ISearchEngineSuccess, IRawFileMatch[]]>(result => {
|
||||
return [result, results];
|
||||
});
|
||||
});
|
||||
|
||||
let cache: Cache;
|
||||
if (config.cacheKey) {
|
||||
cache = this.getOrCreateCache(config.cacheKey);
|
||||
cache.resultsToSearchCache[config.filePattern] = {
|
||||
const cacheRow: ICacheRow = {
|
||||
promise: allResultsPromise,
|
||||
event: emitter.event
|
||||
event: emitter.event,
|
||||
resolved: false
|
||||
};
|
||||
allResultsPromise.then(null, err => {
|
||||
cache.resultsToSearchCache[config.filePattern] = cacheRow;
|
||||
allResultsPromise.then(() => {
|
||||
cacheRow.resolved = true;
|
||||
}, err => {
|
||||
delete cache.resultsToSearchCache[config.filePattern];
|
||||
});
|
||||
|
||||
allResultsPromise = this.preventCancellation(allResultsPromise);
|
||||
}
|
||||
|
||||
let chained: TPromise<void>;
|
||||
return new TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]>((c, e) => {
|
||||
chained = allResultsPromise.then(([result, results]) => {
|
||||
const scorerCache: ScorerCache = cache ? cache.scorerCache : Object.create(null);
|
||||
const unsortedResultTime = Date.now();
|
||||
return this.sortResults(config, results, scorerCache)
|
||||
.then(sortedResults => {
|
||||
const sortedResultTime = Date.now();
|
||||
return allResultsPromise.then(([result, results]) => {
|
||||
const scorerCache: ScorerCache = cache ? cache.scorerCache : Object.create(null);
|
||||
const sortSW = (typeof config.maxResults !== 'number' || config.maxResults > 0) && StopWatch.create(false);
|
||||
return this.sortResults(config, results, scorerCache, token)
|
||||
.then<[ISerializedSearchSuccess, IRawFileMatch[]]>(sortedResults => {
|
||||
// sortingTime: -1 indicates a "sorted" search that was not sorted, i.e. populating the cache when quickopen is opened.
|
||||
// Contrasting with findFiles which is not sorted and will have sortingTime: undefined
|
||||
const sortingTime = sortSW ? sortSW.elapsed() : -1;
|
||||
|
||||
c([{
|
||||
type: 'success',
|
||||
stats: objects.assign({}, result.stats, {
|
||||
unsortedResultTime,
|
||||
sortedResultTime
|
||||
}),
|
||||
limitHit: result.limitHit || typeof config.maxResults === 'number' && results.length > config.maxResults
|
||||
} as ISerializedSearchSuccess, sortedResults]);
|
||||
});
|
||||
}, e);
|
||||
}, () => {
|
||||
chained.cancel();
|
||||
return [{
|
||||
type: 'success',
|
||||
stats: {
|
||||
detailStats: result.stats,
|
||||
sortingTime,
|
||||
fromCache: false,
|
||||
type: 'searchProcess',
|
||||
workspaceFolderCount: config.folderQueries.length,
|
||||
resultCount: sortedResults.length
|
||||
},
|
||||
limitHit: result.limitHit || typeof config.maxResults === 'number' && results.length > config.maxResults
|
||||
} as ISerializedSearchSuccess, sortedResults];
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -231,56 +216,42 @@ export class SearchService implements IRawSearchService {
|
||||
return this.caches[cacheKey] = new Cache();
|
||||
}
|
||||
|
||||
private trySortedSearchFromCache(config: IRawSearch, progressCallback: IFileProgressCallback): TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]> {
|
||||
private trySortedSearchFromCache(config: IFileQuery, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISerializedSearchSuccess, IRawFileMatch[]]> {
|
||||
const cache = config.cacheKey && this.caches[config.cacheKey];
|
||||
if (!cache) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const cacheLookupStartTime = Date.now();
|
||||
const cached = this.getResultsFromCache(cache, config.filePattern, progressCallback);
|
||||
const cached = this.getResultsFromCache(cache, config.filePattern, progressCallback, token);
|
||||
if (cached) {
|
||||
let chained: TPromise<void>;
|
||||
return new TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]>((c, e) => {
|
||||
chained = cached.then(([result, results, cacheStats]) => {
|
||||
const cacheLookupResultTime = Date.now();
|
||||
return this.sortResults(config, results, cache.scorerCache)
|
||||
.then(sortedResults => {
|
||||
const sortedResultTime = Date.now();
|
||||
return cached.then(([result, results, cacheStats]) => {
|
||||
const sortSW = StopWatch.create(false);
|
||||
return this.sortResults(config, results, cache.scorerCache, token)
|
||||
.then<[ISerializedSearchSuccess, IRawFileMatch[]]>(sortedResults => {
|
||||
const sortingTime = sortSW.elapsed();
|
||||
const stats: IFileSearchStats = {
|
||||
fromCache: true,
|
||||
detailStats: cacheStats,
|
||||
type: 'searchProcess',
|
||||
resultCount: results.length,
|
||||
sortingTime
|
||||
};
|
||||
|
||||
const stats: ICachedSearchStats = {
|
||||
fromCache: true,
|
||||
cacheLookupStartTime: cacheLookupStartTime,
|
||||
cacheFilterStartTime: cacheStats.cacheFilterStartTime,
|
||||
cacheLookupResultTime: cacheLookupResultTime,
|
||||
cacheEntryCount: cacheStats.cacheFilterResultCount,
|
||||
resultCount: results.length
|
||||
};
|
||||
if (config.sortByScore) {
|
||||
stats.unsortedResultTime = cacheLookupResultTime;
|
||||
stats.sortedResultTime = sortedResultTime;
|
||||
}
|
||||
if (!cacheStats.cacheWasResolved) {
|
||||
stats.joined = result.stats;
|
||||
}
|
||||
c([
|
||||
{
|
||||
type: 'success',
|
||||
limitHit: result.limitHit || typeof config.maxResults === 'number' && results.length > config.maxResults,
|
||||
stats: stats
|
||||
} as ISerializedSearchSuccess,
|
||||
sortedResults
|
||||
]);
|
||||
});
|
||||
}, e);
|
||||
}, () => {
|
||||
chained.cancel();
|
||||
return [
|
||||
{
|
||||
type: 'success',
|
||||
limitHit: result.limitHit || typeof config.maxResults === 'number' && results.length > config.maxResults,
|
||||
stats
|
||||
} as ISerializedSearchSuccess,
|
||||
sortedResults
|
||||
];
|
||||
});
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private sortResults(config: IRawSearch, results: IRawFileMatch[], scorerCache: ScorerCache): TPromise<IRawFileMatch[]> {
|
||||
private sortResults(config: IFileQuery, results: IRawFileMatch[], scorerCache: ScorerCache, token?: CancellationToken): Promise<IRawFileMatch[]> {
|
||||
// we use the same compare function that is used later when showing the results using fuzzy scoring
|
||||
// this is very important because we are also limiting the number of results by config.maxResults
|
||||
// and as such we want the top items to be included in this result set if the number of items
|
||||
@@ -288,7 +259,7 @@ export class SearchService implements IRawSearchService {
|
||||
const query = prepareQuery(config.filePattern);
|
||||
const compare = (matchA: IRawFileMatch, matchB: IRawFileMatch) => compareItemsByScore(matchA, matchB, query, true, FileMatchItemAccessor, scorerCache);
|
||||
|
||||
return arrays.topAsync(results, compare, config.maxResults, 10000);
|
||||
return arrays.topAsync(results, compare, config.maxResults, 10000, token);
|
||||
}
|
||||
|
||||
private sendProgress(results: ISerializedFileMatch[], progressCb: IProgressCallback, batchSize: number) {
|
||||
@@ -301,13 +272,13 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
}
|
||||
|
||||
private getResultsFromCache(cache: Cache, searchValue: string, progressCallback: IFileProgressCallback): TPromise<[ISerializedSearchSuccess, IRawFileMatch[], CacheStats]> {
|
||||
private getResultsFromCache(cache: Cache, searchValue: string, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISearchEngineSuccess, IRawFileMatch[], ICachedSearchStats]> {
|
||||
const cacheLookupSW = StopWatch.create(false);
|
||||
|
||||
// Find cache entries by prefix of search value
|
||||
const hasPathSep = searchValue.indexOf(sep) >= 0;
|
||||
let cachedRow: CacheRow;
|
||||
let wasResolved: boolean;
|
||||
let cachedRow: ICacheRow;
|
||||
for (let previousSearch in cache.resultsToSearchCache) {
|
||||
|
||||
// If we narrow down, we might be able to reuse the cached results
|
||||
if (strings.startsWith(searchValue, previousSearch)) {
|
||||
if (hasPathSep && previousSearch.indexOf(sep) < 0) {
|
||||
@@ -315,11 +286,10 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
|
||||
const row = cache.resultsToSearchCache[previousSearch];
|
||||
row.promise.then(() => { wasResolved = false; });
|
||||
wasResolved = true;
|
||||
cachedRow = {
|
||||
promise: this.preventCancellation(row.promise),
|
||||
event: row.event
|
||||
event: row.event,
|
||||
resolved: row.resolved
|
||||
};
|
||||
break;
|
||||
}
|
||||
@@ -329,64 +299,53 @@ export class SearchService implements IRawSearchService {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cacheLookupTime = cacheLookupSW.elapsed();
|
||||
const cacheFilterSW = StopWatch.create(false);
|
||||
|
||||
const listener = cachedRow.event(progressCallback);
|
||||
|
||||
return new TPromise<[ISerializedSearchSuccess, IRawFileMatch[], CacheStats]>((c, e) => {
|
||||
cachedRow.promise.then(([complete, cachedEntries]) => {
|
||||
const cacheFilterStartTime = Date.now();
|
||||
|
||||
// Pattern match on results
|
||||
let results: IRawFileMatch[] = [];
|
||||
const normalizedSearchValueLowercase = strings.stripWildcards(searchValue).toLowerCase();
|
||||
for (let i = 0; i < cachedEntries.length; i++) {
|
||||
let entry = cachedEntries[i];
|
||||
|
||||
// Check if this entry is a match for the search value
|
||||
if (!strings.fuzzyContains(entry.relativePath, normalizedSearchValueLowercase)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push(entry);
|
||||
}
|
||||
|
||||
c([complete, results, {
|
||||
cacheWasResolved: wasResolved,
|
||||
cacheFilterStartTime: cacheFilterStartTime,
|
||||
cacheFilterResultCount: cachedEntries.length
|
||||
}]);
|
||||
}, e);
|
||||
}, () => {
|
||||
cachedRow.promise.cancel();
|
||||
listener.dispose();
|
||||
});
|
||||
}
|
||||
|
||||
private doTextSearch(engine: TextSearchEngine, progressCallback: IProgressCallback, batchSize: number): TPromise<ISerializedSearchSuccess> {
|
||||
return new TPromise<ISerializedSearchSuccess>((c, e) => {
|
||||
// Use BatchedCollector to get new results to the frontend every 2s at least, until 50 results have been returned
|
||||
const collector = new BatchedCollector<ISerializedFileMatch>(batchSize, progressCallback);
|
||||
engine.search((matches) => {
|
||||
const totalMatches = matches.reduce((acc, m) => acc + m.numMatches, 0);
|
||||
collector.addItems(matches, totalMatches);
|
||||
}, (progress) => {
|
||||
progressCallback(progress);
|
||||
}, (error, stats) => {
|
||||
collector.flush();
|
||||
|
||||
if (error) {
|
||||
e(error);
|
||||
} else {
|
||||
c(stats);
|
||||
}
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => {
|
||||
listener.dispose();
|
||||
});
|
||||
}, () => {
|
||||
engine.cancel();
|
||||
}
|
||||
|
||||
return cachedRow.promise.then<[ISearchEngineSuccess, IRawFileMatch[], ICachedSearchStats]>(([complete, cachedEntries]) => {
|
||||
if (token && token.isCancellationRequested) {
|
||||
throw canceled();
|
||||
}
|
||||
|
||||
// Pattern match on results
|
||||
let results: IRawFileMatch[] = [];
|
||||
const normalizedSearchValueLowercase = strings.stripWildcards(searchValue).toLowerCase();
|
||||
for (let i = 0; i < cachedEntries.length; i++) {
|
||||
let entry = cachedEntries[i];
|
||||
|
||||
// Check if this entry is a match for the search value
|
||||
if (!strings.fuzzyContains(entry.relativePath, normalizedSearchValueLowercase)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push(entry);
|
||||
}
|
||||
|
||||
return [complete, results, {
|
||||
cacheWasResolved: cachedRow.resolved,
|
||||
cacheLookupTime,
|
||||
cacheFilterTime: cacheFilterSW.elapsed(),
|
||||
cacheEntryCount: cachedEntries.length
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
private doSearch(engine: ISearchEngine<IRawFileMatch>, progressCallback: IFileProgressCallback, batchSize?: number): TPromise<ISerializedSearchSuccess> {
|
||||
return new TPromise<ISerializedSearchSuccess>((c, e) => {
|
||||
|
||||
|
||||
private doSearch(engine: ISearchEngine<IRawFileMatch>, progressCallback: IFileProgressCallback, batchSize: number, token?: CancellationToken): Promise<ISearchEngineSuccess> {
|
||||
return new Promise<ISearchEngineSuccess>((c, e) => {
|
||||
let batch: IRawFileMatch[] = [];
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => engine.cancel());
|
||||
}
|
||||
|
||||
engine.search((match) => {
|
||||
if (match) {
|
||||
if (batchSize) {
|
||||
@@ -400,49 +359,55 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
}
|
||||
}, (progress) => {
|
||||
process.nextTick(() => {
|
||||
progressCallback(progress);
|
||||
});
|
||||
}, (error, stats) => {
|
||||
progressCallback(progress);
|
||||
}, (error, complete) => {
|
||||
if (batch.length) {
|
||||
progressCallback(batch);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
e(error);
|
||||
} else {
|
||||
c(stats);
|
||||
c(complete);
|
||||
}
|
||||
});
|
||||
}, () => {
|
||||
engine.cancel();
|
||||
});
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): TPromise<void> {
|
||||
public clearCache(cacheKey: string): Promise<void> {
|
||||
delete this.caches[cacheKey];
|
||||
return TPromise.as(undefined);
|
||||
return Promise.resolve(undefined);
|
||||
}
|
||||
|
||||
private preventCancellation<C, P>(promise: TPromise<C>): TPromise<C> {
|
||||
return new TPromise<C>((c, e) => {
|
||||
// Allow for piled up cancellations to come through first.
|
||||
process.nextTick(() => {
|
||||
promise.then(c, e);
|
||||
});
|
||||
}, () => {
|
||||
// Do not propagate.
|
||||
});
|
||||
/**
|
||||
* Return a CancelablePromise which is not actually cancelable
|
||||
* TODO@rob - Is this really needed?
|
||||
*/
|
||||
private preventCancellation<C>(promise: CancelablePromise<C>): CancelablePromise<C> {
|
||||
return new class implements CancelablePromise<C> {
|
||||
cancel() {
|
||||
// Do nothing
|
||||
}
|
||||
then(resolve, reject) {
|
||||
return promise.then(resolve, reject);
|
||||
}
|
||||
catch(reject?) {
|
||||
return this.then(undefined, reject);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
interface CacheRow {
|
||||
promise: TPromise<[ISerializedSearchSuccess, IRawFileMatch[]]>;
|
||||
interface ICacheRow {
|
||||
// TODO@roblou - never actually canceled
|
||||
promise: CancelablePromise<[ISearchEngineSuccess, IRawFileMatch[]]>;
|
||||
resolved: boolean;
|
||||
event: Event<IFileSearchProgressItem>;
|
||||
}
|
||||
|
||||
class Cache {
|
||||
|
||||
public resultsToSearchCache: { [searchValue: string]: CacheRow; } = Object.create(null);
|
||||
public resultsToSearchCache: { [searchValue: string]: ICacheRow; } = Object.create(null);
|
||||
|
||||
public scorerCache: ScorerCache = Object.create(null);
|
||||
}
|
||||
@@ -462,94 +427,19 @@ const FileMatchItemAccessor = new class implements IItemAccessor<IRawFileMatch>
|
||||
}
|
||||
};
|
||||
|
||||
interface CacheStats {
|
||||
cacheWasResolved: boolean;
|
||||
cacheFilterStartTime: number;
|
||||
cacheFilterResultCount: number;
|
||||
function reviveQuery<U extends IRawQuery>(rawQuery: U): U extends IRawTextQuery ? ITextQuery : IFileQuery {
|
||||
return {
|
||||
...<any>rawQuery, // TODO
|
||||
...{
|
||||
folderQueries: rawQuery.folderQueries && rawQuery.folderQueries.map(reviveFolderQuery),
|
||||
extraFileResources: rawQuery.extraFileResources && rawQuery.extraFileResources.map(components => URI.revive(components))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects items that have a size - before the cumulative size of collected items reaches START_BATCH_AFTER_COUNT, the callback is called for every
|
||||
* set of items collected.
|
||||
* But after that point, the callback is called with batches of maxBatchSize.
|
||||
* If the batch isn't filled within some time, the callback is also called.
|
||||
*/
|
||||
class BatchedCollector<T> {
|
||||
private static readonly TIMEOUT = 4000;
|
||||
|
||||
// After RUN_TIMEOUT_UNTIL_COUNT items have been collected, stop flushing on timeout
|
||||
private static readonly START_BATCH_AFTER_COUNT = 50;
|
||||
|
||||
private totalNumberCompleted = 0;
|
||||
private batch: T[] = [];
|
||||
private batchSize = 0;
|
||||
private timeoutHandle: number;
|
||||
|
||||
constructor(private maxBatchSize: number, private cb: (items: T | T[]) => void) {
|
||||
}
|
||||
|
||||
addItem(item: T, size: number): void {
|
||||
if (!item) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.maxBatchSize > 0) {
|
||||
this.addItemToBatch(item, size);
|
||||
} else {
|
||||
this.cb(item);
|
||||
}
|
||||
}
|
||||
|
||||
addItems(items: T[], size: number): void {
|
||||
if (!items) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.maxBatchSize > 0) {
|
||||
this.addItemsToBatch(items, size);
|
||||
} else {
|
||||
this.cb(items);
|
||||
}
|
||||
}
|
||||
|
||||
private addItemToBatch(item: T, size: number): void {
|
||||
this.batch.push(item);
|
||||
this.batchSize += size;
|
||||
this.onUpdate();
|
||||
}
|
||||
|
||||
private addItemsToBatch(item: T[], size: number): void {
|
||||
this.batch = this.batch.concat(item);
|
||||
this.batchSize += size;
|
||||
this.onUpdate();
|
||||
}
|
||||
|
||||
private onUpdate(): void {
|
||||
if (this.totalNumberCompleted < BatchedCollector.START_BATCH_AFTER_COUNT) {
|
||||
// Flush because we aren't batching yet
|
||||
this.flush();
|
||||
} else if (this.batchSize >= this.maxBatchSize) {
|
||||
// Flush because the batch is full
|
||||
this.flush();
|
||||
} else if (!this.timeoutHandle) {
|
||||
// No timeout running, start a timeout to flush
|
||||
this.timeoutHandle = setTimeout(() => {
|
||||
this.flush();
|
||||
}, BatchedCollector.TIMEOUT);
|
||||
}
|
||||
}
|
||||
|
||||
flush(): void {
|
||||
if (this.batchSize) {
|
||||
this.totalNumberCompleted += this.batchSize;
|
||||
this.cb(this.batch);
|
||||
this.batch = [];
|
||||
this.batchSize = 0;
|
||||
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
function reviveFolderQuery(rawFolderQuery: IFolderQuery<UriComponents>): IFolderQuery<URI> {
|
||||
return {
|
||||
...rawFolderQuery,
|
||||
folder: URI.revive(rawFolderQuery.folder)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -4,22 +4,23 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as cp from 'child_process';
|
||||
import { rgPath } from 'vscode-ripgrep';
|
||||
|
||||
import { isMacintosh as isMac } from 'vs/base/common/platform';
|
||||
import * as path from 'path';
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import { startsWith } from 'vs/base/common/strings';
|
||||
import { normalizeNFD } from 'vs/base/common/normalization';
|
||||
|
||||
import { IFolderSearch, IRawSearch } from './search';
|
||||
import { foldersToIncludeGlobs, foldersToRgExcludeGlobs } from './ripgrepTextSearch';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import * as paths from 'vs/base/common/paths';
|
||||
import { isMacintosh as isMac } from 'vs/base/common/platform';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { IFileQuery, IFolderQuery } from 'vs/platform/search/common/search';
|
||||
import { anchorGlob } from 'vs/workbench/services/search/node/ripgrepSearchUtils';
|
||||
import { rgPath } from 'vscode-ripgrep';
|
||||
|
||||
// If vscode-ripgrep is in an .asar file, then the binary is unpacked.
|
||||
const rgDiskPath = rgPath.replace(/\bnode_modules\.asar\b/, 'node_modules.asar.unpacked');
|
||||
|
||||
export function spawnRipgrepCmd(config: IRawSearch, folderQuery: IFolderSearch, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
export function spawnRipgrepCmd(config: IFileQuery, folderQuery: IFolderQuery, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
const rgArgs = getRgArgs(config, folderQuery, includePattern, excludePattern);
|
||||
const cwd = folderQuery.folder;
|
||||
const cwd = folderQuery.folder.fsPath;
|
||||
return {
|
||||
cmd: cp.spawn(rgDiskPath, rgArgs.args, { cwd }),
|
||||
siblingClauses: rgArgs.siblingClauses,
|
||||
@@ -28,12 +29,12 @@ export function spawnRipgrepCmd(config: IRawSearch, folderQuery: IFolderSearch,
|
||||
};
|
||||
}
|
||||
|
||||
function getRgArgs(config: IRawSearch, folderQuery: IFolderSearch, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
const args = ['--files', '--hidden', '--case-sensitive'];
|
||||
|
||||
// includePattern can't have siblingClauses
|
||||
foldersToIncludeGlobs([folderQuery], includePattern, false).forEach(globArg => {
|
||||
const inclusion = anchor(globArg);
|
||||
const inclusion = anchorGlob(globArg);
|
||||
args.push('-g', inclusion);
|
||||
if (isMac) {
|
||||
const normalized = normalizeNFD(inclusion);
|
||||
@@ -43,11 +44,11 @@ function getRgArgs(config: IRawSearch, folderQuery: IFolderSearch, includePatter
|
||||
}
|
||||
});
|
||||
|
||||
let siblingClauses: glob.IExpression;
|
||||
let siblingClauses: glob.IExpression | null;
|
||||
|
||||
const rgGlobs = foldersToRgExcludeGlobs([folderQuery], excludePattern, undefined, false);
|
||||
rgGlobs.globArgs.forEach(globArg => {
|
||||
const exclusion = `!${anchor(globArg)}`;
|
||||
const exclusion = `!${anchorGlob(globArg)}`;
|
||||
args.push('-g', exclusion);
|
||||
if (isMac) {
|
||||
const normalized = normalizeNFD(exclusion);
|
||||
@@ -66,7 +67,7 @@ function getRgArgs(config: IRawSearch, folderQuery: IFolderSearch, includePatter
|
||||
}
|
||||
|
||||
// Follow symlinks
|
||||
if (!config.ignoreSymlinks) {
|
||||
if (!folderQuery.ignoreSymlinks) {
|
||||
args.push('--follow');
|
||||
}
|
||||
|
||||
@@ -74,14 +75,108 @@ function getRgArgs(config: IRawSearch, folderQuery: IFolderSearch, includePatter
|
||||
args.push('--quiet');
|
||||
}
|
||||
|
||||
// Folder to search
|
||||
args.push('--');
|
||||
|
||||
args.push('.');
|
||||
args.push('--no-config');
|
||||
if (folderQuery.disregardGlobalIgnoreFiles) {
|
||||
args.push('--no-ignore-global');
|
||||
}
|
||||
|
||||
return { args, siblingClauses };
|
||||
}
|
||||
|
||||
function anchor(glob: string) {
|
||||
return startsWith(glob, '**') || startsWith(glob, '/') ? glob : `/${glob}`;
|
||||
export interface IRgGlobResult {
|
||||
globArgs: string[];
|
||||
siblingClauses: glob.IExpression | null;
|
||||
}
|
||||
|
||||
export function foldersToRgExcludeGlobs(folderQueries: IFolderQuery[], globalExclude: glob.IExpression, excludesToSkip?: Set<string>, absoluteGlobs = true): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression = {};
|
||||
folderQueries.forEach(folderQuery => {
|
||||
const totalExcludePattern = objects.assign({}, folderQuery.excludePattern || {}, globalExclude || {});
|
||||
const result = globExprsToRgGlobs(totalExcludePattern, absoluteGlobs ? folderQuery.folder.fsPath : undefined, excludesToSkip);
|
||||
globArgs.push(...result.globArgs);
|
||||
if (result.siblingClauses) {
|
||||
siblingClauses = objects.assign(siblingClauses, result.siblingClauses);
|
||||
}
|
||||
});
|
||||
|
||||
return { globArgs, siblingClauses };
|
||||
}
|
||||
|
||||
export function foldersToIncludeGlobs(folderQueries: IFolderQuery[], globalInclude: glob.IExpression, absoluteGlobs = true): string[] {
|
||||
const globArgs: string[] = [];
|
||||
folderQueries.forEach(folderQuery => {
|
||||
const totalIncludePattern = objects.assign({}, globalInclude || {}, folderQuery.includePattern || {});
|
||||
const result = globExprsToRgGlobs(totalIncludePattern, absoluteGlobs ? folderQuery.folder.fsPath : undefined);
|
||||
globArgs.push(...result.globArgs);
|
||||
});
|
||||
|
||||
return globArgs;
|
||||
}
|
||||
|
||||
function globExprsToRgGlobs(patterns: glob.IExpression, folder?: string, excludesToSkip?: Set<string>): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression | null = null;
|
||||
Object.keys(patterns)
|
||||
.forEach(key => {
|
||||
if (excludesToSkip && excludesToSkip.has(key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!key) {
|
||||
return;
|
||||
}
|
||||
|
||||
const value = patterns[key];
|
||||
key = trimTrailingSlash(folder ? getAbsoluteGlob(folder, key) : key);
|
||||
|
||||
// glob.ts requires forward slashes, but a UNC path still must start with \\
|
||||
// #38165 and #38151
|
||||
if (strings.startsWith(key, '\\\\')) {
|
||||
key = '\\\\' + key.substr(2).replace(/\\/g, '/');
|
||||
} else {
|
||||
key = key.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean' && value) {
|
||||
if (strings.startsWith(key, '\\\\')) {
|
||||
// Absolute globs UNC paths don't work properly, see #58758
|
||||
key += '**';
|
||||
}
|
||||
|
||||
globArgs.push(fixDriveC(key));
|
||||
} else if (value && value.when) {
|
||||
if (!siblingClauses) {
|
||||
siblingClauses = {};
|
||||
}
|
||||
|
||||
siblingClauses[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
return { globArgs, siblingClauses };
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a glob like "node_modules/**" in "/foo/bar" to "/foo/bar/node_modules/**".
|
||||
* Special cases C:/foo paths to write the glob like /foo instead - see https://github.com/BurntSushi/ripgrep/issues/530.
|
||||
*
|
||||
* Exported for testing
|
||||
*/
|
||||
export function getAbsoluteGlob(folder: string, key: string): string {
|
||||
return paths.isAbsolute(key) ?
|
||||
key :
|
||||
path.join(folder, key);
|
||||
}
|
||||
|
||||
function trimTrailingSlash(str: string): string {
|
||||
str = strings.rtrim(str, '\\');
|
||||
return strings.rtrim(str, '/');
|
||||
}
|
||||
|
||||
export function fixDriveC(path: string): string {
|
||||
const root = paths.getRoot(path);
|
||||
return root.toLowerCase() === 'c:/' ?
|
||||
path.replace(/^c:[/\\]/i, '/') :
|
||||
path;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { OutputChannel } from 'vs/workbench/services/search/node/ripgrepSearchUtils';
|
||||
import { RipgrepTextSearchEngine } from 'vs/workbench/services/search/node/ripgrepTextSearchEngine';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export class RipgrepSearchProvider implements vscode.TextSearchProvider {
|
||||
private inProgress: Set<vscode.CancellationTokenSource> = new Set();
|
||||
|
||||
constructor(private outputChannel: OutputChannel) {
|
||||
process.once('exit', () => this.dispose());
|
||||
}
|
||||
|
||||
provideTextSearchResults(query: vscode.TextSearchQuery, options: vscode.TextSearchOptions, progress: vscode.Progress<vscode.TextSearchResult>, token: vscode.CancellationToken): Promise<vscode.TextSearchComplete> {
|
||||
const engine = new RipgrepTextSearchEngine(this.outputChannel);
|
||||
return this.withToken(token, token => engine.provideTextSearchResults(query, options, progress, token));
|
||||
}
|
||||
|
||||
private async withToken<T>(token: vscode.CancellationToken, fn: (token: vscode.CancellationToken) => Thenable<T>): Promise<T> {
|
||||
const merged = mergedTokenSource(token);
|
||||
this.inProgress.add(merged);
|
||||
const result = await fn(merged.token);
|
||||
this.inProgress.delete(merged);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private dispose() {
|
||||
this.inProgress.forEach(engine => engine.cancel());
|
||||
}
|
||||
}
|
||||
|
||||
function mergedTokenSource(token: vscode.CancellationToken): vscode.CancellationTokenSource {
|
||||
const tokenSource = new CancellationTokenSource();
|
||||
token.onCancellationRequested(() => tokenSource.cancel());
|
||||
|
||||
return tokenSource;
|
||||
}
|
||||
92
src/vs/workbench/services/search/node/ripgrepSearchUtils.ts
Normal file
92
src/vs/workbench/services/search/node/ripgrepSearchUtils.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { startsWith } from 'vs/base/common/strings';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { SearchRange, TextSearchMatch } from 'vs/platform/search/common/search';
|
||||
import * as vscode from 'vscode';
|
||||
import { mapArrayOrNot } from 'vs/base/common/arrays';
|
||||
|
||||
export type Maybe<T> = T | null | undefined;
|
||||
|
||||
export function anchorGlob(glob: string): string {
|
||||
return startsWith(glob, '**') || startsWith(glob, '/') ? glob : `/${glob}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a vscode.TextSearchResult by using our internal TextSearchResult type for its previewOptions logic.
|
||||
*/
|
||||
export function createTextSearchResult(uri: vscode.Uri, text: string, range: Range | Range[], previewOptions?: vscode.TextSearchPreviewOptions): vscode.TextSearchMatch {
|
||||
const searchRange = mapArrayOrNot(range, rangeToSearchRange);
|
||||
|
||||
const internalResult = new TextSearchMatch(text, searchRange, previewOptions);
|
||||
const internalPreviewRange = internalResult.preview.matches;
|
||||
return {
|
||||
ranges: mapArrayOrNot(searchRange, searchRangeToRange),
|
||||
uri,
|
||||
preview: {
|
||||
text: internalResult.preview.text,
|
||||
matches: mapArrayOrNot(internalPreviewRange, searchRangeToRange)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function rangeToSearchRange(range: Range): SearchRange {
|
||||
return new SearchRange(range.start.line, range.start.character, range.end.line, range.end.character);
|
||||
}
|
||||
|
||||
function searchRangeToRange(range: SearchRange): Range {
|
||||
return new Range(range.startLineNumber, range.startColumn, range.endLineNumber, range.endColumn);
|
||||
}
|
||||
|
||||
export class Position {
|
||||
constructor(public readonly line, public readonly character) { }
|
||||
|
||||
isBefore(other: Position): boolean { return false; }
|
||||
isBeforeOrEqual(other: Position): boolean { return false; }
|
||||
isAfter(other: Position): boolean { return false; }
|
||||
isAfterOrEqual(other: Position): boolean { return false; }
|
||||
isEqual(other: Position): boolean { return false; }
|
||||
compareTo(other: Position): number { return 0; }
|
||||
translate(lineDelta?: number, characterDelta?: number): Position;
|
||||
translate(change: { lineDelta?: number; characterDelta?: number; }): Position;
|
||||
translate(_?: any, _2?: any): Position { return new Position(0, 0); }
|
||||
with(line?: number, character?: number): Position;
|
||||
with(change: { line?: number; character?: number; }): Position;
|
||||
with(_: any): Position { return new Position(0, 0); }
|
||||
}
|
||||
|
||||
export class Range {
|
||||
readonly start: Position;
|
||||
readonly end: Position;
|
||||
|
||||
constructor(startLine: number, startCol: number, endLine: number, endCol: number) {
|
||||
this.start = new Position(startLine, startCol);
|
||||
this.end = new Position(endLine, endCol);
|
||||
}
|
||||
|
||||
isEmpty: boolean;
|
||||
isSingleLine: boolean;
|
||||
contains(positionOrRange: Position | Range): boolean { return false; }
|
||||
isEqual(other: Range): boolean { return false; }
|
||||
intersection(range: Range): Range | undefined { return undefined; }
|
||||
union(other: Range): Range { return new Range(0, 0, 0, 0); }
|
||||
|
||||
with(start?: Position, end?: Position): Range;
|
||||
with(change: { start?: Position, end?: Position }): Range;
|
||||
with(_: any): Range { return new Range(0, 0, 0, 0); }
|
||||
}
|
||||
|
||||
export interface IOutputChannel {
|
||||
appendLine(msg: string): void;
|
||||
}
|
||||
|
||||
export class OutputChannel implements IOutputChannel {
|
||||
constructor(@ILogService private logService: ILogService) { }
|
||||
|
||||
appendLine(msg: string): void {
|
||||
this.logService.debug('RipgrepSearchEH#search', msg);
|
||||
}
|
||||
}
|
||||
@@ -1,560 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
|
||||
import * as cp from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as path from 'path';
|
||||
import { NodeStringDecoder, StringDecoder } from 'string_decoder';
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import * as paths from 'vs/base/common/paths';
|
||||
import * as platform from 'vs/base/common/platform';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import * as encoding from 'vs/base/node/encoding';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { IProgress } from 'vs/platform/search/common/search';
|
||||
import { rgPath } from 'vscode-ripgrep';
|
||||
import { FileMatch, IFolderSearch, IRawSearch, ISerializedFileMatch, LineMatch, ISerializedSearchSuccess } from './search';
|
||||
|
||||
// If vscode-ripgrep is in an .asar file, then the binary is unpacked.
|
||||
const rgDiskPath = rgPath.replace(/\bnode_modules\.asar\b/, 'node_modules.asar.unpacked');
|
||||
|
||||
export class RipgrepEngine {
|
||||
private isDone = false;
|
||||
private rgProc: cp.ChildProcess;
|
||||
private killRgProcFn: (code?: number) => void;
|
||||
private postProcessExclusions: glob.ParsedExpression;
|
||||
|
||||
private ripgrepParser: RipgrepParser;
|
||||
|
||||
private resultsHandledP: TPromise<any> = TPromise.wrap(null);
|
||||
|
||||
constructor(private config: IRawSearch) {
|
||||
this.killRgProcFn = () => this.rgProc && this.rgProc.kill();
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this.isDone = true;
|
||||
this.ripgrepParser.cancel();
|
||||
this.rgProc.kill();
|
||||
}
|
||||
|
||||
// TODO@Rob - make promise-based once the old search is gone, and I don't need them to have matching interfaces anymore
|
||||
search(onResult: (match: ISerializedFileMatch) => void, onMessage: (message: IProgress) => void, done: (error: Error, complete: ISerializedSearchSuccess) => void): void {
|
||||
if (!this.config.folderQueries.length && !this.config.extraFiles.length) {
|
||||
process.removeListener('exit', this.killRgProcFn);
|
||||
done(null, {
|
||||
type: 'success',
|
||||
limitHit: false,
|
||||
stats: null
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const rgArgs = getRgArgs(this.config);
|
||||
if (rgArgs.siblingClauses) {
|
||||
this.postProcessExclusions = glob.parseToAsync(rgArgs.siblingClauses, { trimForExclusions: true });
|
||||
}
|
||||
|
||||
const cwd = platform.isWindows ? 'c:/' : '/';
|
||||
process.nextTick(() => { // Allow caller to register progress callback
|
||||
const escapedArgs = rgArgs.args
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
|
||||
let rgCmd = `rg ${escapedArgs}\n - cwd: ${cwd}`;
|
||||
if (rgArgs.siblingClauses) {
|
||||
rgCmd += `\n - Sibling clauses: ${JSON.stringify(rgArgs.siblingClauses)}`;
|
||||
}
|
||||
|
||||
onMessage({ message: rgCmd });
|
||||
});
|
||||
this.rgProc = cp.spawn(rgDiskPath, rgArgs.args, { cwd });
|
||||
process.once('exit', this.killRgProcFn);
|
||||
|
||||
this.ripgrepParser = new RipgrepParser(this.config.maxResults, cwd, this.config.extraFiles);
|
||||
this.ripgrepParser.on('result', (match: ISerializedFileMatch) => {
|
||||
if (this.postProcessExclusions) {
|
||||
const handleResultP = (<TPromise<string>>this.postProcessExclusions(match.path, undefined, glob.hasSiblingPromiseFn(() => getSiblings(match.path))))
|
||||
.then(globMatch => {
|
||||
if (!globMatch) {
|
||||
onResult(match);
|
||||
}
|
||||
});
|
||||
|
||||
this.resultsHandledP = TPromise.join([this.resultsHandledP, handleResultP]);
|
||||
} else {
|
||||
onResult(match);
|
||||
}
|
||||
});
|
||||
this.ripgrepParser.on('hitLimit', () => {
|
||||
this.cancel();
|
||||
process.removeListener('exit', this.killRgProcFn);
|
||||
done(null, {
|
||||
type: 'success',
|
||||
limitHit: true,
|
||||
stats: null
|
||||
});
|
||||
});
|
||||
|
||||
this.rgProc.stdout.on('data', data => {
|
||||
this.ripgrepParser.handleData(data);
|
||||
});
|
||||
|
||||
let gotData = false;
|
||||
this.rgProc.stdout.once('data', () => gotData = true);
|
||||
|
||||
let stderr = '';
|
||||
this.rgProc.stderr.on('data', data => {
|
||||
const message = data.toString();
|
||||
onMessage({ message });
|
||||
stderr += message;
|
||||
});
|
||||
|
||||
this.rgProc.on('close', code => {
|
||||
// Trigger last result, then wait on async result handling
|
||||
this.ripgrepParser.flush();
|
||||
this.resultsHandledP.then(() => {
|
||||
this.rgProc = null;
|
||||
if (!this.isDone) {
|
||||
this.isDone = true;
|
||||
let displayMsg: string;
|
||||
process.removeListener('exit', this.killRgProcFn);
|
||||
if (stderr && !gotData && (displayMsg = rgErrorMsgForDisplay(stderr))) {
|
||||
done(new Error(displayMsg), {
|
||||
type: 'success',
|
||||
limitHit: false,
|
||||
stats: null
|
||||
});
|
||||
} else {
|
||||
done(null, {
|
||||
type: 'success',
|
||||
limitHit: false,
|
||||
stats: null
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the first line of stderr and return an error for display or undefined, based on a whitelist.
|
||||
* Ripgrep produces stderr output which is not from a fatal error, and we only want the search to be
|
||||
* "failed" when a fatal error was produced.
|
||||
*/
|
||||
export function rgErrorMsgForDisplay(msg: string): string | undefined {
|
||||
const firstLine = msg.split('\n')[0].trim();
|
||||
|
||||
if (strings.startsWith(firstLine, 'Error parsing regex')) {
|
||||
return firstLine;
|
||||
}
|
||||
|
||||
if (strings.startsWith(firstLine, 'error parsing glob') ||
|
||||
strings.startsWith(firstLine, 'unsupported encoding')) {
|
||||
// Uppercase first letter
|
||||
return firstLine.charAt(0).toUpperCase() + firstLine.substr(1);
|
||||
}
|
||||
|
||||
if (firstLine === `Literal '\\n' not allowed.`) {
|
||||
// I won't localize this because none of the Ripgrep error messages are localized
|
||||
return `Literal '\\n' currently not supported`;
|
||||
}
|
||||
|
||||
if (strings.startsWith(firstLine, 'Literal ')) {
|
||||
// Other unsupported chars
|
||||
return firstLine;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export class RipgrepParser extends EventEmitter {
|
||||
private static readonly RESULT_REGEX = /^\u001b\[0m(\d+)\u001b\[0m:(.*)(\r?)/;
|
||||
private static readonly FILE_REGEX = /^\u001b\[0m(.+)\u001b\[0m$/;
|
||||
|
||||
public static readonly MATCH_START_MARKER = '\u001b[0m\u001b[31m';
|
||||
public static readonly MATCH_END_MARKER = '\u001b[0m';
|
||||
|
||||
private fileMatch: FileMatch;
|
||||
private remainder: string;
|
||||
private isDone: boolean;
|
||||
private stringDecoder: NodeStringDecoder;
|
||||
private extraSearchFiles: string[];
|
||||
|
||||
private numResults = 0;
|
||||
|
||||
constructor(private maxResults: number, private rootFolder: string, extraFiles?: string[]) {
|
||||
super();
|
||||
this.stringDecoder = new StringDecoder();
|
||||
|
||||
this.extraSearchFiles = extraFiles || [];
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
this.isDone = true;
|
||||
}
|
||||
|
||||
public flush(): void {
|
||||
this.handleDecodedData(this.stringDecoder.end());
|
||||
|
||||
if (this.fileMatch) {
|
||||
this.onResult();
|
||||
}
|
||||
}
|
||||
|
||||
public handleData(data: Buffer | string): void {
|
||||
const dataStr = typeof data === 'string' ? data : this.stringDecoder.write(data);
|
||||
this.handleDecodedData(dataStr);
|
||||
}
|
||||
|
||||
private handleDecodedData(decodedData: string): void {
|
||||
// If the previous data chunk didn't end in a newline, prepend it to this chunk
|
||||
const dataStr = this.remainder ?
|
||||
this.remainder + decodedData :
|
||||
decodedData;
|
||||
|
||||
const dataLines: string[] = dataStr.split(/\r\n|\n/);
|
||||
this.remainder = dataLines[dataLines.length - 1] ? dataLines.pop() : null;
|
||||
|
||||
for (let l = 0; l < dataLines.length; l++) {
|
||||
const outputLine = dataLines[l].trim();
|
||||
if (this.isDone) {
|
||||
break;
|
||||
}
|
||||
|
||||
let r: RegExpMatchArray;
|
||||
if (r = outputLine.match(RipgrepParser.RESULT_REGEX)) {
|
||||
const lineNum = parseInt(r[1]) - 1;
|
||||
let matchText = r[2];
|
||||
|
||||
// workaround https://github.com/BurntSushi/ripgrep/issues/416
|
||||
// If the match line ended with \r, append a match end marker so the match isn't lost
|
||||
if (r[3]) {
|
||||
matchText += RipgrepParser.MATCH_END_MARKER;
|
||||
}
|
||||
|
||||
// Line is a result - add to collected results for the current file path
|
||||
this.handleMatchLine(outputLine, lineNum, matchText);
|
||||
} else if (r = outputLine.match(RipgrepParser.FILE_REGEX)) {
|
||||
// Line is a file path - send all collected results for the previous file path
|
||||
if (this.fileMatch) {
|
||||
this.onResult();
|
||||
}
|
||||
|
||||
this.fileMatch = this.getFileMatch(r[1]);
|
||||
} else {
|
||||
// Line is empty (or malformed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getFileMatch(relativeOrAbsolutePath: string): FileMatch {
|
||||
const absPath = path.isAbsolute(relativeOrAbsolutePath) ?
|
||||
relativeOrAbsolutePath :
|
||||
path.join(this.rootFolder, relativeOrAbsolutePath);
|
||||
|
||||
return new FileMatch(absPath);
|
||||
}
|
||||
|
||||
private handleMatchLine(outputLine: string, lineNum: number, text: string): void {
|
||||
if (lineNum === 0) {
|
||||
text = strings.stripUTF8BOM(text);
|
||||
}
|
||||
|
||||
const lineMatch = new LineMatch(text, lineNum);
|
||||
if (!this.fileMatch) {
|
||||
// When searching a single file and no folderQueries, rg does not print the file line, so create it here
|
||||
const singleFile = this.extraSearchFiles[0];
|
||||
if (!singleFile) {
|
||||
throw new Error('Got match line for unknown file');
|
||||
}
|
||||
|
||||
this.fileMatch = this.getFileMatch(singleFile);
|
||||
}
|
||||
|
||||
this.fileMatch.addMatch(lineMatch);
|
||||
|
||||
let lastMatchEndPos = 0;
|
||||
let matchTextStartPos = -1;
|
||||
|
||||
// Track positions with color codes subtracted - offsets in the final text preview result
|
||||
let matchTextStartRealIdx = -1;
|
||||
let textRealIdx = 0;
|
||||
let hitLimit = false;
|
||||
|
||||
const realTextParts: string[] = [];
|
||||
|
||||
for (let i = 0; i < text.length - (RipgrepParser.MATCH_END_MARKER.length - 1);) {
|
||||
if (text.substr(i, RipgrepParser.MATCH_START_MARKER.length) === RipgrepParser.MATCH_START_MARKER) {
|
||||
// Match start
|
||||
const chunk = text.slice(lastMatchEndPos, i);
|
||||
realTextParts.push(chunk);
|
||||
i += RipgrepParser.MATCH_START_MARKER.length;
|
||||
matchTextStartPos = i;
|
||||
matchTextStartRealIdx = textRealIdx;
|
||||
} else if (text.substr(i, RipgrepParser.MATCH_END_MARKER.length) === RipgrepParser.MATCH_END_MARKER) {
|
||||
// Match end
|
||||
const chunk = text.slice(matchTextStartPos, i);
|
||||
realTextParts.push(chunk);
|
||||
if (!hitLimit) {
|
||||
lineMatch.addMatch(matchTextStartRealIdx, textRealIdx - matchTextStartRealIdx);
|
||||
}
|
||||
|
||||
matchTextStartPos = -1;
|
||||
matchTextStartRealIdx = -1;
|
||||
i += RipgrepParser.MATCH_END_MARKER.length;
|
||||
lastMatchEndPos = i;
|
||||
this.numResults++;
|
||||
|
||||
// Check hit maxResults limit
|
||||
if (this.numResults >= this.maxResults) {
|
||||
// Finish the line, then report the result below
|
||||
hitLimit = true;
|
||||
}
|
||||
} else {
|
||||
i++;
|
||||
textRealIdx++;
|
||||
}
|
||||
}
|
||||
|
||||
const chunk = text.slice(lastMatchEndPos);
|
||||
realTextParts.push(chunk);
|
||||
|
||||
// Replace preview with version without color codes
|
||||
const preview = realTextParts.join('');
|
||||
lineMatch.preview = preview;
|
||||
|
||||
if (hitLimit) {
|
||||
this.cancel();
|
||||
this.onResult();
|
||||
this.emit('hitLimit');
|
||||
}
|
||||
}
|
||||
|
||||
private onResult(): void {
|
||||
this.emit('result', this.fileMatch.serialize());
|
||||
this.fileMatch = null;
|
||||
}
|
||||
}
|
||||
|
||||
export interface IRgGlobResult {
|
||||
globArgs: string[];
|
||||
siblingClauses: glob.IExpression;
|
||||
}
|
||||
|
||||
export function foldersToRgExcludeGlobs(folderQueries: IFolderSearch[], globalExclude: glob.IExpression, excludesToSkip?: Set<string>, absoluteGlobs = true): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression = {};
|
||||
folderQueries.forEach(folderQuery => {
|
||||
const totalExcludePattern = objects.assign({}, folderQuery.excludePattern || {}, globalExclude || {});
|
||||
const result = globExprsToRgGlobs(totalExcludePattern, absoluteGlobs && folderQuery.folder, excludesToSkip);
|
||||
globArgs.push(...result.globArgs);
|
||||
if (result.siblingClauses) {
|
||||
siblingClauses = objects.assign(siblingClauses, result.siblingClauses);
|
||||
}
|
||||
});
|
||||
|
||||
return { globArgs, siblingClauses };
|
||||
}
|
||||
|
||||
export function foldersToIncludeGlobs(folderQueries: IFolderSearch[], globalInclude: glob.IExpression, absoluteGlobs = true): string[] {
|
||||
const globArgs: string[] = [];
|
||||
folderQueries.forEach(folderQuery => {
|
||||
const totalIncludePattern = objects.assign({}, globalInclude || {}, folderQuery.includePattern || {});
|
||||
const result = globExprsToRgGlobs(totalIncludePattern, absoluteGlobs && folderQuery.folder);
|
||||
globArgs.push(...result.globArgs);
|
||||
});
|
||||
|
||||
return globArgs;
|
||||
}
|
||||
|
||||
function globExprsToRgGlobs(patterns: glob.IExpression, folder?: string, excludesToSkip?: Set<string>): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression = null;
|
||||
Object.keys(patterns)
|
||||
.forEach(key => {
|
||||
if (excludesToSkip && excludesToSkip.has(key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!key) {
|
||||
return;
|
||||
}
|
||||
|
||||
const value = patterns[key];
|
||||
key = trimTrailingSlash(folder ? getAbsoluteGlob(folder, key) : key);
|
||||
|
||||
// glob.ts requires forward slashes, but a UNC path still must start with \\
|
||||
// #38165 and #38151
|
||||
if (strings.startsWith(key, '\\\\')) {
|
||||
key = '\\\\' + key.substr(2).replace(/\\/g, '/');
|
||||
} else {
|
||||
key = key.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean' && value) {
|
||||
globArgs.push(fixDriveC(key));
|
||||
} else if (value && value.when) {
|
||||
if (!siblingClauses) {
|
||||
siblingClauses = {};
|
||||
}
|
||||
|
||||
siblingClauses[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
return { globArgs, siblingClauses };
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a glob like "node_modules/**" in "/foo/bar" to "/foo/bar/node_modules/**".
|
||||
* Special cases C:/foo paths to write the glob like /foo instead - see https://github.com/BurntSushi/ripgrep/issues/530.
|
||||
*
|
||||
* Exported for testing
|
||||
*/
|
||||
export function getAbsoluteGlob(folder: string, key: string): string {
|
||||
return paths.isAbsolute(key) ?
|
||||
key :
|
||||
path.join(folder, key);
|
||||
}
|
||||
|
||||
function trimTrailingSlash(str: string): string {
|
||||
str = strings.rtrim(str, '\\');
|
||||
return strings.rtrim(str, '/');
|
||||
}
|
||||
|
||||
export function fixDriveC(path: string): string {
|
||||
const root = paths.getRoot(path);
|
||||
return root.toLowerCase() === 'c:/' ?
|
||||
path.replace(/^c:[/\\]/i, '/') :
|
||||
path;
|
||||
}
|
||||
|
||||
function getRgArgs(config: IRawSearch) {
|
||||
const args = ['--hidden', '--heading', '--line-number', '--color', 'ansi', '--colors', 'path:none', '--colors', 'line:none', '--colors', 'match:fg:red', '--colors', 'match:style:nobold'];
|
||||
args.push(config.contentPattern.isCaseSensitive ? '--case-sensitive' : '--ignore-case');
|
||||
|
||||
// includePattern can't have siblingClauses
|
||||
foldersToIncludeGlobs(config.folderQueries, config.includePattern).forEach(globArg => {
|
||||
args.push('-g', globArg);
|
||||
});
|
||||
|
||||
let siblingClauses: glob.IExpression;
|
||||
|
||||
// Find excludes that are exactly the same in all folderQueries - e.g. from user settings, and that start with `**`.
|
||||
// To make the command shorter, don't resolve these against every folderQuery path - see #33189.
|
||||
const universalExcludes = findUniversalExcludes(config.folderQueries);
|
||||
const rgGlobs = foldersToRgExcludeGlobs(config.folderQueries, config.excludePattern, universalExcludes);
|
||||
rgGlobs.globArgs
|
||||
.forEach(rgGlob => args.push('-g', `!${rgGlob}`));
|
||||
if (universalExcludes) {
|
||||
universalExcludes
|
||||
.forEach(exclude => args.push('-g', `!${trimTrailingSlash(exclude)}`));
|
||||
}
|
||||
siblingClauses = rgGlobs.siblingClauses;
|
||||
|
||||
if (config.maxFilesize) {
|
||||
args.push('--max-filesize', config.maxFilesize + '');
|
||||
}
|
||||
|
||||
if (config.disregardIgnoreFiles) {
|
||||
// Don't use .gitignore or .ignore
|
||||
args.push('--no-ignore');
|
||||
} else {
|
||||
args.push('--no-ignore-parent');
|
||||
}
|
||||
|
||||
// Follow symlinks
|
||||
if (!config.ignoreSymlinks) {
|
||||
args.push('--follow');
|
||||
}
|
||||
|
||||
if (config.folderQueries[0]) {
|
||||
const folder0Encoding = config.folderQueries[0].fileEncoding;
|
||||
if (folder0Encoding && folder0Encoding !== 'utf8' && config.folderQueries.every(fq => fq.fileEncoding === folder0Encoding)) {
|
||||
args.push('--encoding', encoding.toCanonicalName(folder0Encoding));
|
||||
}
|
||||
}
|
||||
|
||||
// Ripgrep handles -- as a -- arg separator. Only --.
|
||||
// - is ok, --- is ok, --some-flag is handled as query text. Need to special case.
|
||||
if (config.contentPattern.pattern === '--') {
|
||||
config.contentPattern.isRegExp = true;
|
||||
config.contentPattern.pattern = '\\-\\-';
|
||||
}
|
||||
|
||||
let searchPatternAfterDoubleDashes: string;
|
||||
if (config.contentPattern.isWordMatch) {
|
||||
const regexp = strings.createRegExp(config.contentPattern.pattern, config.contentPattern.isRegExp, { wholeWord: config.contentPattern.isWordMatch });
|
||||
const regexpStr = regexp.source.replace(/\\\//g, '/'); // RegExp.source arbitrarily returns escaped slashes. Search and destroy.
|
||||
args.push('--regexp', regexpStr);
|
||||
} else if (config.contentPattern.isRegExp) {
|
||||
args.push('--regexp', fixRegexEndingPattern(config.contentPattern.pattern));
|
||||
} else {
|
||||
searchPatternAfterDoubleDashes = config.contentPattern.pattern;
|
||||
args.push('--fixed-strings');
|
||||
}
|
||||
|
||||
args.push('--no-config');
|
||||
|
||||
// Folder to search
|
||||
args.push('--');
|
||||
|
||||
if (searchPatternAfterDoubleDashes) {
|
||||
// Put the query after --, in case the query starts with a dash
|
||||
args.push(searchPatternAfterDoubleDashes);
|
||||
}
|
||||
|
||||
args.push(...config.folderQueries.map(q => q.folder));
|
||||
args.push(...config.extraFiles);
|
||||
|
||||
return { args, siblingClauses };
|
||||
}
|
||||
|
||||
function getSiblings(file: string): TPromise<string[]> {
|
||||
return new TPromise<string[]>((resolve, reject) => {
|
||||
extfs.readdir(path.dirname(file), (error: Error, files: string[]) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
}
|
||||
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function findUniversalExcludes(folderQueries: IFolderSearch[]): Set<string> {
|
||||
if (folderQueries.length < 2) {
|
||||
// Nothing to simplify
|
||||
return null;
|
||||
}
|
||||
|
||||
const firstFolder = folderQueries[0];
|
||||
if (!firstFolder.excludePattern) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const universalExcludes = new Set<string>();
|
||||
Object.keys(firstFolder.excludePattern).forEach(key => {
|
||||
if (strings.startsWith(key, '**') && folderQueries.every(q => q.excludePattern && q.excludePattern[key] === true)) {
|
||||
universalExcludes.add(key);
|
||||
}
|
||||
});
|
||||
|
||||
return universalExcludes;
|
||||
}
|
||||
|
||||
// Exported for testing
|
||||
export function fixRegexEndingPattern(pattern: string): string {
|
||||
// Replace an unescaped $ at the end of the pattern with \r?$
|
||||
// Match $ preceeded by none or even number of literal \
|
||||
return pattern.match(/([^\\]|^)(\\\\)*\$$/) ?
|
||||
pattern.replace(/\$$/, '\\r?$') :
|
||||
pattern;
|
||||
}
|
||||
487
src/vs/workbench/services/search/node/ripgrepTextSearchEngine.ts
Normal file
487
src/vs/workbench/services/search/node/ripgrepTextSearchEngine.ts
Normal file
@@ -0,0 +1,487 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as cp from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as path from 'path';
|
||||
import { NodeStringDecoder, StringDecoder } from 'string_decoder';
|
||||
import { createRegExp, startsWith, startsWithUTF8BOM, stripUTF8BOM, escapeRegExpCharacters } from 'vs/base/common/strings';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { IExtendedExtensionSearchOptions, SearchError, SearchErrorCode, serializeSearchError } from 'vs/platform/search/common/search';
|
||||
import * as vscode from 'vscode';
|
||||
import { rgPath } from 'vscode-ripgrep';
|
||||
import { anchorGlob, createTextSearchResult, IOutputChannel, Maybe, Range } from './ripgrepSearchUtils';
|
||||
|
||||
// If vscode-ripgrep is in an .asar file, then the binary is unpacked.
|
||||
const rgDiskPath = rgPath.replace(/\bnode_modules\.asar\b/, 'node_modules.asar.unpacked');
|
||||
|
||||
export class RipgrepTextSearchEngine {
|
||||
|
||||
constructor(private outputChannel: IOutputChannel) { }
|
||||
|
||||
provideTextSearchResults(query: vscode.TextSearchQuery, options: vscode.TextSearchOptions, progress: vscode.Progress<vscode.TextSearchResult>, token: vscode.CancellationToken): Thenable<vscode.TextSearchComplete> {
|
||||
this.outputChannel.appendLine(`provideTextSearchResults ${query.pattern}, ${JSON.stringify({
|
||||
...options,
|
||||
...{
|
||||
folder: options.folder.toString()
|
||||
}
|
||||
})}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
token.onCancellationRequested(() => cancel());
|
||||
|
||||
const rgArgs = getRgArgs(query, options);
|
||||
|
||||
const cwd = options.folder.fsPath;
|
||||
|
||||
const escapedArgs = rgArgs
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
this.outputChannel.appendLine(`rg ${escapedArgs}\n - cwd: ${cwd}`);
|
||||
|
||||
let rgProc: Maybe<cp.ChildProcess> = cp.spawn(rgDiskPath, rgArgs, { cwd });
|
||||
rgProc.on('error', e => {
|
||||
console.error(e);
|
||||
this.outputChannel.appendLine('Error: ' + (e && e.message));
|
||||
reject(serializeSearchError(new SearchError(e && e.message, SearchErrorCode.rgProcessError)));
|
||||
});
|
||||
|
||||
let gotResult = false;
|
||||
const ripgrepParser = new RipgrepParser(options.maxResults, cwd, options.previewOptions);
|
||||
ripgrepParser.on('result', (match: vscode.TextSearchResult) => {
|
||||
gotResult = true;
|
||||
progress.report(match);
|
||||
});
|
||||
|
||||
let isDone = false;
|
||||
const cancel = () => {
|
||||
isDone = true;
|
||||
|
||||
if (rgProc) {
|
||||
rgProc.kill();
|
||||
}
|
||||
|
||||
if (ripgrepParser) {
|
||||
ripgrepParser.cancel();
|
||||
}
|
||||
};
|
||||
|
||||
let limitHit = false;
|
||||
ripgrepParser.on('hitLimit', () => {
|
||||
limitHit = true;
|
||||
cancel();
|
||||
});
|
||||
|
||||
rgProc.stdout.on('data', data => {
|
||||
ripgrepParser.handleData(data);
|
||||
});
|
||||
|
||||
let gotData = false;
|
||||
rgProc.stdout.once('data', () => gotData = true);
|
||||
|
||||
let stderr = '';
|
||||
rgProc.stderr.on('data', data => {
|
||||
const message = data.toString();
|
||||
this.outputChannel.appendLine(message);
|
||||
stderr += message;
|
||||
});
|
||||
|
||||
rgProc.on('close', () => {
|
||||
this.outputChannel.appendLine(gotData ? 'Got data from stdout' : 'No data from stdout');
|
||||
this.outputChannel.appendLine(gotResult ? 'Got result from parser' : 'No result from parser');
|
||||
this.outputChannel.appendLine('');
|
||||
if (isDone) {
|
||||
resolve({ limitHit });
|
||||
} else {
|
||||
// Trigger last result
|
||||
ripgrepParser.flush();
|
||||
rgProc = null;
|
||||
let searchError: Maybe<SearchError>;
|
||||
if (stderr && !gotData && (searchError = rgErrorMsgForDisplay(stderr))) {
|
||||
reject(serializeSearchError(new SearchError(searchError.message, searchError.code)));
|
||||
} else {
|
||||
resolve({ limitHit });
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the first line of stderr and return an error for display or undefined, based on a whitelist.
|
||||
* Ripgrep produces stderr output which is not from a fatal error, and we only want the search to be
|
||||
* "failed" when a fatal error was produced.
|
||||
*/
|
||||
export function rgErrorMsgForDisplay(msg: string): Maybe<SearchError> {
|
||||
const firstLine = msg.split('\n')[0].trim();
|
||||
|
||||
if (startsWith(firstLine, 'regex parse error')) {
|
||||
return new SearchError('Regex parse error', SearchErrorCode.regexParseError);
|
||||
}
|
||||
|
||||
let match = firstLine.match(/grep config error: unknown encoding: (.*)/);
|
||||
if (match) {
|
||||
return new SearchError(`Unknown encoding: ${match[1]}`, SearchErrorCode.unknownEncoding);
|
||||
}
|
||||
|
||||
if (startsWith(firstLine, 'error parsing glob')) {
|
||||
// Uppercase first letter
|
||||
return new SearchError(firstLine.charAt(0).toUpperCase() + firstLine.substr(1), SearchErrorCode.globParseError);
|
||||
}
|
||||
|
||||
if (startsWith(firstLine, 'the literal')) {
|
||||
// Uppercase first letter
|
||||
return new SearchError(firstLine.charAt(0).toUpperCase() + firstLine.substr(1), SearchErrorCode.invalidLiteral);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export class RipgrepParser extends EventEmitter {
|
||||
private remainder = '';
|
||||
private isDone = false;
|
||||
private hitLimit = false;
|
||||
private stringDecoder: NodeStringDecoder;
|
||||
|
||||
private numResults = 0;
|
||||
|
||||
constructor(private maxResults: number, private rootFolder: string, private previewOptions?: vscode.TextSearchPreviewOptions) {
|
||||
super();
|
||||
this.stringDecoder = new StringDecoder();
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
this.isDone = true;
|
||||
}
|
||||
|
||||
public flush(): void {
|
||||
this.handleDecodedData(this.stringDecoder.end());
|
||||
}
|
||||
|
||||
|
||||
on(event: 'result', listener: (result: vscode.TextSearchResult) => void);
|
||||
on(event: 'hitLimit', listener: () => void);
|
||||
on(event: string, listener: (...args: any[]) => void) {
|
||||
super.on(event, listener);
|
||||
}
|
||||
|
||||
public handleData(data: Buffer | string): void {
|
||||
if (this.isDone) {
|
||||
return;
|
||||
}
|
||||
|
||||
const dataStr = typeof data === 'string' ? data : this.stringDecoder.write(data);
|
||||
this.handleDecodedData(dataStr);
|
||||
}
|
||||
|
||||
private handleDecodedData(decodedData: string): void {
|
||||
// check for newline before appending to remainder
|
||||
let newlineIdx = decodedData.indexOf('\n');
|
||||
|
||||
// If the previous data chunk didn't end in a newline, prepend it to this chunk
|
||||
const dataStr = this.remainder + decodedData;
|
||||
|
||||
if (newlineIdx >= 0) {
|
||||
newlineIdx += this.remainder.length;
|
||||
} else {
|
||||
// Shortcut
|
||||
this.remainder = dataStr;
|
||||
return;
|
||||
}
|
||||
|
||||
let prevIdx = 0;
|
||||
while (newlineIdx >= 0) {
|
||||
this.handleLine(dataStr.substring(prevIdx, newlineIdx).trim());
|
||||
prevIdx = newlineIdx + 1;
|
||||
newlineIdx = dataStr.indexOf('\n', prevIdx);
|
||||
}
|
||||
|
||||
this.remainder = dataStr.substring(prevIdx).trim();
|
||||
}
|
||||
|
||||
private handleLine(outputLine: string): void {
|
||||
if (this.isDone || !outputLine) {
|
||||
return;
|
||||
}
|
||||
|
||||
let parsedLine: IRgMessage;
|
||||
try {
|
||||
parsedLine = JSON.parse(outputLine);
|
||||
} catch (e) {
|
||||
throw new Error(`malformed line from rg: ${outputLine}`);
|
||||
}
|
||||
|
||||
if (parsedLine.type === 'match') {
|
||||
const matchPath = bytesOrTextToString(parsedLine.data.path);
|
||||
const uri = URI.file(path.join(this.rootFolder, matchPath));
|
||||
const result = this.createTextSearchMatch(parsedLine.data, uri);
|
||||
this.onResult(result);
|
||||
|
||||
if (this.hitLimit) {
|
||||
this.cancel();
|
||||
this.emit('hitLimit');
|
||||
}
|
||||
} else if (parsedLine.type === 'context') {
|
||||
const contextPath = bytesOrTextToString(parsedLine.data.path);
|
||||
const uri = URI.file(path.join(this.rootFolder, contextPath));
|
||||
const result = this.createTextSearchContext(parsedLine.data, uri);
|
||||
result.forEach(r => this.onResult(r));
|
||||
}
|
||||
}
|
||||
|
||||
private createTextSearchMatch(data: IRgMatch, uri: vscode.Uri): vscode.TextSearchMatch {
|
||||
const lineNumber = data.line_number - 1;
|
||||
const fullText = bytesOrTextToString(data.lines);
|
||||
const fullTextBytes = Buffer.from(fullText);
|
||||
|
||||
let prevMatchEnd = 0;
|
||||
let prevMatchEndCol = 0;
|
||||
let prevMatchEndLine = lineNumber;
|
||||
const ranges = data.submatches.map((match, i) => {
|
||||
if (this.hitLimit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.numResults++;
|
||||
if (this.numResults >= this.maxResults) {
|
||||
// Finish the line, then report the result below
|
||||
this.hitLimit = true;
|
||||
}
|
||||
|
||||
let matchText = bytesOrTextToString(match.match);
|
||||
const inBetweenChars = fullTextBytes.slice(prevMatchEnd, match.start).toString().length;
|
||||
let startCol = prevMatchEndCol + inBetweenChars;
|
||||
|
||||
const stats = getNumLinesAndLastNewlineLength(matchText);
|
||||
let startLineNumber = prevMatchEndLine;
|
||||
let endLineNumber = stats.numLines + startLineNumber;
|
||||
let endCol = stats.numLines > 0 ?
|
||||
stats.lastLineLength :
|
||||
stats.lastLineLength + startCol;
|
||||
|
||||
if (lineNumber === 0 && i === 0 && startsWithUTF8BOM(matchText)) {
|
||||
matchText = stripUTF8BOM(matchText);
|
||||
startCol -= 3;
|
||||
endCol -= 3;
|
||||
}
|
||||
|
||||
prevMatchEnd = match.end;
|
||||
prevMatchEndCol = endCol;
|
||||
prevMatchEndLine = endLineNumber;
|
||||
|
||||
return new Range(startLineNumber, startCol, endLineNumber, endCol);
|
||||
})
|
||||
.filter(r => !!r);
|
||||
|
||||
return createTextSearchResult(uri, fullText, <Range[]>ranges, this.previewOptions);
|
||||
}
|
||||
|
||||
private createTextSearchContext(data: IRgMatch, uri: URI): vscode.TextSearchContext[] {
|
||||
const text = bytesOrTextToString(data.lines);
|
||||
const startLine = data.line_number;
|
||||
return text
|
||||
.replace(/\r?\n$/, '')
|
||||
.split('\n')
|
||||
.map((line, i) => {
|
||||
return {
|
||||
text: line,
|
||||
uri,
|
||||
lineNumber: startLine + i
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
private onResult(match: vscode.TextSearchResult): void {
|
||||
this.emit('result', match);
|
||||
}
|
||||
}
|
||||
|
||||
function bytesOrTextToString(obj: any): string {
|
||||
return obj.bytes ?
|
||||
Buffer.from(obj.bytes, 'base64').toString() :
|
||||
obj.text;
|
||||
}
|
||||
|
||||
function getNumLinesAndLastNewlineLength(text: string): { numLines: number, lastLineLength: number } {
|
||||
const re = /\n/g;
|
||||
let numLines = 0;
|
||||
let lastNewlineIdx = -1;
|
||||
let match: ReturnType<typeof re.exec>;
|
||||
while (match = re.exec(text)) {
|
||||
numLines++;
|
||||
lastNewlineIdx = match.index;
|
||||
}
|
||||
|
||||
const lastLineLength = lastNewlineIdx >= 0 ?
|
||||
text.length - lastNewlineIdx - 1 :
|
||||
text.length;
|
||||
|
||||
return { numLines, lastLineLength };
|
||||
}
|
||||
|
||||
function getRgArgs(query: vscode.TextSearchQuery, options: vscode.TextSearchOptions): string[] {
|
||||
const args = ['--hidden'];
|
||||
args.push(query.isCaseSensitive ? '--case-sensitive' : '--ignore-case');
|
||||
|
||||
options.includes
|
||||
.map(anchorGlob)
|
||||
.forEach(globArg => args.push('-g', globArg));
|
||||
|
||||
options.excludes
|
||||
.map(anchorGlob)
|
||||
.forEach(rgGlob => args.push('-g', `!${rgGlob}`));
|
||||
|
||||
if (options.maxFileSize) {
|
||||
args.push('--max-filesize', options.maxFileSize + '');
|
||||
}
|
||||
|
||||
if (options.useIgnoreFiles) {
|
||||
args.push('--no-ignore-parent');
|
||||
} else {
|
||||
// Don't use .gitignore or .ignore
|
||||
args.push('--no-ignore');
|
||||
}
|
||||
|
||||
if (options.followSymlinks) {
|
||||
args.push('--follow');
|
||||
}
|
||||
|
||||
if (options.encoding && options.encoding !== 'utf8') {
|
||||
args.push('--encoding', options.encoding);
|
||||
}
|
||||
|
||||
let pattern = query.pattern;
|
||||
|
||||
// Ripgrep handles -- as a -- arg separator. Only --.
|
||||
// - is ok, --- is ok, --some-flag is also ok. Need to special case.
|
||||
if (pattern === '--') {
|
||||
query.isRegExp = true;
|
||||
pattern = '\\-\\-';
|
||||
}
|
||||
|
||||
if (query.isMultiline && !query.isRegExp) {
|
||||
query.pattern = escapeRegExpCharacters(query.pattern);
|
||||
query.isRegExp = true;
|
||||
}
|
||||
|
||||
if ((<IExtendedExtensionSearchOptions>options).usePCRE2) {
|
||||
args.push('--pcre2');
|
||||
|
||||
if (query.isRegExp) {
|
||||
pattern = unicodeEscapesToPCRE2(pattern);
|
||||
}
|
||||
}
|
||||
|
||||
let searchPatternAfterDoubleDashes: Maybe<string>;
|
||||
if (query.isWordMatch) {
|
||||
const regexp = createRegExp(pattern, !!query.isRegExp, { wholeWord: query.isWordMatch });
|
||||
const regexpStr = regexp.source.replace(/\\\//g, '/'); // RegExp.source arbitrarily returns escaped slashes. Search and destroy.
|
||||
args.push('--regexp', regexpStr);
|
||||
} else if (query.isRegExp) {
|
||||
let fixedRegexpQuery = fixRegexEndingPattern(query.pattern);
|
||||
fixedRegexpQuery = fixRegexNewline(fixedRegexpQuery);
|
||||
fixedRegexpQuery = fixNewline(fixedRegexpQuery);
|
||||
fixedRegexpQuery = fixRegexCRMatchingNonWordClass(fixedRegexpQuery, !!query.isMultiline);
|
||||
fixedRegexpQuery = fixRegexCRMatchingWhitespaceClass(fixedRegexpQuery, !!query.isMultiline);
|
||||
args.push('--regexp', fixedRegexpQuery);
|
||||
} else {
|
||||
searchPatternAfterDoubleDashes = pattern;
|
||||
args.push('--fixed-strings');
|
||||
}
|
||||
|
||||
args.push('--no-config');
|
||||
if (!options.useGlobalIgnoreFiles) {
|
||||
args.push('--no-ignore-global');
|
||||
}
|
||||
|
||||
args.push('--json');
|
||||
|
||||
if (query.isMultiline) {
|
||||
args.push('--multiline');
|
||||
}
|
||||
|
||||
if (options.beforeContext) {
|
||||
args.push('--before-context', options.beforeContext + '');
|
||||
}
|
||||
|
||||
if (options.afterContext) {
|
||||
args.push('--after-context', options.afterContext + '');
|
||||
}
|
||||
|
||||
// Folder to search
|
||||
args.push('--');
|
||||
|
||||
if (searchPatternAfterDoubleDashes) {
|
||||
// Put the query after --, in case the query starts with a dash
|
||||
args.push(searchPatternAfterDoubleDashes);
|
||||
}
|
||||
|
||||
args.push('.');
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
export function unicodeEscapesToPCRE2(pattern: string): string {
|
||||
const reg = /((?:[^\\]|^)(?:\\\\)*)\\u([a-z0-9]{4})(?!\d)/g;
|
||||
// Replace an unescaped $ at the end of the pattern with \r?$
|
||||
// Match $ preceeded by none or even number of literal \
|
||||
while (pattern.match(reg)) {
|
||||
pattern = pattern.replace(reg, `$1\\x{$2}`);
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
export interface IRgMessage {
|
||||
type: 'match' | 'context' | string;
|
||||
data: IRgMatch;
|
||||
}
|
||||
|
||||
export interface IRgMatch {
|
||||
path: IRgBytesOrText;
|
||||
lines: IRgBytesOrText;
|
||||
line_number: number;
|
||||
absolute_offset: number;
|
||||
submatches: IRgSubmatch[];
|
||||
}
|
||||
|
||||
export interface IRgSubmatch {
|
||||
match: IRgBytesOrText;
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
export type IRgBytesOrText = { bytes: string } | { text: string };
|
||||
|
||||
export function fixRegexEndingPattern(pattern: string): string {
|
||||
// Replace an unescaped $ at the end of the pattern with \r?$
|
||||
// Match $ preceeded by none or even number of literal \
|
||||
return pattern.match(/([^\\]|^)(\\\\)*\$$/) ?
|
||||
pattern.replace(/\$$/, '\\r?$') :
|
||||
pattern;
|
||||
}
|
||||
|
||||
export function fixRegexNewline(pattern: string): string {
|
||||
// Replace an unescaped $ at the end of the pattern with \r?$
|
||||
// Match $ preceeded by none or even number of literal \
|
||||
return pattern.replace(/([^\\]|^)(\\\\)*\\n/g, '$1$2\\r?\\n');
|
||||
}
|
||||
|
||||
export function fixRegexCRMatchingWhitespaceClass(pattern: string, isMultiline: boolean): string {
|
||||
return isMultiline ?
|
||||
pattern.replace(/([^\\]|^)((?:\\\\)*)\\s/g, '$1$2(\\r?\\n|[^\\S\\r])') :
|
||||
pattern.replace(/([^\\]|^)((?:\\\\)*)\\s/g, '$1$2[ \\t\\f]');
|
||||
}
|
||||
|
||||
export function fixRegexCRMatchingNonWordClass(pattern: string, isMultiline: boolean): string {
|
||||
return isMultiline ?
|
||||
pattern.replace(/([^\\]|^)((?:\\\\)*)\\W/g, '$1$2(\\r?\\n|[^\\w\\r])') :
|
||||
pattern.replace(/([^\\]|^)((?:\\\\)*)\\W/g, '$1$2[^\\w\\r]');
|
||||
}
|
||||
|
||||
export function fixNewline(pattern: string): string {
|
||||
return pattern.replace(/\n/g, '\\r?\\n');
|
||||
}
|
||||
@@ -3,38 +3,10 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { IExpression } from 'vs/base/common/glob';
|
||||
import { IProgress, ILineMatch, IPatternInfo, ISearchStats } from 'vs/platform/search/common/search';
|
||||
import { ITelemetryData } from 'vs/platform/telemetry/common/telemetry';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
|
||||
export interface IFolderSearch {
|
||||
folder: string;
|
||||
excludePattern?: IExpression;
|
||||
includePattern?: IExpression;
|
||||
fileEncoding?: string;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
}
|
||||
|
||||
export interface IRawSearch {
|
||||
folderQueries: IFolderSearch[];
|
||||
ignoreSymlinks?: boolean;
|
||||
extraFiles?: string[];
|
||||
filePattern?: string;
|
||||
excludePattern?: IExpression;
|
||||
includePattern?: IExpression;
|
||||
contentPattern?: IPatternInfo;
|
||||
maxResults?: number;
|
||||
exists?: boolean;
|
||||
sortByScore?: boolean;
|
||||
cacheKey?: string;
|
||||
maxFilesize?: number;
|
||||
useRipgrep?: boolean;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
}
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import { IFileSearchStats, IFolderQuery, IProgress, IRawFileQuery, IRawTextQuery, ISearchEngineStats, ISearchQuery, ITextSearchMatch, ITextSearchStats, ITextSearchResult } from 'vs/platform/search/common/search';
|
||||
import { ITelemetryData } from 'vs/platform/telemetry/common/telemetry';
|
||||
|
||||
export interface ITelemetryEvent {
|
||||
eventName: string;
|
||||
@@ -42,10 +14,9 @@ export interface ITelemetryEvent {
|
||||
}
|
||||
|
||||
export interface IRawSearchService {
|
||||
fileSearch(search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
textSearch(search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
clearCache(cacheKey: string): TPromise<void>;
|
||||
readonly onTelemetry: Event<ITelemetryEvent>;
|
||||
fileSearch(search: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
textSearch(search: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
clearCache(cacheKey: string): Thenable<void>;
|
||||
}
|
||||
|
||||
export interface IRawFileMatch {
|
||||
@@ -56,14 +27,19 @@ export interface IRawFileMatch {
|
||||
}
|
||||
|
||||
export interface ISearchEngine<T> {
|
||||
search: (onResult: (matches: T) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISerializedSearchSuccess) => void) => void;
|
||||
search: (onResult: (matches: T) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void) => void;
|
||||
cancel: () => void;
|
||||
}
|
||||
|
||||
export interface ISerializedSearchSuccess {
|
||||
type: 'success';
|
||||
limitHit: boolean;
|
||||
stats: ISearchStats;
|
||||
stats: IFileSearchStats | ITextSearchStats | null;
|
||||
}
|
||||
|
||||
export interface ISearchEngineSuccess {
|
||||
limitHit: boolean;
|
||||
stats: ISearchEngineStats;
|
||||
}
|
||||
|
||||
export interface ISerializedSearchError {
|
||||
@@ -90,9 +66,13 @@ export function isSerializedSearchSuccess(arg: ISerializedSearchComplete): arg i
|
||||
return arg.type === 'success';
|
||||
}
|
||||
|
||||
export function isSerializedFileMatch(arg: ISerializedSearchProgressItem): arg is ISerializedFileMatch {
|
||||
return !!(<ISerializedFileMatch>arg).path;
|
||||
}
|
||||
|
||||
export interface ISerializedFileMatch {
|
||||
path: string;
|
||||
lineMatches?: ILineMatch[];
|
||||
path?: string;
|
||||
results?: ITextSearchResult[];
|
||||
numMatches?: number;
|
||||
}
|
||||
|
||||
@@ -103,56 +83,121 @@ export type IFileSearchProgressItem = IRawFileMatch | IRawFileMatch[] | IProgres
|
||||
|
||||
export class FileMatch implements ISerializedFileMatch {
|
||||
path: string;
|
||||
lineMatches: LineMatch[];
|
||||
results: ITextSearchMatch[];
|
||||
|
||||
constructor(path: string) {
|
||||
this.path = path;
|
||||
this.lineMatches = [];
|
||||
this.results = [];
|
||||
}
|
||||
|
||||
addMatch(lineMatch: LineMatch): void {
|
||||
this.lineMatches.push(lineMatch);
|
||||
addMatch(match: ITextSearchMatch): void {
|
||||
this.results.push(match);
|
||||
}
|
||||
|
||||
serialize(): ISerializedFileMatch {
|
||||
let lineMatches: ILineMatch[] = [];
|
||||
let numMatches = 0;
|
||||
|
||||
for (let i = 0; i < this.lineMatches.length; i++) {
|
||||
numMatches += this.lineMatches[i].offsetAndLengths.length;
|
||||
lineMatches.push(this.lineMatches[i].serialize());
|
||||
}
|
||||
|
||||
return {
|
||||
path: this.path,
|
||||
lineMatches,
|
||||
numMatches
|
||||
results: this.results,
|
||||
numMatches: this.results.length
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class LineMatch implements ILineMatch {
|
||||
preview: string;
|
||||
lineNumber: number;
|
||||
offsetAndLengths: number[][];
|
||||
/**
|
||||
* Computes the patterns that the provider handles. Discards sibling clauses and 'false' patterns
|
||||
*/
|
||||
export function resolvePatternsForProvider(globalPattern: glob.IExpression | undefined, folderPattern: glob.IExpression | undefined): string[] {
|
||||
const merged = {
|
||||
...(globalPattern || {}),
|
||||
...(folderPattern || {})
|
||||
};
|
||||
|
||||
constructor(preview: string, lineNumber: number) {
|
||||
this.preview = preview.replace(/(\r|\n)*$/, '');
|
||||
this.lineNumber = lineNumber;
|
||||
this.offsetAndLengths = [];
|
||||
}
|
||||
return Object.keys(merged)
|
||||
.filter(key => {
|
||||
const value = merged[key];
|
||||
return typeof value === 'boolean' && value;
|
||||
});
|
||||
}
|
||||
|
||||
addMatch(offset: number, length: number): void {
|
||||
this.offsetAndLengths.push([offset, length]);
|
||||
}
|
||||
export class QueryGlobTester {
|
||||
|
||||
serialize(): ILineMatch {
|
||||
const result = {
|
||||
preview: this.preview,
|
||||
lineNumber: this.lineNumber,
|
||||
offsetAndLengths: this.offsetAndLengths
|
||||
private _excludeExpression: glob.IExpression;
|
||||
private _parsedExcludeExpression: glob.ParsedExpression;
|
||||
|
||||
private _parsedIncludeExpression: glob.ParsedExpression;
|
||||
|
||||
constructor(config: ISearchQuery, folderQuery: IFolderQuery) {
|
||||
this._excludeExpression = {
|
||||
...(config.excludePattern || {}),
|
||||
...(folderQuery.excludePattern || {})
|
||||
};
|
||||
this._parsedExcludeExpression = glob.parse(this._excludeExpression);
|
||||
|
||||
return result;
|
||||
// Empty includeExpression means include nothing, so no {} shortcuts
|
||||
let includeExpression: glob.IExpression | undefined = config.includePattern;
|
||||
if (folderQuery.includePattern) {
|
||||
if (includeExpression) {
|
||||
includeExpression = {
|
||||
...includeExpression,
|
||||
...folderQuery.includePattern
|
||||
};
|
||||
} else {
|
||||
includeExpression = folderQuery.includePattern;
|
||||
}
|
||||
}
|
||||
|
||||
if (includeExpression) {
|
||||
this._parsedIncludeExpression = glob.parse(includeExpression);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed sync - siblingsFn should not return a promise.
|
||||
*/
|
||||
public includedInQuerySync(testPath: string, basename?: string, hasSibling?: (name: string) => boolean): boolean {
|
||||
if (this._parsedExcludeExpression && this._parsedExcludeExpression(testPath, basename, hasSibling)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this._parsedIncludeExpression && !this._parsedIncludeExpression(testPath, basename, hasSibling)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Guaranteed async.
|
||||
*/
|
||||
public includedInQuery(testPath: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): Promise<boolean> {
|
||||
const excludeP = this._parsedExcludeExpression ?
|
||||
Promise.resolve(this._parsedExcludeExpression(testPath, basename, hasSibling)).then(result => !!result) :
|
||||
Promise.resolve(false);
|
||||
|
||||
return excludeP.then(excluded => {
|
||||
if (excluded) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this._parsedIncludeExpression ?
|
||||
Promise.resolve(this._parsedIncludeExpression(testPath, basename, hasSibling)).then(result => !!result) :
|
||||
Promise.resolve(true);
|
||||
}).then(included => {
|
||||
return included;
|
||||
});
|
||||
}
|
||||
|
||||
public hasSiblingExcludeClauses(): boolean {
|
||||
return hasSiblingClauses(this._excludeExpression);
|
||||
}
|
||||
}
|
||||
|
||||
function hasSiblingClauses(pattern: glob.IExpression): boolean {
|
||||
for (let key in pattern) {
|
||||
if (typeof pattern[key] !== 'boolean') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -3,13 +3,11 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { Server } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
import { SearchChannel } from './searchIpc';
|
||||
import { SearchService } from './rawSearchService';
|
||||
|
||||
const server = new Server();
|
||||
const server = new Server('search');
|
||||
const service = new SearchService();
|
||||
const channel = new SearchChannel(service);
|
||||
server.registerChannel('search', channel);
|
||||
@@ -2,11 +2,11 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
|
||||
import { Emitter, Event } from 'vs/base/common/event';
|
||||
import { ISearchHistoryValues, ISearchHistoryService } from 'vs/platform/search/common/search';
|
||||
import { IStorageService, StorageScope } from 'vs/platform/storage/common/storage';
|
||||
import { isEmptyObject } from 'vs/base/common/types';
|
||||
|
||||
export class SearchHistoryService implements ISearchHistoryService {
|
||||
public _serviceBrand: any;
|
||||
@@ -26,7 +26,7 @@ export class SearchHistoryService implements ISearchHistoryService {
|
||||
}
|
||||
|
||||
public load(): ISearchHistoryValues {
|
||||
let result: ISearchHistoryValues;
|
||||
let result: ISearchHistoryValues | undefined;
|
||||
const raw = this.storageService.get(SearchHistoryService.SEARCH_HISTORY_KEY, StorageScope.WORKSPACE);
|
||||
|
||||
if (raw) {
|
||||
@@ -41,6 +41,10 @@ export class SearchHistoryService implements ISearchHistoryService {
|
||||
}
|
||||
|
||||
public save(history: ISearchHistoryValues): void {
|
||||
this.storageService.store(SearchHistoryService.SEARCH_HISTORY_KEY, JSON.stringify(history), StorageScope.WORKSPACE);
|
||||
if (isEmptyObject(history)) {
|
||||
this.storageService.remove(SearchHistoryService.SEARCH_HISTORY_KEY, StorageScope.WORKSPACE);
|
||||
} else {
|
||||
this.storageService.store(SearchHistoryService.SEARCH_HISTORY_KEY, JSON.stringify(history), StorageScope.WORKSPACE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,35 +3,24 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { IChannel } from 'vs/base/parts/ipc/common/ipc';
|
||||
import { IRawSearchService, IRawSearch, ISerializedSearchComplete, ISerializedSearchProgressItem, ITelemetryEvent } from './search';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { IChannel, IServerChannel } from 'vs/base/parts/ipc/node/ipc';
|
||||
import { IRawFileQuery, IRawTextQuery } from 'vs/platform/search/common/search';
|
||||
import { IRawSearchService, ISerializedSearchComplete, ISerializedSearchProgressItem } from './search';
|
||||
|
||||
export interface ISearchChannel extends IChannel {
|
||||
listen(event: 'telemetry'): Event<ITelemetryEvent>;
|
||||
listen(event: 'fileSearch', search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
listen(event: 'textSearch', search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
call(command: 'clearCache', cacheKey: string): TPromise<void>;
|
||||
call(command: string, arg: any): TPromise<any>;
|
||||
}
|
||||
|
||||
export class SearchChannel implements ISearchChannel {
|
||||
export class SearchChannel implements IServerChannel {
|
||||
|
||||
constructor(private service: IRawSearchService) { }
|
||||
|
||||
listen<T>(event: string, arg?: any): Event<any> {
|
||||
listen<T>(_, event: string, arg?: any): Event<any> {
|
||||
switch (event) {
|
||||
case 'telemetry': return this.service.onTelemetry;
|
||||
case 'fileSearch': return this.service.fileSearch(arg);
|
||||
case 'textSearch': return this.service.textSearch(arg);
|
||||
}
|
||||
throw new Error('Event not found');
|
||||
}
|
||||
|
||||
call(command: string, arg?: any): TPromise<any> {
|
||||
call(_, command: string, arg?: any): Thenable<any> {
|
||||
switch (command) {
|
||||
case 'clearCache': return this.service.clearCache(arg);
|
||||
}
|
||||
@@ -41,19 +30,17 @@ export class SearchChannel implements ISearchChannel {
|
||||
|
||||
export class SearchChannelClient implements IRawSearchService {
|
||||
|
||||
get onTelemetry(): Event<ITelemetryEvent> { return this.channel.listen('telemetry'); }
|
||||
constructor(private channel: IChannel) { }
|
||||
|
||||
constructor(private channel: ISearchChannel) { }
|
||||
|
||||
fileSearch(search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
fileSearch(search: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
return this.channel.listen('fileSearch', search);
|
||||
}
|
||||
|
||||
textSearch(search: IRawSearch): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
textSearch(search: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
return this.channel.listen('textSearch', search);
|
||||
}
|
||||
|
||||
clearCache(cacheKey: string): TPromise<void> {
|
||||
clearCache(cacheKey: string): Thenable<void> {
|
||||
return this.channel.call('clearCache', cacheKey);
|
||||
}
|
||||
}
|
||||
@@ -2,30 +2,34 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
import * as arrays from 'vs/base/common/arrays';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { canceled } from 'vs/base/common/errors';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { Disposable, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
|
||||
import { ResourceMap, values } from 'vs/base/common/map';
|
||||
import { keys, ResourceMap, values } from 'vs/base/common/map';
|
||||
import { Schemas } from 'vs/base/common/network';
|
||||
import * as objects from 'vs/base/common/objects';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import uri from 'vs/base/common/uri';
|
||||
import { TPromise } from 'vs/base/common/winjs.base';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import { URI as uri } from 'vs/base/common/uri';
|
||||
import * as pfs from 'vs/base/node/pfs';
|
||||
import { getNextTickChannel } from 'vs/base/parts/ipc/common/ipc';
|
||||
import { getNextTickChannel } from 'vs/base/parts/ipc/node/ipc';
|
||||
import { Client, IIPCOptions } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
import { IModelService } from 'vs/editor/common/services/modelService';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { IDebugParams, IEnvironmentService } from 'vs/platform/environment/common/environment';
|
||||
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { FileMatch, IFileMatch, IFolderQuery, IProgress, ISearchComplete, ISearchConfiguration, ISearchProgressItem, ISearchQuery, ISearchResultProvider, ISearchService, LineMatch, pathIncludedInQuery, QueryType, SearchProviderType } from 'vs/platform/search/common/search';
|
||||
import { deserializeSearchError, FileMatch, ICachedSearchStats, IFileMatch, IFileQuery, IFileSearchStats, IFolderQuery, IProgress, ISearchComplete, ISearchConfiguration, ISearchEngineStats, ISearchProgressItem, ISearchQuery, ISearchResultProvider, ISearchService, ITextQuery, pathIncludedInQuery, QueryType, SearchError, SearchErrorCode, SearchProviderType } from 'vs/platform/search/common/search';
|
||||
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
|
||||
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
|
||||
import { IExtensionService } from 'vs/workbench/services/extensions/common/extensions';
|
||||
import { addContextToEditorMatches, editorMatchesToTextSearchResults } from 'vs/workbench/services/search/common/searchHelpers';
|
||||
import { IUntitledEditorService } from 'vs/workbench/services/untitled/common/untitledEditorService';
|
||||
import { IRawSearch, IRawSearchService, ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, isSerializedSearchComplete, isSerializedSearchSuccess, ITelemetryEvent } from './search';
|
||||
import { ISearchChannel, SearchChannelClient } from './searchIpc';
|
||||
import { IRawSearchService, ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, isSerializedSearchComplete, isSerializedSearchSuccess } from './search';
|
||||
import { SearchChannelClient } from './searchIpc';
|
||||
|
||||
export class SearchService extends Disposable implements ISearchService {
|
||||
public _serviceBrand: any;
|
||||
@@ -36,8 +40,10 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
private readonly fileIndexProviders = new Map<string, ISearchResultProvider>();
|
||||
|
||||
constructor(
|
||||
@IInstantiationService private instantiationService: IInstantiationService,
|
||||
@IModelService private modelService: IModelService,
|
||||
@IUntitledEditorService private untitledEditorService: IUntitledEditorService,
|
||||
@IEditorService private editorService: IEditorService,
|
||||
@IEnvironmentService environmentService: IEnvironmentService,
|
||||
@ITelemetryService private telemetryService: ITelemetryService,
|
||||
@IConfigurationService private configurationService: IConfigurationService,
|
||||
@@ -45,10 +51,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
@IExtensionService private extensionService: IExtensionService
|
||||
) {
|
||||
super();
|
||||
this.diskSearch = new DiskSearch(!environmentService.isBuilt || environmentService.verbose, /*timeout=*/undefined, environmentService.debugSearch);
|
||||
this._register(this.diskSearch.onTelemetry(event => {
|
||||
this.telemetryService.publicLog(event.eventName, event.data);
|
||||
}));
|
||||
this.diskSearch = this.instantiationService.createInstance(DiskSearch, !environmentService.isBuilt || environmentService.verbose, /*timeout=*/undefined, environmentService.debugSearch);
|
||||
}
|
||||
|
||||
public registerSearchResultProvider(scheme: string, type: SearchProviderType, provider: ISearchResultProvider): IDisposable {
|
||||
@@ -68,15 +71,9 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
});
|
||||
}
|
||||
|
||||
public extendQuery(query: ISearchQuery): void {
|
||||
public extendQuery(query: IFileQuery): void {
|
||||
const configuration = this.configurationService.getValue<ISearchConfiguration>();
|
||||
|
||||
// Configuration: Encoding
|
||||
if (!query.fileEncoding) {
|
||||
const fileEncoding = configuration && configuration.files && configuration.files.encoding;
|
||||
query.fileEncoding = fileEncoding;
|
||||
}
|
||||
|
||||
// Configuration: File Excludes
|
||||
if (!query.disregardExcludeSettings) {
|
||||
const fileExcludes = objects.deepClone(configuration && configuration.files && configuration.files.exclude);
|
||||
@@ -90,123 +87,138 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
}
|
||||
}
|
||||
|
||||
public search(query: ISearchQuery, onProgress?: (item: ISearchProgressItem) => void): TPromise<ISearchComplete> {
|
||||
let combinedPromise: TPromise<void>;
|
||||
public textSearch(query: ITextQuery, token?: CancellationToken, onProgress?: (item: ISearchProgressItem) => void): Promise<ISearchComplete> {
|
||||
// Get local results from dirty/untitled
|
||||
const localResults = this.getLocalResults(query);
|
||||
|
||||
return new TPromise<ISearchComplete>((onComplete, onError) => {
|
||||
if (onProgress) {
|
||||
localResults.values().filter((res) => !!res).forEach(onProgress);
|
||||
}
|
||||
|
||||
// Get local results from dirty/untitled
|
||||
const localResults = this.getLocalResults(query);
|
||||
this.logService.trace('SearchService#search', JSON.stringify(query));
|
||||
|
||||
if (onProgress) {
|
||||
localResults.values().filter((res) => !!res).forEach(onProgress);
|
||||
const onProviderProgress = progress => {
|
||||
if (progress.resource) {
|
||||
// Match
|
||||
if (!localResults.has(progress.resource) && onProgress) { // don't override local results
|
||||
onProgress(progress);
|
||||
}
|
||||
} else if (onProgress) {
|
||||
// Progress
|
||||
onProgress(<IProgress>progress);
|
||||
}
|
||||
|
||||
this.logService.trace('SearchService#search', JSON.stringify(query));
|
||||
if (progress.message) {
|
||||
this.logService.debug('SearchService#search', progress.message);
|
||||
}
|
||||
};
|
||||
|
||||
const onProviderProgress = progress => {
|
||||
if (progress.resource) {
|
||||
// Match
|
||||
if (!localResults.has(progress.resource) && onProgress) { // don't override local results
|
||||
onProgress(progress);
|
||||
}
|
||||
} else if (onProgress) {
|
||||
// Progress
|
||||
onProgress(<IProgress>progress);
|
||||
}
|
||||
|
||||
if (progress.message) {
|
||||
this.logService.debug('SearchService#search', progress.message);
|
||||
}
|
||||
};
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
const schemesInQuery = query.folderQueries.map(fq => fq.folder.scheme);
|
||||
const providerActivations = schemesInQuery.map(scheme => this.extensionService.activateByEvent(`onSearch:${scheme}`));
|
||||
|
||||
const providerPromise = TPromise.join(providerActivations)
|
||||
.then(() => this.searchWithProviders(query, onProviderProgress))
|
||||
.then(completes => {
|
||||
completes = completes.filter(c => !!c);
|
||||
if (!completes.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <ISearchComplete>{
|
||||
limitHit: completes[0] && completes[0].limitHit,
|
||||
stats: completes[0].stats,
|
||||
results: arrays.flatten(completes.map(c => c.results))
|
||||
};
|
||||
}, errs => {
|
||||
if (!Array.isArray(errs)) {
|
||||
errs = [errs];
|
||||
}
|
||||
|
||||
errs = errs.filter(e => !!e);
|
||||
return TPromise.wrapError(errs[0]);
|
||||
});
|
||||
|
||||
combinedPromise = providerPromise.then(value => {
|
||||
this.logService.debug(`SearchService#search: ${Date.now() - startTime}ms`);
|
||||
const values = [value];
|
||||
|
||||
const result: ISearchComplete = {
|
||||
limitHit: false,
|
||||
results: [],
|
||||
stats: undefined
|
||||
};
|
||||
|
||||
// TODO@joh
|
||||
// sorting, disjunct results
|
||||
for (const value of values) {
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
// TODO@joh individual stats/limit
|
||||
result.stats = value.stats || result.stats;
|
||||
result.limitHit = value.limitHit || result.limitHit;
|
||||
|
||||
for (const match of value.results) {
|
||||
if (!localResults.has(match.resource)) {
|
||||
result.results.push(match);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
}).then(onComplete, onError);
|
||||
|
||||
}, () => combinedPromise && combinedPromise.cancel());
|
||||
return this.doSearch(query, token, onProviderProgress);
|
||||
}
|
||||
|
||||
private searchWithProviders(query: ISearchQuery, onProviderProgress: (progress: ISearchProgressItem) => void) {
|
||||
const diskSearchQueries: IFolderQuery[] = [];
|
||||
const searchPs = [];
|
||||
public fileSearch(query: IFileQuery, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
return this.doSearch(query, token);
|
||||
}
|
||||
|
||||
query.folderQueries.forEach(fq => {
|
||||
let provider = query.type === QueryType.File ?
|
||||
this.fileSearchProviders.get(fq.folder.scheme) || this.fileIndexProviders.get(fq.folder.scheme) :
|
||||
this.textSearchProviders.get(fq.folder.scheme);
|
||||
private doSearch(query: ISearchQuery, token?: CancellationToken, onProgress?: (item: ISearchProgressItem) => void): Promise<ISearchComplete> {
|
||||
const schemesInQuery = this.getSchemesInQuery(query);
|
||||
|
||||
if (!provider && fq.folder.scheme === 'file') {
|
||||
diskSearchQueries.push(fq);
|
||||
} else if (!provider) {
|
||||
throw new Error('No search provider registered for scheme: ' + fq.folder.scheme);
|
||||
} else {
|
||||
const oneFolderQuery = {
|
||||
...query,
|
||||
...{
|
||||
folderQueries: [fq]
|
||||
const providerActivations: Thenable<any>[] = [Promise.resolve(null)];
|
||||
schemesInQuery.forEach(scheme => providerActivations.push(this.extensionService.activateByEvent(`onSearch:${scheme}`)));
|
||||
providerActivations.push(this.extensionService.activateByEvent('onSearch:file'));
|
||||
|
||||
const providerPromise = Promise.all(providerActivations)
|
||||
.then(() => this.extensionService.whenInstalledExtensionsRegistered())
|
||||
.then(() => {
|
||||
// Cancel faster if search was canceled while waiting for extensions
|
||||
if (token && token.isCancellationRequested) {
|
||||
return Promise.reject(canceled());
|
||||
}
|
||||
|
||||
const progressCallback = (item: ISearchProgressItem) => {
|
||||
if (token && token.isCancellationRequested) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(item);
|
||||
}
|
||||
};
|
||||
|
||||
searchPs.push(provider.search(oneFolderQuery, onProviderProgress));
|
||||
return this.searchWithProviders(query, progressCallback, token);
|
||||
})
|
||||
.then(completes => {
|
||||
completes = completes.filter(c => !!c);
|
||||
if (!completes.length) {
|
||||
return {
|
||||
limitHit: false,
|
||||
results: []
|
||||
};
|
||||
}
|
||||
|
||||
return <ISearchComplete>{
|
||||
limitHit: completes[0] && completes[0].limitHit,
|
||||
stats: completes[0].stats,
|
||||
results: arrays.flatten(completes.map(c => c.results))
|
||||
};
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => {
|
||||
reject(canceled());
|
||||
});
|
||||
}
|
||||
|
||||
providerPromise.then(resolve, reject);
|
||||
});
|
||||
}
|
||||
|
||||
private getSchemesInQuery(query: ISearchQuery): Set<string> {
|
||||
const schemes = new Set<string>();
|
||||
if (query.folderQueries) {
|
||||
query.folderQueries.forEach(fq => schemes.add(fq.folder.scheme));
|
||||
}
|
||||
|
||||
if (query.extraFileResources) {
|
||||
query.extraFileResources.forEach(extraFile => schemes.add(extraFile.scheme));
|
||||
}
|
||||
|
||||
return schemes;
|
||||
}
|
||||
|
||||
private searchWithProviders(query: ISearchQuery, onProviderProgress: (progress: ISearchProgressItem) => void, token?: CancellationToken) {
|
||||
const e2eSW = StopWatch.create(false);
|
||||
|
||||
const diskSearchQueries: IFolderQuery[] = [];
|
||||
const searchPs: Thenable<ISearchComplete>[] = [];
|
||||
|
||||
const fqs = this.groupFolderQueriesByScheme(query);
|
||||
keys(fqs).forEach(scheme => {
|
||||
const schemeFQs = fqs.get(scheme);
|
||||
let provider = query.type === QueryType.File ?
|
||||
this.fileSearchProviders.get(scheme) || this.fileIndexProviders.get(scheme) :
|
||||
this.textSearchProviders.get(scheme);
|
||||
|
||||
if (!provider && scheme === 'file') {
|
||||
diskSearchQueries.push(...schemeFQs);
|
||||
} else if (!provider) {
|
||||
throw new Error('No search provider registered for scheme: ' + scheme);
|
||||
} else {
|
||||
const oneSchemeQuery: ISearchQuery = {
|
||||
...query,
|
||||
...{
|
||||
folderQueries: schemeFQs
|
||||
}
|
||||
};
|
||||
|
||||
searchPs.push(query.type === QueryType.File ?
|
||||
provider.fileSearch(<IFileQuery>oneSchemeQuery, token) :
|
||||
provider.textSearch(<ITextQuery>oneSchemeQuery, onProviderProgress, token));
|
||||
}
|
||||
});
|
||||
|
||||
const diskSearchExtraFileResources = query.extraFileResources && query.extraFileResources.filter(res => res.scheme === 'file');
|
||||
const diskSearchExtraFileResources = query.extraFileResources && query.extraFileResources.filter(res => res.scheme === Schemas.file);
|
||||
|
||||
if (diskSearchQueries.length || diskSearchExtraFileResources) {
|
||||
const diskSearchQuery: ISearchQuery = {
|
||||
@@ -217,13 +229,154 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
extraFileResources: diskSearchExtraFileResources
|
||||
};
|
||||
|
||||
searchPs.push(this.diskSearch.search(diskSearchQuery, onProviderProgress));
|
||||
searchPs.push(diskSearchQuery.type === QueryType.File ?
|
||||
this.diskSearch.fileSearch(diskSearchQuery, token) :
|
||||
this.diskSearch.textSearch(diskSearchQuery, onProviderProgress, token));
|
||||
}
|
||||
|
||||
return TPromise.join(searchPs);
|
||||
return Promise.all(searchPs).then(completes => {
|
||||
const endToEndTime = e2eSW.elapsed();
|
||||
this.logService.trace(`SearchService#search: ${endToEndTime}ms`);
|
||||
completes.forEach(complete => {
|
||||
this.sendTelemetry(query, endToEndTime, complete);
|
||||
});
|
||||
return completes;
|
||||
}, err => {
|
||||
const endToEndTime = e2eSW.elapsed();
|
||||
this.logService.trace(`SearchService#search: ${endToEndTime}ms`);
|
||||
const searchError = deserializeSearchError(err.message);
|
||||
this.sendTelemetry(query, endToEndTime, null, searchError);
|
||||
|
||||
throw searchError;
|
||||
});
|
||||
}
|
||||
|
||||
private getLocalResults(query: ISearchQuery): ResourceMap<IFileMatch> {
|
||||
private groupFolderQueriesByScheme(query: ISearchQuery): Map<string, IFolderQuery[]> {
|
||||
const queries = new Map<string, IFolderQuery[]>();
|
||||
|
||||
query.folderQueries.forEach(fq => {
|
||||
const schemeFQs = queries.get(fq.folder.scheme) || [];
|
||||
schemeFQs.push(fq);
|
||||
|
||||
queries.set(fq.folder.scheme, schemeFQs);
|
||||
});
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
private sendTelemetry(query: ISearchQuery, endToEndTime: number, complete?: ISearchComplete, err?: SearchError): void {
|
||||
const fileSchemeOnly = query.folderQueries.every(fq => fq.folder.scheme === 'file');
|
||||
const otherSchemeOnly = query.folderQueries.every(fq => fq.folder.scheme !== 'file');
|
||||
const scheme = fileSchemeOnly ? 'file' :
|
||||
otherSchemeOnly ? 'other' :
|
||||
'mixed';
|
||||
|
||||
if (query.type === QueryType.File && complete && complete.stats) {
|
||||
const fileSearchStats = complete.stats as IFileSearchStats;
|
||||
if (fileSearchStats.fromCache) {
|
||||
const cacheStats: ICachedSearchStats = fileSearchStats.detailStats as ICachedSearchStats;
|
||||
|
||||
/* __GDPR__
|
||||
"cachedSearchComplete" : {
|
||||
"reason" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"resultCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"workspaceFolderCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"type" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"endToEndTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"sortingTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cacheWasResolved" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"cacheLookupTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cacheFilterTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cacheEntryCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"scheme" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
this.telemetryService.publicLog('cachedSearchComplete', {
|
||||
reason: query._reason,
|
||||
resultCount: fileSearchStats.resultCount,
|
||||
workspaceFolderCount: query.folderQueries.length,
|
||||
type: fileSearchStats.type,
|
||||
endToEndTime: endToEndTime,
|
||||
sortingTime: fileSearchStats.sortingTime,
|
||||
cacheWasResolved: cacheStats.cacheWasResolved,
|
||||
cacheLookupTime: cacheStats.cacheLookupTime,
|
||||
cacheFilterTime: cacheStats.cacheFilterTime,
|
||||
cacheEntryCount: cacheStats.cacheEntryCount,
|
||||
scheme
|
||||
});
|
||||
} else {
|
||||
const searchEngineStats: ISearchEngineStats = fileSearchStats.detailStats as ISearchEngineStats;
|
||||
|
||||
/* __GDPR__
|
||||
"searchComplete" : {
|
||||
"reason" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"resultCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"workspaceFolderCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"type" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"endToEndTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"sortingTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"traversal" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"fileWalkTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"directoriesWalked" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"filesWalked" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cmdTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cmdResultCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"scheme" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"useRipgrep" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
this.telemetryService.publicLog('searchComplete', {
|
||||
reason: query._reason,
|
||||
resultCount: fileSearchStats.resultCount,
|
||||
workspaceFolderCount: query.folderQueries.length,
|
||||
type: fileSearchStats.type,
|
||||
endToEndTime: endToEndTime,
|
||||
sortingTime: fileSearchStats.sortingTime,
|
||||
traversal: searchEngineStats.traversal,
|
||||
fileWalkTime: searchEngineStats.fileWalkTime,
|
||||
directoriesWalked: searchEngineStats.directoriesWalked,
|
||||
filesWalked: searchEngineStats.filesWalked,
|
||||
cmdTime: searchEngineStats.cmdTime,
|
||||
cmdResultCount: searchEngineStats.cmdResultCount,
|
||||
scheme,
|
||||
useRipgrep: query.useRipgrep
|
||||
});
|
||||
}
|
||||
} else if (query.type === QueryType.Text) {
|
||||
let errorType: string;
|
||||
if (err) {
|
||||
errorType = err.code === SearchErrorCode.regexParseError ? 'regex' :
|
||||
err.code === SearchErrorCode.unknownEncoding ? 'encoding' :
|
||||
err.code === SearchErrorCode.globParseError ? 'glob' :
|
||||
err.code === SearchErrorCode.invalidLiteral ? 'literal' :
|
||||
err.code === SearchErrorCode.other ? 'other' :
|
||||
'unknown';
|
||||
}
|
||||
|
||||
/* __GDPR__
|
||||
"textSearchComplete" : {
|
||||
"reason" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"workspaceFolderCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"endToEndTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"scheme" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"error" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"useRipgrep" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"usePCRE2" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
this.telemetryService.publicLog('textSearchComplete', {
|
||||
reason: query._reason,
|
||||
workspaceFolderCount: query.folderQueries.length,
|
||||
endToEndTime: endToEndTime,
|
||||
scheme,
|
||||
error: errorType,
|
||||
useRipgrep: query.useRipgrep,
|
||||
usePCRE2: !!query.usePCRE2
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private getLocalResults(query: ITextQuery): ResourceMap<IFileMatch> {
|
||||
const localResults = new ResourceMap<IFileMatch>();
|
||||
|
||||
if (query.type === QueryType.Text) {
|
||||
@@ -234,6 +387,10 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.editorService.isOpen({ resource })) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Support untitled files
|
||||
if (resource.scheme === Schemas.untitled) {
|
||||
if (!this.untitledEditorService.exists(resource)) {
|
||||
@@ -259,9 +416,8 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
let fileMatch = new FileMatch(resource);
|
||||
localResults.set(resource, fileMatch);
|
||||
|
||||
matches.forEach((match) => {
|
||||
fileMatch.lineMatches.push(new LineMatch(model.getLineContent(match.range.startLineNumber), match.range.startLineNumber - 1, [[match.range.startColumn - 1, match.range.endColumn - match.range.startColumn]]));
|
||||
});
|
||||
const textSearchResults = editorMatchesToTextSearchResults(matches, model, query.previewOptions);
|
||||
fileMatch.results = addContextToEditorMatches(textSearchResults, model, query);
|
||||
} else {
|
||||
localResults.set(resource, null);
|
||||
}
|
||||
@@ -271,18 +427,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return localResults;
|
||||
}
|
||||
|
||||
private matches(resource: uri, query: ISearchQuery): boolean {
|
||||
// file pattern
|
||||
if (query.filePattern) {
|
||||
if (resource.scheme !== Schemas.file) {
|
||||
return false; // if we match on file pattern, we have to ignore non file resources
|
||||
}
|
||||
|
||||
if (!strings.fuzzyContains(resource.fsPath, strings.stripWildcards(query.filePattern).toLowerCase())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private matches(resource: uri, query: ITextQuery): boolean {
|
||||
// includes
|
||||
if (query.includePattern) {
|
||||
if (resource.scheme !== Schemas.file) {
|
||||
@@ -293,22 +438,29 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return pathIncludedInQuery(query, resource.fsPath);
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): TPromise<void> {
|
||||
public clearCache(cacheKey: string): Promise<void> {
|
||||
const clearPs = [
|
||||
this.diskSearch,
|
||||
...values(this.fileIndexProviders)
|
||||
...values(this.fileIndexProviders),
|
||||
...values(this.fileSearchProviders)
|
||||
].map(provider => provider && provider.clearCache(cacheKey));
|
||||
|
||||
return TPromise.join(clearPs)
|
||||
return Promise.all(clearPs)
|
||||
.then(() => { });
|
||||
}
|
||||
}
|
||||
|
||||
export class DiskSearch implements ISearchResultProvider {
|
||||
public _serviceBrand: any;
|
||||
|
||||
private raw: IRawSearchService;
|
||||
|
||||
constructor(verboseLogging: boolean, timeout: number = 60 * 60 * 1000, searchDebug?: IDebugParams) {
|
||||
constructor(
|
||||
verboseLogging: boolean,
|
||||
timeout: number = 60 * 60 * 1000,
|
||||
searchDebug: IDebugParams | undefined,
|
||||
@ILogService private readonly logService: ILogService
|
||||
) {
|
||||
const opts: IIPCOptions = {
|
||||
serverName: 'Search',
|
||||
timeout: timeout,
|
||||
@@ -322,7 +474,8 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/searchApp',
|
||||
PIPE_LOGGING: 'true',
|
||||
VERBOSE_LOGGING: verboseLogging
|
||||
}
|
||||
},
|
||||
useQueue: true
|
||||
};
|
||||
|
||||
if (searchDebug) {
|
||||
@@ -334,81 +487,69 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
}
|
||||
|
||||
const client = new Client(
|
||||
uri.parse(require.toUrl('bootstrap')).fsPath,
|
||||
getPathFromAmdModule(require, 'bootstrap-fork'),
|
||||
opts);
|
||||
|
||||
const channel = getNextTickChannel(client.getChannel<ISearchChannel>('search'));
|
||||
const channel = getNextTickChannel(client.getChannel('search'));
|
||||
this.raw = new SearchChannelClient(channel);
|
||||
}
|
||||
|
||||
public get onTelemetry(): Event<ITelemetryEvent> {
|
||||
return this.raw.onTelemetry;
|
||||
}
|
||||
|
||||
public search(query: ISearchQuery, onProgress?: (p: ISearchProgressItem) => void): TPromise<ISearchComplete> {
|
||||
textSearch(query: ITextQuery, onProgress?: (p: ISearchProgressItem) => void, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
const folderQueries = query.folderQueries || [];
|
||||
return TPromise.join(folderQueries.map(q => q.folder.scheme === Schemas.file && pfs.exists(q.folder.fsPath)))
|
||||
return Promise.all(folderQueries.map(q => q.folder.scheme === Schemas.file && pfs.exists(q.folder.fsPath)))
|
||||
.then(exists => {
|
||||
const existingFolders = folderQueries.filter((q, index) => exists[index]);
|
||||
const rawSearch = this.rawSearchQuery(query, existingFolders);
|
||||
if (token && token.isCancellationRequested) {
|
||||
throw canceled();
|
||||
}
|
||||
|
||||
query.folderQueries = folderQueries.filter((q, index) => exists[index]);
|
||||
const event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete> = this.raw.textSearch(query);
|
||||
|
||||
return DiskSearch.collectResultsFromEvent(event, onProgress, token);
|
||||
});
|
||||
}
|
||||
|
||||
fileSearch(query: IFileQuery, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
const folderQueries = query.folderQueries || [];
|
||||
return Promise.all(folderQueries.map(q => q.folder.scheme === Schemas.file && pfs.exists(q.folder.fsPath)))
|
||||
.then(exists => {
|
||||
if (token && token.isCancellationRequested) {
|
||||
throw canceled();
|
||||
}
|
||||
|
||||
query.folderQueries = folderQueries.filter((q, index) => exists[index]);
|
||||
let event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
if (query.type === QueryType.File) {
|
||||
event = this.raw.fileSearch(rawSearch);
|
||||
} else {
|
||||
event = this.raw.textSearch(rawSearch);
|
||||
}
|
||||
event = this.raw.fileSearch(query);
|
||||
|
||||
return DiskSearch.collectResultsFromEvent(event, onProgress);
|
||||
const onProgress = (p: ISearchProgressItem) => {
|
||||
if (p.message) {
|
||||
// Should only be for logs
|
||||
this.logService.debug('SearchService#search', p.message);
|
||||
}
|
||||
};
|
||||
|
||||
return DiskSearch.collectResultsFromEvent(event, onProgress, token);
|
||||
});
|
||||
}
|
||||
|
||||
private rawSearchQuery(query: ISearchQuery, existingFolders: IFolderQuery[]) {
|
||||
let rawSearch: IRawSearch = {
|
||||
folderQueries: [],
|
||||
extraFiles: [],
|
||||
filePattern: query.filePattern,
|
||||
excludePattern: query.excludePattern,
|
||||
includePattern: query.includePattern,
|
||||
maxResults: query.maxResults,
|
||||
exists: query.exists,
|
||||
sortByScore: query.sortByScore,
|
||||
cacheKey: query.cacheKey,
|
||||
useRipgrep: query.useRipgrep,
|
||||
disregardIgnoreFiles: query.disregardIgnoreFiles,
|
||||
ignoreSymlinks: query.ignoreSymlinks
|
||||
};
|
||||
|
||||
for (const q of existingFolders) {
|
||||
rawSearch.folderQueries.push({
|
||||
excludePattern: q.excludePattern,
|
||||
includePattern: q.includePattern,
|
||||
fileEncoding: q.fileEncoding,
|
||||
disregardIgnoreFiles: q.disregardIgnoreFiles,
|
||||
folder: q.folder.fsPath
|
||||
});
|
||||
}
|
||||
|
||||
if (query.extraFileResources) {
|
||||
for (const r of query.extraFileResources) {
|
||||
if (r.scheme === Schemas.file) {
|
||||
rawSearch.extraFiles.push(r.fsPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (query.type === QueryType.Text) {
|
||||
rawSearch.contentPattern = query.contentPattern;
|
||||
}
|
||||
|
||||
return rawSearch;
|
||||
}
|
||||
|
||||
public static collectResultsFromEvent(event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>, onProgress?: (p: ISearchProgressItem) => void): TPromise<ISearchComplete> {
|
||||
/**
|
||||
* Public for test
|
||||
*/
|
||||
public static collectResultsFromEvent(event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>, onProgress?: (p: ISearchProgressItem) => void, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
let result: IFileMatch[] = [];
|
||||
|
||||
let listener: IDisposable;
|
||||
return new TPromise<ISearchComplete>((c, e) => {
|
||||
return new Promise<ISearchComplete>((c, e) => {
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => {
|
||||
if (listener) {
|
||||
listener.dispose();
|
||||
}
|
||||
|
||||
e(canceled());
|
||||
});
|
||||
}
|
||||
|
||||
listener = event(ev => {
|
||||
if (isSerializedSearchComplete(ev)) {
|
||||
if (isSerializedSearchSuccess(ev)) {
|
||||
@@ -448,21 +589,19 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
() => listener && listener.dispose());
|
||||
});
|
||||
}
|
||||
|
||||
private static createFileMatch(data: ISerializedFileMatch): FileMatch {
|
||||
let fileMatch = new FileMatch(uri.file(data.path));
|
||||
if (data.lineMatches) {
|
||||
for (let j = 0; j < data.lineMatches.length; j++) {
|
||||
fileMatch.lineMatches.push(new LineMatch(data.lineMatches[j].preview, data.lineMatches[j].lineNumber, data.lineMatches[j].offsetAndLengths));
|
||||
}
|
||||
const fileMatch = new FileMatch(uri.file(data.path));
|
||||
if (data.results) {
|
||||
// const matches = data.results.filter(resultIsMatch);
|
||||
fileMatch.results.push(...data.results);
|
||||
}
|
||||
return fileMatch;
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): TPromise<void> {
|
||||
public clearCache(cacheKey: string): Thenable<void> {
|
||||
return this.raw.clearCache(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
62
src/vs/workbench/services/search/node/textSearchAdapter.ts
Normal file
62
src/vs/workbench/services/search/node/textSearchAdapter.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { IFileMatch, IProgress, ITextQuery, ITextSearchStats, ITextSearchMatch } from 'vs/platform/search/common/search';
|
||||
import { RipgrepTextSearchEngine } from 'vs/workbench/services/search/node/ripgrepTextSearchEngine';
|
||||
import { TextSearchManager } from 'vs/workbench/services/search/node/textSearchManager';
|
||||
import { ISerializedFileMatch, ISerializedSearchSuccess } from './search';
|
||||
|
||||
export class TextSearchEngineAdapter {
|
||||
|
||||
constructor(private query: ITextQuery) {
|
||||
}
|
||||
|
||||
search(token: CancellationToken, onResult: (matches: ISerializedFileMatch[]) => void, onMessage: (message: IProgress) => void): Promise<ISerializedSearchSuccess> {
|
||||
if ((!this.query.folderQueries || !this.query.folderQueries.length) && (!this.query.extraFileResources || !this.query.extraFileResources.length)) {
|
||||
return Promise.resolve(<ISerializedSearchSuccess>{
|
||||
type: 'success',
|
||||
limitHit: false,
|
||||
stats: <ITextSearchStats>{
|
||||
type: 'searchProcess'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const pretendOutputChannel = {
|
||||
appendLine(msg) {
|
||||
onMessage({ message: msg });
|
||||
}
|
||||
};
|
||||
const textSearchManager = new TextSearchManager(this.query, new RipgrepTextSearchEngine(pretendOutputChannel), extfs);
|
||||
return new Promise((resolve, reject) => {
|
||||
return textSearchManager
|
||||
.search(
|
||||
matches => {
|
||||
onResult(matches.map(fileMatchToSerialized));
|
||||
},
|
||||
token)
|
||||
.then(
|
||||
c => resolve({ limitHit: c.limitHit, stats: null, type: 'success' } as ISerializedSearchSuccess),
|
||||
reject);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function fileMatchToSerialized(match: IFileMatch): ISerializedFileMatch {
|
||||
return {
|
||||
path: match.resource ? match.resource.fsPath : undefined,
|
||||
results: match.results,
|
||||
numMatches: (match.results || []).reduce((sum, r) => {
|
||||
if (!!(<ITextSearchMatch>r).ranges) {
|
||||
const m = <ITextSearchMatch>r;
|
||||
return sum + (Array.isArray(m.ranges) ? m.ranges.length : 1);
|
||||
} else {
|
||||
return sum + 1;
|
||||
}
|
||||
}, 0)
|
||||
};
|
||||
}
|
||||
311
src/vs/workbench/services/search/node/textSearchManager.ts
Normal file
311
src/vs/workbench/services/search/node/textSearchManager.ts
Normal file
@@ -0,0 +1,311 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'path';
|
||||
import { mapArrayOrNot } from 'vs/base/common/arrays';
|
||||
import { CancellationToken, CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { toErrorMessage } from 'vs/base/common/errorMessage';
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import * as resources from 'vs/base/common/resources';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { toCanonicalName } from 'vs/base/node/encoding';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { IExtendedExtensionSearchOptions, IFileMatch, IFolderQuery, IPatternInfo, ISearchCompleteStats, ITextQuery, ITextSearchMatch, ITextSearchContext, ITextSearchResult } from 'vs/platform/search/common/search';
|
||||
import { QueryGlobTester, resolvePatternsForProvider } from 'vs/workbench/services/search/node/search';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export class TextSearchManager {
|
||||
|
||||
private collector: TextSearchResultsCollector;
|
||||
|
||||
private isLimitHit: boolean;
|
||||
private resultCount = 0;
|
||||
|
||||
constructor(private query: ITextQuery, private provider: vscode.TextSearchProvider, private _extfs: typeof extfs = extfs) {
|
||||
}
|
||||
|
||||
public search(onProgress: (matches: IFileMatch[]) => void, token: CancellationToken): Promise<ISearchCompleteStats> {
|
||||
const folderQueries = this.query.folderQueries || [];
|
||||
const tokenSource = new CancellationTokenSource();
|
||||
token.onCancellationRequested(() => tokenSource.cancel());
|
||||
|
||||
return new Promise<ISearchCompleteStats>((resolve, reject) => {
|
||||
this.collector = new TextSearchResultsCollector(onProgress);
|
||||
|
||||
let isCanceled = false;
|
||||
const onResult = (match: vscode.TextSearchResult, folderIdx: number) => {
|
||||
if (isCanceled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof this.query.maxResults === 'number' && this.resultCount >= this.query.maxResults) {
|
||||
this.isLimitHit = true;
|
||||
isCanceled = true;
|
||||
tokenSource.cancel();
|
||||
}
|
||||
|
||||
if (!this.isLimitHit) {
|
||||
this.resultCount++;
|
||||
this.collector.add(match, folderIdx);
|
||||
}
|
||||
};
|
||||
|
||||
// For each root folder
|
||||
Promise.all(folderQueries.map((fq, i) => {
|
||||
return this.searchInFolder(fq, r => onResult(r, i), tokenSource.token);
|
||||
})).then(results => {
|
||||
tokenSource.dispose();
|
||||
this.collector.flush();
|
||||
|
||||
const someFolderHitLImit = results.some(result => !!result && !!result.limitHit);
|
||||
resolve({
|
||||
limitHit: this.isLimitHit || someFolderHitLImit,
|
||||
stats: {
|
||||
type: 'textSearchProvider'
|
||||
}
|
||||
});
|
||||
}, (err: Error) => {
|
||||
tokenSource.dispose();
|
||||
const errMsg = toErrorMessage(err);
|
||||
reject(new Error(errMsg));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private searchInFolder(folderQuery: IFolderQuery<URI>, onResult: (result: vscode.TextSearchResult) => void, token: CancellationToken): Promise<vscode.TextSearchComplete | null | undefined> {
|
||||
const queryTester = new QueryGlobTester(this.query, folderQuery);
|
||||
const testingPs: Promise<void>[] = [];
|
||||
const progress = {
|
||||
report: (result: vscode.TextSearchResult) => {
|
||||
// TODO: validate result.ranges vs result.preview.matches
|
||||
|
||||
const hasSibling = folderQuery.folder.scheme === 'file' ?
|
||||
glob.hasSiblingPromiseFn(() => {
|
||||
return this.readdir(path.dirname(result.uri.fsPath));
|
||||
}) :
|
||||
undefined;
|
||||
|
||||
const relativePath = path.relative(folderQuery.folder.fsPath, result.uri.fsPath);
|
||||
testingPs.push(
|
||||
queryTester.includedInQuery(relativePath, path.basename(relativePath), hasSibling)
|
||||
.then(included => {
|
||||
if (included) {
|
||||
onResult(result);
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
const searchOptions = this.getSearchOptionsForFolder(folderQuery);
|
||||
return new Promise(resolve => process.nextTick(resolve))
|
||||
.then(() => this.provider.provideTextSearchResults(patternInfoToQuery(this.query.contentPattern), searchOptions, progress, token))
|
||||
.then(result => {
|
||||
return Promise.all(testingPs)
|
||||
.then(() => result);
|
||||
});
|
||||
}
|
||||
|
||||
private readdir(dirname: string): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._extfs.readdir(dirname, (err, files) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private getSearchOptionsForFolder(fq: IFolderQuery<URI>): vscode.TextSearchOptions {
|
||||
const includes = resolvePatternsForProvider(this.query.includePattern, fq.includePattern);
|
||||
const excludes = resolvePatternsForProvider(this.query.excludePattern, fq.excludePattern);
|
||||
|
||||
const options = <vscode.TextSearchOptions>{
|
||||
folder: URI.from(fq.folder),
|
||||
excludes,
|
||||
includes,
|
||||
useIgnoreFiles: !fq.disregardIgnoreFiles,
|
||||
useGlobalIgnoreFiles: !fq.disregardGlobalIgnoreFiles,
|
||||
followSymlinks: !fq.ignoreSymlinks,
|
||||
encoding: fq.fileEncoding && toCanonicalName(fq.fileEncoding),
|
||||
maxFileSize: this.query.maxFileSize,
|
||||
maxResults: this.query.maxResults,
|
||||
previewOptions: this.query.previewOptions,
|
||||
afterContext: this.query.afterContext,
|
||||
beforeContext: this.query.beforeContext
|
||||
};
|
||||
(<IExtendedExtensionSearchOptions>options).usePCRE2 = this.query.usePCRE2;
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
function patternInfoToQuery(patternInfo: IPatternInfo): vscode.TextSearchQuery {
|
||||
return <vscode.TextSearchQuery>{
|
||||
isCaseSensitive: patternInfo.isCaseSensitive || false,
|
||||
isRegExp: patternInfo.isRegExp || false,
|
||||
isWordMatch: patternInfo.isWordMatch || false,
|
||||
isMultiline: patternInfo.isMultiline || false,
|
||||
pattern: patternInfo.pattern
|
||||
};
|
||||
}
|
||||
|
||||
export class TextSearchResultsCollector {
|
||||
private _batchedCollector: BatchedCollector<IFileMatch>;
|
||||
|
||||
private _currentFolderIdx: number;
|
||||
private _currentUri: URI;
|
||||
private _currentFileMatch: IFileMatch | null = null;
|
||||
|
||||
constructor(private _onResult: (result: IFileMatch[]) => void) {
|
||||
this._batchedCollector = new BatchedCollector<IFileMatch>(512, items => this.sendItems(items));
|
||||
}
|
||||
|
||||
add(data: vscode.TextSearchResult, folderIdx: number): void {
|
||||
// Collects TextSearchResults into IInternalFileMatches and collates using BatchedCollector.
|
||||
// This is efficient for ripgrep which sends results back one file at a time. It wouldn't be efficient for other search
|
||||
// providers that send results in random order. We could do this step afterwards instead.
|
||||
if (this._currentFileMatch && (this._currentFolderIdx !== folderIdx || !resources.isEqual(this._currentUri, data.uri))) {
|
||||
this.pushToCollector();
|
||||
this._currentFileMatch = null;
|
||||
}
|
||||
|
||||
if (!this._currentFileMatch) {
|
||||
this._currentFolderIdx = folderIdx;
|
||||
this._currentFileMatch = {
|
||||
resource: data.uri,
|
||||
results: []
|
||||
};
|
||||
}
|
||||
|
||||
this._currentFileMatch.results!.push(extensionResultToFrontendResult(data));
|
||||
}
|
||||
|
||||
private pushToCollector(): void {
|
||||
const size = this._currentFileMatch && this._currentFileMatch.results ?
|
||||
this._currentFileMatch.results.length :
|
||||
0;
|
||||
this._batchedCollector.addItem(this._currentFileMatch!, size);
|
||||
}
|
||||
|
||||
flush(): void {
|
||||
this.pushToCollector();
|
||||
this._batchedCollector.flush();
|
||||
}
|
||||
|
||||
private sendItems(items: IFileMatch[]): void {
|
||||
this._onResult(items);
|
||||
}
|
||||
}
|
||||
|
||||
function extensionResultToFrontendResult(data: vscode.TextSearchResult): ITextSearchResult {
|
||||
// Warning: result from RipgrepTextSearchEH has fake vscode.Range. Don't depend on any other props beyond these...
|
||||
if (extensionResultIsMatch(data)) {
|
||||
return <ITextSearchMatch>{
|
||||
preview: {
|
||||
matches: mapArrayOrNot(data.preview.matches, m => ({
|
||||
startLineNumber: m.start.line,
|
||||
startColumn: m.start.character,
|
||||
endLineNumber: m.end.line,
|
||||
endColumn: m.end.character
|
||||
})),
|
||||
text: data.preview.text
|
||||
},
|
||||
ranges: mapArrayOrNot(data.ranges, r => ({
|
||||
startLineNumber: r.start.line,
|
||||
startColumn: r.start.character,
|
||||
endLineNumber: r.end.line,
|
||||
endColumn: r.end.character
|
||||
}))
|
||||
};
|
||||
} else {
|
||||
return <ITextSearchContext>{
|
||||
text: data.text,
|
||||
lineNumber: data.lineNumber
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function extensionResultIsMatch(data: vscode.TextSearchResult): data is vscode.TextSearchMatch {
|
||||
return !!(<vscode.TextSearchMatch>data).preview;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects items that have a size - before the cumulative size of collected items reaches START_BATCH_AFTER_COUNT, the callback is called for every
|
||||
* set of items collected.
|
||||
* But after that point, the callback is called with batches of maxBatchSize.
|
||||
* If the batch isn't filled within some time, the callback is also called.
|
||||
*/
|
||||
export class BatchedCollector<T> {
|
||||
private static readonly TIMEOUT = 4000;
|
||||
|
||||
// After START_BATCH_AFTER_COUNT items have been collected, stop flushing on timeout
|
||||
private static readonly START_BATCH_AFTER_COUNT = 50;
|
||||
|
||||
private totalNumberCompleted = 0;
|
||||
private batch: T[] = [];
|
||||
private batchSize = 0;
|
||||
private timeoutHandle: any;
|
||||
|
||||
constructor(private maxBatchSize: number, private cb: (items: T[]) => void) {
|
||||
}
|
||||
|
||||
addItem(item: T, size: number): void {
|
||||
if (!item) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.addItemToBatch(item, size);
|
||||
}
|
||||
|
||||
addItems(items: T[], size: number): void {
|
||||
if (!items) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.addItemsToBatch(items, size);
|
||||
}
|
||||
|
||||
private addItemToBatch(item: T, size: number): void {
|
||||
this.batch.push(item);
|
||||
this.batchSize += size;
|
||||
this.onUpdate();
|
||||
}
|
||||
|
||||
private addItemsToBatch(item: T[], size: number): void {
|
||||
this.batch = this.batch.concat(item);
|
||||
this.batchSize += size;
|
||||
this.onUpdate();
|
||||
}
|
||||
|
||||
private onUpdate(): void {
|
||||
if (this.totalNumberCompleted < BatchedCollector.START_BATCH_AFTER_COUNT) {
|
||||
// Flush because we aren't batching yet
|
||||
this.flush();
|
||||
} else if (this.batchSize >= this.maxBatchSize) {
|
||||
// Flush because the batch is full
|
||||
this.flush();
|
||||
} else if (!this.timeoutHandle) {
|
||||
// No timeout running, start a timeout to flush
|
||||
this.timeoutHandle = setTimeout(() => {
|
||||
this.flush();
|
||||
}, BatchedCollector.TIMEOUT);
|
||||
}
|
||||
}
|
||||
|
||||
flush(): void {
|
||||
if (this.batchSize) {
|
||||
this.totalNumberCompleted += this.batchSize;
|
||||
this.cb(this.batch);
|
||||
this.batch = [];
|
||||
this.batchSize = 0;
|
||||
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user