mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-12 11:08:31 -05:00
Merge VS Code 1.31.1 (#4283)
This commit is contained in:
@@ -25,13 +25,6 @@ import { IFileQuery, IFolderQuery, IProgress, ISearchEngineStats } from 'vs/plat
|
||||
import { IRawFileMatch, ISearchEngine, ISearchEngineSuccess } from 'vs/workbench/services/search/node/search';
|
||||
import { spawnRipgrepCmd } from './ripgrepFileSearch';
|
||||
|
||||
enum Traversal {
|
||||
Node = 1,
|
||||
MacFind,
|
||||
LinuxFind,
|
||||
Ripgrep
|
||||
}
|
||||
|
||||
interface IDirectoryEntry {
|
||||
base: string;
|
||||
relativePath: string;
|
||||
@@ -50,43 +43,38 @@ process.on('exit', () => {
|
||||
|
||||
export class FileWalker {
|
||||
private config: IFileQuery;
|
||||
private useRipgrep: boolean;
|
||||
private filePattern: string;
|
||||
private normalizedFilePatternLowercase: string;
|
||||
private includePattern: glob.ParsedExpression;
|
||||
private maxResults: number;
|
||||
private includePattern: glob.ParsedExpression | undefined;
|
||||
private maxResults: number | null;
|
||||
private exists: boolean;
|
||||
private maxFilesize: number;
|
||||
private maxFilesize: number | null;
|
||||
private isLimitHit: boolean;
|
||||
private resultCount: number;
|
||||
private isCanceled: boolean;
|
||||
private fileWalkSW: StopWatch;
|
||||
private directoriesWalked: number;
|
||||
private filesWalked: number;
|
||||
private traversal: Traversal;
|
||||
private errors: string[];
|
||||
private cmdSW: StopWatch;
|
||||
private cmdResultCount: number;
|
||||
|
||||
private folderExcludePatterns: Map<string, AbsoluteAndRelativeParsedExpression>;
|
||||
private globalExcludePattern: glob.ParsedExpression;
|
||||
private globalExcludePattern: glob.ParsedExpression | undefined;
|
||||
|
||||
private walkedPaths: { [path: string]: boolean; };
|
||||
|
||||
constructor(config: IFileQuery, maxFileSize?: number) {
|
||||
constructor(config: IFileQuery) {
|
||||
this.config = config;
|
||||
this.useRipgrep = config.useRipgrep !== false;
|
||||
this.filePattern = config.filePattern;
|
||||
this.filePattern = config.filePattern || '';
|
||||
this.includePattern = config.includePattern && glob.parse(config.includePattern);
|
||||
this.maxResults = config.maxResults || null;
|
||||
this.exists = config.exists;
|
||||
this.maxFilesize = maxFileSize || null;
|
||||
this.exists = !!config.exists;
|
||||
this.walkedPaths = Object.create(null);
|
||||
this.resultCount = 0;
|
||||
this.isLimitHit = false;
|
||||
this.directoriesWalked = 0;
|
||||
this.filesWalked = 0;
|
||||
this.traversal = Traversal.Node;
|
||||
this.errors = [];
|
||||
|
||||
if (this.filePattern) {
|
||||
@@ -115,11 +103,11 @@ export class FileWalker {
|
||||
});
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
cancel(): void {
|
||||
this.isCanceled = true;
|
||||
}
|
||||
|
||||
public walk(folderQueries: IFolderQuery[], extraFiles: URI[], onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (error: Error, isLimitHit: boolean) => void): void {
|
||||
walk(folderQueries: IFolderQuery[], extraFiles: URI[], onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (error: Error | null, isLimitHit: boolean) => void): void {
|
||||
this.fileWalkSW = StopWatch.create(false);
|
||||
|
||||
// Support that the file pattern is a full path to a file that exists
|
||||
@@ -128,57 +116,38 @@ export class FileWalker {
|
||||
}
|
||||
|
||||
// For each extra file
|
||||
if (extraFiles) {
|
||||
extraFiles.forEach(extraFilePath => {
|
||||
const basename = path.basename(extraFilePath.fsPath);
|
||||
if (this.globalExcludePattern && this.globalExcludePattern(extraFilePath.fsPath, basename)) {
|
||||
return; // excluded
|
||||
}
|
||||
|
||||
// File: Check for match on file pattern and include pattern
|
||||
this.matchFile(onResult, { relativePath: extraFilePath.fsPath /* no workspace relative path */, basename });
|
||||
});
|
||||
}
|
||||
|
||||
let traverse = this.nodeJSTraversal;
|
||||
if (!this.maxFilesize) {
|
||||
if (this.useRipgrep) {
|
||||
this.traversal = Traversal.Ripgrep;
|
||||
traverse = this.cmdTraversal;
|
||||
} else if (platform.isMacintosh) {
|
||||
this.traversal = Traversal.MacFind;
|
||||
traverse = this.cmdTraversal;
|
||||
} else if (platform.isLinux) {
|
||||
this.traversal = Traversal.LinuxFind;
|
||||
traverse = this.cmdTraversal;
|
||||
extraFiles.forEach(extraFilePath => {
|
||||
const basename = path.basename(extraFilePath.fsPath);
|
||||
if (this.globalExcludePattern && this.globalExcludePattern(extraFilePath.fsPath, basename)) {
|
||||
return; // excluded
|
||||
}
|
||||
}
|
||||
|
||||
const isNodeTraversal = traverse === this.nodeJSTraversal;
|
||||
if (!isNodeTraversal) {
|
||||
this.cmdSW = StopWatch.create(false);
|
||||
}
|
||||
// File: Check for match on file pattern and include pattern
|
||||
this.matchFile(onResult, { relativePath: extraFilePath.fsPath /* no workspace relative path */, basename });
|
||||
});
|
||||
|
||||
this.cmdSW = StopWatch.create(false);
|
||||
|
||||
// For each root folder
|
||||
flow.parallel<IFolderQuery, void>(folderQueries, (folderQuery: IFolderQuery, rootFolderDone: (err: Error, result: void) => void) => {
|
||||
this.call(traverse, this, folderQuery, onResult, onMessage, (err?: Error) => {
|
||||
flow.parallel<IFolderQuery, void>(folderQueries, (folderQuery: IFolderQuery, rootFolderDone: (err: Error | null, result: void) => void) => {
|
||||
this.call(this.cmdTraversal, this, folderQuery, onResult, onMessage, (err?: Error) => {
|
||||
if (err) {
|
||||
const errorMessage = toErrorMessage(err);
|
||||
console.error(errorMessage);
|
||||
this.errors.push(errorMessage);
|
||||
rootFolderDone(err, undefined);
|
||||
} else {
|
||||
rootFolderDone(undefined, undefined);
|
||||
rootFolderDone(null, undefined);
|
||||
}
|
||||
});
|
||||
}, (errors, result) => {
|
||||
this.fileWalkSW.stop();
|
||||
const err = errors ? errors.filter(e => !!e)[0] : null;
|
||||
const err = errors ? arrays.coalesce(errors)[0] : null;
|
||||
done(err, this.isLimitHit);
|
||||
});
|
||||
}
|
||||
|
||||
private call(fun: Function, that: any, ...args: any[]): void {
|
||||
private call<F extends Function>(fun: F, that: any, ...args: any[]): void {
|
||||
try {
|
||||
fun.apply(that, args);
|
||||
} catch (e) {
|
||||
@@ -199,31 +168,25 @@ export class FileWalker {
|
||||
cb(err);
|
||||
};
|
||||
let leftover = '';
|
||||
let first = true;
|
||||
const tree = this.initDirectoryTree();
|
||||
|
||||
const useRipgrep = this.useRipgrep;
|
||||
let noSiblingsClauses: boolean;
|
||||
if (useRipgrep) {
|
||||
const ripgrep = spawnRipgrepCmd(this.config, folderQuery, this.config.includePattern, this.folderExcludePatterns.get(folderQuery.folder.fsPath).expression);
|
||||
cmd = ripgrep.cmd;
|
||||
noSiblingsClauses = !Object.keys(ripgrep.siblingClauses).length;
|
||||
const ripgrep = spawnRipgrepCmd(this.config, folderQuery, this.config.includePattern, this.folderExcludePatterns.get(folderQuery.folder.fsPath)!.expression);
|
||||
cmd = ripgrep.cmd;
|
||||
noSiblingsClauses = !Object.keys(ripgrep.siblingClauses).length;
|
||||
|
||||
const escapedArgs = ripgrep.rgArgs.args
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
const escapedArgs = ripgrep.rgArgs.args
|
||||
.map(arg => arg.match(/^-/) ? arg : `'${arg}'`)
|
||||
.join(' ');
|
||||
|
||||
let rgCmd = `rg ${escapedArgs}\n - cwd: ${ripgrep.cwd}`;
|
||||
if (ripgrep.rgArgs.siblingClauses) {
|
||||
rgCmd += `\n - Sibling clauses: ${JSON.stringify(ripgrep.rgArgs.siblingClauses)}`;
|
||||
}
|
||||
onMessage({ message: rgCmd });
|
||||
} else {
|
||||
cmd = this.spawnFindCmd(folderQuery);
|
||||
let rgCmd = `rg ${escapedArgs}\n - cwd: ${ripgrep.cwd}`;
|
||||
if (ripgrep.rgArgs.siblingClauses) {
|
||||
rgCmd += `\n - Sibling clauses: ${JSON.stringify(ripgrep.rgArgs.siblingClauses)}`;
|
||||
}
|
||||
onMessage({ message: rgCmd });
|
||||
|
||||
this.cmdResultCount = 0;
|
||||
this.collectStdout(cmd, 'utf8', useRipgrep, onMessage, (err: Error, stdout?: string, last?: boolean) => {
|
||||
this.collectStdout(cmd, 'utf8', onMessage, (err: Error, stdout?: string, last?: boolean) => {
|
||||
if (err) {
|
||||
done(err);
|
||||
return;
|
||||
@@ -234,12 +197,8 @@ export class FileWalker {
|
||||
}
|
||||
|
||||
// Mac: uses NFD unicode form on disk, but we want NFC
|
||||
const normalized = leftover + (isMac ? normalization.normalizeNFC(stdout) : stdout);
|
||||
const relativeFiles = normalized.split(useRipgrep ? '\n' : '\n./');
|
||||
if (!useRipgrep && first && normalized.length >= 2) {
|
||||
first = false;
|
||||
relativeFiles[0] = relativeFiles[0].trim().substr(2);
|
||||
}
|
||||
const normalized = leftover + (isMac ? normalization.normalizeNFC(stdout || '') : stdout);
|
||||
const relativeFiles = normalized.split('\n');
|
||||
|
||||
if (last) {
|
||||
const n = relativeFiles.length;
|
||||
@@ -248,7 +207,7 @@ export class FileWalker {
|
||||
relativeFiles.pop();
|
||||
}
|
||||
} else {
|
||||
leftover = relativeFiles.pop();
|
||||
leftover = relativeFiles.pop() || '';
|
||||
}
|
||||
|
||||
if (relativeFiles.length && relativeFiles[0].indexOf('\n') !== -1) {
|
||||
@@ -258,7 +217,7 @@ export class FileWalker {
|
||||
|
||||
this.cmdResultCount += relativeFiles.length;
|
||||
|
||||
if (useRipgrep && noSiblingsClauses) {
|
||||
if (noSiblingsClauses) {
|
||||
for (const relativePath of relativeFiles) {
|
||||
const basename = path.basename(relativePath);
|
||||
this.matchFile(onResult, { base: rootFolder, relativePath, basename });
|
||||
@@ -287,11 +246,11 @@ export class FileWalker {
|
||||
/**
|
||||
* Public for testing.
|
||||
*/
|
||||
public spawnFindCmd(folderQuery: IFolderQuery) {
|
||||
const excludePattern = this.folderExcludePatterns.get(folderQuery.folder.fsPath);
|
||||
spawnFindCmd(folderQuery: IFolderQuery) {
|
||||
const excludePattern = this.folderExcludePatterns.get(folderQuery.folder.fsPath)!;
|
||||
const basenames = excludePattern.getBasenameTerms();
|
||||
const pathTerms = excludePattern.getPathTerms();
|
||||
let args = ['-L', '.'];
|
||||
const args = ['-L', '.'];
|
||||
if (basenames.length || pathTerms.length) {
|
||||
args.push('-not', '(', '(');
|
||||
for (const basename of basenames) {
|
||||
@@ -312,9 +271,9 @@ export class FileWalker {
|
||||
/**
|
||||
* Public for testing.
|
||||
*/
|
||||
public readStdout(cmd: childProcess.ChildProcess, encoding: string, isRipgrep: boolean, cb: (err: Error, stdout?: string) => void): void {
|
||||
readStdout(cmd: childProcess.ChildProcess, encoding: string, cb: (err: Error | null, stdout?: string) => void): void {
|
||||
let all = '';
|
||||
this.collectStdout(cmd, encoding, isRipgrep, () => { }, (err: Error, stdout?: string, last?: boolean) => {
|
||||
this.collectStdout(cmd, encoding, () => { }, (err: Error, stdout?: string, last?: boolean) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
return;
|
||||
@@ -327,8 +286,8 @@ export class FileWalker {
|
||||
});
|
||||
}
|
||||
|
||||
private collectStdout(cmd: childProcess.ChildProcess, encoding: string, isRipgrep: boolean, onMessage: (message: IProgress) => void, cb: (err: Error, stdout?: string, last?: boolean) => void): void {
|
||||
let onData = (err: Error, stdout?: string, last?: boolean) => {
|
||||
private collectStdout(cmd: childProcess.ChildProcess, encoding: string, onMessage: (message: IProgress) => void, cb: (err: Error | null, stdout?: string, last?: boolean) => void): void {
|
||||
let onData = (err: Error | null, stdout?: string, last?: boolean) => {
|
||||
if (err || last) {
|
||||
onData = () => { };
|
||||
|
||||
@@ -363,10 +322,10 @@ export class FileWalker {
|
||||
cmd.on('close', (code: number) => {
|
||||
// ripgrep returns code=1 when no results are found
|
||||
let stderrText: string;
|
||||
if (isRipgrep ? (!gotData && (stderrText = this.decodeData(stderr, encoding)) && rgErrorMsgForDisplay(stderrText)) : code !== 0) {
|
||||
if (!gotData && (stderrText = this.decodeData(stderr, encoding)) && rgErrorMsgForDisplay(stderrText)) {
|
||||
onData(new Error(`command failed with error code ${code}: ${this.decodeData(stderr, encoding)}`));
|
||||
} else {
|
||||
if (isRipgrep && this.exists && code === 0) {
|
||||
if (this.exists && code === 0) {
|
||||
this.isLimitHit = true;
|
||||
}
|
||||
onData(null, '', true);
|
||||
@@ -374,7 +333,7 @@ export class FileWalker {
|
||||
});
|
||||
}
|
||||
|
||||
private forwardData(stream: Readable, encoding: string, cb: (err: Error, stdout?: string) => void): NodeStringDecoder {
|
||||
private forwardData(stream: Readable, encoding: string, cb: (err: Error | null, stdout?: string) => void): NodeStringDecoder {
|
||||
const decoder = new StringDecoder(encoding);
|
||||
stream.on('data', (data: Buffer) => {
|
||||
cb(null, decoder.write(data));
|
||||
@@ -430,7 +389,7 @@ export class FileWalker {
|
||||
|
||||
private matchDirectoryTree({ rootEntries, pathToEntries }: IDirectoryTree, rootFolder: string, onResult: (result: IRawFileMatch) => void) {
|
||||
const self = this;
|
||||
const excludePattern = this.folderExcludePatterns.get(rootFolder);
|
||||
const excludePattern = this.folderExcludePatterns.get(rootFolder)!;
|
||||
const filePattern = this.filePattern;
|
||||
function matchDirectory(entries: IDirectoryEntry[]) {
|
||||
self.directoriesWalked++;
|
||||
@@ -467,58 +426,42 @@ export class FileWalker {
|
||||
matchDirectory(rootEntries);
|
||||
}
|
||||
|
||||
private nodeJSTraversal(folderQuery: IFolderQuery, onResult: (result: IRawFileMatch) => void, onMessage: (message: IProgress) => void, done: (err?: Error) => void): void {
|
||||
this.directoriesWalked++;
|
||||
extfs.readdir(folderQuery.folder.fsPath, (error: Error, files: string[]) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return done();
|
||||
}
|
||||
|
||||
if (this.isCanceled || this.isLimitHit) {
|
||||
return done();
|
||||
}
|
||||
|
||||
return this.doWalk(folderQuery, '', files, onResult, done);
|
||||
});
|
||||
}
|
||||
|
||||
public getStats(): ISearchEngineStats {
|
||||
getStats(): ISearchEngineStats {
|
||||
return {
|
||||
cmdTime: this.cmdSW && this.cmdSW.elapsed(),
|
||||
fileWalkTime: this.fileWalkSW.elapsed(),
|
||||
traversal: Traversal[this.traversal],
|
||||
directoriesWalked: this.directoriesWalked,
|
||||
filesWalked: this.filesWalked,
|
||||
cmdResultCount: this.cmdResultCount
|
||||
};
|
||||
}
|
||||
|
||||
private doWalk(folderQuery: IFolderQuery, relativeParentPath: string, files: string[], onResult: (result: IRawFileMatch) => void, done: (error: Error) => void): void {
|
||||
private doWalk(folderQuery: IFolderQuery, relativeParentPath: string, files: string[], onResult: (result: IRawFileMatch) => void, done: (error?: Error) => void): void {
|
||||
const rootFolder = folderQuery.folder;
|
||||
|
||||
// Execute tasks on each file in parallel to optimize throughput
|
||||
const hasSibling = glob.hasSiblingFn(() => files);
|
||||
flow.parallel(files, (file: string, clb: (error: Error, result: {}) => void): void => {
|
||||
flow.parallel(files, (file: string, clb: (error: Error | null, _?: any) => void): void => {
|
||||
|
||||
// Check canceled
|
||||
if (this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
// Check exclude pattern
|
||||
// If the user searches for the exact file name, we adjust the glob matching
|
||||
// to ignore filtering by siblings because the user seems to know what she
|
||||
// is searching for and we want to include the result in that case anyway
|
||||
let currentRelativePath = relativeParentPath ? [relativeParentPath, file].join(path.sep) : file;
|
||||
if (this.folderExcludePatterns.get(folderQuery.folder.fsPath).test(currentRelativePath, file, this.config.filePattern !== file ? hasSibling : undefined)) {
|
||||
return clb(null, undefined);
|
||||
const currentRelativePath = relativeParentPath ? [relativeParentPath, file].join(path.sep) : file;
|
||||
if (this.folderExcludePatterns.get(folderQuery.folder.fsPath)!.test(currentRelativePath, file, this.config.filePattern !== file ? hasSibling : undefined)) {
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
// Use lstat to detect links
|
||||
let currentAbsolutePath = [rootFolder.fsPath, currentRelativePath].join(path.sep);
|
||||
const currentAbsolutePath = [rootFolder.fsPath, currentRelativePath].join(path.sep);
|
||||
fs.lstat(currentAbsolutePath, (error, lstat) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
// If the path is a link, we must instead use fs.stat() to find out if the
|
||||
@@ -526,7 +469,7 @@ export class FileWalker {
|
||||
// the link which is always a file.
|
||||
this.statLinkIfNeeded(currentAbsolutePath, lstat, (error, stat) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
// Directory: Follow directories
|
||||
@@ -536,11 +479,12 @@ export class FileWalker {
|
||||
// to really prevent loops with links we need to resolve the real path of them
|
||||
return this.realPathIfNeeded(currentAbsolutePath, lstat, (error, realpath) => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
realpath = realpath || '';
|
||||
if (this.walkedPaths[realpath]) {
|
||||
return clb(null, undefined); // escape when there are cycles (can happen with symlinks)
|
||||
return clb(null); // escape when there are cycles (can happen with symlinks)
|
||||
}
|
||||
|
||||
this.walkedPaths[realpath] = true; // remember as walked
|
||||
@@ -548,10 +492,10 @@ export class FileWalker {
|
||||
// Continue walking
|
||||
return extfs.readdir(currentAbsolutePath, (error: Error, children: string[]): void => {
|
||||
if (error || this.isCanceled || this.isLimitHit) {
|
||||
return clb(null, undefined);
|
||||
return clb(null);
|
||||
}
|
||||
|
||||
this.doWalk(folderQuery, currentRelativePath, children, onResult, err => clb(err, undefined));
|
||||
this.doWalk(folderQuery, currentRelativePath, children, onResult, err => clb(err || null));
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -579,7 +523,7 @@ export class FileWalker {
|
||||
error = arrays.coalesce(error); // find any error by removing null values first
|
||||
}
|
||||
|
||||
return done(error && error.length > 0 ? error[0] : null);
|
||||
return done(error && error.length > 0 ? error[0] : undefined);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -612,7 +556,7 @@ export class FileWalker {
|
||||
return true;
|
||||
}
|
||||
|
||||
private statLinkIfNeeded(path: string, lstat: fs.Stats, clb: (error: Error, stat: fs.Stats) => void): void {
|
||||
private statLinkIfNeeded(path: string, lstat: fs.Stats, clb: (error: Error | null, stat: fs.Stats) => void): void {
|
||||
if (lstat.isSymbolicLink()) {
|
||||
return fs.stat(path, clb); // stat the target the link points to
|
||||
}
|
||||
@@ -620,7 +564,7 @@ export class FileWalker {
|
||||
return clb(null, lstat); // not a link, so the stat is already ok for us
|
||||
}
|
||||
|
||||
private realPathIfNeeded(path: string, lstat: fs.Stats, clb: (error: Error, realpath?: string) => void): void {
|
||||
private realPathIfNeeded(path: string, lstat: fs.Stats, clb: (error: Error | null, realpath?: string) => void): void {
|
||||
if (lstat.isSymbolicLink()) {
|
||||
return fs.realpath(path, (error, realpath) => {
|
||||
if (error) {
|
||||
@@ -642,12 +586,12 @@ export class Engine implements ISearchEngine<IRawFileMatch> {
|
||||
|
||||
constructor(config: IFileQuery) {
|
||||
this.folderQueries = config.folderQueries;
|
||||
this.extraFiles = config.extraFileResources;
|
||||
this.extraFiles = config.extraFileResources || [];
|
||||
|
||||
this.walker = new FileWalker(config);
|
||||
}
|
||||
|
||||
public search(onResult: (result: IRawFileMatch) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void): void {
|
||||
search(onResult: (result: IRawFileMatch) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void): void {
|
||||
this.walker.walk(this.folderQueries, this.extraFiles, onResult, onProgress, (err: Error, isLimitHit: boolean) => {
|
||||
done(err, {
|
||||
limitHit: isLimitHit,
|
||||
@@ -656,7 +600,7 @@ export class Engine implements ISearchEngine<IRawFileMatch> {
|
||||
});
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
cancel(): void {
|
||||
this.walker.cancel();
|
||||
}
|
||||
}
|
||||
@@ -667,8 +611,8 @@ export class Engine implements ISearchEngine<IRawFileMatch> {
|
||||
* file searched, it's only used for a text search with a searchPath
|
||||
*/
|
||||
class AbsoluteAndRelativeParsedExpression {
|
||||
private absoluteParsedExpr: glob.ParsedExpression;
|
||||
private relativeParsedExpr: glob.ParsedExpression;
|
||||
private absoluteParsedExpr: glob.ParsedExpression | undefined;
|
||||
private relativeParsedExpr: glob.ParsedExpression | undefined;
|
||||
|
||||
constructor(public expression: glob.IExpression, private root: string) {
|
||||
this.init(expression);
|
||||
@@ -678,8 +622,8 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
* Split the IExpression into its absolute and relative components, and glob.parse them separately.
|
||||
*/
|
||||
private init(expr: glob.IExpression): void {
|
||||
let absoluteGlobExpr: glob.IExpression;
|
||||
let relativeGlobExpr: glob.IExpression;
|
||||
let absoluteGlobExpr: glob.IExpression | undefined;
|
||||
let relativeGlobExpr: glob.IExpression | undefined;
|
||||
Object.keys(expr)
|
||||
.filter(key => expr[key])
|
||||
.forEach(key => {
|
||||
@@ -696,12 +640,12 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
this.relativeParsedExpr = relativeGlobExpr && glob.parse(relativeGlobExpr, { trimForExclusions: true });
|
||||
}
|
||||
|
||||
public test(_path: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): string | Promise<string> {
|
||||
test(_path: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): string | Promise<string | null> | undefined | null {
|
||||
return (this.relativeParsedExpr && this.relativeParsedExpr(_path, basename, hasSibling)) ||
|
||||
(this.absoluteParsedExpr && this.absoluteParsedExpr(path.join(this.root, _path), basename, hasSibling));
|
||||
}
|
||||
|
||||
public getBasenameTerms(): string[] {
|
||||
getBasenameTerms(): string[] {
|
||||
const basenameTerms: string[] = [];
|
||||
if (this.absoluteParsedExpr) {
|
||||
basenameTerms.push(...glob.getBasenameTerms(this.absoluteParsedExpr));
|
||||
@@ -714,7 +658,7 @@ class AbsoluteAndRelativeParsedExpression {
|
||||
return basenameTerms;
|
||||
}
|
||||
|
||||
public getPathTerms(): string[] {
|
||||
getPathTerms(): string[] {
|
||||
const pathTerms: string[] = [];
|
||||
if (this.absoluteParsedExpr) {
|
||||
pathTerms.push(...glob.getPathTerms(this.absoluteParsedExpr));
|
||||
|
||||
@@ -56,13 +56,13 @@ class FileSearchEngine {
|
||||
this.globalExcludePattern = config.excludePattern && glob.parse(config.excludePattern);
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
cancel(): void {
|
||||
this.isCanceled = true;
|
||||
this.activeCancellationTokens.forEach(t => t.cancel());
|
||||
this.activeCancellationTokens = new Set();
|
||||
}
|
||||
|
||||
public search(_onResult: (match: IInternalFileMatch) => void): Promise<IInternalSearchComplete> {
|
||||
search(_onResult: (match: IInternalFileMatch) => void): Promise<IInternalSearchComplete> {
|
||||
const folderQueries = this.config.folderQueries || [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as gracefulFs from 'graceful-fs';
|
||||
import { CancellationToken } from 'vs/base/common/cancellation';
|
||||
import { MAX_FILE_SIZE } from 'vs/platform/files/node/files';
|
||||
import { ITextQuery, QueryType } from 'vs/platform/search/common/search';
|
||||
import { FileWalker } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { Engine } from 'vs/workbench/services/search/node/legacy/textSearch';
|
||||
import { TextSearchWorkerProvider } from 'vs/workbench/services/search/node/legacy/textSearchWorkerProvider';
|
||||
import { BatchedCollector } from 'vs/workbench/services/search/node/textSearchManager';
|
||||
import { ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, ISerializedSearchSuccess } from '../search';
|
||||
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
type IProgressCallback = (p: ISerializedSearchProgressItem) => void;
|
||||
|
||||
export class LegacyTextSearchService {
|
||||
private static readonly BATCH_SIZE = 512;
|
||||
|
||||
private textSearchWorkerProvider: TextSearchWorkerProvider;
|
||||
|
||||
textSearch(config: ITextQuery, progressCallback: IProgressCallback, token?: CancellationToken): Promise<ISerializedSearchComplete> {
|
||||
if (!this.textSearchWorkerProvider) {
|
||||
this.textSearchWorkerProvider = new TextSearchWorkerProvider();
|
||||
}
|
||||
|
||||
let engine = new Engine(
|
||||
config,
|
||||
new FileWalker({
|
||||
type: QueryType.File,
|
||||
folderQueries: config.folderQueries,
|
||||
extraFileResources: config.extraFileResources,
|
||||
includePattern: config.includePattern,
|
||||
excludePattern: config.excludePattern,
|
||||
useRipgrep: false
|
||||
}, MAX_FILE_SIZE),
|
||||
this.textSearchWorkerProvider);
|
||||
|
||||
return this.doTextSearch(engine, progressCallback, LegacyTextSearchService.BATCH_SIZE, token);
|
||||
}
|
||||
|
||||
private doTextSearch(engine: Engine, progressCallback: IProgressCallback, batchSize: number, token?: CancellationToken): Promise<ISerializedSearchSuccess> {
|
||||
if (token) {
|
||||
token.onCancellationRequested(() => engine.cancel());
|
||||
}
|
||||
|
||||
return new Promise<ISerializedSearchSuccess>((c, e) => {
|
||||
// Use BatchedCollector to get new results to the frontend every 2s at least, until 50 results have been returned
|
||||
const collector = new BatchedCollector<ISerializedFileMatch>(batchSize, progressCallback);
|
||||
engine.search((matches) => {
|
||||
const totalMatches = matches.reduce((acc, m) => acc + m.numMatches, 0);
|
||||
collector.addItems(matches, totalMatches);
|
||||
}, (progress) => {
|
||||
progressCallback(progress);
|
||||
}, (error, stats) => {
|
||||
collector.flush();
|
||||
|
||||
if (error) {
|
||||
e(error);
|
||||
} else {
|
||||
c({
|
||||
type: 'success',
|
||||
limitHit: stats.limitHit,
|
||||
stats: null
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as glob from 'vs/base/common/glob';
|
||||
import { IPatternInfo, ITextSearchPreviewOptions } from 'vs/platform/search/common/search';
|
||||
|
||||
export interface IFolderSearch {
|
||||
folder: string;
|
||||
excludePattern?: glob.IExpression;
|
||||
includePattern?: glob.IExpression;
|
||||
fileEncoding?: string;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
disregardGlobalIgnoreFiles?: boolean;
|
||||
}
|
||||
|
||||
export interface IRawSearch {
|
||||
folderQueries: IFolderSearch[];
|
||||
ignoreSymlinks?: boolean;
|
||||
extraFiles?: string[];
|
||||
filePattern?: string;
|
||||
excludePattern?: glob.IExpression;
|
||||
includePattern?: glob.IExpression;
|
||||
contentPattern?: IPatternInfo;
|
||||
maxResults?: number;
|
||||
exists?: boolean;
|
||||
sortByScore?: boolean;
|
||||
cacheKey?: string;
|
||||
maxFilesize?: number;
|
||||
useRipgrep?: boolean;
|
||||
disregardIgnoreFiles?: boolean;
|
||||
previewOptions?: ITextSearchPreviewOptions;
|
||||
disregardGlobalIgnoreFiles?: boolean;
|
||||
}
|
||||
@@ -1,206 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'path';
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { IProgress, ITextQuery } from 'vs/platform/search/common/search';
|
||||
import { FileWalker } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { ISearchEngine, ISearchEngineSuccess, ISerializedFileMatch } from '../search';
|
||||
import { ITextSearchWorkerProvider } from './textSearchWorkerProvider';
|
||||
import { ISearchWorker, ISearchWorkerSearchArgs } from './worker/searchWorkerIpc';
|
||||
import { IRawSearch } from 'vs/workbench/services/search/node/legacy/search';
|
||||
|
||||
export class Engine implements ISearchEngine<ISerializedFileMatch[]> {
|
||||
|
||||
private static readonly PROGRESS_FLUSH_CHUNK_SIZE = 50; // optimization: number of files to process before emitting progress event
|
||||
|
||||
private config: IRawSearch;
|
||||
private config2: ITextQuery;
|
||||
private walker: FileWalker;
|
||||
private walkerError: Error;
|
||||
|
||||
private isCanceled = false;
|
||||
private isDone = false;
|
||||
private totalBytes = 0;
|
||||
private processedBytes = 0;
|
||||
private progressed = 0;
|
||||
private walkerIsDone = false;
|
||||
private limitReached = false;
|
||||
private numResults = 0;
|
||||
|
||||
private workerProvider: ITextSearchWorkerProvider;
|
||||
private workers: ISearchWorker[];
|
||||
|
||||
private nextWorker = 0;
|
||||
|
||||
constructor(config: ITextQuery, walker: FileWalker, workerProvider: ITextSearchWorkerProvider) {
|
||||
this.config = makeRawSearch(config);
|
||||
this.config2 = config;
|
||||
this.walker = walker;
|
||||
this.workerProvider = workerProvider;
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this.isCanceled = true;
|
||||
this.walker.cancel();
|
||||
|
||||
this.workers.forEach(w => {
|
||||
w.cancel()
|
||||
.then(null, onUnexpectedError);
|
||||
});
|
||||
}
|
||||
|
||||
initializeWorkers(): void {
|
||||
this.workers.forEach(w => {
|
||||
w.initialize()
|
||||
.then(null, onUnexpectedError);
|
||||
});
|
||||
}
|
||||
|
||||
search(onResult: (match: ISerializedFileMatch[]) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void): void {
|
||||
this.workers = this.workerProvider.getWorkers();
|
||||
this.initializeWorkers();
|
||||
|
||||
const fileEncoding = this.config.folderQueries.length === 1 ?
|
||||
this.config.folderQueries[0].fileEncoding || 'utf8' :
|
||||
'utf8';
|
||||
|
||||
const progress = () => {
|
||||
if (++this.progressed % Engine.PROGRESS_FLUSH_CHUNK_SIZE === 0) {
|
||||
onProgress({ total: this.totalBytes, worked: this.processedBytes }); // buffer progress in chunks to reduce pressure
|
||||
}
|
||||
};
|
||||
|
||||
const unwind = (processed: number) => {
|
||||
this.processedBytes += processed;
|
||||
|
||||
// Emit progress() unless we got canceled or hit the limit
|
||||
if (processed && !this.isDone && !this.isCanceled && !this.limitReached) {
|
||||
progress();
|
||||
}
|
||||
|
||||
// Emit done()
|
||||
if (!this.isDone && this.processedBytes === this.totalBytes && this.walkerIsDone) {
|
||||
this.isDone = true;
|
||||
done(this.walkerError, {
|
||||
limitHit: this.limitReached,
|
||||
stats: this.walker.getStats()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const run = (batch: string[], batchBytes: number): void => {
|
||||
const worker = this.workers[this.nextWorker];
|
||||
this.nextWorker = (this.nextWorker + 1) % this.workers.length;
|
||||
|
||||
const maxResults = this.config.maxResults && (this.config.maxResults - this.numResults);
|
||||
const searchArgs: ISearchWorkerSearchArgs = { absolutePaths: batch, maxResults, pattern: this.config.contentPattern, fileEncoding, previewOptions: this.config.previewOptions };
|
||||
worker.search(searchArgs).then(result => {
|
||||
if (!result || this.limitReached || this.isCanceled) {
|
||||
return unwind(batchBytes);
|
||||
}
|
||||
|
||||
const matches = result.matches;
|
||||
onResult(matches);
|
||||
this.numResults += result.numMatches;
|
||||
|
||||
if (this.config.maxResults && this.numResults >= this.config.maxResults) {
|
||||
// It's possible to go over maxResults like this, but it's much simpler than trying to extract the exact number
|
||||
// of file matches, line matches, and matches within a line to == maxResults.
|
||||
this.limitReached = true;
|
||||
}
|
||||
|
||||
unwind(batchBytes);
|
||||
},
|
||||
error => {
|
||||
// An error on the worker's end, not in reading the file, but in processing the batch. Log and continue.
|
||||
onUnexpectedError(error);
|
||||
unwind(batchBytes);
|
||||
});
|
||||
};
|
||||
|
||||
// Walk over the file system
|
||||
let nextBatch: string[] = [];
|
||||
let nextBatchBytes = 0;
|
||||
const batchFlushBytes = 2 ** 20; // 1MB
|
||||
this.walker.walk(this.config2.folderQueries, this.config2.extraFileResources, result => {
|
||||
let bytes = result.size || 1;
|
||||
this.totalBytes += bytes;
|
||||
|
||||
// If we have reached the limit or we are canceled, ignore it
|
||||
if (this.limitReached || this.isCanceled) {
|
||||
return unwind(bytes);
|
||||
}
|
||||
|
||||
// Indicate progress to the outside
|
||||
progress();
|
||||
|
||||
const absolutePath = result.base ? [result.base, result.relativePath].join(path.sep) : result.relativePath;
|
||||
nextBatch.push(absolutePath);
|
||||
nextBatchBytes += bytes;
|
||||
|
||||
if (nextBatchBytes >= batchFlushBytes) {
|
||||
run(nextBatch, nextBatchBytes);
|
||||
nextBatch = [];
|
||||
nextBatchBytes = 0;
|
||||
}
|
||||
},
|
||||
onProgress,
|
||||
(error, isLimitHit) => {
|
||||
this.walkerIsDone = true;
|
||||
this.walkerError = error;
|
||||
|
||||
// Send any remaining paths to a worker, or unwind if we're stopping
|
||||
if (nextBatch.length) {
|
||||
if (this.limitReached || this.isCanceled) {
|
||||
unwind(nextBatchBytes);
|
||||
} else {
|
||||
run(nextBatch, nextBatchBytes);
|
||||
}
|
||||
} else {
|
||||
unwind(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exported for tests
|
||||
*/
|
||||
export function makeRawSearch(query: ITextQuery): IRawSearch {
|
||||
let rawSearch: IRawSearch = {
|
||||
folderQueries: [],
|
||||
extraFiles: [],
|
||||
excludePattern: query.excludePattern,
|
||||
includePattern: query.includePattern,
|
||||
maxResults: query.maxResults,
|
||||
useRipgrep: query.useRipgrep,
|
||||
disregardIgnoreFiles: query.folderQueries.some(fq => fq.disregardIgnoreFiles),
|
||||
disregardGlobalIgnoreFiles: query.folderQueries.some(fq => fq.disregardGlobalIgnoreFiles),
|
||||
ignoreSymlinks: query.folderQueries.some(fq => fq.ignoreSymlinks),
|
||||
previewOptions: query.previewOptions
|
||||
};
|
||||
|
||||
for (const q of query.folderQueries) {
|
||||
rawSearch.folderQueries.push({
|
||||
excludePattern: q.excludePattern,
|
||||
includePattern: q.includePattern,
|
||||
fileEncoding: q.fileEncoding,
|
||||
disregardIgnoreFiles: q.disregardIgnoreFiles,
|
||||
disregardGlobalIgnoreFiles: q.disregardGlobalIgnoreFiles,
|
||||
folder: q.folder.fsPath
|
||||
});
|
||||
}
|
||||
|
||||
if (query.extraFileResources) {
|
||||
for (const r of query.extraFileResources) {
|
||||
rawSearch.extraFiles.push(r.fsPath);
|
||||
}
|
||||
}
|
||||
|
||||
rawSearch.contentPattern = query.contentPattern;
|
||||
|
||||
return rawSearch;
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as os from 'os';
|
||||
|
||||
import * as ipc from 'vs/base/parts/ipc/node/ipc';
|
||||
import { Client } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
|
||||
import { ISearchWorker, SearchWorkerChannelClient } from './worker/searchWorkerIpc';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
|
||||
export interface ITextSearchWorkerProvider {
|
||||
getWorkers(): ISearchWorker[];
|
||||
}
|
||||
|
||||
export class TextSearchWorkerProvider implements ITextSearchWorkerProvider {
|
||||
private workers: ISearchWorker[] = [];
|
||||
|
||||
getWorkers(): ISearchWorker[] {
|
||||
const numWorkers = os.cpus().length;
|
||||
while (this.workers.length < numWorkers) {
|
||||
this.createWorker();
|
||||
}
|
||||
|
||||
return this.workers;
|
||||
}
|
||||
|
||||
private createWorker(): void {
|
||||
let client = new Client(
|
||||
getPathFromAmdModule(require, 'bootstrap-fork'),
|
||||
{
|
||||
serverName: 'Search Worker ' + this.workers.length,
|
||||
args: ['--type=searchWorker'],
|
||||
timeout: 30 * 1000,
|
||||
env: {
|
||||
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/legacy/worker/searchWorkerApp',
|
||||
PIPE_LOGGING: 'true',
|
||||
VERBOSE_LOGGING: process.env.VERBOSE_LOGGING
|
||||
},
|
||||
useQueue: true
|
||||
});
|
||||
|
||||
const channel = ipc.getNextTickChannel(client.getChannel('searchWorker'));
|
||||
const channelClient = new SearchWorkerChannelClient(channel);
|
||||
|
||||
this.workers.push(channelClient);
|
||||
}
|
||||
}
|
||||
@@ -1,291 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as gracefulFs from 'graceful-fs';
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { bomLength, decode, detectEncodingFromBuffer, encodingExists, UTF16be, UTF16le, UTF8, UTF8_with_bom } from 'vs/base/node/encoding';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { ITextSearchPreviewOptions, TextSearchMatch } from 'vs/platform/search/common/search';
|
||||
import { ISearchWorker, ISearchWorkerSearchArgs, ISearchWorkerSearchResult } from './searchWorkerIpc';
|
||||
import { FileMatch } from 'vs/workbench/services/search/node/search';
|
||||
|
||||
gracefulFs.gracefulify(fs);
|
||||
|
||||
interface ReadLinesOptions {
|
||||
bufferLength: number;
|
||||
encoding: string;
|
||||
}
|
||||
|
||||
const MAX_FILE_ERRORS = 5; // Don't report more than this number of errors, 1 per file, to avoid flooding the log when there's a general issue
|
||||
let numErrorsLogged = 0;
|
||||
function onError(error: any): void {
|
||||
if (numErrorsLogged++ < MAX_FILE_ERRORS) {
|
||||
onUnexpectedError(error);
|
||||
}
|
||||
}
|
||||
|
||||
export class SearchWorker implements ISearchWorker {
|
||||
private currentSearchEngine: SearchWorkerEngine;
|
||||
|
||||
initialize(): Promise<void> {
|
||||
this.currentSearchEngine = new SearchWorkerEngine();
|
||||
return Promise.resolve<void>(undefined);
|
||||
}
|
||||
|
||||
cancel(): Promise<void> {
|
||||
// Cancel the current search. It will stop searching and close its open files.
|
||||
if (this.currentSearchEngine) {
|
||||
this.currentSearchEngine.cancel();
|
||||
}
|
||||
|
||||
return Promise.resolve<void>(null);
|
||||
}
|
||||
|
||||
search(args: ISearchWorkerSearchArgs): Promise<ISearchWorkerSearchResult> {
|
||||
if (!this.currentSearchEngine) {
|
||||
// Worker timed out during search
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
return this.currentSearchEngine.searchBatch(args);
|
||||
}
|
||||
}
|
||||
|
||||
interface IFileSearchResult {
|
||||
match: FileMatch;
|
||||
numMatches: number;
|
||||
limitReached?: boolean;
|
||||
}
|
||||
|
||||
const LF = 0x0a;
|
||||
const CR = 0x0d;
|
||||
|
||||
export class SearchWorkerEngine {
|
||||
private nextSearch = Promise.resolve(null);
|
||||
private isCanceled = false;
|
||||
|
||||
/**
|
||||
* Searches some number of the given paths concurrently, and starts searches in other paths when those complete.
|
||||
*/
|
||||
searchBatch(args: ISearchWorkerSearchArgs): Promise<ISearchWorkerSearchResult> {
|
||||
const contentPattern = strings.createRegExp(args.pattern.pattern, args.pattern.isRegExp, { matchCase: args.pattern.isCaseSensitive, wholeWord: args.pattern.isWordMatch, multiline: false, global: true });
|
||||
const fileEncoding = encodingExists(args.fileEncoding) ? args.fileEncoding : UTF8;
|
||||
return this.nextSearch =
|
||||
this.nextSearch.then(() => this._searchBatch(args, contentPattern, fileEncoding));
|
||||
}
|
||||
|
||||
|
||||
private _searchBatch(args: ISearchWorkerSearchArgs, contentPattern: RegExp, fileEncoding: string): Promise<ISearchWorkerSearchResult> {
|
||||
if (this.isCanceled) {
|
||||
return Promise.resolve<ISearchWorkerSearchResult>(null);
|
||||
}
|
||||
|
||||
return new Promise<ISearchWorkerSearchResult>(batchDone => {
|
||||
const result: ISearchWorkerSearchResult = {
|
||||
matches: [],
|
||||
numMatches: 0,
|
||||
limitReached: false
|
||||
};
|
||||
|
||||
// Search in the given path, and when it's finished, search in the next path in absolutePaths
|
||||
const startSearchInFile = (absolutePath: string): Promise<void> => {
|
||||
return this.searchInFile(absolutePath, contentPattern, fileEncoding, args.maxResults && (args.maxResults - result.numMatches), args.previewOptions).then(fileResult => {
|
||||
// Finish early if search is canceled
|
||||
if (this.isCanceled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (fileResult) {
|
||||
result.numMatches += fileResult.numMatches;
|
||||
result.matches.push(fileResult.match.serialize());
|
||||
if (fileResult.limitReached) {
|
||||
// If the limit was reached, terminate early with the results so far and cancel in-progress searches.
|
||||
this.cancel();
|
||||
result.limitReached = true;
|
||||
return batchDone(result);
|
||||
}
|
||||
}
|
||||
}, onError);
|
||||
};
|
||||
|
||||
Promise.all(args.absolutePaths.map(startSearchInFile)).then(() => {
|
||||
batchDone(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
cancel(): void {
|
||||
this.isCanceled = true;
|
||||
}
|
||||
|
||||
private searchInFile(absolutePath: string, contentPattern: RegExp, fileEncoding: string, maxResults?: number, previewOptions?: ITextSearchPreviewOptions): Promise<IFileSearchResult> {
|
||||
let fileMatch: FileMatch | null = null;
|
||||
let limitReached = false;
|
||||
let numMatches = 0;
|
||||
|
||||
const perLineCallback = (line: string, lineNumber: number) => {
|
||||
let match = contentPattern.exec(line);
|
||||
|
||||
// Record all matches into file result
|
||||
while (match !== null && match[0].length > 0 && !this.isCanceled && !limitReached) {
|
||||
if (fileMatch === null) {
|
||||
fileMatch = new FileMatch(absolutePath);
|
||||
}
|
||||
|
||||
const lineMatch = new TextSearchMatch(line, new Range(lineNumber, match.index, lineNumber, match.index + match[0].length), previewOptions);
|
||||
fileMatch.addMatch(lineMatch);
|
||||
|
||||
numMatches++;
|
||||
if (maxResults && numMatches >= maxResults) {
|
||||
limitReached = true;
|
||||
}
|
||||
|
||||
match = contentPattern.exec(line);
|
||||
}
|
||||
};
|
||||
|
||||
// Read lines buffered to support large files
|
||||
return this.readlinesAsync(absolutePath, perLineCallback, { bufferLength: 8096, encoding: fileEncoding }).then(
|
||||
() => fileMatch ? { match: fileMatch, limitReached, numMatches } : null);
|
||||
}
|
||||
|
||||
private readlinesAsync(filename: string, perLineCallback: (line: string, lineNumber: number) => void, options: ReadLinesOptions): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
fs.open(filename, 'r', null, (error: Error, fd: number) => {
|
||||
if (error) {
|
||||
return resolve(null);
|
||||
}
|
||||
|
||||
const buffer = Buffer.allocUnsafe(options.bufferLength);
|
||||
let line = '';
|
||||
let lineNumber = 0;
|
||||
let lastBufferHadTrailingCR = false;
|
||||
|
||||
const readFile = (isFirstRead: boolean, clb: (error: Error) => void): void => {
|
||||
if (this.isCanceled) {
|
||||
return clb(null); // return early if canceled or limit reached
|
||||
}
|
||||
|
||||
fs.read(fd, buffer, 0, buffer.length, null, (error: Error, bytesRead: number, buffer: Buffer) => {
|
||||
const decodeBuffer = (buffer: Buffer, start: number, end: number): string => {
|
||||
if (options.encoding === UTF8 || options.encoding === UTF8_with_bom) {
|
||||
return buffer.toString(undefined, start, end); // much faster to use built in toString() when encoding is default
|
||||
}
|
||||
|
||||
return decode(buffer.slice(start, end), options.encoding);
|
||||
};
|
||||
|
||||
const lineFinished = (offset: number): void => {
|
||||
line += decodeBuffer(buffer, pos, i + offset);
|
||||
perLineCallback(line, lineNumber);
|
||||
line = '';
|
||||
lineNumber++;
|
||||
pos = i + offset;
|
||||
};
|
||||
|
||||
if (error || bytesRead === 0 || this.isCanceled) {
|
||||
return clb(error); // return early if canceled or limit reached or no more bytes to read
|
||||
}
|
||||
|
||||
let crlfCharSize = 1;
|
||||
let crBytes = [CR];
|
||||
let lfBytes = [LF];
|
||||
let pos = 0;
|
||||
let i = 0;
|
||||
|
||||
// Detect encoding and mime when this is the beginning of the file
|
||||
if (isFirstRead) {
|
||||
const detected = detectEncodingFromBuffer({ buffer, bytesRead }, false);
|
||||
if (detected.seemsBinary) {
|
||||
return clb(null); // skip files that seem binary
|
||||
}
|
||||
|
||||
// Check for BOM offset
|
||||
switch (detected.encoding) {
|
||||
case UTF8:
|
||||
pos = i = bomLength(UTF8);
|
||||
options.encoding = UTF8;
|
||||
break;
|
||||
case UTF16be:
|
||||
pos = i = bomLength(UTF16be);
|
||||
options.encoding = UTF16be;
|
||||
break;
|
||||
case UTF16le:
|
||||
pos = i = bomLength(UTF16le);
|
||||
options.encoding = UTF16le;
|
||||
break;
|
||||
}
|
||||
|
||||
// when we are running with UTF16le/be, LF and CR are encoded as
|
||||
// two bytes, like 0A 00 (LF) / 0D 00 (CR) for LE or flipped around
|
||||
// for BE. We need to account for this when splitting the buffer into
|
||||
// newlines, and when detecting a CRLF combo.
|
||||
if (options.encoding === UTF16le) {
|
||||
crlfCharSize = 2;
|
||||
crBytes = [CR, 0x00];
|
||||
lfBytes = [LF, 0x00];
|
||||
} else if (options.encoding === UTF16be) {
|
||||
crlfCharSize = 2;
|
||||
crBytes = [0x00, CR];
|
||||
lfBytes = [0x00, LF];
|
||||
}
|
||||
}
|
||||
|
||||
if (lastBufferHadTrailingCR) {
|
||||
if (buffer[i] === lfBytes[0] && (lfBytes.length === 1 || buffer[i + 1] === lfBytes[1])) {
|
||||
lineFinished(1 * crlfCharSize);
|
||||
i++;
|
||||
} else {
|
||||
lineFinished(0);
|
||||
}
|
||||
|
||||
lastBufferHadTrailingCR = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This loop executes for every byte of every file in the workspace - it is highly performance-sensitive!
|
||||
* Hence the duplication in reading the buffer to avoid a function call. Previously a function call was not
|
||||
* being inlined by V8.
|
||||
*/
|
||||
for (; i < bytesRead; ++i) {
|
||||
if (buffer[i] === lfBytes[0] && (lfBytes.length === 1 || buffer[i + 1] === lfBytes[1])) {
|
||||
lineFinished(1 * crlfCharSize);
|
||||
} else if (buffer[i] === crBytes[0] && (crBytes.length === 1 || buffer[i + 1] === crBytes[1])) { // CR (Carriage Return)
|
||||
if (i + crlfCharSize === bytesRead) {
|
||||
lastBufferHadTrailingCR = true;
|
||||
} else if (buffer[i + crlfCharSize] === lfBytes[0] && (lfBytes.length === 1 || buffer[i + crlfCharSize + 1] === lfBytes[1])) {
|
||||
lineFinished(2 * crlfCharSize);
|
||||
i += 2 * crlfCharSize - 1;
|
||||
} else {
|
||||
lineFinished(1 * crlfCharSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
line += decodeBuffer(buffer, pos, bytesRead);
|
||||
|
||||
readFile(/*isFirstRead=*/false, clb); // Continue reading
|
||||
});
|
||||
};
|
||||
|
||||
readFile(/*isFirstRead=*/true, (error: Error) => {
|
||||
if (error) {
|
||||
return resolve(null);
|
||||
}
|
||||
|
||||
if (line.length) {
|
||||
perLineCallback(line, lineNumber); // handle last line
|
||||
}
|
||||
|
||||
fs.close(fd, (error: Error) => {
|
||||
resolve(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Server } from 'vs/base/parts/ipc/node/ipc.cp';
|
||||
import { SearchWorkerChannel } from './searchWorkerIpc';
|
||||
import { SearchWorker } from './searchWorker';
|
||||
|
||||
const server = new Server('searchWorker');
|
||||
const worker = new SearchWorker();
|
||||
const channel = new SearchWorkerChannel(worker);
|
||||
server.registerChannel('searchWorker', channel);
|
||||
@@ -1,64 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { IChannel, IServerChannel } from 'vs/base/parts/ipc/node/ipc';
|
||||
import { IPatternInfo, ITextSearchPreviewOptions } from 'vs/platform/search/common/search';
|
||||
import { SearchWorker } from './searchWorker';
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { ISerializedFileMatch } from 'vs/workbench/services/search/node/search';
|
||||
|
||||
export interface ISearchWorkerSearchArgs {
|
||||
pattern: IPatternInfo;
|
||||
fileEncoding: string;
|
||||
absolutePaths: string[];
|
||||
maxResults?: number;
|
||||
previewOptions?: ITextSearchPreviewOptions;
|
||||
}
|
||||
|
||||
export interface ISearchWorkerSearchResult {
|
||||
matches: ISerializedFileMatch[];
|
||||
numMatches: number;
|
||||
limitReached: boolean;
|
||||
}
|
||||
|
||||
export interface ISearchWorker {
|
||||
initialize(): Thenable<void>;
|
||||
search(args: ISearchWorkerSearchArgs): Thenable<ISearchWorkerSearchResult>;
|
||||
cancel(): Thenable<void>;
|
||||
}
|
||||
|
||||
export class SearchWorkerChannel implements IServerChannel {
|
||||
constructor(private worker: SearchWorker) {
|
||||
}
|
||||
|
||||
listen<T>(): Event<T> {
|
||||
throw new Error('No events');
|
||||
}
|
||||
|
||||
call(_, command: string, arg?: any): Promise<any> {
|
||||
switch (command) {
|
||||
case 'initialize': return this.worker.initialize();
|
||||
case 'search': return this.worker.search(arg);
|
||||
case 'cancel': return this.worker.cancel();
|
||||
}
|
||||
throw new Error(`Call not found: ${command}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class SearchWorkerChannelClient implements ISearchWorker {
|
||||
constructor(private channel: IChannel) { }
|
||||
|
||||
initialize(): Thenable<void> {
|
||||
return this.channel.call('initialize');
|
||||
}
|
||||
|
||||
search(args: ISearchWorkerSearchArgs): Thenable<ISearchWorkerSearchResult> {
|
||||
return this.channel.call('search', args);
|
||||
}
|
||||
|
||||
cancel(): Thenable<void> {
|
||||
return this.channel.call('cancel');
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,6 @@ import { compareItemsByScore, IItemAccessor, prepareQuery, ScorerCache } from 'v
|
||||
import { MAX_FILE_SIZE } from 'vs/platform/files/node/files';
|
||||
import { ICachedSearchStats, IFileQuery, IFileSearchStats, IFolderQuery, IProgress, IRawFileQuery, IRawQuery, IRawTextQuery, ITextQuery } from 'vs/platform/search/common/search';
|
||||
import { Engine as FileSearchEngine } from 'vs/workbench/services/search/node/fileSearch';
|
||||
import { LegacyTextSearchService } from 'vs/workbench/services/search/node/legacy/rawLegacyTextSearchService';
|
||||
import { TextSearchEngineAdapter } from 'vs/workbench/services/search/node/textSearchAdapter';
|
||||
import { IFileSearchProgressItem, IRawFileMatch, IRawSearchService, ISearchEngine, ISearchEngineSuccess, ISerializedFileMatch, ISerializedSearchComplete, ISerializedSearchProgressItem, ISerializedSearchSuccess } from './search';
|
||||
|
||||
@@ -32,10 +31,9 @@ export class SearchService implements IRawSearchService {
|
||||
|
||||
private static readonly BATCH_SIZE = 512;
|
||||
|
||||
private legacyTextSearchService = new LegacyTextSearchService();
|
||||
private caches: { [cacheKey: string]: Cache; } = Object.create(null);
|
||||
|
||||
public fileSearch(config: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
fileSearch(config: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: CancelablePromise<ISerializedSearchSuccess>;
|
||||
|
||||
const query = reviveQuery(config);
|
||||
@@ -57,16 +55,14 @@ export class SearchService implements IRawSearchService {
|
||||
return emitter.event;
|
||||
}
|
||||
|
||||
public textSearch(rawQuery: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
textSearch(rawQuery: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete> {
|
||||
let promise: CancelablePromise<ISerializedSearchComplete>;
|
||||
|
||||
const query = reviveQuery(rawQuery);
|
||||
const emitter = new Emitter<ISerializedSearchProgressItem | ISerializedSearchComplete>({
|
||||
onFirstListenerDidAdd: () => {
|
||||
promise = createCancelablePromise(token => {
|
||||
return (rawQuery.useRipgrep ?
|
||||
this.ripgrepTextSearch(query, p => emitter.fire(p), token) :
|
||||
this.legacyTextSearchService.textSearch(query, p => emitter.fire(p), token));
|
||||
return this.ripgrepTextSearch(query, p => emitter.fire(p), token);
|
||||
});
|
||||
|
||||
promise.then(
|
||||
@@ -115,7 +111,7 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
|
||||
return new Promise<ISerializedSearchSuccess>((c, e) => {
|
||||
sortedSearch.then(([result, rawMatches]) => {
|
||||
sortedSearch!.then(([result, rawMatches]) => {
|
||||
const serializedMatches = rawMatches.map(rawMatch => this.rawMatchToSearchItem(rawMatch));
|
||||
this.sendProgress(serializedMatches, progressCallback, batchSize);
|
||||
c(result);
|
||||
@@ -173,11 +169,11 @@ export class SearchService implements IRawSearchService {
|
||||
event: emitter.event,
|
||||
resolved: false
|
||||
};
|
||||
cache.resultsToSearchCache[config.filePattern] = cacheRow;
|
||||
cache.resultsToSearchCache[config.filePattern || ''] = cacheRow;
|
||||
allResultsPromise.then(() => {
|
||||
cacheRow.resolved = true;
|
||||
}, err => {
|
||||
delete cache.resultsToSearchCache[config.filePattern];
|
||||
delete cache.resultsToSearchCache[config.filePattern || ''];
|
||||
});
|
||||
|
||||
allResultsPromise = this.preventCancellation(allResultsPromise);
|
||||
@@ -216,13 +212,13 @@ export class SearchService implements IRawSearchService {
|
||||
return this.caches[cacheKey] = new Cache();
|
||||
}
|
||||
|
||||
private trySortedSearchFromCache(config: IFileQuery, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISerializedSearchSuccess, IRawFileMatch[]]> {
|
||||
private trySortedSearchFromCache(config: IFileQuery, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISerializedSearchSuccess, IRawFileMatch[]]> | undefined {
|
||||
const cache = config.cacheKey && this.caches[config.cacheKey];
|
||||
if (!cache) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const cached = this.getResultsFromCache(cache, config.filePattern, progressCallback, token);
|
||||
const cached = this.getResultsFromCache(cache, config.filePattern || '', progressCallback, token);
|
||||
if (cached) {
|
||||
return cached.then(([result, results, cacheStats]) => {
|
||||
const sortSW = StopWatch.create(false);
|
||||
@@ -256,10 +252,11 @@ export class SearchService implements IRawSearchService {
|
||||
// this is very important because we are also limiting the number of results by config.maxResults
|
||||
// and as such we want the top items to be included in this result set if the number of items
|
||||
// exceeds config.maxResults.
|
||||
const query = prepareQuery(config.filePattern);
|
||||
const query = prepareQuery(config.filePattern || '');
|
||||
const compare = (matchA: IRawFileMatch, matchB: IRawFileMatch) => compareItemsByScore(matchA, matchB, query, true, FileMatchItemAccessor, scorerCache);
|
||||
|
||||
return arrays.topAsync(results, compare, config.maxResults, 10000, token);
|
||||
const maxResults = config.maxResults || Number.MAX_VALUE;
|
||||
return arrays.topAsync(results, compare, maxResults, 10000, token);
|
||||
}
|
||||
|
||||
private sendProgress(results: ISerializedFileMatch[], progressCb: IProgressCallback, batchSize: number) {
|
||||
@@ -272,13 +269,13 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
}
|
||||
|
||||
private getResultsFromCache(cache: Cache, searchValue: string, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISearchEngineSuccess, IRawFileMatch[], ICachedSearchStats]> {
|
||||
private getResultsFromCache(cache: Cache, searchValue: string, progressCallback: IFileProgressCallback, token?: CancellationToken): Promise<[ISearchEngineSuccess, IRawFileMatch[], ICachedSearchStats]> | null {
|
||||
const cacheLookupSW = StopWatch.create(false);
|
||||
|
||||
// Find cache entries by prefix of search value
|
||||
const hasPathSep = searchValue.indexOf(sep) >= 0;
|
||||
let cachedRow: ICacheRow;
|
||||
for (let previousSearch in cache.resultsToSearchCache) {
|
||||
let cachedRow: ICacheRow | undefined;
|
||||
for (const previousSearch in cache.resultsToSearchCache) {
|
||||
// If we narrow down, we might be able to reuse the cached results
|
||||
if (strings.startsWith(searchValue, previousSearch)) {
|
||||
if (hasPathSep && previousSearch.indexOf(sep) < 0) {
|
||||
@@ -315,10 +312,9 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
|
||||
// Pattern match on results
|
||||
let results: IRawFileMatch[] = [];
|
||||
const results: IRawFileMatch[] = [];
|
||||
const normalizedSearchValueLowercase = strings.stripWildcards(searchValue).toLowerCase();
|
||||
for (let i = 0; i < cachedEntries.length; i++) {
|
||||
let entry = cachedEntries[i];
|
||||
for (const entry of cachedEntries) {
|
||||
|
||||
// Check if this entry is a match for the search value
|
||||
if (!strings.fuzzyContains(entry.relativePath, normalizedSearchValueLowercase)) {
|
||||
@@ -329,7 +325,7 @@ export class SearchService implements IRawSearchService {
|
||||
}
|
||||
|
||||
return [complete, results, {
|
||||
cacheWasResolved: cachedRow.resolved,
|
||||
cacheWasResolved: cachedRow!.resolved,
|
||||
cacheLookupTime,
|
||||
cacheFilterTime: cacheFilterSW.elapsed(),
|
||||
cacheEntryCount: cachedEntries.length
|
||||
@@ -374,7 +370,7 @@ export class SearchService implements IRawSearchService {
|
||||
});
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): Promise<void> {
|
||||
clearCache(cacheKey: string): Promise<void> {
|
||||
delete this.caches[cacheKey];
|
||||
return Promise.resolve(undefined);
|
||||
}
|
||||
@@ -394,6 +390,9 @@ export class SearchService implements IRawSearchService {
|
||||
catch(reject?) {
|
||||
return this.then(undefined, reject);
|
||||
}
|
||||
finally(onFinally) {
|
||||
return promise.finally(onFinally);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -407,22 +406,22 @@ interface ICacheRow {
|
||||
|
||||
class Cache {
|
||||
|
||||
public resultsToSearchCache: { [searchValue: string]: ICacheRow; } = Object.create(null);
|
||||
resultsToSearchCache: { [searchValue: string]: ICacheRow; } = Object.create(null);
|
||||
|
||||
public scorerCache: ScorerCache = Object.create(null);
|
||||
scorerCache: ScorerCache = Object.create(null);
|
||||
}
|
||||
|
||||
const FileMatchItemAccessor = new class implements IItemAccessor<IRawFileMatch> {
|
||||
|
||||
public getItemLabel(match: IRawFileMatch): string {
|
||||
getItemLabel(match: IRawFileMatch): string {
|
||||
return match.basename; // e.g. myFile.txt
|
||||
}
|
||||
|
||||
public getItemDescription(match: IRawFileMatch): string {
|
||||
getItemDescription(match: IRawFileMatch): string {
|
||||
return match.relativePath.substr(0, match.relativePath.length - match.basename.length - 1); // e.g. some/path/to/file
|
||||
}
|
||||
|
||||
public getItemPath(match: IRawFileMatch): string {
|
||||
getItemPath(match: IRawFileMatch): string {
|
||||
return match.relativePath; // e.g. some/path/to/file/myFile.txt
|
||||
}
|
||||
};
|
||||
|
||||
@@ -18,7 +18,7 @@ import { rgPath } from 'vscode-ripgrep';
|
||||
// If vscode-ripgrep is in an .asar file, then the binary is unpacked.
|
||||
const rgDiskPath = rgPath.replace(/\bnode_modules\.asar\b/, 'node_modules.asar.unpacked');
|
||||
|
||||
export function spawnRipgrepCmd(config: IFileQuery, folderQuery: IFolderQuery, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
export function spawnRipgrepCmd(config: IFileQuery, folderQuery: IFolderQuery, includePattern?: glob.IExpression, excludePattern?: glob.IExpression) {
|
||||
const rgArgs = getRgArgs(config, folderQuery, includePattern, excludePattern);
|
||||
const cwd = folderQuery.folder.fsPath;
|
||||
return {
|
||||
@@ -29,7 +29,7 @@ export function spawnRipgrepCmd(config: IFileQuery, folderQuery: IFolderQuery, i
|
||||
};
|
||||
}
|
||||
|
||||
function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern: glob.IExpression, excludePattern: glob.IExpression) {
|
||||
function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern?: glob.IExpression, excludePattern?: glob.IExpression) {
|
||||
const args = ['--files', '--hidden', '--case-sensitive'];
|
||||
|
||||
// includePattern can't have siblingClauses
|
||||
@@ -44,8 +44,6 @@ function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern
|
||||
}
|
||||
});
|
||||
|
||||
let siblingClauses: glob.IExpression | null;
|
||||
|
||||
const rgGlobs = foldersToRgExcludeGlobs([folderQuery], excludePattern, undefined, false);
|
||||
rgGlobs.globArgs.forEach(globArg => {
|
||||
const exclusion = `!${anchorGlob(globArg)}`;
|
||||
@@ -57,8 +55,6 @@ function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern
|
||||
}
|
||||
}
|
||||
});
|
||||
siblingClauses = rgGlobs.siblingClauses;
|
||||
|
||||
if (folderQuery.disregardIgnoreFiles !== false) {
|
||||
// Don't use .gitignore or .ignore
|
||||
args.push('--no-ignore');
|
||||
@@ -80,15 +76,18 @@ function getRgArgs(config: IFileQuery, folderQuery: IFolderQuery, includePattern
|
||||
args.push('--no-ignore-global');
|
||||
}
|
||||
|
||||
return { args, siblingClauses };
|
||||
return {
|
||||
args,
|
||||
siblingClauses: rgGlobs.siblingClauses
|
||||
};
|
||||
}
|
||||
|
||||
export interface IRgGlobResult {
|
||||
globArgs: string[];
|
||||
siblingClauses: glob.IExpression | null;
|
||||
siblingClauses: glob.IExpression;
|
||||
}
|
||||
|
||||
export function foldersToRgExcludeGlobs(folderQueries: IFolderQuery[], globalExclude: glob.IExpression, excludesToSkip?: Set<string>, absoluteGlobs = true): IRgGlobResult {
|
||||
export function foldersToRgExcludeGlobs(folderQueries: IFolderQuery[], globalExclude?: glob.IExpression, excludesToSkip?: Set<string>, absoluteGlobs = true): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression = {};
|
||||
folderQueries.forEach(folderQuery => {
|
||||
@@ -103,7 +102,7 @@ export function foldersToRgExcludeGlobs(folderQueries: IFolderQuery[], globalExc
|
||||
return { globArgs, siblingClauses };
|
||||
}
|
||||
|
||||
export function foldersToIncludeGlobs(folderQueries: IFolderQuery[], globalInclude: glob.IExpression, absoluteGlobs = true): string[] {
|
||||
export function foldersToIncludeGlobs(folderQueries: IFolderQuery[], globalInclude?: glob.IExpression, absoluteGlobs = true): string[] {
|
||||
const globArgs: string[] = [];
|
||||
folderQueries.forEach(folderQuery => {
|
||||
const totalIncludePattern = objects.assign({}, globalInclude || {}, folderQuery.includePattern || {});
|
||||
@@ -116,7 +115,7 @@ export function foldersToIncludeGlobs(folderQueries: IFolderQuery[], globalInclu
|
||||
|
||||
function globExprsToRgGlobs(patterns: glob.IExpression, folder?: string, excludesToSkip?: Set<string>): IRgGlobResult {
|
||||
const globArgs: string[] = [];
|
||||
let siblingClauses: glob.IExpression | null = null;
|
||||
const siblingClauses: glob.IExpression = {};
|
||||
Object.keys(patterns)
|
||||
.forEach(key => {
|
||||
if (excludesToSkip && excludesToSkip.has(key)) {
|
||||
@@ -146,10 +145,6 @@ function globExprsToRgGlobs(patterns: glob.IExpression, folder?: string, exclude
|
||||
|
||||
globArgs.push(fixDriveC(key));
|
||||
} else if (value && value.when) {
|
||||
if (!siblingClauses) {
|
||||
siblingClauses = {};
|
||||
}
|
||||
|
||||
siblingClauses[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -20,7 +20,7 @@ export class RipgrepSearchProvider implements vscode.TextSearchProvider {
|
||||
return this.withToken(token, token => engine.provideTextSearchResults(query, options, progress, token));
|
||||
}
|
||||
|
||||
private async withToken<T>(token: vscode.CancellationToken, fn: (token: vscode.CancellationToken) => Thenable<T>): Promise<T> {
|
||||
private async withToken<T>(token: vscode.CancellationToken, fn: (token: vscode.CancellationToken) => Promise<T>): Promise<T> {
|
||||
const merged = mergedTokenSource(token);
|
||||
this.inProgress.add(merged);
|
||||
const result = await fn(merged.token);
|
||||
|
||||
@@ -42,7 +42,7 @@ function searchRangeToRange(range: SearchRange): Range {
|
||||
}
|
||||
|
||||
export class Position {
|
||||
constructor(public readonly line, public readonly character) { }
|
||||
constructor(readonly line, readonly character) { }
|
||||
|
||||
isBefore(other: Position): boolean { return false; }
|
||||
isBeforeOrEqual(other: Position): boolean { return false; }
|
||||
@@ -84,7 +84,7 @@ export interface IOutputChannel {
|
||||
}
|
||||
|
||||
export class OutputChannel implements IOutputChannel {
|
||||
constructor(@ILogService private logService: ILogService) { }
|
||||
constructor(@ILogService private readonly logService: ILogService) { }
|
||||
|
||||
appendLine(msg: string): void {
|
||||
this.logService.debug('RipgrepSearchEH#search', msg);
|
||||
|
||||
@@ -7,12 +7,15 @@ import * as cp from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as path from 'path';
|
||||
import { NodeStringDecoder, StringDecoder } from 'string_decoder';
|
||||
import { createRegExp, startsWith, startsWithUTF8BOM, stripUTF8BOM, escapeRegExpCharacters } from 'vs/base/common/strings';
|
||||
import { createRegExp, startsWith, startsWithUTF8BOM, stripUTF8BOM, escapeRegExpCharacters, endsWith } from 'vs/base/common/strings';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { IExtendedExtensionSearchOptions, SearchError, SearchErrorCode, serializeSearchError } from 'vs/platform/search/common/search';
|
||||
import * as vscode from 'vscode';
|
||||
import { rgPath } from 'vscode-ripgrep';
|
||||
import { anchorGlob, createTextSearchResult, IOutputChannel, Maybe, Range } from './ripgrepSearchUtils';
|
||||
import { coalesce } from 'vs/base/common/arrays';
|
||||
import { splitGlobAware } from 'vs/base/common/glob';
|
||||
import { groupBy } from 'vs/base/common/collections';
|
||||
|
||||
// If vscode-ripgrep is in an .asar file, then the binary is unpacked.
|
||||
const rgDiskPath = rgPath.replace(/\bnode_modules\.asar\b/, 'node_modules.asar.unpacked');
|
||||
@@ -21,7 +24,7 @@ export class RipgrepTextSearchEngine {
|
||||
|
||||
constructor(private outputChannel: IOutputChannel) { }
|
||||
|
||||
provideTextSearchResults(query: vscode.TextSearchQuery, options: vscode.TextSearchOptions, progress: vscode.Progress<vscode.TextSearchResult>, token: vscode.CancellationToken): Thenable<vscode.TextSearchComplete> {
|
||||
provideTextSearchResults(query: vscode.TextSearchQuery, options: vscode.TextSearchOptions, progress: vscode.Progress<vscode.TextSearchResult>, token: vscode.CancellationToken): Promise<vscode.TextSearchComplete> {
|
||||
this.outputChannel.appendLine(`provideTextSearchResults ${query.pattern}, ${JSON.stringify({
|
||||
...options,
|
||||
...{
|
||||
@@ -122,7 +125,7 @@ export function rgErrorMsgForDisplay(msg: string): Maybe<SearchError> {
|
||||
return new SearchError('Regex parse error', SearchErrorCode.regexParseError);
|
||||
}
|
||||
|
||||
let match = firstLine.match(/grep config error: unknown encoding: (.*)/);
|
||||
const match = firstLine.match(/grep config error: unknown encoding: (.*)/);
|
||||
if (match) {
|
||||
return new SearchError(`Unknown encoding: ${match[1]}`, SearchErrorCode.unknownEncoding);
|
||||
}
|
||||
@@ -153,11 +156,11 @@ export class RipgrepParser extends EventEmitter {
|
||||
this.stringDecoder = new StringDecoder();
|
||||
}
|
||||
|
||||
public cancel(): void {
|
||||
cancel(): void {
|
||||
this.isDone = true;
|
||||
}
|
||||
|
||||
public flush(): void {
|
||||
flush(): void {
|
||||
this.handleDecodedData(this.stringDecoder.end());
|
||||
}
|
||||
|
||||
@@ -168,7 +171,7 @@ export class RipgrepParser extends EventEmitter {
|
||||
super.on(event, listener);
|
||||
}
|
||||
|
||||
public handleData(data: Buffer | string): void {
|
||||
handleData(data: Buffer | string): void {
|
||||
if (this.isDone) {
|
||||
return;
|
||||
}
|
||||
@@ -234,13 +237,18 @@ export class RipgrepParser extends EventEmitter {
|
||||
|
||||
private createTextSearchMatch(data: IRgMatch, uri: vscode.Uri): vscode.TextSearchMatch {
|
||||
const lineNumber = data.line_number - 1;
|
||||
const fullText = bytesOrTextToString(data.lines);
|
||||
let isBOMStripped = false;
|
||||
let fullText = bytesOrTextToString(data.lines);
|
||||
if (lineNumber === 0 && startsWithUTF8BOM(fullText)) {
|
||||
isBOMStripped = true;
|
||||
fullText = stripUTF8BOM(fullText);
|
||||
}
|
||||
const fullTextBytes = Buffer.from(fullText);
|
||||
|
||||
let prevMatchEnd = 0;
|
||||
let prevMatchEndCol = 0;
|
||||
let prevMatchEndLine = lineNumber;
|
||||
const ranges = data.submatches.map((match, i) => {
|
||||
const ranges = coalesce(data.submatches.map((match, i) => {
|
||||
if (this.hitLimit) {
|
||||
return null;
|
||||
}
|
||||
@@ -252,29 +260,27 @@ export class RipgrepParser extends EventEmitter {
|
||||
}
|
||||
|
||||
let matchText = bytesOrTextToString(match.match);
|
||||
if (lineNumber === 0 && i === 0 && isBOMStripped) {
|
||||
matchText = stripUTF8BOM(matchText);
|
||||
match.start = match.start <= 3 ? 0 : match.start - 3;
|
||||
match.end = match.end <= 3 ? 0 : match.end - 3;
|
||||
}
|
||||
const inBetweenChars = fullTextBytes.slice(prevMatchEnd, match.start).toString().length;
|
||||
let startCol = prevMatchEndCol + inBetweenChars;
|
||||
|
||||
const stats = getNumLinesAndLastNewlineLength(matchText);
|
||||
let startLineNumber = prevMatchEndLine;
|
||||
let endLineNumber = stats.numLines + startLineNumber;
|
||||
const startLineNumber = prevMatchEndLine;
|
||||
const endLineNumber = stats.numLines + startLineNumber;
|
||||
let endCol = stats.numLines > 0 ?
|
||||
stats.lastLineLength :
|
||||
stats.lastLineLength + startCol;
|
||||
|
||||
if (lineNumber === 0 && i === 0 && startsWithUTF8BOM(matchText)) {
|
||||
matchText = stripUTF8BOM(matchText);
|
||||
startCol -= 3;
|
||||
endCol -= 3;
|
||||
}
|
||||
|
||||
prevMatchEnd = match.end;
|
||||
prevMatchEndCol = endCol;
|
||||
prevMatchEndLine = endLineNumber;
|
||||
|
||||
return new Range(startLineNumber, startCol, endLineNumber, endCol);
|
||||
})
|
||||
.filter(r => !!r);
|
||||
}));
|
||||
|
||||
return createTextSearchResult(uri, fullText, <Range[]>ranges, this.previewOptions);
|
||||
}
|
||||
@@ -326,9 +332,36 @@ function getRgArgs(query: vscode.TextSearchQuery, options: vscode.TextSearchOpti
|
||||
const args = ['--hidden'];
|
||||
args.push(query.isCaseSensitive ? '--case-sensitive' : '--ignore-case');
|
||||
|
||||
options.includes
|
||||
.map(anchorGlob)
|
||||
.forEach(globArg => args.push('-g', globArg));
|
||||
const { doubleStarIncludes, otherIncludes } = groupBy(
|
||||
options.includes,
|
||||
(include: string) => startsWith(include, '**') ? 'doubleStarIncludes' : 'otherIncludes');
|
||||
|
||||
if (otherIncludes && otherIncludes.length) {
|
||||
const uniqueOthers = new Set<string>();
|
||||
otherIncludes.forEach(other => {
|
||||
if (!endsWith(other, '/**')) {
|
||||
other += '/**';
|
||||
}
|
||||
|
||||
uniqueOthers.add(other);
|
||||
});
|
||||
|
||||
args.push('-g', '!*');
|
||||
uniqueOthers
|
||||
.forEach(otherIncude => {
|
||||
spreadGlobComponents(otherIncude)
|
||||
.map(anchorGlob)
|
||||
.forEach(globArg => {
|
||||
args.push('-g', globArg);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (doubleStarIncludes && doubleStarIncludes.length) {
|
||||
doubleStarIncludes.forEach(globArg => {
|
||||
args.push('-g', globArg);
|
||||
});
|
||||
}
|
||||
|
||||
options.excludes
|
||||
.map(anchorGlob)
|
||||
@@ -424,6 +457,18 @@ function getRgArgs(query: vscode.TextSearchQuery, options: vscode.TextSearchOpti
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* `"foo/*bar/something"` -> `["foo", "foo/*bar", "foo/*bar/something", "foo/*bar/something/**"]`
|
||||
*/
|
||||
export function spreadGlobComponents(globArg: string): string[] {
|
||||
const components = splitGlobAware(globArg, '/');
|
||||
if (components[components.length - 1] !== '**') {
|
||||
components.push('**');
|
||||
}
|
||||
|
||||
return components.map((_, i) => components.slice(0, i + 1).join('/'));
|
||||
}
|
||||
|
||||
export function unicodeEscapesToPCRE2(pattern: string): string {
|
||||
const reg = /((?:[^\\]|^)(?:\\\\)*)\\u([a-z0-9]{4})(?!\d)/g;
|
||||
// Replace an unescaped $ at the end of the pattern with \r?$
|
||||
|
||||
@@ -16,7 +16,7 @@ export interface ITelemetryEvent {
|
||||
export interface IRawSearchService {
|
||||
fileSearch(search: IRawFileQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
textSearch(search: IRawTextQuery): Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
clearCache(cacheKey: string): Thenable<void>;
|
||||
clearCache(cacheKey: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface IRawFileMatch {
|
||||
@@ -27,7 +27,7 @@ export interface IRawFileMatch {
|
||||
}
|
||||
|
||||
export interface ISearchEngine<T> {
|
||||
search: (onResult: (matches: T) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISearchEngineSuccess) => void) => void;
|
||||
search: (onResult: (matches: T) => void, onProgress: (progress: IProgress) => void, done: (error: Error | null, complete: ISearchEngineSuccess) => void) => void;
|
||||
cancel: () => void;
|
||||
}
|
||||
|
||||
@@ -154,7 +154,7 @@ export class QueryGlobTester {
|
||||
/**
|
||||
* Guaranteed sync - siblingsFn should not return a promise.
|
||||
*/
|
||||
public includedInQuerySync(testPath: string, basename?: string, hasSibling?: (name: string) => boolean): boolean {
|
||||
includedInQuerySync(testPath: string, basename?: string, hasSibling?: (name: string) => boolean): boolean {
|
||||
if (this._parsedExcludeExpression && this._parsedExcludeExpression(testPath, basename, hasSibling)) {
|
||||
return false;
|
||||
}
|
||||
@@ -169,7 +169,7 @@ export class QueryGlobTester {
|
||||
/**
|
||||
* Guaranteed async.
|
||||
*/
|
||||
public includedInQuery(testPath: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): Promise<boolean> {
|
||||
includedInQuery(testPath: string, basename?: string, hasSibling?: (name: string) => boolean | Promise<boolean>): Promise<boolean> {
|
||||
const excludeP = this._parsedExcludeExpression ?
|
||||
Promise.resolve(this._parsedExcludeExpression(testPath, basename, hasSibling)).then(result => !!result) :
|
||||
Promise.resolve(false);
|
||||
@@ -187,13 +187,13 @@ export class QueryGlobTester {
|
||||
});
|
||||
}
|
||||
|
||||
public hasSiblingExcludeClauses(): boolean {
|
||||
hasSiblingExcludeClauses(): boolean {
|
||||
return hasSiblingClauses(this._excludeExpression);
|
||||
}
|
||||
}
|
||||
|
||||
function hasSiblingClauses(pattern: glob.IExpression): boolean {
|
||||
for (let key in pattern) {
|
||||
for (const key in pattern) {
|
||||
if (typeof pattern[key] !== 'boolean') {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -9,23 +9,23 @@ import { IStorageService, StorageScope } from 'vs/platform/storage/common/storag
|
||||
import { isEmptyObject } from 'vs/base/common/types';
|
||||
|
||||
export class SearchHistoryService implements ISearchHistoryService {
|
||||
public _serviceBrand: any;
|
||||
_serviceBrand: any;
|
||||
|
||||
private static readonly SEARCH_HISTORY_KEY = 'workbench.search.history';
|
||||
|
||||
private readonly _onDidClearHistory: Emitter<void> = new Emitter<void>();
|
||||
public readonly onDidClearHistory: Event<void> = this._onDidClearHistory.event;
|
||||
private readonly _onDidClearHistory = new Emitter<void>();
|
||||
readonly onDidClearHistory: Event<void> = this._onDidClearHistory.event;
|
||||
|
||||
constructor(
|
||||
@IStorageService private storageService: IStorageService
|
||||
@IStorageService private readonly storageService: IStorageService
|
||||
) { }
|
||||
|
||||
public clearHistory(): void {
|
||||
clearHistory(): void {
|
||||
this.storageService.remove(SearchHistoryService.SEARCH_HISTORY_KEY, StorageScope.WORKSPACE);
|
||||
this._onDidClearHistory.fire();
|
||||
}
|
||||
|
||||
public load(): ISearchHistoryValues {
|
||||
load(): ISearchHistoryValues {
|
||||
let result: ISearchHistoryValues | undefined;
|
||||
const raw = this.storageService.get(SearchHistoryService.SEARCH_HISTORY_KEY, StorageScope.WORKSPACE);
|
||||
|
||||
@@ -40,7 +40,7 @@ export class SearchHistoryService implements ISearchHistoryService {
|
||||
return result || {};
|
||||
}
|
||||
|
||||
public save(history: ISearchHistoryValues): void {
|
||||
save(history: ISearchHistoryValues): void {
|
||||
if (isEmptyObject(history)) {
|
||||
this.storageService.remove(SearchHistoryService.SEARCH_HISTORY_KEY, StorageScope.WORKSPACE);
|
||||
} else {
|
||||
|
||||
@@ -20,7 +20,7 @@ export class SearchChannel implements IServerChannel {
|
||||
throw new Error('Event not found');
|
||||
}
|
||||
|
||||
call(_, command: string, arg?: any): Thenable<any> {
|
||||
call(_, command: string, arg?: any): Promise<any> {
|
||||
switch (command) {
|
||||
case 'clearCache': return this.service.clearCache(arg);
|
||||
}
|
||||
@@ -40,7 +40,7 @@ export class SearchChannelClient implements IRawSearchService {
|
||||
return this.channel.listen('textSearch', search);
|
||||
}
|
||||
|
||||
clearCache(cacheKey: string): Thenable<void> {
|
||||
clearCache(cacheKey: string): Promise<void> {
|
||||
return this.channel.call('clearCache', cacheKey);
|
||||
}
|
||||
}
|
||||
@@ -32,7 +32,7 @@ import { IRawSearchService, ISerializedFileMatch, ISerializedSearchComplete, ISe
|
||||
import { SearchChannelClient } from './searchIpc';
|
||||
|
||||
export class SearchService extends Disposable implements ISearchService {
|
||||
public _serviceBrand: any;
|
||||
_serviceBrand: any;
|
||||
|
||||
private diskSearch: DiskSearch;
|
||||
private readonly fileSearchProviders = new Map<string, ISearchResultProvider>();
|
||||
@@ -40,21 +40,21 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
private readonly fileIndexProviders = new Map<string, ISearchResultProvider>();
|
||||
|
||||
constructor(
|
||||
@IInstantiationService private instantiationService: IInstantiationService,
|
||||
@IModelService private modelService: IModelService,
|
||||
@IUntitledEditorService private untitledEditorService: IUntitledEditorService,
|
||||
@IEditorService private editorService: IEditorService,
|
||||
@IInstantiationService private readonly instantiationService: IInstantiationService,
|
||||
@IModelService private readonly modelService: IModelService,
|
||||
@IUntitledEditorService private readonly untitledEditorService: IUntitledEditorService,
|
||||
@IEditorService private readonly editorService: IEditorService,
|
||||
@IEnvironmentService environmentService: IEnvironmentService,
|
||||
@ITelemetryService private telemetryService: ITelemetryService,
|
||||
@IConfigurationService private configurationService: IConfigurationService,
|
||||
@ILogService private logService: ILogService,
|
||||
@IExtensionService private extensionService: IExtensionService
|
||||
@ITelemetryService private readonly telemetryService: ITelemetryService,
|
||||
@IConfigurationService private readonly configurationService: IConfigurationService,
|
||||
@ILogService private readonly logService: ILogService,
|
||||
@IExtensionService private readonly extensionService: IExtensionService
|
||||
) {
|
||||
super();
|
||||
this.diskSearch = this.instantiationService.createInstance(DiskSearch, !environmentService.isBuilt || environmentService.verbose, /*timeout=*/undefined, environmentService.debugSearch);
|
||||
}
|
||||
|
||||
public registerSearchResultProvider(scheme: string, type: SearchProviderType, provider: ISearchResultProvider): IDisposable {
|
||||
registerSearchResultProvider(scheme: string, type: SearchProviderType, provider: ISearchResultProvider): IDisposable {
|
||||
let list: Map<string, ISearchResultProvider>;
|
||||
if (type === SearchProviderType.file) {
|
||||
list = this.fileSearchProviders;
|
||||
@@ -71,7 +71,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
});
|
||||
}
|
||||
|
||||
public extendQuery(query: IFileQuery): void {
|
||||
extendQuery(query: IFileQuery): void {
|
||||
const configuration = this.configurationService.getValue<ISearchConfiguration>();
|
||||
|
||||
// Configuration: File Excludes
|
||||
@@ -87,12 +87,12 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
}
|
||||
}
|
||||
|
||||
public textSearch(query: ITextQuery, token?: CancellationToken, onProgress?: (item: ISearchProgressItem) => void): Promise<ISearchComplete> {
|
||||
textSearch(query: ITextQuery, token?: CancellationToken, onProgress?: (item: ISearchProgressItem) => void): Promise<ISearchComplete> {
|
||||
// Get local results from dirty/untitled
|
||||
const localResults = this.getLocalResults(query);
|
||||
|
||||
if (onProgress) {
|
||||
localResults.values().filter((res) => !!res).forEach(onProgress);
|
||||
arrays.coalesce(localResults.values()).forEach(onProgress);
|
||||
}
|
||||
|
||||
this.logService.trace('SearchService#search', JSON.stringify(query));
|
||||
@@ -116,14 +116,14 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return this.doSearch(query, token, onProviderProgress);
|
||||
}
|
||||
|
||||
public fileSearch(query: IFileQuery, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
fileSearch(query: IFileQuery, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
return this.doSearch(query, token);
|
||||
}
|
||||
|
||||
private doSearch(query: ISearchQuery, token?: CancellationToken, onProgress?: (item: ISearchProgressItem) => void): Promise<ISearchComplete> {
|
||||
const schemesInQuery = this.getSchemesInQuery(query);
|
||||
|
||||
const providerActivations: Thenable<any>[] = [Promise.resolve(null)];
|
||||
const providerActivations: Promise<any>[] = [Promise.resolve(null)];
|
||||
schemesInQuery.forEach(scheme => providerActivations.push(this.extensionService.activateByEvent(`onSearch:${scheme}`)));
|
||||
providerActivations.push(this.extensionService.activateByEvent('onSearch:file'));
|
||||
|
||||
@@ -148,7 +148,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return this.searchWithProviders(query, progressCallback, token);
|
||||
})
|
||||
.then(completes => {
|
||||
completes = completes.filter(c => !!c);
|
||||
completes = arrays.coalesce(completes);
|
||||
if (!completes.length) {
|
||||
return {
|
||||
limitHit: false,
|
||||
@@ -191,19 +191,19 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
const e2eSW = StopWatch.create(false);
|
||||
|
||||
const diskSearchQueries: IFolderQuery[] = [];
|
||||
const searchPs: Thenable<ISearchComplete>[] = [];
|
||||
const searchPs: Promise<ISearchComplete>[] = [];
|
||||
|
||||
const fqs = this.groupFolderQueriesByScheme(query);
|
||||
keys(fqs).forEach(scheme => {
|
||||
const schemeFQs = fqs.get(scheme);
|
||||
let provider = query.type === QueryType.File ?
|
||||
const provider = query.type === QueryType.File ?
|
||||
this.fileSearchProviders.get(scheme) || this.fileIndexProviders.get(scheme) :
|
||||
this.textSearchProviders.get(scheme);
|
||||
|
||||
if (!provider && scheme === 'file') {
|
||||
diskSearchQueries.push(...schemeFQs);
|
||||
} else if (!provider) {
|
||||
throw new Error('No search provider registered for scheme: ' + scheme);
|
||||
console.warn('No search provider registered for scheme: ' + scheme);
|
||||
} else {
|
||||
const oneSchemeQuery: ISearchQuery = {
|
||||
...query,
|
||||
@@ -315,14 +315,12 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
"type" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"endToEndTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"sortingTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"traversal" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"fileWalkTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"directoriesWalked" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"filesWalked" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cmdTime" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"cmdResultCount" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth", "isMeasurement": true },
|
||||
"scheme" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"useRipgrep" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
"scheme" : { "classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
this.telemetryService.publicLog('searchComplete', {
|
||||
@@ -332,14 +330,12 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
type: fileSearchStats.type,
|
||||
endToEndTime: endToEndTime,
|
||||
sortingTime: fileSearchStats.sortingTime,
|
||||
traversal: searchEngineStats.traversal,
|
||||
fileWalkTime: searchEngineStats.fileWalkTime,
|
||||
directoriesWalked: searchEngineStats.directoriesWalked,
|
||||
filesWalked: searchEngineStats.filesWalked,
|
||||
cmdTime: searchEngineStats.cmdTime,
|
||||
cmdResultCount: searchEngineStats.cmdResultCount,
|
||||
scheme,
|
||||
useRipgrep: query.useRipgrep
|
||||
scheme
|
||||
});
|
||||
}
|
||||
} else if (query.type === QueryType.Text) {
|
||||
@@ -370,7 +366,6 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
endToEndTime: endToEndTime,
|
||||
scheme,
|
||||
error: errorType,
|
||||
useRipgrep: query.useRipgrep,
|
||||
usePCRE2: !!query.usePCRE2
|
||||
});
|
||||
}
|
||||
@@ -380,9 +375,9 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
const localResults = new ResourceMap<IFileMatch>();
|
||||
|
||||
if (query.type === QueryType.Text) {
|
||||
let models = this.modelService.getModels();
|
||||
const models = this.modelService.getModels();
|
||||
models.forEach((model) => {
|
||||
let resource = model.uri;
|
||||
const resource = model.uri;
|
||||
if (!resource) {
|
||||
return;
|
||||
}
|
||||
@@ -411,9 +406,9 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
}
|
||||
|
||||
// Use editor API to find matches
|
||||
let matches = model.findMatches(query.contentPattern.pattern, false, query.contentPattern.isRegExp, query.contentPattern.isCaseSensitive, query.contentPattern.isWordMatch ? query.contentPattern.wordSeparators : null, false, query.maxResults);
|
||||
const matches = model.findMatches(query.contentPattern.pattern, false, query.contentPattern.isRegExp, query.contentPattern.isCaseSensitive, query.contentPattern.isWordMatch ? query.contentPattern.wordSeparators : null, false, query.maxResults);
|
||||
if (matches.length) {
|
||||
let fileMatch = new FileMatch(resource);
|
||||
const fileMatch = new FileMatch(resource);
|
||||
localResults.set(resource, fileMatch);
|
||||
|
||||
const textSearchResults = editorMatchesToTextSearchResults(matches, model, query.previewOptions);
|
||||
@@ -438,7 +433,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
return pathIncludedInQuery(query, resource.fsPath);
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): Promise<void> {
|
||||
clearCache(cacheKey: string): Promise<void> {
|
||||
const clearPs = [
|
||||
this.diskSearch,
|
||||
...values(this.fileIndexProviders),
|
||||
@@ -451,7 +446,7 @@ export class SearchService extends Disposable implements ISearchService {
|
||||
}
|
||||
|
||||
export class DiskSearch implements ISearchResultProvider {
|
||||
public _serviceBrand: any;
|
||||
_serviceBrand: any;
|
||||
|
||||
private raw: IRawSearchService;
|
||||
|
||||
@@ -521,7 +516,7 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
let event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>;
|
||||
event = this.raw.fileSearch(query);
|
||||
|
||||
const onProgress = (p: ISearchProgressItem) => {
|
||||
const onProgress = (p: IProgress) => {
|
||||
if (p.message) {
|
||||
// Should only be for logs
|
||||
this.logService.debug('SearchService#search', p.message);
|
||||
@@ -535,7 +530,7 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
/**
|
||||
* Public for test
|
||||
*/
|
||||
public static collectResultsFromEvent(event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>, onProgress?: (p: ISearchProgressItem) => void, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
static collectResultsFromEvent(event: Event<ISerializedSearchProgressItem | ISerializedSearchComplete>, onProgress?: (p: ISearchProgressItem) => void, token?: CancellationToken): Promise<ISearchComplete> {
|
||||
let result: IFileMatch[] = [];
|
||||
|
||||
let listener: IDisposable;
|
||||
@@ -601,7 +596,7 @@ export class DiskSearch implements ISearchResultProvider {
|
||||
return fileMatch;
|
||||
}
|
||||
|
||||
public clearCache(cacheKey: string): Thenable<void> {
|
||||
clearCache(cacheKey: string): Promise<void> {
|
||||
return this.raw.clearCache(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import * as resources from 'vs/base/common/resources';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { toCanonicalName } from 'vs/base/node/encoding';
|
||||
import * as extfs from 'vs/base/node/extfs';
|
||||
import { IExtendedExtensionSearchOptions, IFileMatch, IFolderQuery, IPatternInfo, ISearchCompleteStats, ITextQuery, ITextSearchMatch, ITextSearchContext, ITextSearchResult } from 'vs/platform/search/common/search';
|
||||
import { IExtendedExtensionSearchOptions, IFileMatch, IFolderQuery, IPatternInfo, ISearchCompleteStats, ITextQuery, ITextSearchContext, ITextSearchMatch, ITextSearchResult } from 'vs/platform/search/common/search';
|
||||
import { QueryGlobTester, resolvePatternsForProvider } from 'vs/workbench/services/search/node/search';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
@@ -26,7 +26,7 @@ export class TextSearchManager {
|
||||
constructor(private query: ITextQuery, private provider: vscode.TextSearchProvider, private _extfs: typeof extfs = extfs) {
|
||||
}
|
||||
|
||||
public search(onProgress: (matches: IFileMatch[]) => void, token: CancellationToken): Promise<ISearchCompleteStats> {
|
||||
search(onProgress: (matches: IFileMatch[]) => void, token: CancellationToken): Promise<ISearchCompleteStats> {
|
||||
const folderQueries = this.query.folderQueries || [];
|
||||
const tokenSource = new CancellationTokenSource();
|
||||
token.onCancellationRequested(() => tokenSource.cancel());
|
||||
@@ -35,20 +35,26 @@ export class TextSearchManager {
|
||||
this.collector = new TextSearchResultsCollector(onProgress);
|
||||
|
||||
let isCanceled = false;
|
||||
const onResult = (match: vscode.TextSearchResult, folderIdx: number) => {
|
||||
const onResult = (result: vscode.TextSearchResult, folderIdx: number) => {
|
||||
if (isCanceled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof this.query.maxResults === 'number' && this.resultCount >= this.query.maxResults) {
|
||||
this.isLimitHit = true;
|
||||
isCanceled = true;
|
||||
tokenSource.cancel();
|
||||
}
|
||||
|
||||
if (!this.isLimitHit) {
|
||||
this.resultCount++;
|
||||
this.collector.add(match, folderIdx);
|
||||
const resultSize = this.resultSize(result);
|
||||
if (extensionResultIsMatch(result) && typeof this.query.maxResults === 'number' && this.resultCount + resultSize > this.query.maxResults) {
|
||||
this.isLimitHit = true;
|
||||
isCanceled = true;
|
||||
tokenSource.cancel();
|
||||
|
||||
result = this.trimResultToSize(result, this.query.maxResults - this.resultCount);
|
||||
}
|
||||
|
||||
const newResultSize = this.resultSize(result);
|
||||
this.resultCount += newResultSize;
|
||||
if (newResultSize > 0) {
|
||||
this.collector.add(result, folderIdx);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -74,12 +80,35 @@ export class TextSearchManager {
|
||||
});
|
||||
}
|
||||
|
||||
private resultSize(result: vscode.TextSearchResult): number {
|
||||
const match = <vscode.TextSearchMatch>result;
|
||||
return Array.isArray(match.ranges) ?
|
||||
match.ranges.length :
|
||||
1;
|
||||
}
|
||||
|
||||
private trimResultToSize(result: vscode.TextSearchMatch, size: number): vscode.TextSearchMatch {
|
||||
const rangesArr = Array.isArray(result.ranges) ? result.ranges : [result.ranges];
|
||||
const matchesArr = Array.isArray(result.preview.matches) ? result.preview.matches : [result.preview.matches];
|
||||
|
||||
return {
|
||||
ranges: rangesArr.slice(0, size),
|
||||
preview: {
|
||||
matches: matchesArr.slice(0, size),
|
||||
text: result.preview.text
|
||||
},
|
||||
uri: result.uri
|
||||
};
|
||||
}
|
||||
|
||||
private searchInFolder(folderQuery: IFolderQuery<URI>, onResult: (result: vscode.TextSearchResult) => void, token: CancellationToken): Promise<vscode.TextSearchComplete | null | undefined> {
|
||||
const queryTester = new QueryGlobTester(this.query, folderQuery);
|
||||
const testingPs: Promise<void>[] = [];
|
||||
const progress = {
|
||||
report: (result: vscode.TextSearchResult) => {
|
||||
// TODO: validate result.ranges vs result.preview.matches
|
||||
if (!this.validateProviderResult(result)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hasSibling = folderQuery.folder.scheme === 'file' ?
|
||||
glob.hasSiblingPromiseFn(() => {
|
||||
@@ -107,6 +136,29 @@ export class TextSearchManager {
|
||||
});
|
||||
}
|
||||
|
||||
private validateProviderResult(result: vscode.TextSearchResult): boolean {
|
||||
if (extensionResultIsMatch(result)) {
|
||||
if (Array.isArray(result.ranges)) {
|
||||
if (!Array.isArray(result.preview.matches)) {
|
||||
console.warn('INVALID - A text search provider match\'s`ranges` and`matches` properties must have the same type.');
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((<vscode.Range[]>result.preview.matches).length !== result.ranges.length) {
|
||||
console.warn('INVALID - A text search provider match\'s`ranges` and`matches` properties must have the same length.');
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (Array.isArray(result.preview.matches)) {
|
||||
console.warn('INVALID - A text search provider match\'s`ranges` and`matches` properties must have the same length.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private readdir(dirname: string): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._extfs.readdir(dirname, (err, files) => {
|
||||
|
||||
Reference in New Issue
Block a user