mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-06 09:35:41 -05:00
Fixes to apply recursive (#7714)
* Fix apply recursive Promise.all to correctly await promises and fix apply to not apply defaults to child files. * PR comments
This commit is contained in:
@@ -202,8 +202,8 @@ export class AclEntry {
|
||||
* user::r-x
|
||||
* default:group::r--
|
||||
*/
|
||||
toAclStrings(): string[] {
|
||||
return Array.from(this.permissions.entries()).map((entry: [AclEntryScope, AclEntryPermission]) => {
|
||||
toAclStrings(includeDefaults: boolean = true): string[] {
|
||||
return Array.from(this.permissions.entries()).filter((entry: [AclEntryScope, AclEntryPermission]) => includeDefaults || entry[0] !== AclEntryScope.default).map((entry: [AclEntryScope, AclEntryPermission]) => {
|
||||
return `${entry[0] === AclEntryScope.default ? 'default:' : ''}${getAclEntryType(this.type)}:${this.name}:${entry[1].toString()}`;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,12 +3,34 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export enum HdfsFileType {
|
||||
import { FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
|
||||
export const enum HdfsFileType {
|
||||
File = 'File',
|
||||
Directory = 'Directory',
|
||||
Symlink = 'Symlink'
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a @see HdfsFileType to its corresponding @see FileType. Will return undefined if
|
||||
* passed in type is undefined.
|
||||
* @param hdfsFileType The HdfsFileType to map from
|
||||
*/
|
||||
export function hdfsFileTypeToFileType(hdfsFileType: HdfsFileType | undefined): FileType | undefined {
|
||||
switch (hdfsFileType) {
|
||||
case HdfsFileType.Directory:
|
||||
return FileType.Directory;
|
||||
case HdfsFileType.File:
|
||||
return FileType.File;
|
||||
case HdfsFileType.Symlink:
|
||||
return FileType.Symlink;
|
||||
case undefined:
|
||||
return undefined;
|
||||
default:
|
||||
throw new Error(`Unexpected file type ${hdfsFileType}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class FileStatus {
|
||||
/**
|
||||
*
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { IFileSource } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { IFileSource, FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { PermissionStatus, AclEntry, AclEntryScope, AclType, AclEntryPermission } from './aclEntry';
|
||||
import { FileStatus } from './fileStatus';
|
||||
import { FileStatus, hdfsFileTypeToFileType } from './fileStatus';
|
||||
import * as nls from 'vscode-nls';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
@@ -84,7 +84,7 @@ export class HdfsModel {
|
||||
* @param recursive Whether to apply the changes recursively (to all sub-folders and files)
|
||||
*/
|
||||
public async apply(recursive: boolean = false): Promise<void> {
|
||||
await this.applyAclChanges(this.path);
|
||||
await this.applyAclChanges(this.path, hdfsFileTypeToFileType(this.fileStatus ? this.fileStatus.type : undefined));
|
||||
if (recursive) {
|
||||
azdata.tasks.startBackgroundOperation(
|
||||
{
|
||||
@@ -112,10 +112,9 @@ export class HdfsModel {
|
||||
const files = await this.fileSource.enumerateFiles(path, true);
|
||||
// Apply changes to all children of this path and then recursively apply to children of any directories
|
||||
await Promise.all(
|
||||
[
|
||||
files.map(file => this.applyAclChanges(file.path)),
|
||||
files.filter(f => f.isDirectory).map(d => this.applyToChildrenRecursive(op, d.path))
|
||||
]);
|
||||
files.map(file => this.applyAclChanges(file.path, file.fileType)).concat(
|
||||
files.filter(f => f.fileType === FileType.Directory).map(d => this.applyToChildrenRecursive(op, d.path)))
|
||||
);
|
||||
} catch (error) {
|
||||
const errMsg = localize('mssql.recursivePermissionOpError', "Error applying permission changes: {0}", (error instanceof Error ? error.message : error));
|
||||
vscode.window.showErrorMessage(errMsg);
|
||||
@@ -127,7 +126,7 @@ export class HdfsModel {
|
||||
* Applies the current set of Permissions/ACLs to the specified path
|
||||
* @param path The path to apply the changes to
|
||||
*/
|
||||
private async applyAclChanges(path: string): Promise<any> {
|
||||
private async applyAclChanges(path: string, fileType: FileType | undefined): Promise<any> {
|
||||
// HDFS won't remove existing default ACLs even if you call setAcl with no default ACLs specified. You
|
||||
// need to call removeDefaultAcl specifically to remove them.
|
||||
if (!this.permissionStatus.owner.getPermission(AclEntryScope.default) &&
|
||||
@@ -136,7 +135,7 @@ export class HdfsModel {
|
||||
await this.fileSource.removeDefaultAcl(path);
|
||||
}
|
||||
return Promise.all([
|
||||
this.fileSource.setAcl(path, this.permissionStatus.owner, this.permissionStatus.group, this.permissionStatus.other, this.permissionStatus.aclEntries),
|
||||
this.fileSource.setAcl(path, fileType, this.permissionStatus),
|
||||
this.fileSource.setPermission(path, this.permissionStatus)]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { Cookie } from 'tough-cookie';
|
||||
import * as through from 'through2';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as auth from '../util/auth';
|
||||
import { IHdfsOptions, IRequestParams } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { IHdfsOptions, IRequestParams, FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { PermissionStatus, AclEntry, parseAclList, PermissionType, parseAclPermissionFromOctal, AclEntryScope, AclType } from './aclEntry';
|
||||
import { Mount } from './mount';
|
||||
import { everyoneName, ownerPostfix, owningGroupPostfix } from '../localizedConstants';
|
||||
@@ -509,20 +509,15 @@ export class WebHDFS {
|
||||
/**
|
||||
* Set ACL for the given path. The owner, group and other fields are required - other entries are optional.
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param ownerEntry The entry corresponding to the path owner
|
||||
* @param groupEntry The entry corresponding to the path owning group
|
||||
* @param otherEntry The entry corresponding to default permissions for all other users
|
||||
* @param aclEntries The optional additional ACL entries to set
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param ownerEntry The status containing the permissions to set
|
||||
* @param callback Callback to handle the response
|
||||
*/
|
||||
public setAcl(path: string, ownerEntry: AclEntry, groupEntry: AclEntry, otherEntry: AclEntry, aclEntries: AclEntry[], callback: (error: HdfsError) => void): void {
|
||||
public setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('ownerEntry', ownerEntry);
|
||||
this.checkArgDefined('groupEntry', groupEntry);
|
||||
this.checkArgDefined('otherEntry', otherEntry);
|
||||
this.checkArgDefined('aclEntries', aclEntries);
|
||||
const concatEntries = [ownerEntry, groupEntry, otherEntry].concat(aclEntries);
|
||||
const aclSpec = concatEntries.reduce((acc, entry) => acc.concat(entry.toAclStrings()), []).join(',');
|
||||
this.checkArgDefined('permissionStatus', permissionStatus);
|
||||
const concatEntries = [permissionStatus.owner, permissionStatus.group, permissionStatus.other].concat(permissionStatus.aclEntries);
|
||||
const aclSpec = concatEntries.reduce((acc, entry: AclEntry) => acc.concat(entry.toAclStrings(fileType !== FileType.File)), []).join(',');
|
||||
let endpoint = this.getOperationEndpoint('setacl', path, { aclspec: aclSpec });
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
return callback && callback(error);
|
||||
|
||||
@@ -15,9 +15,9 @@ import * as nls from 'vscode-nls';
|
||||
|
||||
import * as constants from '../constants';
|
||||
import { WebHDFS, HdfsError } from '../hdfs/webhdfs';
|
||||
import { AclEntry, PermissionStatus } from '../hdfs/aclEntry';
|
||||
import { PermissionStatus } from '../hdfs/aclEntry';
|
||||
import { Mount, MountStatus } from '../hdfs/mount';
|
||||
import { FileStatus, HdfsFileType } from '../hdfs/fileStatus';
|
||||
import { FileStatus, hdfsFileTypeToFileType } from '../hdfs/fileStatus';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
@@ -28,15 +28,21 @@ export function joinHdfsPath(parent: string, child: string): string {
|
||||
return `${parent}/${child}`;
|
||||
}
|
||||
|
||||
export const enum FileType {
|
||||
Directory = 'Directory',
|
||||
File = 'File',
|
||||
Symlink = 'Symlink'
|
||||
}
|
||||
|
||||
export interface IFile {
|
||||
path: string;
|
||||
isDirectory: boolean;
|
||||
fileType: FileType;
|
||||
mountStatus?: MountStatus;
|
||||
}
|
||||
|
||||
export class File implements IFile {
|
||||
public mountStatus?: MountStatus;
|
||||
constructor(public path: string, public isDirectory: boolean) {
|
||||
constructor(public path: string, public fileType: FileType) {
|
||||
|
||||
}
|
||||
|
||||
@@ -44,16 +50,16 @@ export class File implements IFile {
|
||||
return joinHdfsPath(path, fileName);
|
||||
}
|
||||
|
||||
public static createChild(parent: IFile, fileName: string, isDirectory: boolean): IFile {
|
||||
return new File(File.createPath(parent.path, fileName), isDirectory);
|
||||
public static createChild(parent: IFile, fileName: string, fileType: FileType): IFile {
|
||||
return new File(File.createPath(parent.path, fileName), fileType);
|
||||
}
|
||||
|
||||
public static createFile(parent: IFile, fileName: string): File {
|
||||
return File.createChild(parent, fileName, false);
|
||||
return File.createChild(parent, fileName, FileType.File);
|
||||
}
|
||||
|
||||
public static createDirectory(parent: IFile, fileName: string): IFile {
|
||||
return File.createChild(parent, fileName, true);
|
||||
return File.createChild(parent, fileName, FileType.Directory);
|
||||
}
|
||||
|
||||
public static getBasename(file: IFile): string {
|
||||
@@ -81,12 +87,10 @@ export interface IFileSource {
|
||||
/**
|
||||
* Sets the ACL status for given path
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param ownerEntry The entry corresponding to the path owner
|
||||
* @param groupEntry The entry corresponding to the path owning group
|
||||
* @param otherEntry The entry corresponding to default permissions for all other users
|
||||
* @param aclEntries The ACL entries to set
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param permissionStatus The status containing the permissions to set
|
||||
*/
|
||||
setAcl(path: string, ownerEntry: AclEntry, groupEntry: AclEntry, otherEntry: AclEntry, aclEntries: AclEntry[]): Promise<void>;
|
||||
setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus): Promise<void>;
|
||||
/**
|
||||
* Removes the default ACLs for the specified path
|
||||
* @param path The path to remove the default ACLs for
|
||||
@@ -204,7 +208,7 @@ export class HdfsFileSource implements IFileSource {
|
||||
}
|
||||
else {
|
||||
let hdfsFiles: IFile[] = fileStatuses.map(fileStatus => {
|
||||
let file = new File(File.createPath(path, fileStatus.pathSuffix), fileStatus.type === HdfsFileType.Directory);
|
||||
let file = new File(File.createPath(path, fileStatus.pathSuffix), hdfsFileTypeToFileType(fileStatus.type));
|
||||
if (this.mounts && this.mounts.has(file.path)) {
|
||||
file.mountStatus = MountStatus.Mount;
|
||||
}
|
||||
@@ -385,14 +389,13 @@ export class HdfsFileSource implements IFileSource {
|
||||
/**
|
||||
* Sets the ACL status for given path
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param ownerEntry The entry corresponding to the path owner
|
||||
* @param groupEntry The entry corresponding to the path owning group
|
||||
* @param otherEntry The entry corresponding to default permissions for all other users
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param ownerEntry The status containing the permissions to set
|
||||
* @param aclEntries The ACL entries to set
|
||||
*/
|
||||
public setAcl(path: string, ownerEntry: AclEntry, groupEntry: AclEntry, otherEntry: AclEntry, aclEntries: AclEntry[]): Promise<void> {
|
||||
public setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.setAcl(path, ownerEntry, groupEntry, otherEntry, aclEntries, (error: HdfsError) => {
|
||||
this.client.setAcl(path, fileType, permissionStatus, (error: HdfsError) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
|
||||
@@ -12,7 +12,7 @@ const localize = nls.loadMessageBundle();
|
||||
|
||||
import { ApiWrapper } from '../apiWrapper';
|
||||
import { Command, ICommandViewContext, ProgressCommand, ICommandObjectExplorerContext } from './command';
|
||||
import { File, IFile, joinHdfsPath } from './fileSources';
|
||||
import { File, IFile, joinHdfsPath, FileType } from './fileSources';
|
||||
import { FolderNode, FileNode, HdfsFileSourceNode } from './hdfsProvider';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import * as constants from '../constants';
|
||||
@@ -102,8 +102,15 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
|
||||
private mapPathsToFiles(): (value: string, index: number, array: string[]) => Promise<File> {
|
||||
return async (path: string) => {
|
||||
let isDir = (await fs.lstat(path)).isDirectory();
|
||||
return new File(path, isDir);
|
||||
const stats = (await fs.lstat(path));
|
||||
if (stats.isDirectory()) {
|
||||
return new File(path, FileType.Directory);
|
||||
} else if (stats.isSymbolicLink()) {
|
||||
return new File(path, FileType.Symlink);
|
||||
} else {
|
||||
return new File(path, FileType.File);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@@ -113,7 +120,7 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
// Throw here so that all recursion is ended
|
||||
throw new Error('Upload canceled');
|
||||
}
|
||||
if (file.isDirectory) {
|
||||
if (file.fileType === FileType.Directory) {
|
||||
let dirName = fspath.basename(file.path);
|
||||
let subFolder = await folderNode.mkdir(dirName);
|
||||
let children: IFile[] = await Promise.all((await fs.readdir(file.path))
|
||||
|
||||
@@ -11,7 +11,7 @@ import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import * as Constants from '../constants';
|
||||
import { IFileSource, IHdfsOptions, IFile, File, FileSourceFactory } from './fileSources';
|
||||
import { IFileSource, IHdfsOptions, IFile, File, FileSourceFactory, FileType } from './fileSources';
|
||||
import { CancelableStream } from './cancelableStream';
|
||||
import { TreeNode } from './treeNodes';
|
||||
import * as utils from '../utils';
|
||||
@@ -132,8 +132,9 @@ export class FolderNode extends HdfsFileSourceNode {
|
||||
if (files) {
|
||||
// Note: for now, assuming HDFS-provided sorting is sufficient
|
||||
this.children = files.map((file) => {
|
||||
let node: TreeNode = file.isDirectory ? new FolderNode(this.context, file.path, this.fileSource, Constants.MssqlClusterItems.Folder, this.getChildMountStatus(file))
|
||||
: new FileNode(this.context, file.path, this.fileSource, this.getChildMountStatus(file));
|
||||
let node: TreeNode = file.fileType === FileType.File ?
|
||||
new FileNode(this.context, file.path, this.fileSource, this.getChildMountStatus(file)) :
|
||||
new FolderNode(this.context, file.path, this.fileSource, Constants.MssqlClusterItems.Folder, this.getChildMountStatus(file));
|
||||
node.parent = this;
|
||||
return node;
|
||||
});
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
@@ -17,7 +15,7 @@ import * as LocalizedConstants from '../../../localizedConstants';
|
||||
import * as utils from '../../../utils';
|
||||
import { SparkJobSubmissionService, SparkJobSubmissionInput, LivyLogResponse } from './sparkJobSubmissionService';
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { IFileSource, File, joinHdfsPath } from '../../../objectExplorerNodeProvider/fileSources';
|
||||
import { IFileSource, File, joinHdfsPath, FileType } from '../../../objectExplorerNodeProvider/fileSources';
|
||||
|
||||
|
||||
// Stores important state and service methods used by the Spark Job Submission Dialog.
|
||||
@@ -146,8 +144,8 @@ export class SparkJobSubmissionModel {
|
||||
return Promise.reject(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(localFilePath));
|
||||
}
|
||||
|
||||
let fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
|
||||
await fileSource.writeFile(new File(localFilePath, false), hdfsFolderPath);
|
||||
const fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
|
||||
await fileSource.writeFile(new File(localFilePath, FileType.File), hdfsFolderPath);
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user