mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-08 01:28:26 -05:00
Use cryptography 3.2.1 for sparkmagic dependency (#14154)
* Use cryptography 3.2.1 for sparkmagic dependency * pip -> conda * fix typing * fix tests * prefix with required
This commit is contained in:
@@ -43,7 +43,12 @@ export interface PythonInstallSettings {
|
||||
packageUpgradeOnly?: boolean;
|
||||
}
|
||||
export interface IJupyterServerInstallation {
|
||||
installCondaPackages(packages: PythonPkgDetails[], useMinVersion: boolean): Promise<void>;
|
||||
/**
|
||||
* Installs the specified packages using conda
|
||||
* @param packages The list of packages to install
|
||||
* @param useMinVersionDefault Whether we install each package as a min version (>=) or exact version (==) by default
|
||||
*/
|
||||
installCondaPackages(packages: PythonPkgDetails[], useMinVersionDefault: boolean): Promise<void>;
|
||||
configurePackagePaths(): Promise<void>;
|
||||
startInstallProcess(forceInstall: boolean, installSettings?: PythonInstallSettings): Promise<void>;
|
||||
getInstalledPipPackages(): Promise<PythonPkgDetails[]>;
|
||||
@@ -53,11 +58,43 @@ export interface IJupyterServerInstallation {
|
||||
getCondaExePath(): string;
|
||||
executeBufferedCommand(command: string): Promise<string>;
|
||||
executeStreamedCommand(command: string): Promise<void>;
|
||||
installPipPackages(packages: PythonPkgDetails[], useMinVersion: boolean): Promise<void>;
|
||||
/**
|
||||
* Installs the specified packages using pip
|
||||
* @param packages The list of packages to install
|
||||
* @param useMinVersionDefault Whether we install each package as a min version (>=) or exact version (==) by default
|
||||
*/
|
||||
installPipPackages(packages: PythonPkgDetails[], useMinVersionDefault: boolean): Promise<void>;
|
||||
uninstallPipPackages(packages: PythonPkgDetails[]): Promise<void>;
|
||||
pythonExecutable: string;
|
||||
pythonInstallationPath: string;
|
||||
}
|
||||
|
||||
export const requiredJupyterPkg: PythonPkgDetails = {
|
||||
name: 'jupyter',
|
||||
version: '1.0.0'
|
||||
};
|
||||
|
||||
export const requiredPowershellPkg: PythonPkgDetails = {
|
||||
name: 'powershell-kernel',
|
||||
version: '0.1.4'
|
||||
};
|
||||
|
||||
export const requiredSparkPackages: PythonPkgDetails[] = [
|
||||
requiredJupyterPkg,
|
||||
{
|
||||
name: 'cryptography',
|
||||
version: '3.2.1',
|
||||
installExactVersion: true
|
||||
},
|
||||
{
|
||||
name: 'sparkmagic',
|
||||
version: '0.12.9'
|
||||
}, {
|
||||
name: 'pandas',
|
||||
version: '0.24.2'
|
||||
}
|
||||
];
|
||||
|
||||
export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
public extensionPath: string;
|
||||
public pythonBinPath: string;
|
||||
@@ -101,32 +138,13 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
this._kernelSetupCache = new Map<string, boolean>();
|
||||
this._requiredKernelPackages = new Map<string, PythonPkgDetails[]>();
|
||||
|
||||
let jupyterPkg = {
|
||||
name: 'jupyter',
|
||||
version: '1.0.0'
|
||||
};
|
||||
this._requiredKernelPackages.set(constants.python3DisplayName, [jupyterPkg]);
|
||||
this._requiredKernelPackages.set(constants.python3DisplayName, [requiredJupyterPkg]);
|
||||
this._requiredKernelPackages.set(constants.powershellDisplayName, [requiredJupyterPkg, requiredPowershellPkg]);
|
||||
this._requiredKernelPackages.set(constants.pysparkDisplayName, requiredSparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkScalaDisplayName, requiredSparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkRDisplayName, requiredSparkPackages);
|
||||
|
||||
let powershellPkg = {
|
||||
name: 'powershell-kernel',
|
||||
version: '0.1.4'
|
||||
};
|
||||
this._requiredKernelPackages.set(constants.powershellDisplayName, [jupyterPkg, powershellPkg]);
|
||||
|
||||
let sparkPackages = [
|
||||
jupyterPkg,
|
||||
{
|
||||
name: 'sparkmagic',
|
||||
version: '0.12.9'
|
||||
}, {
|
||||
name: 'pandas',
|
||||
version: '0.24.2'
|
||||
}];
|
||||
this._requiredKernelPackages.set(constants.pysparkDisplayName, sparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkScalaDisplayName, sparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkRDisplayName, sparkPackages);
|
||||
|
||||
let allPackages = sparkPackages.concat(powershellPkg);
|
||||
let allPackages = requiredSparkPackages.concat(requiredPowershellPkg);
|
||||
this._requiredKernelPackages.set(constants.allKernelsName, allPackages);
|
||||
|
||||
this._requiredPackagesSet = new Set<string>();
|
||||
@@ -517,13 +535,16 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
}
|
||||
|
||||
public installPipPackages(packages: PythonPkgDetails[], useMinVersion: boolean): Promise<void> {
|
||||
public installPipPackages(packages: PythonPkgDetails[], useMinVersionDefault: boolean): Promise<void> {
|
||||
if (!packages || packages.length === 0) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
let versionSpecifier = useMinVersion ? '>=' : '==';
|
||||
let packagesStr = packages.map(pkg => `"${pkg.name}${versionSpecifier}${pkg.version}"`).join(' ');
|
||||
let versionSpecifierDefault = useMinVersionDefault ? '>=' : '==';
|
||||
let packagesStr = packages.map(pkg => {
|
||||
const pkgVersionSpecifier = pkg.installExactVersion ? '==' : versionSpecifierDefault;
|
||||
return `"${pkg.name}${pkgVersionSpecifier}${pkg.version}"`;
|
||||
}).join(' ');
|
||||
let cmd = `"${this.pythonExecutable}" -m pip install --user ${packagesStr}`;
|
||||
return this.executeStreamedCommand(cmd);
|
||||
}
|
||||
@@ -566,13 +587,16 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
}
|
||||
|
||||
public installCondaPackages(packages: PythonPkgDetails[], useMinVersion: boolean): Promise<void> {
|
||||
public installCondaPackages(packages: PythonPkgDetails[], useMinVersionDefault: boolean): Promise<void> {
|
||||
if (!packages || packages.length === 0) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
let versionSpecifier = useMinVersion ? '>=' : '==';
|
||||
let packagesStr = packages.map(pkg => `"${pkg.name}${versionSpecifier}${pkg.version}"`).join(' ');
|
||||
let versionSpecifierDefault = useMinVersionDefault ? '>=' : '==';
|
||||
let packagesStr = packages.map(pkg => {
|
||||
const pkgVersionSpecifier = pkg.installExactVersion ? '==' : versionSpecifierDefault;
|
||||
return `"${pkg.name}${pkgVersionSpecifier}${pkg.version}"`;
|
||||
}).join(' ');
|
||||
let condaExe = this.getCondaExePath();
|
||||
let cmd = `"${condaExe}" install -c conda-forge -y ${packagesStr}`;
|
||||
return this.executeStreamedCommand(cmd);
|
||||
@@ -736,6 +760,10 @@ export interface PythonPkgDetails {
|
||||
name: string;
|
||||
version: string;
|
||||
channel?: string;
|
||||
/**
|
||||
* Whether to always install the exact version of the package (==)
|
||||
*/
|
||||
installExactVersion?: boolean
|
||||
}
|
||||
|
||||
export interface PipPackageOverview {
|
||||
|
||||
@@ -11,7 +11,7 @@ import * as uuid from 'uuid';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as request from 'request';
|
||||
import * as utils from '../../common/utils';
|
||||
import { JupyterServerInstallation, PythonInstallSettings, PythonPkgDetails } from '../../jupyter/jupyterServerInstallation';
|
||||
import { requiredJupyterPkg, JupyterServerInstallation, requiredPowershellPkg, PythonInstallSettings, PythonPkgDetails, requiredSparkPackages } from '../../jupyter/jupyterServerInstallation';
|
||||
import { powershellDisplayName, pysparkDisplayName, python3DisplayName, sparkRDisplayName, sparkScalaDisplayName, winPlatform } from '../../common/constants';
|
||||
|
||||
describe('Jupyter Server Installation', function () {
|
||||
@@ -216,45 +216,24 @@ describe('Jupyter Server Installation', function () {
|
||||
});
|
||||
|
||||
it('Get required packages test - Python 3 kernel', async function() {
|
||||
let expectedPackages: PythonPkgDetails[] = [{
|
||||
name: 'jupyter',
|
||||
version: '1.0.0'
|
||||
}];
|
||||
let packages = installation.getRequiredPackagesForKernel(python3DisplayName);
|
||||
should(packages).be.deepEqual(expectedPackages);
|
||||
should(packages).be.deepEqual([requiredJupyterPkg]);
|
||||
});
|
||||
|
||||
it('Get required packages test - Powershell kernel', async function() {
|
||||
let expectedPackages = [{
|
||||
name: 'jupyter',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
name: 'powershell-kernel',
|
||||
version: '0.1.4'
|
||||
}];
|
||||
let packages = installation.getRequiredPackagesForKernel(powershellDisplayName);
|
||||
should(packages).be.deepEqual(expectedPackages);
|
||||
should(packages).be.deepEqual([requiredJupyterPkg, requiredPowershellPkg]);
|
||||
});
|
||||
|
||||
it('Get required packages test - Spark kernels', async function() {
|
||||
let expectedPackages = [{
|
||||
name: 'jupyter',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
name: 'sparkmagic',
|
||||
version: '0.12.9'
|
||||
}, {
|
||||
name: 'pandas',
|
||||
version: '0.24.2'
|
||||
}];
|
||||
let packages = installation.getRequiredPackagesForKernel(pysparkDisplayName);
|
||||
should(packages).be.deepEqual(expectedPackages, "Unexpected packages for PySpark kernel.");
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for PySpark kernel.');
|
||||
|
||||
packages = installation.getRequiredPackagesForKernel(sparkScalaDisplayName);
|
||||
should(packages).be.deepEqual(expectedPackages, "Unexpected packages for Spark Scala kernel.");
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for Spark Scala kernel.');
|
||||
|
||||
packages = installation.getRequiredPackagesForKernel(sparkRDisplayName);
|
||||
should(packages).be.deepEqual(expectedPackages, "Unexpected packages for Spark R kernel.");
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for Spark R kernel.');
|
||||
});
|
||||
|
||||
it('Install python test - Run install while Python is already running', async function() {
|
||||
|
||||
Reference in New Issue
Block a user