mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 02:51:36 -05:00
Update python packages to use sparkmagic 0.12.9 (#7240)
This commit is contained in:
@@ -10,7 +10,7 @@ import * as assert from 'assert';
|
|||||||
import * as azdata from 'azdata';
|
import * as azdata from 'azdata';
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import { context } from './testContext';
|
import { context } from './testContext';
|
||||||
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySpark3KernelMetadata, pythonKernelMetadata, sqlNotebookMultipleCellsContent, notebookContentForCellLanguageTest, sqlKernelSpec, pythonKernelSpec, pySpark3KernelSpec, CellTypes } from './notebook.util';
|
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySparkKernelMetadata, pythonKernelMetadata, sqlNotebookMultipleCellsContent, notebookContentForCellLanguageTest, sqlKernelSpec, pythonKernelSpec, pySparkKernelSpec, CellTypes } from './notebook.util';
|
||||||
import { getBdcServer, getConfigValue, EnvironmentVariable_PYTHON_PATH } from './testConfig';
|
import { getBdcServer, getConfigValue, EnvironmentVariable_PYTHON_PATH } from './testConfig';
|
||||||
import { connectToServer, sleep } from './utils';
|
import { connectToServer, sleep } from './utils';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
@@ -79,14 +79,14 @@ if (context.RunTest) {
|
|||||||
await (new NotebookTester()).pythonChangeKernelDifferentProviderTest(this.test.title);
|
await (new NotebookTester()).pythonChangeKernelDifferentProviderTest(this.test.title);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('Change kernel same provider Python to PySpark3 to Python', async function () {
|
test('Change kernel same provider Python to PySpark to Python', async function () {
|
||||||
await (new NotebookTester()).pythonChangeKernelSameProviderTest(this.test.title);
|
await (new NotebookTester()).pythonChangeKernelSameProviderTest(this.test.title);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env['RUN_PYSPARK_TEST'] === '1') {
|
if (process.env['RUN_PYSPARK_TEST'] === '1') {
|
||||||
test('PySpark3 notebook test', async function () {
|
test('PySpark notebook test', async function () {
|
||||||
await (new NotebookTester()).pySpark3NbTest(this.test.title);
|
await (new NotebookTester()).pySparkNbTest(this.test.title);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -112,8 +112,8 @@ class NotebookTester {
|
|||||||
invocationCount: number = 0;
|
invocationCount: number = 0;
|
||||||
|
|
||||||
@stressify({ dop: NotebookTester.ParallelCount })
|
@stressify({ dop: NotebookTester.ParallelCount })
|
||||||
async pySpark3NbTest(title: string): Promise<void> {
|
async pySparkNbTest(title: string): Promise<void> {
|
||||||
let notebook = await this.openNotebook(pySparkNotebookContent, pySpark3KernelMetadata, title + this.invocationCount++);
|
let notebook = await this.openNotebook(pySparkNotebookContent, pySparkKernelMetadata, title + this.invocationCount++);
|
||||||
await this.runCell(notebook);
|
await this.runCell(notebook);
|
||||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
||||||
@@ -281,9 +281,9 @@ class NotebookTester {
|
|||||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||||
assert(notebook.document.kernelSpec.name === 'python3', `Expected first kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
assert(notebook.document.kernelSpec.name === 'python3', `Expected first kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||||
|
|
||||||
let kernelChanged = await notebook.changeKernel(pySpark3KernelSpec);
|
let kernelChanged = await notebook.changeKernel(pySparkKernelSpec);
|
||||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'pyspark3kernel', `Expected second kernel name: pyspark3kernel, Actual: ${notebook.document.kernelSpec.name}`);
|
assert(kernelChanged && notebook.document.kernelSpec.name === 'pysparkkernel', `Expected second kernel name: pysparkkernel, Actual: ${notebook.document.kernelSpec.name}`);
|
||||||
|
|
||||||
kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
||||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||||
|
|||||||
@@ -27,8 +27,8 @@ export const pySparkNotebookContent: azdata.nb.INotebookContents = {
|
|||||||
}],
|
}],
|
||||||
metadata: {
|
metadata: {
|
||||||
'kernelspec': {
|
'kernelspec': {
|
||||||
'name': 'pyspark3kernel',
|
'name': 'pysparkkernel',
|
||||||
'display_name': 'PySpark3'
|
'display_name': 'PySpark'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
nbformat: 4,
|
nbformat: 4,
|
||||||
@@ -127,16 +127,16 @@ export const sqlNotebookMultipleCellsContent: azdata.nb.INotebookContents = {
|
|||||||
nbformat_minor: 2
|
nbformat_minor: 2
|
||||||
};
|
};
|
||||||
|
|
||||||
export const pySpark3KernelMetadata = {
|
export const pySparkKernelMetadata = {
|
||||||
'kernelspec': {
|
'kernelspec': {
|
||||||
'name': 'pyspark3kernel',
|
'name': 'pysparkkernel',
|
||||||
'display_name': 'PySpark3'
|
'display_name': 'PySpark'
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const pySpark3KernelSpec = {
|
export const pySparkKernelSpec = {
|
||||||
name: 'pyspark3',
|
name: 'pyspark',
|
||||||
display_name: 'PySpark3'
|
display_name: 'PySpark'
|
||||||
};
|
};
|
||||||
|
|
||||||
export const sqlKernelMetadata = {
|
export const sqlKernelMetadata = {
|
||||||
|
|||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"argv": [
|
|
||||||
"python",
|
|
||||||
"-m",
|
|
||||||
"sparkmagic.kernels.pyspark3kernel.pyspark3kernel",
|
|
||||||
"-f",
|
|
||||||
"{connection_file}"
|
|
||||||
],
|
|
||||||
"display_name": "PySpark3"
|
|
||||||
}
|
|
||||||
@@ -310,14 +310,6 @@
|
|||||||
"IPYNB"
|
"IPYNB"
|
||||||
],
|
],
|
||||||
"standardKernels": [
|
"standardKernels": [
|
||||||
{
|
|
||||||
"name": "pyspark3kernel",
|
|
||||||
"displayName": "PySpark3",
|
|
||||||
"connectionProviderIds": [
|
|
||||||
"HADOOP_KNOX",
|
|
||||||
"MSSQL"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "pysparkkernel",
|
"name": "pysparkkernel",
|
||||||
"displayName": "PySpark",
|
"displayName": "PySpark",
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ export const extensionOutputChannel = 'Notebooks';
|
|||||||
// JUPYTER CONFIG //////////////////////////////////////////////////////////
|
// JUPYTER CONFIG //////////////////////////////////////////////////////////
|
||||||
export const pythonBundleVersion = '0.0.1';
|
export const pythonBundleVersion = '0.0.1';
|
||||||
export const pythonVersion = '3.6.6';
|
export const pythonVersion = '3.6.6';
|
||||||
export const sparkMagicVersion = '0.12.6.1';
|
|
||||||
export const pythonPathConfigKey = 'pythonPath';
|
export const pythonPathConfigKey = 'pythonPath';
|
||||||
export const existingPythonConfigKey = 'useExistingPython';
|
export const existingPythonConfigKey = 'useExistingPython';
|
||||||
export const notebookConfigKey = 'notebook';
|
export const notebookConfigKey = 'notebook';
|
||||||
@@ -42,7 +41,6 @@ export enum PythonPkgType {
|
|||||||
Anaconda = 'Anaconda'
|
Anaconda = 'Anaconda'
|
||||||
}
|
}
|
||||||
|
|
||||||
export const pythonOfflinePipPackagesUrl = 'https://go.microsoft.com/fwlink/?linkid=2092867';
|
export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2103837';
|
||||||
export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2092866';
|
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2103836';
|
||||||
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2092865';
|
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2103835';
|
||||||
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2092864';
|
|
||||||
|
|||||||
@@ -221,8 +221,8 @@ async function analyzeNotebook(oeContext?: azdata.ObjectExplorerContext): Promis
|
|||||||
providerId: JUPYTER_NOTEBOOK_PROVIDER,
|
providerId: JUPYTER_NOTEBOOK_PROVIDER,
|
||||||
preview: false,
|
preview: false,
|
||||||
defaultKernel: {
|
defaultKernel: {
|
||||||
name: 'pyspark3kernel',
|
name: 'pysparkkernel',
|
||||||
display_name: 'PySpark3',
|
display_name: 'PySpark',
|
||||||
language: 'python'
|
language: 'python'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -150,8 +150,8 @@ export class JupyterController implements vscode.Disposable {
|
|||||||
providerId: constants.jupyterNotebookProviderId,
|
providerId: constants.jupyterNotebookProviderId,
|
||||||
preview: false,
|
preview: false,
|
||||||
defaultKernel: {
|
defaultKernel: {
|
||||||
name: 'pyspark3kernel',
|
name: 'pysparkkernel',
|
||||||
display_name: 'PySpark3',
|
display_name: 'PySpark',
|
||||||
language: 'python'
|
language: 'python'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -77,9 +77,6 @@ export class JupyterServerInstallation {
|
|||||||
try {
|
try {
|
||||||
await this.installPythonPackage(backgroundOperation);
|
await this.installPythonPackage(backgroundOperation);
|
||||||
|
|
||||||
this.outputChannel.appendLine(msgPythonDownloadComplete);
|
|
||||||
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgPythonDownloadComplete);
|
|
||||||
|
|
||||||
if (this._usingConda) {
|
if (this._usingConda) {
|
||||||
await this.installCondaDependencies();
|
await this.installCondaDependencies();
|
||||||
} else if (this._usingExistingPython) {
|
} else if (this._usingExistingPython) {
|
||||||
@@ -87,19 +84,11 @@ export class JupyterServerInstallation {
|
|||||||
} else {
|
} else {
|
||||||
await this.installOfflinePipDependencies();
|
await this.installOfflinePipDependencies();
|
||||||
}
|
}
|
||||||
let doOnlineInstall = this._usingExistingPython;
|
|
||||||
await this.installSparkMagic(doOnlineInstall);
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.outputChannel.appendLine(msgDependenciesInstallationFailed(utils.getErrorMessage(err)));
|
this.outputChannel.appendLine(msgDependenciesInstallationFailed(utils.getErrorMessage(err)));
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
fs.remove(this._pythonPackageDir, (err: Error) => {
|
|
||||||
if (err) {
|
|
||||||
this.outputChannel.appendLine(err.message);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.outputChannel.appendLine(msgInstallPkgFinish);
|
this.outputChannel.appendLine(msgInstallPkgFinish);
|
||||||
backgroundOperation.updateStatus(azdata.TaskStatus.Succeeded, msgInstallPkgFinish);
|
backgroundOperation.updateStatus(azdata.TaskStatus.Succeeded, msgInstallPkgFinish);
|
||||||
window.showInformationMessage(msgInstallPkgFinish);
|
window.showInformationMessage(msgInstallPkgFinish);
|
||||||
@@ -107,15 +96,16 @@ export class JupyterServerInstallation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private installPythonPackage(backgroundOperation: azdata.BackgroundOperation): Promise<void> {
|
private installPythonPackage(backgroundOperation: azdata.BackgroundOperation): Promise<void> {
|
||||||
|
if (this._usingExistingPython) {
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
let bundleVersion = constants.pythonBundleVersion;
|
let bundleVersion = constants.pythonBundleVersion;
|
||||||
let pythonVersion = constants.pythonVersion;
|
let pythonVersion = constants.pythonVersion;
|
||||||
let platformId = utils.getOSPlatformId();
|
let platformId = utils.getOSPlatformId();
|
||||||
let packageName: string;
|
let packageName: string;
|
||||||
let pythonDownloadUrl: string;
|
let pythonDownloadUrl: string;
|
||||||
if (this._usingExistingPython) {
|
|
||||||
packageName = `python-${pythonVersion}-${bundleVersion}-offlinePackages.zip`;
|
|
||||||
pythonDownloadUrl = constants.pythonOfflinePipPackagesUrl;
|
|
||||||
} else {
|
|
||||||
let extension = process.platform === constants.winPlatform ? 'zip' : 'tar.gz';
|
let extension = process.platform === constants.winPlatform ? 'zip' : 'tar.gz';
|
||||||
packageName = `python-${pythonVersion}-${platformId}-${bundleVersion}.${extension}`;
|
packageName = `python-${pythonVersion}-${platformId}-${bundleVersion}.${extension}`;
|
||||||
|
|
||||||
@@ -131,17 +121,9 @@ export class JupyterServerInstallation {
|
|||||||
pythonDownloadUrl = constants.pythonLinuxInstallUrl;
|
pythonDownloadUrl = constants.pythonLinuxInstallUrl;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let installPath: string;
|
|
||||||
if (this._usingExistingPython) {
|
|
||||||
installPath = utils.getUserHome();
|
|
||||||
} else {
|
|
||||||
installPath = this._pythonInstallationPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
let pythonPackagePathLocal = path.join(installPath, packageName);
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
let installPath = this._pythonInstallationPath;
|
||||||
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgDownloadPython(platformId, pythonDownloadUrl));
|
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgDownloadPython(platformId, pythonDownloadUrl));
|
||||||
fs.mkdirs(installPath, (err) => {
|
fs.mkdirs(installPath, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@@ -179,12 +161,13 @@ export class JupyterServerInstallation {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let pythonPackagePathLocal = path.join(installPath, packageName);
|
||||||
downloadRequest.pipe(fs.createWriteStream(pythonPackagePathLocal))
|
downloadRequest.pipe(fs.createWriteStream(pythonPackagePathLocal))
|
||||||
.on('close', async () => {
|
.on('close', async () => {
|
||||||
//unpack python zip/tar file
|
//unpack python zip/tar file
|
||||||
this.outputChannel.appendLine(msgPythonUnpackPending);
|
this.outputChannel.appendLine(msgPythonUnpackPending);
|
||||||
let pythonSourcePath = path.join(installPath, constants.pythonBundleVersion);
|
let pythonSourcePath = path.join(installPath, constants.pythonBundleVersion);
|
||||||
if (!this._usingExistingPython && await utils.exists(pythonSourcePath)) {
|
if (await utils.exists(pythonSourcePath)) {
|
||||||
try {
|
try {
|
||||||
fs.removeSync(pythonSourcePath);
|
fs.removeSync(pythonSourcePath);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -201,6 +184,8 @@ export class JupyterServerInstallation {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
this.outputChannel.appendLine(msgPythonDownloadComplete);
|
||||||
|
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgPythonDownloadComplete);
|
||||||
resolve();
|
resolve();
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgPythonUnpackError);
|
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgPythonUnpackError);
|
||||||
@@ -223,7 +208,7 @@ export class JupyterServerInstallation {
|
|||||||
: path.join(this._pythonInstallationPath, constants.pythonBundleVersion);
|
: path.join(this._pythonInstallationPath, constants.pythonBundleVersion);
|
||||||
|
|
||||||
if (this._usingExistingPython) {
|
if (this._usingExistingPython) {
|
||||||
this._pythonPackageDir = path.join(utils.getUserHome(), 'offlinePackages');
|
this._pythonPackageDir = undefined;
|
||||||
} else {
|
} else {
|
||||||
this._pythonPackageDir = path.join(pythonSourcePath, 'offlinePackages');
|
this._pythonPackageDir = path.join(pythonSourcePath, 'offlinePackages');
|
||||||
}
|
}
|
||||||
@@ -438,41 +423,27 @@ export class JupyterServerInstallation {
|
|||||||
installJupyterCommand = `"${this._pythonExecutable}" -m pip install --force-reinstall --no-index pip --find-links "${this._pythonPackageDir}" --no-warn-script-location`;
|
installJupyterCommand = `"${this._pythonExecutable}" -m pip install --force-reinstall --no-index pip --find-links "${this._pythonPackageDir}" --no-warn-script-location`;
|
||||||
await this.executeStreamedCommand(installJupyterCommand);
|
await this.executeStreamedCommand(installJupyterCommand);
|
||||||
|
|
||||||
|
fs.remove(this._pythonPackageDir, (err: Error) => {
|
||||||
|
if (err) {
|
||||||
|
this.outputChannel.appendLine(err.message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
||||||
} else {
|
} else {
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async installSparkMagic(doOnlineInstall: boolean): Promise<void> {
|
|
||||||
let installSparkMagic: string;
|
|
||||||
if (process.platform === constants.winPlatform || this._usingExistingPython) {
|
|
||||||
// Overwrite existing install of sparkmagic, since we use a custom version
|
|
||||||
let cmdOptions = this._usingExistingPython ? '--user --force-reinstall' : '--force-reinstall';
|
|
||||||
let sparkWheel = path.join(this._pythonPackageDir, `sparkmagic-${constants.sparkMagicVersion}-py3-none-any.whl`);
|
|
||||||
if (doOnlineInstall) {
|
|
||||||
installSparkMagic = `"${this._pythonExecutable}" -m pip install ${cmdOptions} "${sparkWheel}" --no-warn-script-location`;
|
|
||||||
} else {
|
|
||||||
installSparkMagic = `"${this._pythonExecutable}" -m pip install ${cmdOptions} --no-index "${sparkWheel}" --find-links "${this._pythonPackageDir}" --no-warn-script-location`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (installSparkMagic) {
|
|
||||||
this.outputChannel.show(true);
|
|
||||||
this.outputChannel.appendLine(localize('msgInstallingSpark', "Installing SparkMagic..."));
|
|
||||||
await this.executeStreamedCommand(installSparkMagic);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async installPipDependencies(): Promise<void> {
|
private async installPipDependencies(): Promise<void> {
|
||||||
this.outputChannel.show(true);
|
this.outputChannel.show(true);
|
||||||
this.outputChannel.appendLine(localize('msgInstallStart', "Installing required packages to run Notebooks..."));
|
this.outputChannel.appendLine(localize('msgInstallStart', "Installing required packages to run Notebooks..."));
|
||||||
|
|
||||||
let cmdOptions = this._usingExistingPython ? '--user' : '';
|
let cmdOptions = this._usingExistingPython ? '--user' : '';
|
||||||
let installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} jupyter==1.0.0 pandas==0.24.2`;
|
let installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} jupyter>=1.0.0 pandas>=0.24.2 sparkmagic>=0.12.9`;
|
||||||
await this.executeStreamedCommand(installCommand);
|
await this.executeStreamedCommand(installCommand);
|
||||||
|
|
||||||
installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} prose-codeaccelerator==1.3.0 --extra-index-url https://prose-python-packages.azurewebsites.net`;
|
installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} prose-codeaccelerator>=1.3.0 --extra-index-url https://prose-python-packages.azurewebsites.net`;
|
||||||
await this.executeStreamedCommand(installCommand);
|
await this.executeStreamedCommand(installCommand);
|
||||||
|
|
||||||
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
||||||
@@ -482,14 +453,14 @@ export class JupyterServerInstallation {
|
|||||||
this.outputChannel.show(true);
|
this.outputChannel.show(true);
|
||||||
this.outputChannel.appendLine(localize('msgInstallStart', "Installing required packages to run Notebooks..."));
|
this.outputChannel.appendLine(localize('msgInstallStart', "Installing required packages to run Notebooks..."));
|
||||||
|
|
||||||
let installCommand = `"${this.getCondaExePath()}" install -y jupyter==1.0.0 pandas==0.24.2`;
|
let installCommand = `"${this.getCondaExePath()}" install -y jupyter>=1.0.0 pandas>=0.24.2`;
|
||||||
if (process.platform !== constants.winPlatform) {
|
if (process.platform !== constants.winPlatform) {
|
||||||
installCommand = `${installCommand} pykerberos==1.2.1`;
|
installCommand = `${installCommand} pykerberos>=1.2.1`;
|
||||||
}
|
}
|
||||||
await this.executeStreamedCommand(installCommand);
|
await this.executeStreamedCommand(installCommand);
|
||||||
|
|
||||||
let cmdOptions = this._usingExistingPython ? '--user' : '';
|
let cmdOptions = this._usingExistingPython ? '--user' : '';
|
||||||
installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} prose-codeaccelerator==1.3.0 --extra-index-url https://prose-python-packages.azurewebsites.net`;
|
installCommand = `"${this._pythonExecutable}" -m pip install ${cmdOptions} sparkmagic>=0.12.9 prose-codeaccelerator>=1.3.0 --extra-index-url https://prose-python-packages.azurewebsites.net`;
|
||||||
await this.executeStreamedCommand(installCommand);
|
await this.executeStreamedCommand(installCommand);
|
||||||
|
|
||||||
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
this.outputChannel.appendLine(localize('msgJupyterInstallDone', "... Jupyter installation complete."));
|
||||||
|
|||||||
@@ -113,7 +113,7 @@ let notebookLanguageMagicType: IJSONSchema = {
|
|||||||
type: 'string'
|
type: 'string'
|
||||||
},
|
},
|
||||||
kernels: {
|
kernels: {
|
||||||
description: localize('carbon.extension.contributes.notebook.kernels', "Optional set of kernels this is valid for, e.g. python3, pyspark3, sql"),
|
description: localize('carbon.extension.contributes.notebook.kernels', "Optional set of kernels this is valid for, e.g. python3, pyspark, sql"),
|
||||||
oneOf: [
|
oneOf: [
|
||||||
{ type: 'string' },
|
{ type: 'string' },
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user