mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-09 01:32:34 -05:00
Update python packages to use sparkmagic 0.12.9 (#7240)
This commit is contained in:
@@ -10,7 +10,7 @@ import * as assert from 'assert';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { context } from './testContext';
|
||||
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySpark3KernelMetadata, pythonKernelMetadata, sqlNotebookMultipleCellsContent, notebookContentForCellLanguageTest, sqlKernelSpec, pythonKernelSpec, pySpark3KernelSpec, CellTypes } from './notebook.util';
|
||||
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySparkKernelMetadata, pythonKernelMetadata, sqlNotebookMultipleCellsContent, notebookContentForCellLanguageTest, sqlKernelSpec, pythonKernelSpec, pySparkKernelSpec, CellTypes } from './notebook.util';
|
||||
import { getBdcServer, getConfigValue, EnvironmentVariable_PYTHON_PATH } from './testConfig';
|
||||
import { connectToServer, sleep } from './utils';
|
||||
import * as fs from 'fs';
|
||||
@@ -79,14 +79,14 @@ if (context.RunTest) {
|
||||
await (new NotebookTester()).pythonChangeKernelDifferentProviderTest(this.test.title);
|
||||
});
|
||||
|
||||
test('Change kernel same provider Python to PySpark3 to Python', async function () {
|
||||
test('Change kernel same provider Python to PySpark to Python', async function () {
|
||||
await (new NotebookTester()).pythonChangeKernelSameProviderTest(this.test.title);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.env['RUN_PYSPARK_TEST'] === '1') {
|
||||
test('PySpark3 notebook test', async function () {
|
||||
await (new NotebookTester()).pySpark3NbTest(this.test.title);
|
||||
test('PySpark notebook test', async function () {
|
||||
await (new NotebookTester()).pySparkNbTest(this.test.title);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -112,8 +112,8 @@ class NotebookTester {
|
||||
invocationCount: number = 0;
|
||||
|
||||
@stressify({ dop: NotebookTester.ParallelCount })
|
||||
async pySpark3NbTest(title: string): Promise<void> {
|
||||
let notebook = await this.openNotebook(pySparkNotebookContent, pySpark3KernelMetadata, title + this.invocationCount++);
|
||||
async pySparkNbTest(title: string): Promise<void> {
|
||||
let notebook = await this.openNotebook(pySparkNotebookContent, pySparkKernelMetadata, title + this.invocationCount++);
|
||||
await this.runCell(notebook);
|
||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
||||
@@ -281,9 +281,9 @@ class NotebookTester {
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(notebook.document.kernelSpec.name === 'python3', `Expected first kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
let kernelChanged = await notebook.changeKernel(pySpark3KernelSpec);
|
||||
let kernelChanged = await notebook.changeKernel(pySparkKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'pyspark3kernel', `Expected second kernel name: pyspark3kernel, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'pysparkkernel', `Expected second kernel name: pysparkkernel, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
|
||||
Reference in New Issue
Block a user