mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 02:51:36 -05:00
Add notebook integration tests (#4652)
* Add notebook integration tests
This commit is contained in:
63
extensions/integration-tests/src/notebook.test.ts
Normal file
63
extensions/integration-tests/src/notebook.test.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import 'mocha';
|
||||||
|
import assert = require('assert');
|
||||||
|
|
||||||
|
import * as azdata from 'azdata';
|
||||||
|
import * as vscode from 'vscode';
|
||||||
|
import { context } from './testContext';
|
||||||
|
import { sqlNotebookContent, writeNotebookToFile, pySpark3KernelMetadata, sqlKernelMetadata, pythonKernelMetadata, pySparkNotebookContent } from './notebook.util';
|
||||||
|
import { getBdcServer } from './testConfig';
|
||||||
|
import { connectToServer } from './utils';
|
||||||
|
|
||||||
|
if (context.RunTest) {
|
||||||
|
suite('Notebook integration test suite', async () => {
|
||||||
|
test('Sql NB test', async function () {
|
||||||
|
this.timeout(6000);
|
||||||
|
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
|
||||||
|
|
||||||
|
const expectedOutput0 = '(1 row affected)';
|
||||||
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
|
assert(cellOutputs.length === 3, `Expected length: 3, Acutal: '${cellOutputs.length}'`);
|
||||||
|
let actualOutput0 = (<azdata.nb.IDisplayData>cellOutputs[0]).data['text/html'];
|
||||||
|
assert(actualOutput0 === expectedOutput0, `Expected row count: '${expectedOutput0}', Acutal: '${actualOutput0}'`);
|
||||||
|
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
|
||||||
|
assert(actualOutput2[0] === '1', `Expected result: 1, Acutal: '${actualOutput2[0]}'`);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('Python3 notebook test', async function () {
|
||||||
|
this.timeout(6000);
|
||||||
|
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
|
||||||
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
|
let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
|
||||||
|
assert(result === '2', `Expected: 2, Acutal: '${result}'`);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('PySpark3 notebook test', async function () {
|
||||||
|
this.timeout(12000);
|
||||||
|
let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata);
|
||||||
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
|
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
||||||
|
assert(sparkResult === '2', `Expected: 2, Acutal: '${sparkResult}'`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any): Promise<azdata.nb.NotebookEditor> {
|
||||||
|
let notebookConfig = vscode.workspace.getConfiguration('notebook');
|
||||||
|
notebookConfig.update('pythonPath', process.env.PYTHON_TEST_PATH, 1);
|
||||||
|
let server = await getBdcServer();
|
||||||
|
await connectToServer(server, 6000);
|
||||||
|
let pythonNotebook = Object.assign({}, content, { metadata: kernelMetadata });
|
||||||
|
let uri = writeNotebookToFile(pythonNotebook);
|
||||||
|
let notebook = await azdata.nb.showNotebookDocument(uri);
|
||||||
|
assert(notebook.document.cells.length === 1, 'Notebook should have 1 cell');
|
||||||
|
let ran = await notebook.runCell(notebook.document.cells[0]);
|
||||||
|
assert(ran, 'Notebook runCell should succeed');
|
||||||
|
return notebook;
|
||||||
|
}
|
||||||
|
|
||||||
79
extensions/integration-tests/src/notebook.util.ts
Normal file
79
extensions/integration-tests/src/notebook.util.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import 'mocha';
|
||||||
|
import * as azdata from 'azdata';
|
||||||
|
import * as vscode from 'vscode';
|
||||||
|
import * as tempWrite from 'temp-write';
|
||||||
|
|
||||||
|
export class CellTypes {
|
||||||
|
public static readonly Code = 'code';
|
||||||
|
public static readonly Markdown = 'markdown';
|
||||||
|
public static readonly Raw = 'raw';
|
||||||
|
}
|
||||||
|
|
||||||
|
export const pySparkNotebookContent: azdata.nb.INotebookContents = {
|
||||||
|
cells: [{
|
||||||
|
cell_type: CellTypes.Code,
|
||||||
|
source: '1+1',
|
||||||
|
metadata: { language: 'python' },
|
||||||
|
execution_count: 1
|
||||||
|
}],
|
||||||
|
metadata: {
|
||||||
|
'kernelspec': {
|
||||||
|
'name': 'pyspark3kernel',
|
||||||
|
'display_name': 'PySpark3'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
nbformat: 4,
|
||||||
|
nbformat_minor: 2
|
||||||
|
};
|
||||||
|
|
||||||
|
export const sqlNotebookContent: azdata.nb.INotebookContents = {
|
||||||
|
cells: [{
|
||||||
|
cell_type: CellTypes.Code,
|
||||||
|
source: 'select 1',
|
||||||
|
metadata: { language: 'sql' },
|
||||||
|
execution_count: 1
|
||||||
|
}],
|
||||||
|
metadata: {
|
||||||
|
'kernelspec': {
|
||||||
|
'name': 'SQL',
|
||||||
|
'display_name': 'SQL'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
nbformat: 4,
|
||||||
|
nbformat_minor: 2
|
||||||
|
};
|
||||||
|
|
||||||
|
export const pySpark3KernelMetadata = {
|
||||||
|
'kernelspec': {
|
||||||
|
'name': 'pyspark3kernel',
|
||||||
|
'display_name': 'PySpark3'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const sqlKernelMetadata = {
|
||||||
|
'kernelspec': {
|
||||||
|
'name': 'SQL',
|
||||||
|
'display_name': 'SQL'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const pythonKernelMetadata = {
|
||||||
|
'kernelspec': {
|
||||||
|
'name': 'python3',
|
||||||
|
'display_name': 'Python 3'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents): vscode.Uri {
|
||||||
|
let notebookContentString = JSON.stringify(pythonNotebook);
|
||||||
|
let localFile = tempWrite.sync(notebookContentString, 'notebook.ipynb');
|
||||||
|
let uri = vscode.Uri.file(localFile);
|
||||||
|
return uri;
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@
|
|||||||
import 'mocha';
|
import 'mocha';
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import { context } from './testContext';
|
import { context } from './testContext';
|
||||||
|
import assert = require('assert');
|
||||||
|
|
||||||
if (!context.RunTest) {
|
if (!context.RunTest) {
|
||||||
suite('integration test setup', () => {
|
suite('integration test setup', () => {
|
||||||
@@ -16,6 +17,10 @@ if (!context.RunTest) {
|
|||||||
await vscode.commands.executeCommand('test.setupIntegrationTest');
|
await vscode.commands.executeCommand('test.setupIntegrationTest');
|
||||||
//Reload the window, this is required for some changes made by the 'test.setupIntegrationTest' to work
|
//Reload the window, this is required for some changes made by the 'test.setupIntegrationTest' to work
|
||||||
await vscode.commands.executeCommand('workbench.action.reloadWindow');
|
await vscode.commands.executeCommand('workbench.action.reloadWindow');
|
||||||
|
|
||||||
|
assert(process.env.BDC_BACKEND_HOSTNAME !== undefined &&
|
||||||
|
process.env.BDC_BACKEND_USERNAME !== undefined &&
|
||||||
|
process.env.BDC_BACKEND_PWD !== undefined, 'BDC_BACKEND_HOSTNAME, BDC_BACKEND_USERNAME, BDC_BACKEND_PWD must be set using ./scripts/setbackenvariables.sh or .\\scripts\\setbackendvaraibles.bat');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
@echo off pass in username password hostname of big data cluster
|
@echo off pass in username password hostname of big data cluster, please use "" for BDC_BACKEND_HOSTNAME to include port.
|
||||||
|
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" C:\Users\yuronhe\azuredatastudio-python
|
||||||
set BDC_BACKEND_USERNAME=%~1
|
set BDC_BACKEND_USERNAME=%~1
|
||||||
set BDC_BACKEND_PWD=%~2
|
set BDC_BACKEND_PWD=%~2
|
||||||
set BDC_BACKEND_HOSTNAME=%~3
|
set BDC_BACKEND_HOSTNAME=%~3
|
||||||
@echo No problem reading %BDC_BACKEND_HOSTNAME%, password and %BDC_BACKEND_HOSTNAME%
|
set PYTHON_TEST_PATH=%~4
|
||||||
|
@echo No problem reading %BDC_BACKEND_USERNAME%, password, %BDC_BACKEND_HOSTNAME% and %PYTHON_TEST_PATH%
|
||||||
@@ -2,5 +2,6 @@
|
|||||||
export BDC_BACKEND_USERNAME=$1
|
export BDC_BACKEND_USERNAME=$1
|
||||||
export BDC_BACKEND_PWD=$2
|
export BDC_BACKEND_PWD=$2
|
||||||
export BDC_BACKEND_HOSTNAME=$3
|
export BDC_BACKEND_HOSTNAME=$3
|
||||||
echo No problem reading $BDC_BACKEND_USERNAME, password and $BDC_BACKEND_HOSTNAME
|
export PYTHON_TEST_PATH=$4
|
||||||
|
echo No problem reading $BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH
|
||||||
set
|
set
|
||||||
Reference in New Issue
Block a user