diff --git a/build/gulpfile.hygiene.js b/build/gulpfile.hygiene.js index 1b1b520d99..88330fa9d9 100644 --- a/build/gulpfile.hygiene.js +++ b/build/gulpfile.hygiene.js @@ -96,7 +96,8 @@ const indentationFilter = [ '!extensions/mssql/sqltoolsservice/**', '!extensions/import/flatfileimportservice/**', '!extensions/admin-tool-ext-win/ssmsmin/**', - '!extensions/resource-deployment/notebooks/**' + '!extensions/resource-deployment/notebooks/**', + '!extensions/mssql/notebooks/**' ]; const copyrightFilter = [ diff --git a/extensions/mssql/notebooks/TSG/cluster-status.ipynb b/extensions/mssql/notebooks/TSG/cluster-status.ipynb new file mode 100644 index 0000000000..ed2a6da2ac --- /dev/null +++ b/extensions/mssql/notebooks/TSG/cluster-status.ipynb @@ -0,0 +1,159 @@ +{ + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python", + "version": "3.6.6", + "mimetype": "text/x-python", + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "pygments_lexer": "ipython3", + "nbconvert_exporter": "python", + "file_extension": ".py" + } + }, + "nbformat_minor": 2, + "nbformat": 4, + "cells": [ + { + "cell_type": "markdown", + "source": "![11811317_10153406249401648_2787740058697948111_n](https://raw.githubusercontent.com/Microsoft/sqlworkshops/master/graphics/solutions-microsoft-logo-small.png)\n\n# View the status of your big data cluster\nThis notebook allows you to see the status of the controller, master instance, and pools in your SQL Server big data cluster.\n\n## Important Instructions\n### **Before you begin, you will need:**\n* Big data cluster name\n* Controller username\n* Controller password\n* Controller endpoint \n\nYou can find the controller endpoint from the big data cluster dashboard in the Service Endpoints table. The endpoint is listed as **Cluster Management Service.**\n\nIf you do not know the credentials, ask the admin who deployed your cluster.\n\n### **Instructions**\n* For the best experience, click **Run Cells** on the toolbar above. This will automatically execute all code cells below and show the cluster status in each table.\n* When you click **Run Cells** for this Notebook, you will be prompted at the *Log in to your big data cluster* code cell to provide your login credentials. Follow the prompts and press enter to proceed.\n* **You won't need to modify any of the code cell contents** in this Notebook. If you accidentally made a change, you can reopen this Notebook from the bdc dashboard.\n\n\n", + "metadata": {} + }, + { + "cell_type": "markdown", + "source": "## **Dependencies**\r\n\r\n> This Notebook will try to install these dependencies for you.\r\n\r\n----------------------------------------------------------------\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n
ToolRequiredDescription
mssqlctlYesCommand-line tool for installing and managing a big data cluster.
pandasYesPython Library for formatting data (More info).
\r\n

", + "metadata": {} + }, + { + "cell_type": "markdown", + "source": "### **Install latest version of mssqlctl**", + "metadata": {} + }, + { + "cell_type": "code", + "source": "import sys, platform\r\n\r\nif platform.system()==\"Windows\":\r\n user = ' --user'\r\nelse:\r\n user = ''\r\n\r\ndef executeCommand(cmd, successMsgs, printMsg):\r\n print(printMsg)\r\n cmdOutput = !{cmd}\r\n cmdOutput = ''.join(cmdOutput)\r\n if any(msg in cmdOutput for msg in successMsgs):\r\n print(f\"\\nSuccess >> \" + cmd)\r\n else:\r\n raise SystemExit(f'\\nFailed during:\\n\\n\\t{cmd}\\n\\nreturned: \\n' + ''.join(cmdOutput) + '.\\n')\r\n\r\ninstallPath = 'https://private-repo.microsoft.com/python/ctp3.1/mssqlctl/requirements.txt'\r\ncmd = f'{sys.executable} -m pip uninstall --yes mssqlctl-cli-storage'\r\ncmdOutput = !{cmd}\r\n\r\ncmd = f'{sys.executable} -m pip uninstall -r {installPath} --yes'\r\nexecuteCommand(cmd, ['is not installed', 'Successfully uninstalled mssqlctl'], 'Uninstalling mssqlctl:')\r\n\r\ncmd = f'{sys.executable} -m pip install -r {installPath}{user} --trusted-host helsinki'\r\ncmdOutput = !{cmd}\r\nexecuteCommand(cmd, ['Requirement already satisfied', 'Successfully installed mssqlctl'], 'Installing the latest version of mssqlctl:')", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Install latest version of pandas**", + "metadata": {} + }, + { + "cell_type": "code", + "source": "#install pandas\r\ncmd = f'{sys.executable} -m pip show pandas'\r\ncmdOutput = !{cmd}\r\nif len(cmdOutput) > 0 and '0.24' in cmdOutput[1]:\r\n print('Pandas required version is already installed!')\r\nelse:\r\n pandasVersion = 'pandas==0.24.2'\r\n cmd = f'{sys.executable} -m pip install {pandasVersion}'\r\n cmdOutput = !{cmd}\r\n print(f'\\nSuccess: Upgraded pandas.')", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "## **Log in to your big data cluster**\r\nTo view cluster status, you will need to connect to your big data cluster through mssqlctl. \r\n\r\nWhen you run this code cell, you will be prompted for:\r\n- Cluster name\r\n- Controller username\r\n- Controller password\r\n\r\nTo proceed:\r\n- **Click** on the input box\r\n- **Type** the login info\r\n- **Press** enter.\r\n\r\nIf your cluster is missing a configuration file, you will be asked to provide your controller endpoint. (Format: **https://00.00.00.000:00000**)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "import os, getpass, json\nimport pandas as pd\nimport numpy as np\nfrom IPython.display import *\n\ndef PromptForInfo(promptMsg, isPassword, errorMsg):\n if isPassword:\n promptResponse = getpass.getpass(prompt=promptMsg)\n else:\n promptResponse = input(promptMsg)\n if promptResponse == \"\":\n raise SystemExit(errorMsg + '\\n')\n return promptResponse\n\n# Prompt user inputs:\ncluster_name = PromptForInfo('Please provide your Cluster Name: ', False, 'Cluster Name is required!')\n\ncontroller_username = PromptForInfo('Please provide your Controller Username for login: ', False, 'Controller Username is required!')\n\ncontroller_password = PromptForInfo('Controller Password: ', True, 'Password is required!')\nprint('***********')\n\n!mssqlctl logout\n# Login in to your big data cluster \ncmd = f'mssqlctl login -n {cluster_name} -u {controller_username} -a yes'\nprint(\"Start \" + cmd)\nos.environ['CONTROLLER_USERNAME'] = controller_username\nos.environ['CONTROLLER_PASSWORD'] = controller_password\nos.environ['ACCEPT_EULA'] = 'yes'\n\nloginResult = !{cmd}\nif 'ERROR: Please check your kube config or specify the correct controller endpoint with: --controller-endpoint https://:.' in loginResult[0] or 'ERROR' in loginResult[0]:\n controller_ip = input('Please provide your Controller endpoint: ')\n if controller_ip == \"\":\n raise SystemExit(f'Controller IP is required!' + '\\n')\n else:\n cmd = f'mssqlctl login -n {cluster_name} -e {controller_ip} -u {controller_username} -a yes'\n loginResult = !{cmd}\nprint(loginResult)\n", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "## **Status of big data cluster**\r\nAfter you successfully login to your bdc, you can view the overall status of each container before drilling down into each component.", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of big data cluster\ndef formatColumnNames(column):\n return ' '.join(word[0].upper() + word[1:] for word in column.split())\n\npd.set_option('display.max_colwidth', -1)\ndef show_results(input):\n input = ''.join(input)\n results = json.loads(input)\n df = pd.DataFrame(results)\n df.columns = [formatColumnNames(n) for n in results[0].keys()]\n mydata = HTML(df.to_html(render_links=True))\n display(mydata)\n\nresults = !mssqlctl bdc status show\nstrRes = ''.join(results)\njsonRes = json.loads(strRes)\ndtypes = '{'\nspark = [x for x in jsonRes if x['kind'] == 'Spark']\nif spark:\n spark_exists = True\nelse:\n spark_exists = False\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "## **Cluster Status**\r\nFor each cluster component below, running each code cell will generate a table. This table will include:\r\n\r\n----------------------------------------------------------------\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n
Column NameDescription
KindIdentifies if component is a pod or a set.
LogsURLLink to Kibana logs which is used for troubleshooting.
NameProvides the specific name of the pod or set.
NodeMetricsURLLink to Grafana dashboard to view key metrics of the node.
SQLMetricsURLLink to Grafana dashboard to view key metrics of the SQL instance
StateIndicates state of the pod or set..
\r\n

", + "metadata": {} + }, + { + "cell_type": "markdown", + "source": "### **Controller status**\nTo learn more about the controller, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-controller?view=sql-server-ver15)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of controller\nresults = !mssqlctl bdc control status show\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Master Instance status**\nTo learn more about the master instance, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-master-instance?view=sqlallproducts-allversions)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of master instance\nresults = !mssqlctl bdc pool status show -k master -n default\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Compute Pool status**\nTo learn more about compute pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-compute-pool?view=sqlallproducts-allversions)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of compute pool\nresults = !mssqlctl bdc pool status show -k compute -n default\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Storage Pool status**\nTo learn more about storage pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-storage-pool?view=sqlallproducts-allversions)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of storage pools\nresults = !mssqlctl bdc pool status show -k storage -n default\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Data Pool status**\nTo learn more about data pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-data-pool?view=sqlallproducts-allversions)", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of data pools\nresults = !mssqlctl bdc pool status show -k data -n default\nshow_results(results)", + "metadata": {}, + "outputs": [], + "execution_count": 0 + }, + { + "cell_type": "markdown", + "source": "### **Spark Pool status**\nDisplays status of spark pool if it exists. Otherwise, will show as \"No spark pool.\"", + "metadata": {} + }, + { + "cell_type": "code", + "source": "# Display status of spark pool\nif spark_exists:\n results = !mssqlctl bdc pool status show -k spark -n default\n show_results(results)\nelse:\n print('No spark pool.')", + "metadata": {}, + "outputs": [], + "execution_count": 0 + } + ] +} \ No newline at end of file diff --git a/extensions/mssql/package.json b/extensions/mssql/package.json index fd97dd1bdd..dfc5612f03 100644 --- a/extensions/mssql/package.json +++ b/extensions/mssql/package.json @@ -89,11 +89,11 @@ } }, { - "command": "mssqlCluster.livy.task.openSparkHistory", - "title": "%title.openSparkHistory%", + "command": "mssqlCluster.task.openClusterStatusNotebook", + "title": "%title.openClusterStatusNotebook%", "icon": { - "dark": "resources/dark/new_spark_job_inverse.svg", - "light": "resources/light/new_spark_job.svg" + "dark": "resources/dark/cluster_status_inverse.svg", + "light": "resources/light/cluster_status.svg" } }, { @@ -247,6 +247,10 @@ { "command": "mssqlCluster.livy.task.submitSparkJob", "when": "false" + }, + { + "command": "mssqlCluster.task.openClusterStatusNotebook", + "when": "false" } ], "objectExplorer/item/context": [ @@ -405,7 +409,8 @@ "tasks-widget": [ "mssqlCluster.task.newNotebook", "mssqlCluster.task.openNotebook", - "mssqlCluster.livy.task.submitSparkJob" + "mssqlCluster.livy.task.submitSparkJob", + "mssqlCluster.task.openClusterStatusNotebook" ] } }, diff --git a/extensions/mssql/package.nls.json b/extensions/mssql/package.nls.json index be3b10400b..4eeae5d413 100644 --- a/extensions/mssql/package.nls.json +++ b/extensions/mssql/package.nls.json @@ -25,6 +25,7 @@ "title.tasks": "Tasks", "title.installPackages": "Install Packages", "title.configurePython": "Configure Python for Notebooks", + "title.openClusterStatusNotebook": "Cluster Status", "title.searchServers": "Search: Servers", "title.clearSearchServerResult": "Search: Clear Search Server Results", diff --git a/extensions/mssql/resources/dark/cluster_status_inverse.svg b/extensions/mssql/resources/dark/cluster_status_inverse.svg new file mode 100644 index 0000000000..971d362195 --- /dev/null +++ b/extensions/mssql/resources/dark/cluster_status_inverse.svg @@ -0,0 +1,13 @@ + + + + +new_notebook_inverse + + + + diff --git a/extensions/mssql/resources/dark/copy_inverse.png b/extensions/mssql/resources/dark/copy_inverse.png new file mode 100644 index 0000000000..1770281129 Binary files /dev/null and b/extensions/mssql/resources/dark/copy_inverse.png differ diff --git a/extensions/mssql/resources/light/cluster_status.svg b/extensions/mssql/resources/light/cluster_status.svg new file mode 100644 index 0000000000..b2d4d4bc65 --- /dev/null +++ b/extensions/mssql/resources/light/cluster_status.svg @@ -0,0 +1,9 @@ + + + +new_notebook + + + + diff --git a/extensions/mssql/resources/light/copy.png b/extensions/mssql/resources/light/copy.png new file mode 100644 index 0000000000..776c796378 Binary files /dev/null and b/extensions/mssql/resources/light/copy.png differ diff --git a/extensions/mssql/src/constants.ts b/extensions/mssql/src/constants.ts index 305733b738..56b572130f 100644 --- a/extensions/mssql/src/constants.ts +++ b/extensions/mssql/src/constants.ts @@ -64,6 +64,7 @@ export enum MssqlClusterItemsSubType { // SPARK JOB SUBMISSION ////////////////////////////////////////////////////////// export const mssqlClusterNewNotebookTask = 'mssqlCluster.task.newNotebook'; export const mssqlClusterOpenNotebookTask = 'mssqlCluster.task.openNotebook'; +export const mssqlopenClusterStatusNotebook = 'mssqlCluster.task.openClusterStatusNotebook'; export const mssqlClusterLivySubmitSparkJobCommand = 'mssqlCluster.livy.cmd.submitSparkJob'; export const mssqlClusterLivySubmitSparkJobFromFileCommand = 'mssqlCluster.livy.cmd.submitFileToSparkJob'; export const mssqlClusterLivySubmitSparkJobTask = 'mssqlCluster.livy.task.submitSparkJob'; diff --git a/extensions/mssql/src/main.ts b/extensions/mssql/src/main.ts index 5c4797a347..e8f4b13900 100644 --- a/extensions/mssql/src/main.ts +++ b/extensions/mssql/src/main.ts @@ -137,36 +137,51 @@ export async function activate(context: vscode.ExtensionContext): Promise { const endpointsArray: Array = Object.assign([], view.serverInfo.options['clusterEndpoints']); + endpointsArray.forEach(endpointInfo => { + endpointInfo.isHyperlink = true; + endpointInfo.hyperlink = 'https://' + endpointInfo.ipAddress + ':' + endpointInfo.port; + + }); if (endpointsArray.length > 0) { - const managementProxyEp = endpointsArray.find(e => e.serviceName === 'management-proxy'); + const managementProxyEp = endpointsArray.find(e => e.serviceName === 'management-proxy' || e.serviceName === 'mgmtproxy'); if (managementProxyEp) { - endpointsArray.push(getCustomEndpoint(managementProxyEp, 'Grafana Dashboard', '/grafana')); - endpointsArray.push(getCustomEndpoint(managementProxyEp, 'Kibana Dashboard', '/kibana')); + endpointsArray.push(getCustomEndpoint(managementProxyEp, localize("grafana", "Metrics Dashboard"), '/grafana/d/wZx3OUdmz')); + endpointsArray.push(getCustomEndpoint(managementProxyEp, localize("kibana", "Log Search Dashboard"), '/kibana/app/kibana#/discover')); } const gatewayEp = endpointsArray.find(e => e.serviceName === 'gateway'); if (gatewayEp) { - endpointsArray.push(getCustomEndpoint(gatewayEp, 'Spark History', '/gateway/default/sparkhistory')); - endpointsArray.push(getCustomEndpoint(gatewayEp, 'Yarn History', '/gateway/default/yarn')); + endpointsArray.push(getCustomEndpoint(gatewayEp, localize("sparkHostory", "Spark Job Monitoring"), '/gateway/default/sparkhistory')); + endpointsArray.push(getCustomEndpoint(gatewayEp, localize("yarnHistory", "Spark Resource Management"), '/gateway/default/yarn')); } const container = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'column', width: '100%', height: '100%', alignItems: 'left' }).component(); endpointsArray.forEach(endpointInfo => { const endPointRow = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'row' }).component(); - const nameCell = view.modelBuilder.text().withProperties({ value: endpointInfo.serviceName }).component(); - endPointRow.addItem(nameCell, { CSSStyles: { 'width': '30%', 'font-weight': '600' } }); + const nameCell = view.modelBuilder.text().withProperties({ value: getFriendlyEndpointNames(endpointInfo.serviceName) }).component(); + endPointRow.addItem(nameCell, { CSSStyles: { 'width': '35%', 'font-weight': '600', 'user-select': 'text' } }); if (endpointInfo.isHyperlink) { - const linkCell = view.modelBuilder.hyperlink().withProperties({ label: endpointInfo.hyperlink, url: endpointInfo.hyperlink, position: '' }).component(); - endPointRow.addItem(linkCell, { CSSStyles: { 'width': '70%', 'color': 'blue', 'text-decoration': 'underline', 'padding-top': '10px' } }); + const linkCell = view.modelBuilder.hyperlink().withProperties({ label: endpointInfo.hyperlink, url: endpointInfo.hyperlink }).component(); + endPointRow.addItem(linkCell, { CSSStyles: { 'width': '62%', 'color': '#0078d4', 'text-decoration': 'underline', 'padding-top': '10px' } }); } else { const endpointCell = view.modelBuilder.text().withProperties({ value: endpointInfo.ipAddress + ':' + endpointInfo.port }).component(); - endPointRow.addItem(endpointCell, { CSSStyles: { 'width': '70%' } }); + endPointRow.addItem(endpointCell, { CSSStyles: { 'width': '62%', 'user-select': 'text' } }); } - container.addItem(endPointRow, { CSSStyles: { 'padding-left': '10px', 'border-top': 'solid 1px #ccc', 'box-sizing': 'border-box' } }); + const copyValueCell = view.modelBuilder.button().component(); + copyValueCell.iconPath = { light: context.asAbsolutePath('resources/light/copy.png'), dark: context.asAbsolutePath('resources/dark/copy_inverse.png') }; + copyValueCell.onDidClick(() => { + vscode.env.clipboard.writeText(endpointInfo.hyperlink); + }); + copyValueCell.title = localize("copyText", "Copy"); + copyValueCell.iconHeight = '14px'; + copyValueCell.iconWidth = '14px'; + endPointRow.addItem(copyValueCell, { CSSStyles: { 'width': '3%', 'padding-top': '10px' } }); + + container.addItem(endPointRow, { CSSStyles: { 'padding-left': '10px', 'border-top': 'solid 1px #ccc', 'box-sizing': 'border-box', 'user-select': 'text' } }); }); - const endpointsContainer = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'column', width: '100%', height: '100%', alignItems: 'left' }).component(); - endpointsContainer.addItem(container, { CSSStyles: { 'padding-top': '25px' } }); + const endpointsContainer = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'column', width: '540px', height: '100%', alignItems: 'left' }).component(); + endpointsContainer.addItem(container, { CSSStyles: { 'padding-top': '25px', 'padding-left': '5px' } }); await view.initializeModel(endpointsContainer); } @@ -187,6 +202,30 @@ export async function activate(context: vscode.ExtensionContext): Promise { return handleOpenNotebookTask(profile); }); + apiWrapper.registerTaskHandler(Constants.mssqlopenClusterStatusNotebook, (profile: azdata.IConnectionProfile) => { + return handleOpenClusterStatusNotebookTask(profile, appContext); + }); } function saveProfileAndCreateNotebook(profile: azdata.IConnectionProfile): Promise { @@ -297,6 +339,21 @@ async function handleOpenNotebookTask(profile: azdata.IConnectionProfile): Promi } } +async function handleOpenClusterStatusNotebookTask(profile: azdata.IConnectionProfile, appContext: AppContext): Promise { + const notebookRelativePath = 'notebooks/tsg/cluster-status.ipynb'; + const notebookFullPath = path.join(appContext.extensionContext.extensionPath, notebookRelativePath); + if (!Utils.fileExists(notebookFullPath)) { + vscode.window.showErrorMessage(localize("fileNotFound", "Unable to find the file specified")); + } else { + const targetFile = Utils.getTargetFileName(notebookFullPath); + Utils.copyFile(notebookFullPath, targetFile); + let fileUri = vscode.Uri.file(targetFile); + await azdata.nb.showNotebookDocument(fileUri, { + connectionProfile: profile, + preview: false + }); + } +} function generateServerOptions(executablePath: string): ServerOptions { let launchArgs = Utils.getCommonLaunchArgsAndCleanupOldLogFiles('sqltools', executablePath); return { command: executablePath, args: launchArgs, transport: TransportKind.stdio }; diff --git a/extensions/mssql/src/utils.ts b/extensions/mssql/src/utils.ts index 4eb20e0e2b..1e3fd6037b 100644 --- a/extensions/mssql/src/utils.ts +++ b/extensions/mssql/src/utils.ts @@ -10,6 +10,7 @@ import * as crypto from 'crypto'; import * as os from 'os'; import * as findRemoveSync from 'find-remove'; import * as constants from './constants'; +import * as fs from 'fs'; const configTracingLevel = 'tracingLevel'; const configLogRetentionMinutes = 'logRetentionMinutes'; @@ -29,6 +30,34 @@ export function getAppDataPath() { } } +/** + * Get a file name that is not already used in the target directory + * @param filePath source notebook file name + * @param fileExtension file type + */ +export function getTargetFileName(filePath: string): string { + const targetDirectory = os.homedir(); + const fileExtension = path.extname(filePath); + const baseName = path.basename(filePath, fileExtension); + let targetFileName; + let idx = 0; + do { + const suffix = idx === 0 ? '' : `-${idx}`; + targetFileName = path.join(targetDirectory, `${baseName}${suffix}${fileExtension}`); + idx++; + } while (fs.existsSync(targetFileName)); + + return targetFileName; +} + +export function fileExists(file: string): boolean { + return fs.existsSync(file); +} + +export function copyFile(source: string, target: string): void { + fs.copyFileSync(source, target); +} + export function removeOldLogFiles(prefix: string): JSON { return findRemoveSync(getDefaultLogDir(), { prefix: `${prefix}_`, age: { seconds: getConfigLogRetentionSeconds() }, limit: getConfigLogFilesRemovalLimit() }); } diff --git a/src/sql/workbench/api/node/extHostModelView.ts b/src/sql/workbench/api/node/extHostModelView.ts index 8e2bc52909..fb349b7f0e 100644 --- a/src/sql/workbench/api/node/extHostModelView.ts +++ b/src/sql/workbench/api/node/extHostModelView.ts @@ -1262,6 +1262,20 @@ class ButtonWrapper extends ComponentWrapper implements azdata.ButtonComponent { this.setProperty('iconPath', v); } + public get iconHeight(): string | number { + return this.properties['iconHeight']; + } + public set iconHeight(v: string | number) { + this.setProperty('iconHeight', v); + } + + public get iconWidth(): string | number { + return this.properties['iconWidth']; + } + public set iconWidth(v: string | number) { + this.setProperty('iconWidth', v); + } + public get title(): string { return this.properties['title']; } diff --git a/src/sql/workbench/electron-browser/modelComponents/componentWithIconBase.ts b/src/sql/workbench/electron-browser/modelComponents/componentWithIconBase.ts index db2420d70f..a70a29a149 100644 --- a/src/sql/workbench/electron-browser/modelComponents/componentWithIconBase.ts +++ b/src/sql/workbench/electron-browser/modelComponents/componentWithIconBase.ts @@ -47,8 +47,8 @@ export abstract class ComponentWithIconBase extends ComponentBase { removeCSSRulesContainingSelector(this._iconClass); const icon = this.getLightIconPath(this.iconPath); const iconDark = this.getDarkIconPath(this.iconPath) || icon; - createCSSRule(`.icon.${this._iconClass}`, `background-image: url("${icon}")`); - createCSSRule(`.vs-dark .icon.${this._iconClass}, .hc-black .icon.${this._iconClass}`, `background-image: url("${iconDark}")`); + createCSSRule(`.icon.${this._iconClass}`, `background-image: url("${icon}");width: ${this.iconWidth};height: ${this.iconHeight};`); + createCSSRule(`.vs-dark .icon.${this._iconClass}, .hc-black .icon.${this._iconClass}`, `background-image: url("${iconDark}");width: ${this.iconWidth};height: ${this.iconHeight};`); this._changeRef.detectChanges(); } }