Remove all Big Data Cluster features (#21369)
@@ -1,389 +0,0 @@
|
||||
{
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.6.6",
|
||||
"mimetype": "text/x-python",
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"pygments_lexer": "ipython3",
|
||||
"nbconvert_exporter": "python",
|
||||
"file_extension": ".py"
|
||||
}
|
||||
},
|
||||
"nbformat_minor": 2,
|
||||
"nbformat": 4,
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"# View the status of your SQL Server Big Data Cluster\n",
|
||||
"This notebook allows you to see the status of the controller, master instance, and pools in your SQL Server Big Data Cluster.\n",
|
||||
"\n",
|
||||
"> ## **Important Instructions**\n",
|
||||
"> ### **Before you begin, you will need:**\n",
|
||||
">* Big Data Cluster name\n",
|
||||
">* Controller username\n",
|
||||
">* Controller password\n",
|
||||
">* Controller endpoint \n",
|
||||
"\n",
|
||||
"You can find the controller endpoint from the SQL Big Data Cluster dashboard in the Service Endpoints table. The endpoint is listed as **Cluster Management Service.**\n",
|
||||
"\n",
|
||||
"If you do not know the credentials, ask the admin who deployed your cluster.\n",
|
||||
"\n",
|
||||
"### **Prerequisites**\n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
"\n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
||||
"|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |\n",
|
||||
"|Pandas Package | Python package for data manipulation | Will be installed by the notebook if not present |\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"### **Instructions**\n",
|
||||
"* For the best experience, click **Run Cells** on the toolbar above. This will automatically execute all code cells below and show the cluster status in each table.\n",
|
||||
"* When you click **Run Cells** for this Notebook, you will be prompted at the *Log in to your Big Data Cluster* code cell to provide your login credentials. Follow the prompts and press enter to proceed.\n",
|
||||
"* **You won't need to modify any of the code cell contents** in this Notebook. If you accidentally made a change, you can reopen this Notebook from the cluster dashboard.\n",
|
||||
"\n",
|
||||
"\n",
|
||||
""
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Check azdata version**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import sys, os\r\n",
|
||||
"cmd = f'azdata --version'\r\n",
|
||||
"cmdOutput = !{cmd}\r\n",
|
||||
"azdataStr = '\\'azdata\\''\r\n",
|
||||
"if len(cmdOutput) > 0 and ('command not found' in cmdOutput[1] or f'{azdataStr} is not recognized as an internal or external command' in cmdOutput[0]):\r\n",
|
||||
" raise SystemExit('azdata not found! Please make sure azdata is installed and added to path' + '.\\n')\r\n",
|
||||
"if '15.0' in cmdOutput[0]:\r\n",
|
||||
" print('azdata version: ' + cmdOutput[0])\r\n",
|
||||
""
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Install latest version of pandas**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"#install pandas\r\n",
|
||||
"import pandas\r\n",
|
||||
"pandas_version = pandas.__version__.split('.')\r\n",
|
||||
"pandas_major = int(pandas_version[0])\r\n",
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" pandasVersion = 'pandas==0.24.2'\r\n",
|
||||
" cmd = f'{sys.executable} -m pip install {pandasVersion}'\r\n",
|
||||
" cmdOutput = !{cmd}\r\n",
|
||||
" print(f'\\nSuccess: Upgraded pandas to 0.24.2.')\r\n",
|
||||
"else:\r\n",
|
||||
" print('Pandas required version is already installed!') "
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## **Log in to your Big Data Cluster**\r\n",
|
||||
"To view cluster status, you will need to connect to your Big Data Cluster through azdata. \r\n",
|
||||
"\r\n",
|
||||
"When you run this code cell, you will be prompted for:\r\n",
|
||||
"- Cluster name\r\n",
|
||||
"- Controller username\r\n",
|
||||
"- Controller password\r\n",
|
||||
"\r\n",
|
||||
"To proceed:\r\n",
|
||||
"- **Click** on the input box\r\n",
|
||||
"- **Type** the login info\r\n",
|
||||
"- **Press** enter.\r\n",
|
||||
"\r\n",
|
||||
"If your cluster is missing a configuration file, you will be asked to provide your controller endpoint. (Format: **https://00.00.00.000:00000**) You can find the controller endpoint from the Big Data Cluster dashboard in the Service Endpoints table. The endpoint is listed as **Cluster Management Service.**\r\n",
|
||||
""
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import os, getpass, json\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"from IPython.display import *\n",
|
||||
"\n",
|
||||
"def PromptForInfo(promptMsg, isPassword, errorMsg):\n",
|
||||
" if isPassword:\n",
|
||||
" promptResponse = getpass.getpass(prompt=promptMsg)\n",
|
||||
" else:\n",
|
||||
" promptResponse = input(promptMsg)\n",
|
||||
" if promptResponse == \"\":\n",
|
||||
" raise SystemExit(errorMsg + '\\n')\n",
|
||||
" return promptResponse\n",
|
||||
"\n",
|
||||
"# Prompt user inputs:\n",
|
||||
"cluster_name = PromptForInfo('Please provide your Cluster Name: ', False, 'Cluster Name is required!')\n",
|
||||
"\n",
|
||||
"controller_username = PromptForInfo('Please provide your Controller Username for login: ', False, 'Controller Username is required!')\n",
|
||||
"\n",
|
||||
"controller_password = PromptForInfo('Controller Password: ', True, 'Password is required!')\n",
|
||||
"print('***********')\n",
|
||||
"\n",
|
||||
"!azdata logout\n",
|
||||
"# Login in to your Big Data Cluster \n",
|
||||
"cmd = f'azdata login --namespace {cluster_name} -u {controller_username} -a yes'\n",
|
||||
"print(\"Start \" + cmd)\n",
|
||||
"os.environ['CONTROLLER_USERNAME'] = controller_username\n",
|
||||
"os.environ['CONTROLLER_PASSWORD'] = controller_password\n",
|
||||
"os.environ['ACCEPT_EULA'] = 'yes'\n",
|
||||
"\n",
|
||||
"loginResult = !{cmd}\n",
|
||||
"if 'ERROR: Please check your kube config or specify the correct controller endpoint with: --controller-endpoint https://<ip>:<port>.' in loginResult[0] or 'ERROR' in loginResult[0]:\n",
|
||||
" controller_ip = input('Please provide your Controller endpoint: ')\n",
|
||||
" if controller_ip == \"\":\n",
|
||||
" raise SystemExit(f'Controller IP is required!' + '\\n')\n",
|
||||
" else:\n",
|
||||
" cmd = f'azdata login --namespace {cluster_name} -e {controller_ip} -u {controller_username} -a yes'\n",
|
||||
" loginResult = !{cmd}\n",
|
||||
"print(loginResult)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## **Status of Big Data Cluster**\r\n",
|
||||
"After you successfully login to your bdc, you can view the overall status of each container before drilling down into each component."
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Helper methods for formatting\n",
|
||||
"def formatColumnNames(column):\n",
|
||||
" return ' '.join(word[0].upper() + word[1:] for word in column.split())\n",
|
||||
"\n",
|
||||
"pd.set_option('display.max_colwidth', -1)\n",
|
||||
"def show_results(results):\n",
|
||||
" strResult = ''.join(results)\n",
|
||||
" jsonResults = json.loads(strResult)\n",
|
||||
" results = jsonResults['result']\n",
|
||||
" if isinstance(results, list):\n",
|
||||
" for result in results:\n",
|
||||
" if isinstance(result, list):\n",
|
||||
" show_formattedArray(result)\n",
|
||||
" else:\n",
|
||||
" show_keys(result)\n",
|
||||
" else:\n",
|
||||
" show_keys(results)\n",
|
||||
"\n",
|
||||
"def show_keys(results):\n",
|
||||
" listKeys = []\n",
|
||||
" if isinstance(results, dict):\n",
|
||||
" for key in results.keys():\n",
|
||||
" if results[key] and not isinstance(results[key], list):\n",
|
||||
" print('\\033[1m' + formatColumnNames(key) + ': \\033[0m' + results[key])\n",
|
||||
" if results[key] and isinstance(results[key], list):\n",
|
||||
" listKeys.append(key)\n",
|
||||
" for key in listKeys:\n",
|
||||
" show_formattedArray(results[key])\n",
|
||||
" if isinstance(results, str):\n",
|
||||
" print('\\033[1m' + results + ': \\033[0m')\n",
|
||||
"\n",
|
||||
"def show_formattedArray(results):\n",
|
||||
" fomattedRow = []\n",
|
||||
" if not isinstance(results, list):\n",
|
||||
" show_formattedResults(results)\n",
|
||||
" else:\n",
|
||||
" for row in results:\n",
|
||||
" if isinstance(row, str):\n",
|
||||
" show_keys(row)\n",
|
||||
" else:\n",
|
||||
" fomattedRow.append({ k : v for k,v in row.items() if isinstance(v, str) or v is None})\n",
|
||||
" df = pd.DataFrame(fomattedRow)\n",
|
||||
" df.columns = [formatColumnNames(n) for n in fomattedRow[0].keys()]\n",
|
||||
" mydata = HTML(df.to_html(render_links=True))\n",
|
||||
" display(mydata)\n",
|
||||
" nameKeys = [k for k in fomattedRow[0].keys() if 'Name' in k]\n",
|
||||
" for key in results[0].keys():\n",
|
||||
" if key not in fomattedRow[0].keys():\n",
|
||||
" for result in results:\n",
|
||||
" print('\\033[1m' + formatColumnNames(nameKeys[0]) + ': \\033[0m' + result[nameKeys[0]])\n",
|
||||
" show_formattedArray(result[key])\n",
|
||||
"\n",
|
||||
"def show_formattedResults(input):\n",
|
||||
" df = pd.DataFrame([input])\n",
|
||||
" df.columns = [formatColumnNames(n) for n in [input][0].keys()]\n",
|
||||
" mydata = HTML(df.to_html(render_links=True))\n",
|
||||
" display(mydata)\n",
|
||||
" \n",
|
||||
"# Display status of Big Data Cluster\n",
|
||||
"results = !azdata bdc status show -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## **Cluster Status**\r\n",
|
||||
"For each cluster component below, running each code cell will generate a table. This table will include:\r\n",
|
||||
"\r\n",
|
||||
"|Column Name|Description|\r\n",
|
||||
"|---|---|\r\n",
|
||||
"|**Kind** | Identifies if component is a pod or a set. |\r\n",
|
||||
"|**LogsURL** | Link to [Kibana](https://www.elastic.co/guide/en/kibana/current/introduction.html) logs which is used for troubleshooting. |\r\n",
|
||||
"|**Name** | Provides the specific name of the pod or set. |\r\n",
|
||||
"|**NodeMetricsURL** | Link to [Grafana](https://grafana.com/docs/guides/basic_concepts/) dashboard to view key metrics of the node. |\r\n",
|
||||
"|**SQLMetricsURL** | Link to [Grafana](https://grafana.com/docs/guides/basic_concepts/) dashboard to view key metrics of the SQL instance. |\r\n",
|
||||
"|**State** | Indicates state of the pod or set. |\r\n",
|
||||
"\r\n",
|
||||
"----------------------------------------------------------------"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Controller status**\n",
|
||||
"To learn more about the controller, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-controller?view=sql-server-ver15)"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Display status of controller\n",
|
||||
"results = !azdata bdc control status show --all -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Master Instance status**\n",
|
||||
"To learn more about the master instance, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-master-instance?view=sqlallproducts-allversions)"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"results = !azdata bdc sql status show --resource master --all -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Compute Pool status**\n",
|
||||
"To learn more about compute pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-compute-pool?view=sqlallproducts-allversions)"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Display status of compute pool\n",
|
||||
"results = !azdata bdc sql status show --resource compute-0 --all -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Storage Pool status**\n",
|
||||
"To learn more about storage pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-storage-pool?view=sqlallproducts-allversions)"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Display status of storage pools\n",
|
||||
"results = !azdata bdc sql status show --resource storage-0 --all -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Data Pool status**\n",
|
||||
"To learn more about data pool, [read here.](https://docs.microsoft.com/sql/big-data-cluster/concept-data-pool?view=sqlallproducts-allversions)"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Display status of data pools\n",
|
||||
"results = !azdata bdc sql status show --resource data-0 --all -o json\n",
|
||||
"show_results(results)"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Spark Pool status**\n",
|
||||
"Displays status of spark pool if it exists. Otherwise, will show as \"No spark pool.\""
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Display status of spark pool\n",
|
||||
"results = !azdata bdc spark status show --all -o json\n",
|
||||
"show_results(results)\n",
|
||||
""
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -44,82 +44,6 @@
|
||||
"light": "resources/light/export_blue_light.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.uploadFiles",
|
||||
"title": "%mssqlCluster.uploadFiles%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.mkdir",
|
||||
"title": "%mssqlCluster.mkdir%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.deleteFiles",
|
||||
"title": "%mssqlCluster.deleteFiles%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.previewFile",
|
||||
"title": "%mssqlCluster.previewFile%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.saveFile",
|
||||
"title": "%mssqlCluster.saveFile%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.copyPath",
|
||||
"title": "%mssqlCluster.copyPath%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.manageAccess",
|
||||
"title": "%mssqlCluster.manageAccess%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.newNotebook",
|
||||
"title": "%notebook.command.new%",
|
||||
"icon": {
|
||||
"dark": "resources/dark/new_notebook.svg",
|
||||
"light": "resources/light/new_notebook.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.openNotebook",
|
||||
"title": "%notebook.command.open%",
|
||||
"icon": {
|
||||
"dark": "resources/dark/open_notebook_inverse.svg",
|
||||
"light": "resources/light/open_notebook.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.cmd.submitSparkJob",
|
||||
"title": "%title.submitSparkJob%"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.task.submitSparkJob",
|
||||
"title": "%title.newSparkJob%",
|
||||
"icon": {
|
||||
"dark": "resources/dark/new_spark_job_inverse.svg",
|
||||
"light": "resources/light/new_spark_job.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.openClusterDashboard",
|
||||
"title": "%title.openClusterDashboard%",
|
||||
"icon": {
|
||||
"dark": "resources/dark/cluster_status_inverse.svg",
|
||||
"light": "resources/light/cluster_status.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.task.openYarnHistory",
|
||||
"title": "%title.openYarnHistory%",
|
||||
"icon": {
|
||||
"dark": "resources/light/hadoop.svg",
|
||||
"light": "resources/light/hadoop.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
|
||||
"title": "%title.submitSparkJob%"
|
||||
},
|
||||
{
|
||||
"command": "mssql.searchServers",
|
||||
"title": "%title.searchServers%"
|
||||
@@ -434,54 +358,6 @@
|
||||
"command": "mssql.exportNotebookToSql",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.uploadFiles",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.mkdir",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.deleteFiles",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.previewFile",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.saveFile",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.copyPath",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.manageAccess",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.newNotebook",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.openNotebook",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.task.submitSparkJob",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.task.openClusterDashboard",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "mssql.newTable",
|
||||
"when": "false"
|
||||
@@ -492,51 +368,6 @@
|
||||
}
|
||||
],
|
||||
"objectExplorer/item/context": [
|
||||
{
|
||||
"command": "mssqlCluster.uploadFiles",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeType != mssqlCluster:message && nodeType != mssqlCluster:file && nodeSubType=~/^(?!:mount).*$/",
|
||||
"group": "1mssqlCluster@1"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.mkdir",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeType != mssqlCluster:message && nodeType != mssqlCluster:file && nodeSubType=~/^(?!:mount).*$/",
|
||||
"group": "1mssqlCluster@1"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.saveFile",
|
||||
"when": "nodeType == mssqlCluster:file",
|
||||
"group": "1mssqlCluster@1"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.previewFile",
|
||||
"when": "nodeType == mssqlCluster:file",
|
||||
"group": "1mssqlCluster@2"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.copyPath",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeType != mssqlCluster:connection && nodeType != mssqlCluster:message && nodeType != mssqlCluster:hdfs",
|
||||
"group": "1mssqlCluster@3"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.manageAccess",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeType != mssqlCluster:connection && nodeType != mssqlCluster:message",
|
||||
"group": "1mssqlCluster@3"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.deleteFiles",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeType != mssqlCluster:hdfs && nodeType != mssqlCluster:connection && viewItem != mssqlCluster:connection && nodeType != mssqlCluster:message && nodeSubType=~/^(?!:mount).*$/",
|
||||
"group": "1mssqlCluster@4"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.cmd.submitSparkJob",
|
||||
"when": "nodeType == mssqlCluster:hdfs",
|
||||
"group": "1mssqlCluster@7"
|
||||
},
|
||||
{
|
||||
"command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
|
||||
"when": "nodeType == mssqlCluster:file && nodeSubType =~/:spark:/",
|
||||
"group": "1mssqlCluster@6"
|
||||
},
|
||||
{
|
||||
"command": "mssql.designTable",
|
||||
"when": "connectionProvider == MSSQL && nodeType == Table && nodeSubType != LedgerDropped",
|
||||
@@ -743,57 +574,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"dashboard.tabs": [
|
||||
{
|
||||
"id": "mssql-big-data-cluster",
|
||||
"description": "%tab.bigDataClusterDescription%",
|
||||
"provider": "MSSQL",
|
||||
"title": "%title.bigDataCluster%",
|
||||
"group": "home",
|
||||
"when": "connectionProvider == 'MSSQL' && mssql:iscluster && dashboardContext == 'server'",
|
||||
"container": {
|
||||
"grid-container": [
|
||||
{
|
||||
"name": "%title.tasks%",
|
||||
"row": 0,
|
||||
"col": 0,
|
||||
"colspan": 1,
|
||||
"widget": {
|
||||
"tasks-widget": [
|
||||
"mssqlCluster.task.newNotebook",
|
||||
"mssqlCluster.task.openNotebook",
|
||||
"mssqlCluster.livy.task.submitSparkJob",
|
||||
"mssqlCluster.task.openClusterDashboard"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "%title.endpoints%",
|
||||
"row": 1,
|
||||
"col": 0,
|
||||
"rowspan": 2.5,
|
||||
"colspan": 2,
|
||||
"widget": {
|
||||
"modelview": {
|
||||
"id": "bdc-endpoints"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "%title.books%",
|
||||
"row": 0,
|
||||
"col": 2,
|
||||
"colspan": 1,
|
||||
"widget": {
|
||||
"modelview": {
|
||||
"id": "books-widget"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"connectionProvider": {
|
||||
"providerId": "MSSQL",
|
||||
"displayName": "%mssql.provider.displayName%",
|
||||
@@ -810,13 +590,6 @@
|
||||
"light": "resources/light/azureDB.svg",
|
||||
"dark": "resources/dark/azureDB_inverse.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "mssql:cluster",
|
||||
"path": {
|
||||
"light": "resources/light/sql_bigdata_cluster.svg",
|
||||
"dark": "resources/dark/sql_bigdata_cluster_inverse.svg"
|
||||
}
|
||||
}
|
||||
],
|
||||
"connectionOptions": [
|
||||
|
||||
@@ -6,34 +6,9 @@
|
||||
"json.schemas.schema.desc": "The schema definition for the given URL. The schema only needs to be provided to avoid accesses to the schema URL.",
|
||||
"json.format.enable.desc": "Enable/disable default JSON formatter (requires restart)",
|
||||
|
||||
"mssqlCluster.uploadFiles": "Upload files",
|
||||
"mssqlCluster.mkdir": "New directory",
|
||||
"mssqlCluster.deleteFiles": "Delete",
|
||||
"mssqlCluster.previewFile": "Preview",
|
||||
"mssqlCluster.saveFile": "Save",
|
||||
"mssqlCluster.copyPath": "Copy Path",
|
||||
"mssqlCluster.manageAccess": "Manage Access",
|
||||
|
||||
"notebook.command.new": "New Notebook",
|
||||
"notebook.command.open": "Open Notebook",
|
||||
|
||||
"tab.bigDataClusterDescription": "Tasks and information about your SQL Server Big Data Cluster",
|
||||
"title.bigDataCluster": "SQL Server Big Data Cluster",
|
||||
"title.submitSparkJob": "Submit Spark Job",
|
||||
"title.newSparkJob": "New Spark Job",
|
||||
"title.openSparkHistory": "View Spark History",
|
||||
"title.openYarnHistory": "View Yarn History",
|
||||
"title.tasks": "Tasks",
|
||||
"title.installPackages": "Install Packages",
|
||||
"title.configurePython": "Configure Python for Notebooks",
|
||||
"title.openClusterDashboard": "Cluster\nDashboard",
|
||||
|
||||
"title.searchServers": "Search: Servers",
|
||||
"title.clearSearchServerResult": "Search: Clear Search Server Results",
|
||||
|
||||
"title.endpoints": "Service Endpoints",
|
||||
"title.books": "Notebooks",
|
||||
|
||||
"title.showLogFile": "Show Log File",
|
||||
|
||||
"mssql.disabled": "Disabled",
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><defs><style>.cls-1{fill:#fff;}</style></defs><title>cluster_inverse</title><path class="cls-1" d="M14,7a1.94,1.94,0,0,1,.78.16,2,2,0,0,1,1.07,1.07,2,2,0,0,1,0,1.55,2,2,0,0,1-1.07,1.07,2,2,0,0,1-1.51,0,2.05,2.05,0,0,1-1.05-1,1.88,1.88,0,0,1-.2-.72L10.84,9a3,3,0,0,1-.56,1,3,3,0,0,1-.87.7L9.86,12H10a1.94,1.94,0,0,1,.78.16,2,2,0,0,1,1.07,1.07,2,2,0,0,1,0,1.55,2,2,0,0,1-1.07,1.07,2,2,0,0,1-1.55,0,2,2,0,0,1-1.07-1.07A2,2,0,0,1,8.25,13a2,2,0,0,1,.67-.72L8.46,11l-.23,0H8a3,3,0,0,1-1.36-.32,3,3,0,0,1-1.07-.9L4,10.58a2,2,0,0,1-.11,1.2,2,2,0,0,1-1.07,1.07A1.94,1.94,0,0,1,2,13a1.94,1.94,0,0,1-.78-.16A2,2,0,0,1,.16,11.78a2,2,0,0,1,0-1.55A2,2,0,0,1,1.22,9.16,1.94,1.94,0,0,1,2,9a2,2,0,0,1,.83.18,2,2,0,0,1,.68.51l1.63-.81A3,3,0,0,1,5.2,6.93,2.91,2.91,0,0,1,5.77,6L4.82,4.82A2,2,0,0,1,4,5a1.94,1.94,0,0,1-.78-.16A2,2,0,0,1,2.16,3.78a2,2,0,0,1,0-1.55A2,2,0,0,1,3.22,1.16a2,2,0,0,1,1.55,0A2,2,0,0,1,5.84,2.22,1.94,1.94,0,0,1,6,3a1.94,1.94,0,0,1-.4,1.2l.94,1.18a3.24,3.24,0,0,1,.71-.28A2.94,2.94,0,0,1,8,5a3,3,0,0,1,1.23.26l1.28-1.92a2,2,0,0,1-.37-.62A2,2,0,0,1,10,2a1.94,1.94,0,0,1,.16-.78A2,2,0,0,1,11.22.16a2,2,0,0,1,1.55,0,2,2,0,0,1,1.07,1.07A1.94,1.94,0,0,1,14,2a1.94,1.94,0,0,1-.16.78,2,2,0,0,1-1.07,1.07A1.94,1.94,0,0,1,12,4a2.06,2.06,0,0,1-.66-.11L10.05,5.82A3,3,0,0,1,11,8l1.17.2a2,2,0,0,1,.74-.86,2.14,2.14,0,0,1,.52-.24A1.92,1.92,0,0,1,14,7ZM2,12a1,1,0,0,0,.39-.08,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53A1,1,0,0,0,2,12ZM3,3a1,1,0,0,0,.08.39,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53A1,1,0,0,0,3,3Zm5,7a1.94,1.94,0,0,0,.78-.16A2,2,0,0,0,9.84,8.78a2,2,0,0,0,0-1.55A2,2,0,0,0,8.78,6.16a2,2,0,0,0-1.55,0A2,2,0,0,0,6.16,7.22a2,2,0,0,0,0,1.55A2,2,0,0,0,7.22,9.84,1.94,1.94,0,0,0,8,10Zm3,4a1,1,0,0,0-.08-.39,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53A1,1,0,0,0,11,14ZM12,1a1,1,0,0,0-.39.08,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53A1,1,0,0,0,12,1Zm2,9a1,1,0,0,0,.39-.08,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53A1,1,0,0,0,14,10Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 2.4 KiB |
@@ -1,13 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.4, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<title>new_notebook_inverse</title>
|
||||
<g>
|
||||
<path class="st0" d="M0,2h16v12H0V2z M15,3H1v5h1.7l1.8-3.6l2.5,5l2-4L10.3,8H15V3z M1,13h14V9H9.7L9,7.6l-2,4l-2.5-5L3.3,9H1V13z"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 585 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="14" height="16" viewBox="0 0 14 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11 3V6H5V3H11ZM10 5V4H6V5H10ZM1 0H14V16H1V13H0V12H1V10H0V9H1V7H0V6H1V4H0V3H1V0ZM13 15V1H2V3H3V4H2V6H3V7H2V9H3V10H2V12H3V13H2V15H13Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 263 B |
|
Before Width: | Height: | Size: 5.5 KiB |
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><defs><style>.cls-1{fill:#fff;}.cls-2{fill:#0095d7;}</style></defs><title>open_notebook_inverse</title><path class="cls-1" d="M12.55,4.21l-.08-.11h-.56l-.69.06a1.54,1.54,0,0,0-.23.29v8.69H9.18a3.32,3.32,0,0,0-.93.13,3.34,3.34,0,0,0-.87.34V4.76a2.88,2.88,0,0,1,.43-.31A5.58,5.58,0,0,1,8.29,3.3a2.63,2.63,0,0,0-.3.09A3.62,3.62,0,0,0,6.93,4a3.68,3.68,0,0,0-1.07-.57A3.58,3.58,0,0,0,4.67,3.2H2v.9H.15V15.85H13.72V5.48ZM2.86,4.1H4.67a2.61,2.61,0,0,1,1,.17,2.32,2.32,0,0,1,.86.49v8.85a3.27,3.27,0,0,0-.88-.34,3.22,3.22,0,0,0-.93-.13H2.86ZM1,15V5H2v9H4.67a3.94,3.94,0,0,1,.61.06,3.2,3.2,0,0,1,.52.18,4.19,4.19,0,0,1,.49.29,2.28,2.28,0,0,1,.45.39ZM12.8,15H7.11a2.7,2.7,0,0,1,.47-.39A2.83,2.83,0,0,1,8,14.28a3.42,3.42,0,0,1,.54-.18A3.81,3.81,0,0,1,9.18,14h2.73V5h.89Z"/><polygon class="cls-2" points="13.2 3.56 13.2 3.58 13.19 3.57 13.2 3.56"/><path class="cls-2" d="M13.19,3.57h0v0Z"/><polygon class="cls-2" points="13.2 3.56 13.2 3.58 13.19 3.57 13.2 3.56"/><polygon class="cls-2" points="14.21 1.65 14.19 1.65 14.19 1.63 14.21 1.65"/><path class="cls-2" d="M15.91,2.1,14.2,3.81l-.38.38-.62-.61v0l1-1H12.79a3.35,3.35,0,0,0-1.09.26h0a3.94,3.94,0,0,0-.86.52l-.24.21s0,0,0,0a3.3,3.3,0,0,0-.51.67,3.1,3.1,0,0,0-.26.47A3.41,3.41,0,0,0,9.5,6.11H8.6a4.68,4.68,0,0,1,.16-1.19A4.74,4.74,0,0,1,9,4.26a2.21,2.21,0,0,1,.2-.41,4.66,4.66,0,0,1,.36-.51c.1-.13.22-.26.34-.39a4.14,4.14,0,0,1,.66-.53,1.19,1.19,0,0,1,.23-.16,2.79,2.79,0,0,1,.34-.18l.31-.13.42-.14a4.32,4.32,0,0,1,1.19-.16h1.15l-1-1L13.82,0Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.5 KiB |
@@ -1,46 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
version="1.1"
|
||||
viewBox="0 0 16 16"
|
||||
data-name="Layer 1"
|
||||
id="Layer_1">
|
||||
<metadata
|
||||
id="metadata17">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title>sql_bigdata_cluster</dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<defs
|
||||
id="defs4">
|
||||
<style
|
||||
id="style2">.cls-1{fill:#212121;}.cls-2{fill:#231f20;}</style>
|
||||
</defs>
|
||||
<title
|
||||
id="title6">sql_bigdata_cluster</title>
|
||||
<path
|
||||
style="fill:#ffffff;stroke-width:1.00282443"
|
||||
id="path8"
|
||||
d="M 7.995,0 C 5.605,0 1.575,0.45254557 1.465,2.1319925 V 13.737272 C 1.465,15.517285 5.575,16 7.995,16 c 2.42,0 6.54,-0.482715 6.54,-2.262728 V 2.1319925 C 14.435,0.45254557 10.405,0 7.995,0 Z m 5.45,13.737272 c -0.14,0.392206 -2.18,1.166562 -5.45,1.166562 -3.27,0 -5.32,-0.784412 -5.43,-1.166562 V 3.5097423 a 14.67,14.752986 0 0 0 5.43,0.8749214 14.71,14.793212 0 0 0 5.45,-0.8749214 z m 0,-11.5549967 c -0.17,0.3922062 -2.19,1.1062225 -5.45,1.1062225 -3.26,0 -5.2,-0.6939032 -5.43,-1.0861094 0.23,-0.4022627 2.22,-1.1062225 5.43,-1.1062225 3.21,0 5.27,0.7240729 5.45,1.0659963 v 0 z"
|
||||
class="cls-1" />
|
||||
<polygon
|
||||
style="fill:#ffffff"
|
||||
transform="translate(0.075)"
|
||||
id="polygon10"
|
||||
points="13.57,2.35 13.58,2.36 13.57,2.37 "
|
||||
class="cls-2" />
|
||||
<path
|
||||
style="fill:#ffffff"
|
||||
id="path12"
|
||||
d="m 9.6501562,5.2372858 c -0.1362374,0 -0.2728654,0.026375 -0.4003906,0.082031 -0.123585,0.050567 -0.2358691,0.1260731 -0.3300781,0.2207031 -0.094256,0.096634 -0.1724299,0.2082024 -0.2304688,0.3300781 -0.062701,0.1283175 -0.099426,0.2676857 -0.109375,0.4101562 -0.00186,0.1267925 0.022265,0.2517914 0.070312,0.3691407 0.045212,0.1164344 0.1088696,0.2248797 0.1894531,0.3203125 L 8.2107031,7.9384577 C 8.011051,7.8519995 7.7980699,7.8002026 7.5798437,7.7997858 7.2852043,7.7997877 7.0158159,7.8890317 6.7790625,8.0283014 L 6.3435156,7.4677545 C 6.4851678,7.2819801 6.5620085,7.0548883 6.5622656,6.8212702 6.5623837,6.2311827 6.0839937,5.7527927 5.4939062,5.7529108 4.9038187,5.7527927 4.4254288,6.2311827 4.4255469,6.8212702 4.4254288,7.4113576 4.9038188,7.8897476 5.4939062,7.8896295 5.646983,7.8892233 5.7981841,7.8559185 5.9372656,7.7919733 l 0.4628906,0.5351562 c -0.2593431,0.2844532 -0.4218723,0.6589599 -0.421875,1.0742188 1.1e-6,0.1550931 0.029186,0.301527 0.070312,0.4433594 L 5.2692969,10.19041 C 5.0668671,9.9352433 4.7590727,9.7863779 4.4333593,9.7861139 3.8432718,9.7859958 3.3648819,10.264386 3.365,10.854473 c -1.179e-4,0.590087 0.478272,1.068477 1.0683593,1.068359 0.5900874,1.18e-4 1.0684773,-0.478272 1.0683594,-1.068359 -2.425e-4,-0.05958 -0.00547,-0.119029 -0.015625,-0.177734 l 0.7675782,-0.376953 c 0.2881162,0.42403 0.7748778,0.703124 1.3261718,0.703124 0.087028,-9e-5 0.1739047,-0.0073 0.2597656,-0.02148 l 0.2011719,0.597656 c -0.2806104,0.199117 -0.4474678,0.523359 -0.4472656,0.869137 -8.57e-5,0.586839 0.4721644,1.062587 1.0546875,1.0625 0.5825231,8.7e-5 1.054773,-0.475661 1.054687,-1.0625 8.6e-5,-0.586839 -0.4721639,-1.062587 -1.054687,-1.0625 -0.043779,5.16e-4 -0.087483,0.0038 -0.1308594,0.0098 L 8.3220312,10.819317 C 8.6909643,10.625493 8.9698168,10.295494 9.099375,9.8993953 l 0.5449219,0.089844 h 0.00195 c 0.05025,0.5310507 0.4958731,0.9369327 1.0292971,0.9374997 0.571737,8.6e-5 1.035243,-0.46342 1.035156,-1.0351567 C 11.710786,9.3198482 11.247281,8.8563402 10.675544,8.8564264 10.264465,8.85697 9.8926723,9.100743 9.7282783,9.4775202 L 9.1814062,9.3798639 C 9.1740509,8.9410593 8.9869509,8.524497 8.6638281,8.2275202 L 9.3103125,7.2607233 c 0.1095989,0.036162 0.2244742,0.051906 0.3398437,0.048828 0.1376991,0.0043 0.2729851,-0.023148 0.3984378,-0.080078 0.126162,-0.045588 0.239468,-0.119827 0.330078,-0.21875 0.09823,-0.093286 0.176943,-0.2056351 0.230469,-0.3300781 0.05137,-0.1271794 0.07858,-0.2632358 0.08008,-0.4003907 -4.88e-4,-0.140498 -0.02772,-0.2797842 -0.08008,-0.4101562 C 10.551096,5.7482226 10.472932,5.6366542 10.378672,5.5400202 10.284463,5.44539 10.172179,5.369883 10.048594,5.3193171 9.9210683,5.2636605 9.7863933,5.2372858 9.6501562,5.2372858 Z m -0.00195,0.4746094 C 9.9659223,5.7112473 10.223947,5.9683972 10.224378,6.2861139 10.225028,6.6045936 9.9666863,6.8629356 9.6482062,6.8622858 9.3304864,6.8618548 9.0733369,6.6038302 9.0739843,6.2861139 9.0744163,5.9691601 9.3312493,5.7123255 9.6482031,5.7118952 Z m -4.1543,0.4941406 C 5.8337444,6.2059063 6.1092701,6.481432 6.1091406,6.8212702 6.1092701,7.1611084 5.8337444,7.4366342 5.4939062,7.4365045 5.1540681,7.436634 4.8785424,7.1611083 4.8786719,6.8212702 4.8785424,6.481432 5.154068,6.2059063 5.4939062,6.2060358 Z M 7.5817969,8.3700983 A 1.0403689,1.0403689 0 0 1 8.6228125,9.4111139 1.0403689,1.0403689 0 0 1 7.5817969,10.450176 1.0403689,1.0403689 0 0 1 6.5427343,9.4111139 1.0403689,1.0403689 0 0 1 7.5817969,8.3700983 Z m 3.0585941,0.9277344 h 0.002 c 0.01432,-5.13e-4 0.02865,-5.13e-4 0.04297,0 0.331066,2.151e-4 0.599395,0.2685422 0.59961,0.5996096 -2.16e-4,0.3310657 -0.268544,0.5993937 -0.59961,0.5996087 -0.331828,8.64e-4 -0.601347,-0.26778 -0.601562,-0.5996087 -7.66e-4,-0.3150021 0.242463,-0.5768467 0.556641,-0.5996096 z M 4.4216406,10.260723 c 0.3398381,-1.3e-4 0.6153637,0.275396 0.6152344,0.615234 1.299e-4,0.339838 -0.2753959,0.615365 -0.6152344,0.615235 -0.3398385,1.3e-4 -0.6153643,-0.275397 -0.6152344,-0.615235 -1.293e-4,-0.339838 0.2753963,-0.615364 0.6152344,-0.615234 z m 4.2382813,1.589844 c 0.3452152,-8.4e-5 0.6250885,0.272792 0.625,0.609375 8.81e-5,0.336583 -0.2797848,0.609459 -0.625,0.609375 -0.3452157,8.4e-5 -0.6250889,-0.272792 -0.625,-0.609375 -8.86e-5,-0.336583 0.2797844,-0.609459 0.625,-0.609375 z" />
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 5.9 KiB |
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><title>cluster</title><path d="M14,7a1.94,1.94,0,0,1,.78.16,2,2,0,0,1,1.07,1.07,2,2,0,0,1,0,1.55,2,2,0,0,1-1.07,1.07,2,2,0,0,1-1.51,0,2.05,2.05,0,0,1-1.05-1,1.88,1.88,0,0,1-.2-.72L10.84,9a3,3,0,0,1-.56,1,3,3,0,0,1-.87.7L9.86,12H10a1.94,1.94,0,0,1,.78.16,2,2,0,0,1,1.07,1.07,2,2,0,0,1,0,1.55,2,2,0,0,1-1.07,1.07,2,2,0,0,1-1.55,0,2,2,0,0,1-1.07-1.07A2,2,0,0,1,8.25,13a2,2,0,0,1,.67-.72L8.46,11l-.23,0H8a3,3,0,0,1-1.36-.32,3,3,0,0,1-1.07-.9L4,10.58a2,2,0,0,1-.11,1.2,2,2,0,0,1-1.07,1.07A1.94,1.94,0,0,1,2,13a1.94,1.94,0,0,1-.78-.16A2,2,0,0,1,.16,11.78a2,2,0,0,1,0-1.55A2,2,0,0,1,1.22,9.16,1.94,1.94,0,0,1,2,9a2,2,0,0,1,.83.18,2,2,0,0,1,.68.51l1.63-.81A3,3,0,0,1,5.2,6.93,2.91,2.91,0,0,1,5.77,6L4.82,4.82A2,2,0,0,1,4,5a1.94,1.94,0,0,1-.78-.16A2,2,0,0,1,2.16,3.78a2,2,0,0,1,0-1.55A2,2,0,0,1,3.22,1.16a2,2,0,0,1,1.55,0A2,2,0,0,1,5.84,2.22,1.94,1.94,0,0,1,6,3a1.94,1.94,0,0,1-.4,1.2l.94,1.18a3.24,3.24,0,0,1,.71-.28A2.94,2.94,0,0,1,8,5a3,3,0,0,1,1.23.26l1.28-1.92a2,2,0,0,1-.37-.62A2,2,0,0,1,10,2a1.94,1.94,0,0,1,.16-.78A2,2,0,0,1,11.22.16a2,2,0,0,1,1.55,0,2,2,0,0,1,1.07,1.07A1.94,1.94,0,0,1,14,2a1.94,1.94,0,0,1-.16.78,2,2,0,0,1-1.07,1.07A1.94,1.94,0,0,1,12,4a2.06,2.06,0,0,1-.66-.11L10.05,5.82A3,3,0,0,1,11,8l1.17.2a2,2,0,0,1,.74-.86,2.14,2.14,0,0,1,.52-.24A1.92,1.92,0,0,1,14,7ZM2,12a1,1,0,0,0,.39-.08,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53A1,1,0,0,0,2,12ZM3,3a1,1,0,0,0,.08.39,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53A1,1,0,0,0,3,3Zm5,7a1.94,1.94,0,0,0,.78-.16A2,2,0,0,0,9.84,8.78a2,2,0,0,0,0-1.55A2,2,0,0,0,8.78,6.16a2,2,0,0,0-1.55,0A2,2,0,0,0,6.16,7.22a2,2,0,0,0,0,1.55A2,2,0,0,0,7.22,9.84,1.94,1.94,0,0,0,8,10Zm3,4a1,1,0,0,0-.08-.39,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53A1,1,0,0,0,11,14ZM12,1a1,1,0,0,0-.39.08,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53,1,1,0,0,0,.78,0,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53A1,1,0,0,0,12,1Zm2,9a1,1,0,0,0,.39-.08,1,1,0,0,0,.53-.53,1,1,0,0,0,0-.78,1,1,0,0,0-.53-.53,1,1,0,0,0-.78,0,1,1,0,0,0-.53.53,1,1,0,0,0,0,.78,1,1,0,0,0,.53.53A1,1,0,0,0,14,10Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 2.3 KiB |
@@ -1,9 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.4, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<title>new_notebook</title>
|
||||
<g>
|
||||
<path d="M0,2h16v12H0V2z M15,3H1v5h1.7l1.8-3.6l2.5,5l2-4L10.3,8H15V3z M1,13h14V9H9.7L9,7.6l-2,4l-2.5-5L3.3,9H1V13z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 508 B |
|
Before Width: | Height: | Size: 38 KiB |
@@ -1,3 +0,0 @@
|
||||
<svg width="14" height="16" viewBox="0 0 14 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11 3V6H5V3H11ZM10 5V4H6V5H10ZM1 0H14V16H1V13H0V12H1V10H0V9H1V7H0V6H1V4H0V3H1V0ZM13 15V1H2V3H3V4H2V6H3V7H2V9H3V10H2V12H3V13H2V15H13Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 263 B |
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 16 16"><defs><style>.cls-1,.cls-2{fill:none;}.cls-1{clip-rule:evenodd;}.cls-3{clip-path:url(#clip-path);}.cls-4{fill:#e25a1c;}.cls-5{clip-path:url(#clip-path-2);}.cls-6{fill:#3c3a3e;}.cls-7{clip-path:url(#clip-path-3);}.cls-8{clip-path:url(#clip-path-4);}.cls-9{clip-path:url(#clip-path-5);}.cls-10{clip-path:url(#clip-path-6);}.cls-11{clip-path:url(#clip-path-7);}</style><clipPath id="clip-path"><path class="cls-1" d="M14.58,6.89l0-.06L14,5.7a.07.07,0,0,1,0-.09l.95-1.11a.1.1,0,0,0,0,0l-.28.07-1.15.3a.05.05,0,0,1-.07,0l-.65-1.09a.15.15,0,0,0,0-.05l-.05.29-.18,1s0,.07,0,.11,0,0-.05.06l-1.35.43-.06,0L12.14,6l0,0-.69.45a.07.07,0,0,1-.08,0l-.83-.37a.85.85,0,0,1-.32-.23.43.43,0,0,1,.1-.68,1.23,1.23,0,0,1,.28-.13l1.33-.42A.08.08,0,0,0,12,4.62l.18-1a1.78,1.78,0,0,1,.14-.54.9.9,0,0,1,.12-.18.39.39,0,0,1,.61,0,1.15,1.15,0,0,1,.16.21l.61,1a.07.07,0,0,0,.09,0l1.48-.39a.7.7,0,0,1,.31,0,.3.3,0,0,1,.25.44.84.84,0,0,1-.16.26l-1,1.21a.07.07,0,0,0,0,.09l.62,1.17a.65.65,0,0,1,.09.3.48.48,0,0,1-.42.48.87.87,0,0,1-.39,0l-.93-.28a.05.05,0,0,1,0-.05c0-.22-.07-.44-.11-.65a.14.14,0,0,1,0,0l1.07.29"/></clipPath><clipPath id="clip-path-2"><path class="cls-1" d="M14,10.07h-.84a.08.08,0,0,1-.08,0l-1-1.51,0-.06-.21,1.6h-.73l0-.21.21-1.63.21-1.56a.07.07,0,0,1,0,0l.76-.49h0l-.23,1.74h0l1.2-1.33,0,.18c0,.17.06.33.09.5a.08.08,0,0,1,0,.08l-.77.8,0,0,0,0L13.95,10l0,0h0"/></clipPath><clipPath id="clip-path-3"><path class="cls-1" d="M3.39,9.86l-.06.47-.08.61s0,0,0,0H2.59l0-.29.13-1c.05-.39.09-.77.16-1.16A1.81,1.81,0,0,1,4.29,7.1a1.42,1.42,0,0,1,1.11.18A1.24,1.24,0,0,1,6,8.22a1.66,1.66,0,0,1-.55,1.43,1.7,1.7,0,0,1-.95.47,1.4,1.4,0,0,1-1-.23Zm1.93-1.4a1.71,1.71,0,0,0,0-.22.75.75,0,0,0-.91-.49,1,1,0,0,0-.8.9A.73.73,0,0,0,4,9.42a.86.86,0,0,0,.76-.09A1,1,0,0,0,5.32,8.46Z"/></clipPath><clipPath id="clip-path-4"><path class="cls-1" d="M3.06,6.64l-.66.49L2.3,7a.51.51,0,0,0-.38-.24.43.43,0,0,0-.36.14.25.25,0,0,0,0,.33c.09.12.19.23.29.33l.5.53a1.12,1.12,0,0,1,.3.57,1.16,1.16,0,0,1-.13.75,1.43,1.43,0,0,1-1.08.76,1.42,1.42,0,0,1-.63,0,.93.93,0,0,1-.59-.52c0-.09-.08-.19-.12-.28l.72-.38L.81,9a2.14,2.14,0,0,0,.12.24.49.49,0,0,0,.64.18.7.7,0,0,0,.17-.11.37.37,0,0,0,.07-.51,2.49,2.49,0,0,0-.23-.28c-.2-.22-.4-.43-.59-.65a1,1,0,0,1-.25-.53.91.91,0,0,1,.13-.62A1.34,1.34,0,0,1,2.13,6a1,1,0,0,1,.76.4l.17.23"/></clipPath><clipPath id="clip-path-5"><path class="cls-1" d="M8.4,9.14l-.11.81a.05.05,0,0,1,0,0A1.45,1.45,0,0,1,6.56,9.7a1.31,1.31,0,0,1-.33-1A1.8,1.8,0,0,1,7.79,7.08,1.33,1.33,0,0,1,9,7.52a1.24,1.24,0,0,1,.31.9c0,.22,0,.44-.07.67s-.08.63-.12.94v0H8.48l0-.21c0-.36.1-.72.14-1.09a1.16,1.16,0,0,0-.09-.66A.64.64,0,0,0,8,7.74a1,1,0,0,0-1.09.79.75.75,0,0,0,.3.81A.82.82,0,0,0,8,9.4a1,1,0,0,0,.37-.26"/></clipPath><clipPath id="clip-path-6"><path class="cls-1" d="M11.15,7.14l-.09.68h-.41a.26.26,0,0,0-.24.17.71.71,0,0,0,0,.12l-.2,1.56-.05.39H9.45l0-.29.13-1c0-.29.07-.58.12-.87a.94.94,0,0,1,.84-.75h.57"/></clipPath><clipPath id="clip-path-7"><path class="cls-2" d="M14.28,9.68v.38h-.06V9.68h-.11V9.62h.27v.06Zm.5.38V9.69h0l-.11.37h0l-.11-.37h0v.37h-.06V9.62h.09l.1.34.1-.34h.09v.44Z"/></clipPath></defs><title>new_spark_job</title><g class="cls-3"><rect class="cls-4" x="5.65" y="-1.69" width="14.7" height="13.77"/></g><g class="cls-5"><rect class="cls-6" x="6.67" y="1.7" width="11.72" height="12.78"/></g><g class="cls-7"><rect class="cls-6" x="-1.83" y="2.64" width="12.23" height="12.75"/></g><g class="cls-8"><rect class="cls-6" x="-4.35" y="1.59" width="11.83" height="12.99"/></g><g class="cls-9"><rect class="cls-6" x="1.82" y="2.64" width="11.93" height="11.91"/></g><g class="cls-10"><rect class="cls-6" x="5.03" y="2.72" width="10.53" height="11.76"/></g><g class="cls-11"><rect class="cls-6" x="9.7" y="5.2" width="9.55" height="9.27"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 3.8 KiB |
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><defs><style>.cls-1{fill:#00539c;}</style></defs><title>open_notebook</title><path d="M12.4,4.21l-.08-.11h-.56l-.69.06a1.54,1.54,0,0,0-.23.29v8.69H9a3.32,3.32,0,0,0-.93.13,3.34,3.34,0,0,0-.87.34V4.76a2.88,2.88,0,0,1,.43-.31A5.58,5.58,0,0,1,8.14,3.3a2.63,2.63,0,0,0-.3.09A3.62,3.62,0,0,0,6.78,4a3.68,3.68,0,0,0-1.07-.57A3.58,3.58,0,0,0,4.52,3.2H1.81v.9H0V15.85H13.57V5.48ZM2.71,4.1H4.52a2.61,2.61,0,0,1,1,.17,2.32,2.32,0,0,1,.86.49v8.85a3.27,3.27,0,0,0-.88-.34,3.22,3.22,0,0,0-.93-.13H2.71ZM.9,15V5h.91v9H4.52a3.94,3.94,0,0,1,.61.06,3.2,3.2,0,0,1,.52.18,4.19,4.19,0,0,1,.49.29,2.28,2.28,0,0,1,.45.39Zm11.75,0H7a2.7,2.7,0,0,1,.47-.39,2.83,2.83,0,0,1,.47-.29,3.42,3.42,0,0,1,.54-.18A3.81,3.81,0,0,1,9,14h2.73V5h.89Z"/><polygon class="cls-1" points="13.05 3.56 13.05 3.58 13.04 3.57 13.05 3.56"/><path class="cls-1" d="M13,3.57h0v0Z"/><polygon class="cls-1" points="13.05 3.56 13.05 3.58 13.04 3.57 13.05 3.56"/><polygon class="cls-1" points="14.06 1.65 14.04 1.65 14.04 1.63 14.06 1.65"/><path class="cls-1" d="M15.76,2.1,14,3.81l-.38.38L13,3.58v0l1-1H12.64a3.35,3.35,0,0,0-1.09.26h0a3.94,3.94,0,0,0-.86.52l-.24.21s0,0,0,0a3.3,3.3,0,0,0-.51.67,3.1,3.1,0,0,0-.26.47,3.41,3.41,0,0,0-.27,1.39h-.9a4.68,4.68,0,0,1,.16-1.19,4.74,4.74,0,0,1,.25-.66,2.21,2.21,0,0,1,.2-.41,4.66,4.66,0,0,1,.36-.51c.1-.13.22-.26.34-.39a4.14,4.14,0,0,1,.66-.53,1.19,1.19,0,0,1,.23-.16A2.79,2.79,0,0,1,11,2.08l.31-.13.42-.14a4.32,4.32,0,0,1,1.19-.16h1.15l-1-1L13.67,0Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.5 KiB |
@@ -1,45 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
version="1.1"
|
||||
viewBox="0 0 16 16"
|
||||
data-name="Layer 1"
|
||||
id="Layer_1">
|
||||
<metadata
|
||||
id="metadata17">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title>sql_bigdata_cluster</dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<defs
|
||||
id="defs4">
|
||||
<style
|
||||
id="style2">.cls-1{fill:#212121;}.cls-2{fill:#231f20;}</style>
|
||||
</defs>
|
||||
<title
|
||||
id="title6">sql_bigdata_cluster</title>
|
||||
<path
|
||||
style="fill:#212121;stroke-width:1.00282443"
|
||||
id="path8"
|
||||
d="M 7.995,0 C 5.605,0 1.575,0.45254557 1.465,2.1319925 V 13.737272 C 1.465,15.517285 5.575,16 7.995,16 c 2.42,0 6.54,-0.482715 6.54,-2.262728 V 2.1319925 C 14.435,0.45254557 10.405,0 7.995,0 Z m 5.45,13.737272 c -0.14,0.392206 -2.18,1.166562 -5.45,1.166562 -3.27,0 -5.32,-0.784412 -5.43,-1.166562 V 3.5097423 a 14.67,14.752986 0 0 0 5.43,0.8749214 14.71,14.793212 0 0 0 5.45,-0.8749214 z m 0,-11.5549967 c -0.17,0.3922062 -2.19,1.1062225 -5.45,1.1062225 -3.26,0 -5.2,-0.6939032 -5.43,-1.0861094 0.23,-0.4022627 2.22,-1.1062225 5.43,-1.1062225 3.21,0 5.27,0.7240729 5.45,1.0659963 v 0 z"
|
||||
class="cls-1" />
|
||||
<polygon
|
||||
style="fill:#231f20"
|
||||
transform="translate(0.075)"
|
||||
id="polygon10"
|
||||
points="13.57,2.35 13.58,2.36 13.57,2.37 "
|
||||
class="cls-2" />
|
||||
<path
|
||||
id="path12"
|
||||
d="m 9.6501562,5.2372858 c -0.1362374,0 -0.2728654,0.026375 -0.4003906,0.082031 -0.123585,0.050567 -0.2358691,0.1260731 -0.3300781,0.2207031 -0.094256,0.096634 -0.1724299,0.2082024 -0.2304688,0.3300781 -0.062701,0.1283175 -0.099426,0.2676857 -0.109375,0.4101562 -0.00186,0.1267925 0.022265,0.2517914 0.070312,0.3691407 0.045212,0.1164344 0.1088696,0.2248797 0.1894531,0.3203125 L 8.2107031,7.9384577 C 8.011051,7.8519995 7.7980699,7.8002026 7.5798437,7.7997858 7.2852043,7.7997877 7.0158159,7.8890317 6.7790625,8.0283014 L 6.3435156,7.4677545 C 6.4851678,7.2819801 6.5620085,7.0548883 6.5622656,6.8212702 6.5623837,6.2311827 6.0839937,5.7527927 5.4939062,5.7529108 4.9038187,5.7527927 4.4254288,6.2311827 4.4255469,6.8212702 4.4254288,7.4113576 4.9038188,7.8897476 5.4939062,7.8896295 5.646983,7.8892233 5.7981841,7.8559185 5.9372656,7.7919733 l 0.4628906,0.5351562 c -0.2593431,0.2844532 -0.4218723,0.6589599 -0.421875,1.0742188 1.1e-6,0.1550931 0.029186,0.301527 0.070312,0.4433594 L 5.2692969,10.19041 C 5.0668671,9.9352433 4.7590727,9.7863779 4.4333593,9.7861139 3.8432718,9.7859958 3.3648819,10.264386 3.365,10.854473 c -1.179e-4,0.590087 0.478272,1.068477 1.0683593,1.068359 0.5900874,1.18e-4 1.0684773,-0.478272 1.0683594,-1.068359 -2.425e-4,-0.05958 -0.00547,-0.119029 -0.015625,-0.177734 l 0.7675782,-0.376953 c 0.2881162,0.42403 0.7748778,0.703124 1.3261718,0.703124 0.087028,-9e-5 0.1739047,-0.0073 0.2597656,-0.02148 l 0.2011719,0.597656 c -0.2806104,0.199117 -0.4474678,0.523359 -0.4472656,0.869137 -8.57e-5,0.586839 0.4721644,1.062587 1.0546875,1.0625 0.5825231,8.7e-5 1.054773,-0.475661 1.054687,-1.0625 8.6e-5,-0.586839 -0.4721639,-1.062587 -1.054687,-1.0625 -0.043779,5.16e-4 -0.087483,0.0038 -0.1308594,0.0098 L 8.3220312,10.819317 C 8.6909643,10.625493 8.9698168,10.295494 9.099375,9.8993953 l 0.5449219,0.089844 h 0.00195 c 0.05025,0.5310507 0.4958731,0.9369327 1.0292971,0.9374997 0.571737,8.6e-5 1.035243,-0.46342 1.035156,-1.0351567 C 11.710786,9.3198482 11.247281,8.8563402 10.675544,8.8564264 10.264465,8.85697 9.8926723,9.100743 9.7282783,9.4775202 L 9.1814062,9.3798639 C 9.1740509,8.9410593 8.9869509,8.524497 8.6638281,8.2275202 L 9.3103125,7.2607233 c 0.1095989,0.036162 0.2244742,0.051906 0.3398437,0.048828 0.1376991,0.0043 0.2729851,-0.023148 0.3984378,-0.080078 0.126162,-0.045588 0.239468,-0.119827 0.330078,-0.21875 0.09823,-0.093286 0.176943,-0.2056351 0.230469,-0.3300781 0.05137,-0.1271794 0.07858,-0.2632358 0.08008,-0.4003907 -4.88e-4,-0.140498 -0.02772,-0.2797842 -0.08008,-0.4101562 C 10.551096,5.7482226 10.472932,5.6366542 10.378672,5.5400202 10.284463,5.44539 10.172179,5.369883 10.048594,5.3193171 9.9210683,5.2636605 9.7863933,5.2372858 9.6501562,5.2372858 Z m -0.00195,0.4746094 C 9.9659223,5.7112473 10.223947,5.9683972 10.224378,6.2861139 10.225028,6.6045936 9.9666863,6.8629356 9.6482062,6.8622858 9.3304864,6.8618548 9.0733369,6.6038302 9.0739843,6.2861139 9.0744163,5.9691601 9.3312493,5.7123255 9.6482031,5.7118952 Z m -4.1543,0.4941406 C 5.8337444,6.2059063 6.1092701,6.481432 6.1091406,6.8212702 6.1092701,7.1611084 5.8337444,7.4366342 5.4939062,7.4365045 5.1540681,7.436634 4.8785424,7.1611083 4.8786719,6.8212702 4.8785424,6.481432 5.154068,6.2059063 5.4939062,6.2060358 Z M 7.5817969,8.3700983 A 1.0403689,1.0403689 0 0 1 8.6228125,9.4111139 1.0403689,1.0403689 0 0 1 7.5817969,10.450176 1.0403689,1.0403689 0 0 1 6.5427343,9.4111139 1.0403689,1.0403689 0 0 1 7.5817969,8.3700983 Z m 3.0585941,0.9277344 h 0.002 c 0.01432,-5.13e-4 0.02865,-5.13e-4 0.04297,0 0.331066,2.151e-4 0.599395,0.2685422 0.59961,0.5996096 -2.16e-4,0.3310657 -0.268544,0.5993937 -0.59961,0.5996087 -0.331828,8.64e-4 -0.601347,-0.26778 -0.601562,-0.5996087 -7.66e-4,-0.3150021 0.242463,-0.5768467 0.556641,-0.5996096 z M 4.4216406,10.260723 c 0.3398381,-1.3e-4 0.6153637,0.275396 0.6152344,0.615234 1.299e-4,0.339838 -0.2753959,0.615365 -0.6152344,0.615235 -0.3398385,1.3e-4 -0.6153643,-0.275397 -0.6152344,-0.615235 -1.293e-4,-0.339838 0.2753963,-0.615364 0.6152344,-0.615234 z m 4.2382813,1.589844 c 0.3452152,-8.4e-5 0.6250885,0.272792 0.625,0.609375 8.81e-5,0.336583 -0.2797848,0.609459 -0.625,0.609375 -0.3452157,8.4e-5 -0.6250889,-0.272792 -0.625,-0.609375 -8.86e-5,-0.336583 0.2797844,-0.609459 0.625,-0.609375 z" />
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 5.9 KiB |
@@ -9,30 +9,8 @@ export const serviceCrashLink = 'https://github.com/Microsoft/vscode-mssql/wiki/
|
||||
export const extensionConfigSectionName = 'mssql';
|
||||
|
||||
// DATA PROTOCOL VALUES ///////////////////////////////////////////////////////////
|
||||
export const mssqlClusterProviderName = 'mssqlCluster';
|
||||
export const hadoopEndpointNameGateway = 'gateway';
|
||||
export const protocolVersion = '1.0';
|
||||
export const authenticationTypePropName = 'authenticationType';
|
||||
export const integratedAuth = 'integrated';
|
||||
export const hostPropName = 'host';
|
||||
export const userPropName = 'user';
|
||||
export const knoxPortPropName = 'knoxport';
|
||||
export const passwordPropName = 'password';
|
||||
export const groupIdPropName = 'groupId';
|
||||
export const defaultKnoxPort = 30443;
|
||||
export const groupIdName = 'groupId';
|
||||
export const sqlProviderName = 'MSSQL';
|
||||
|
||||
export const UNTITLED_SCHEMA = 'untitled';
|
||||
|
||||
export const hadoopConnectionTimeoutSeconds = 15;
|
||||
export const hdfsRootPath = '/';
|
||||
|
||||
export const clusterEndpointsProperty = 'clusterEndpoints';
|
||||
export const isBigDataClusterProperty = 'isBigDataCluster';
|
||||
|
||||
export const ViewType = 'view';
|
||||
|
||||
// SERVICE NAMES //////////////////////////////////////////////////////////
|
||||
export const ObjectExplorerService = 'objectexplorer';
|
||||
export const CmsService = 'cmsService';
|
||||
@@ -44,39 +22,3 @@ export const SqlAssessmentService = 'sqlAssessmentService';
|
||||
export const SqlMigrationService = 'sqlMigrationService';
|
||||
export const NotebookConvertService = 'notebookConvertService';
|
||||
export const AzureBlobService = 'azureBlobService';
|
||||
|
||||
export enum BuiltInCommands {
|
||||
SetContext = 'setContext'
|
||||
}
|
||||
|
||||
export enum CommandContext {
|
||||
WizardServiceEnabled = 'wizardservice:enabled'
|
||||
}
|
||||
|
||||
export enum MssqlClusterItems {
|
||||
Connection = 'mssqlCluster:connection',
|
||||
Folder = 'mssqlCluster:folder',
|
||||
File = 'mssqlCluster:file',
|
||||
Error = 'mssqlCluster:error'
|
||||
}
|
||||
|
||||
export enum MssqlClusterItemsSubType {
|
||||
Mount = ':mount:',
|
||||
MountChild = ':mountChild:',
|
||||
Spark = ':spark:'
|
||||
}
|
||||
|
||||
// SPARK JOB SUBMISSION //////////////////////////////////////////////////////////
|
||||
export const mssqlClusterNewNotebookTask = 'mssqlCluster.task.newNotebook';
|
||||
export const mssqlClusterOpenNotebookTask = 'mssqlCluster.task.openNotebook';
|
||||
export const mssqlOpenClusterDashboard = 'mssqlCluster.task.openClusterDashboard';
|
||||
export const mssqlClusterLivySubmitSparkJobCommand = 'mssqlCluster.livy.cmd.submitSparkJob';
|
||||
export const mssqlClusterLivySubmitSparkJobFromFileCommand = 'mssqlCluster.livy.cmd.submitFileToSparkJob';
|
||||
export const mssqlClusterLivySubmitSparkJobTask = 'mssqlCluster.livy.task.submitSparkJob';
|
||||
export const mssqlClusterLivyOpenSparkHistory = 'mssqlCluster.livy.task.openSparkHistory';
|
||||
export const mssqlClusterLivyOpenYarnHistory = 'mssqlCluster.livy.task.openYarnHistory';
|
||||
export const mssqlClusterLivySubmitPath = '/gateway/default/livy/v1/batches';
|
||||
export const mssqlClusterLivyTimeInMSForCheckYarnApp = 1000;
|
||||
export const mssqlClusterLivyRetryTimesForCheckYarnApp = 20;
|
||||
export const mssqlClusterSparkJobFileSelectorButtonWidth = '30px';
|
||||
export const mssqlClusterSparkJobFileSelectorButtonHeight = '30px';
|
||||
|
||||
@@ -7,7 +7,6 @@ import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
|
||||
import * as types from './types';
|
||||
import * as Constants from './constants';
|
||||
|
||||
enum BuiltInCommands {
|
||||
SetContext = 'setContext',
|
||||
@@ -16,7 +15,6 @@ enum BuiltInCommands {
|
||||
enum ContextKeys {
|
||||
ISCLOUD = 'mssql:iscloud',
|
||||
EDITIONID = 'mssql:engineedition',
|
||||
ISCLUSTER = 'mssql:iscluster',
|
||||
SERVERMAJORVERSION = 'mssql:servermajorversion'
|
||||
}
|
||||
|
||||
@@ -41,7 +39,6 @@ export default class ContextProvider {
|
||||
public onDashboardOpen(e: azdata.DashboardDocument): void {
|
||||
let iscloud: boolean;
|
||||
let edition: number;
|
||||
let isCluster: boolean = false;
|
||||
let serverMajorVersion: number;
|
||||
if (e.profile.providerName.toLowerCase() === 'mssql' && !types.isUndefinedOrNull(e.serverInfo) && !types.isUndefinedOrNull(e.serverInfo.engineEditionId)) {
|
||||
if (isCloudEditions.some(i => i === e.serverInfo.engineEditionId)) {
|
||||
@@ -51,13 +48,6 @@ export default class ContextProvider {
|
||||
}
|
||||
|
||||
edition = e.serverInfo.engineEditionId;
|
||||
|
||||
if (!types.isUndefinedOrNull(e.serverInfo.options)) {
|
||||
let isBigDataCluster = e.serverInfo.options[Constants.isBigDataClusterProperty];
|
||||
if (isBigDataCluster) {
|
||||
isCluster = isBigDataCluster;
|
||||
}
|
||||
}
|
||||
serverMajorVersion = e.serverInfo.serverMajorVersion;
|
||||
}
|
||||
|
||||
@@ -69,10 +59,6 @@ export default class ContextProvider {
|
||||
void setCommandContext(ContextKeys.EDITIONID, edition);
|
||||
}
|
||||
|
||||
if (!types.isUndefinedOrNull(isCluster)) {
|
||||
void setCommandContext(ContextKeys.ISCLUSTER, isCluster);
|
||||
}
|
||||
|
||||
if (!types.isUndefinedOrNull(serverMajorVersion)) {
|
||||
void setCommandContext(ContextKeys.SERVERMAJORVERSION, serverMajorVersion);
|
||||
}
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as bdc from 'bdc';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import * as utils from '../utils';
|
||||
|
||||
const mgmtProxyName = 'mgmtproxy';
|
||||
const grafanaEndpointName = 'metricsui';
|
||||
const grafanaDescription = localize('grafana', "Metrics Dashboard");
|
||||
const logsuiEndpointName = 'logsui';
|
||||
const logsuiDescription = localize('kibana', "Log Search Dashboard");
|
||||
const sparkHistoryEndpointName = 'spark-history';
|
||||
const sparkHistoryDescription = localize('sparkHistory', "Spark Jobs Management and Monitoring Dashboard");
|
||||
const yarnUiEndpointName = 'yarn-ui';
|
||||
const yarnHistoryDescription = localize('yarnHistory', "Spark Diagnostics and Monitoring Dashboard");
|
||||
const hyperlinkedEndpoints = [grafanaEndpointName, logsuiEndpointName, sparkHistoryEndpointName, yarnUiEndpointName];
|
||||
|
||||
export function registerServiceEndpoints(context: vscode.ExtensionContext): void {
|
||||
azdata.ui.registerModelViewProvider('bdc-endpoints', async (view) => {
|
||||
let endpointsArray: Array<bdc.IEndpointModel> = Object.assign([], utils.getClusterEndpoints(view.serverInfo));
|
||||
|
||||
if (endpointsArray.length > 0) {
|
||||
const grafanaEp = endpointsArray.find(e => e.name === grafanaEndpointName);
|
||||
if (grafanaEp && grafanaEp.endpoint && grafanaEp.endpoint.indexOf('/d/wZx3OUdmz') === -1) {
|
||||
// Update to have correct URL
|
||||
grafanaEp.endpoint += '/d/wZx3OUdmz';
|
||||
}
|
||||
const kibanaEp = endpointsArray.find(e => e.name === logsuiEndpointName);
|
||||
if (kibanaEp && kibanaEp.endpoint && kibanaEp.endpoint.indexOf('/app/kibana#/discover') === -1) {
|
||||
// Update to have correct URL
|
||||
kibanaEp.endpoint += '/app/kibana#/discover';
|
||||
}
|
||||
|
||||
if (!grafanaEp) {
|
||||
// We are on older CTP, need to manually add some endpoints.
|
||||
// TODO remove once CTP support goes away
|
||||
const managementProxyEp = endpointsArray.find(e => e.name === mgmtProxyName);
|
||||
if (managementProxyEp) {
|
||||
endpointsArray.push(getCustomEndpoint(managementProxyEp, grafanaEndpointName, grafanaDescription, '/grafana/d/wZx3OUdmz'));
|
||||
endpointsArray.push(getCustomEndpoint(managementProxyEp, logsuiEndpointName, logsuiDescription, '/kibana/app/kibana#/discover'));
|
||||
}
|
||||
|
||||
const gatewayEp = endpointsArray.find(e => e.name === 'gateway');
|
||||
if (gatewayEp) {
|
||||
endpointsArray.push(getCustomEndpoint(gatewayEp, sparkHistoryEndpointName, sparkHistoryDescription, '/gateway/default/sparkhistory'));
|
||||
endpointsArray.push(getCustomEndpoint(gatewayEp, yarnUiEndpointName, yarnHistoryDescription, '/gateway/default/yarn'));
|
||||
}
|
||||
}
|
||||
|
||||
endpointsArray = endpointsArray.map(e => {
|
||||
e.description = getEndpointDisplayText(e.name, e.description);
|
||||
return e;
|
||||
});
|
||||
|
||||
// Sort the endpoints. The sort method is that SQL Server Master is first - followed by all
|
||||
// others in alphabetical order by endpoint
|
||||
const sqlServerMasterEndpoints = endpointsArray.filter(e => e.name === Endpoint.sqlServerMaster);
|
||||
endpointsArray = endpointsArray.filter(e => e.name !== Endpoint.sqlServerMaster)
|
||||
.sort((e1, e2) => e1.endpoint.localeCompare(e2.endpoint));
|
||||
endpointsArray.unshift(...sqlServerMasterEndpoints);
|
||||
|
||||
const container = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'column', width: '100%', height: '100%' }).component();
|
||||
endpointsArray.forEach(endpointInfo => {
|
||||
const endPointRow = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'row' }).component();
|
||||
const nameCell = view.modelBuilder.text().withProps({ value: endpointInfo.description }).component();
|
||||
endPointRow.addItem(nameCell, { CSSStyles: { 'width': '35%', 'font-weight': '600', 'user-select': 'text' } });
|
||||
if (hyperlinkedEndpoints.findIndex(e => e === endpointInfo.name) >= 0) {
|
||||
const linkCell = view.modelBuilder.hyperlink()
|
||||
.withProps({
|
||||
label: endpointInfo.endpoint,
|
||||
title: endpointInfo.endpoint,
|
||||
url: endpointInfo.endpoint
|
||||
}).component();
|
||||
endPointRow.addItem(linkCell, { CSSStyles: { 'width': '62%', 'color': '#0078d4', 'text-decoration': 'underline', 'padding-top': '10px', 'overflow': 'hidden', 'text-overflow': 'ellipsis' } });
|
||||
}
|
||||
else {
|
||||
const endpointCell =
|
||||
view.modelBuilder.text()
|
||||
.withProps(
|
||||
{
|
||||
value: endpointInfo.endpoint,
|
||||
title: endpointInfo.endpoint,
|
||||
CSSStyles: { 'overflow': 'hidden', 'text-overflow': 'ellipsis' }
|
||||
})
|
||||
.component();
|
||||
endPointRow.addItem(endpointCell, { CSSStyles: { 'width': '62%', 'user-select': 'text' } });
|
||||
}
|
||||
const copyValueCell = view.modelBuilder.button().component();
|
||||
copyValueCell.iconPath = { light: context.asAbsolutePath('resources/light/copy.png'), dark: context.asAbsolutePath('resources/dark/copy_inverse.png') };
|
||||
copyValueCell.onDidClick(() => {
|
||||
void vscode.env.clipboard.writeText(endpointInfo.endpoint);
|
||||
});
|
||||
copyValueCell.title = localize("copyText", "Copy");
|
||||
copyValueCell.iconHeight = '14px';
|
||||
copyValueCell.iconWidth = '14px';
|
||||
endPointRow.addItem(copyValueCell, { CSSStyles: { 'width': '3%', 'padding-top': '10px' } });
|
||||
|
||||
container.addItem(endPointRow, { CSSStyles: { 'padding-left': '10px', 'border-top': 'solid 1px #ccc', 'box-sizing': 'border-box', 'user-select': 'text' } });
|
||||
});
|
||||
const endpointsContainer = view.modelBuilder.flexContainer().withLayout({ flexFlow: 'column', width: '540px', height: '100%', position: 'absolute' }).component();
|
||||
endpointsContainer.addItem(container, { CSSStyles: { 'padding-top': '25px', 'padding-left': '5px' } });
|
||||
|
||||
await view.initializeModel(endpointsContainer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getCustomEndpoint(parentEndpoint: bdc.IEndpointModel, serviceName: string, description: string, serviceUrl?: string): bdc.IEndpointModel {
|
||||
if (parentEndpoint) {
|
||||
let endpoint: bdc.IEndpointModel = {
|
||||
name: serviceName,
|
||||
description: description,
|
||||
endpoint: parentEndpoint.endpoint + serviceUrl,
|
||||
protocol: 'https'
|
||||
};
|
||||
return endpoint;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export enum Endpoint {
|
||||
gateway = 'gateway',
|
||||
sparkHistory = 'spark-history',
|
||||
yarnUi = 'yarn-ui',
|
||||
appProxy = 'app-proxy',
|
||||
mgmtproxy = 'mgmtproxy',
|
||||
managementProxy = 'management-proxy',
|
||||
logsui = 'logsui',
|
||||
metricsui = 'metricsui',
|
||||
controller = 'controller',
|
||||
sqlServerMaster = 'sql-server-master',
|
||||
webhdfs = 'webhdfs',
|
||||
livy = 'livy'
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the localized text to display for a corresponding endpoint
|
||||
* @param endpointName The endpoint name to get the display text for
|
||||
* @param description The backup description to use if we don't have our own
|
||||
*/
|
||||
function getEndpointDisplayText(endpointName?: string, description?: string): string {
|
||||
endpointName = endpointName || '';
|
||||
switch (endpointName.toLowerCase()) {
|
||||
case Endpoint.appProxy:
|
||||
return localize('endpoint.appproxy', "Application Proxy");
|
||||
case Endpoint.controller:
|
||||
return localize('endpoint.controller', "Cluster Management Service");
|
||||
case Endpoint.gateway:
|
||||
return localize('endpoint.gateway', "Gateway to access HDFS files, Spark");
|
||||
case Endpoint.managementProxy:
|
||||
return localize('endpoint.managementproxy', "Management Proxy");
|
||||
case Endpoint.mgmtproxy:
|
||||
return localize('endpoint.mgmtproxy', "Management Proxy");
|
||||
case Endpoint.sqlServerMaster:
|
||||
return localize('endpoint.sqlServerEndpoint', "SQL Server Master Instance Front-End");
|
||||
case Endpoint.metricsui:
|
||||
return localize('endpoint.grafana', "Metrics Dashboard");
|
||||
case Endpoint.logsui:
|
||||
return localize('endpoint.kibana', "Log Search Dashboard");
|
||||
case Endpoint.yarnUi:
|
||||
return localize('endpoint.yarnHistory', "Spark Diagnostics and Monitoring Dashboard");
|
||||
case Endpoint.sparkHistory:
|
||||
return localize('endpoint.sparkHistory', "Spark Jobs Management and Monitoring Dashboard");
|
||||
case Endpoint.webhdfs:
|
||||
return localize('endpoint.webhdfs', "HDFS File System Proxy");
|
||||
case Endpoint.livy:
|
||||
return localize('endpoint.livy', "Proxy for running Spark statements, jobs, applications");
|
||||
default:
|
||||
// Default is to use the description if one was given, otherwise worst case just fall back to using the
|
||||
// original endpoint name
|
||||
return description && description.length > 0 ? description : endpointName;
|
||||
}
|
||||
}
|
||||
@@ -1,384 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { IconPathHelper, IconPath } from '../iconHelper';
|
||||
import { groupBy } from '../util/arrays';
|
||||
import * as loc from '../localizedConstants';
|
||||
|
||||
/**
|
||||
* The permission status of an HDFS path - this consists of :
|
||||
* - The sticky bit for that path
|
||||
* - The permission bits for the owner, group and other
|
||||
* - (Optional) Set of additional ACL entries on this path
|
||||
*/
|
||||
export class PermissionStatus {
|
||||
/**
|
||||
*
|
||||
* @param owner The ACL entry object for the owner permissions
|
||||
* @param group The ACL entry object for the group permissions
|
||||
* @param other The ACL entry object for the other permissions
|
||||
* @param stickyBit The sticky bit status for the object. If true the owner/root are
|
||||
* the only ones who can delete the resource or its contents (if a folder)
|
||||
* @param aclEntries The ACL entries defined for the object
|
||||
*/
|
||||
constructor(public owner: AclEntry, public group: AclEntry, public other: AclEntry, public stickyBit: boolean, public aclEntries: AclEntry[]) { }
|
||||
|
||||
/**
|
||||
* The permission octal for the path in the form [#]### with each # mapping to :
|
||||
* 0 (optional) - The sticky bit (1 or 0)
|
||||
* 1 - The owner permission digit
|
||||
* 2 - The group permission digit
|
||||
* 3 - The other permission digit
|
||||
* @see AclEntryPermission for more information on the permission digits
|
||||
*/
|
||||
public get permissionOctal(): string {
|
||||
// Always use the access scope for the permission octal - it doesn't have a concept of other scopes
|
||||
return `${this.stickyBit ? '1' : ''}${this.owner.getPermissionDigit(AclEntryScope.access)}${this.group.getPermissionDigit(AclEntryScope.access)}${this.other.getPermissionDigit(AclEntryScope.access)}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of an ACL entry. Corresponds to the first (or second if a scope is present) field of
|
||||
* an ACL entry - e.g. user:bob:rwx (user) or default:group::r-- (group)
|
||||
*/
|
||||
export enum AclType {
|
||||
/**
|
||||
* An ACL entry applied to a specific user.
|
||||
*/
|
||||
user = 'user',
|
||||
/**
|
||||
* An ACL entry applied to a specific group.
|
||||
*/
|
||||
group = 'group',
|
||||
/**
|
||||
* An ACL mask entry.
|
||||
*/
|
||||
mask = 'mask',
|
||||
/**
|
||||
* An ACL entry that applies to all other users that were not covered by one of the more specific ACL entry types.
|
||||
*/
|
||||
other = 'other'
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of permission on a file - this corresponds to the field in the file status used in commands such as chmod.
|
||||
* Typically this value is represented as a 3 digit octal - e.g. 740 - where the first digit is the owner, the second
|
||||
* the group and the third other. @see parseAclPermissionFromOctal
|
||||
*/
|
||||
export enum PermissionType {
|
||||
owner = 'owner',
|
||||
group = 'group',
|
||||
other = 'other'
|
||||
}
|
||||
|
||||
export enum AclEntryScope {
|
||||
/**
|
||||
* An ACL entry that is inspected during permission checks to enforce permissions.
|
||||
*/
|
||||
access = 'access',
|
||||
/**
|
||||
* An ACL entry to be applied to a directory's children that do not otherwise have their own ACL defined.
|
||||
*/
|
||||
default = 'default'
|
||||
}
|
||||
|
||||
/**
|
||||
* The read, write and execute permissions for an ACL
|
||||
*/
|
||||
export class AclEntryPermission {
|
||||
|
||||
constructor(public read: boolean, public write: boolean, public execute: boolean) { }
|
||||
|
||||
/**
|
||||
* Returns the string representation of the permissions in the form [r-][w-][x-].
|
||||
* e.g.
|
||||
* rwx
|
||||
* r--
|
||||
* ---
|
||||
*/
|
||||
public toString() {
|
||||
return `${this.read ? 'r' : '-'}${this.write ? 'w' : '-'}${this.execute ? 'x' : '-'}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the digit for a permission octal for this permission. This digit is a value
|
||||
* between 0 and 7 inclusive, which is a bitwise OR the permission flags (r/w/x).
|
||||
*/
|
||||
public get permissionDigit(): number {
|
||||
return (this.read ? 4 : 0) + (this.write ? 2 : 0) + (this.execute ? 1 : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a string representation of a permission into an AclPermission object. The string must consist
|
||||
* of 3 characters for the read, write and execute permissions where each character is either a r/w/x or
|
||||
* a -.
|
||||
* e.g. The following are all valid strings
|
||||
* rwx
|
||||
* ---
|
||||
* -w-
|
||||
* @param permissionString The string representation of the permission
|
||||
*/
|
||||
function parseAclPermission(permissionString: string): AclEntryPermission {
|
||||
permissionString = permissionString.toLowerCase();
|
||||
if (!/^[r\-][w\-][x\-]$/i.test(permissionString)) {
|
||||
throw new Error(`Invalid permission string ${permissionString}- must match /^[r\-][w\-][x\-]$/i`);
|
||||
}
|
||||
return new AclEntryPermission(permissionString[0] === 'r', permissionString[1] === 'w', permissionString[2] === 'x');
|
||||
}
|
||||
|
||||
/**
|
||||
* A single ACL Permission entry
|
||||
* scope - The scope of the entry @see AclEntryScope
|
||||
* type - The type of the entry @see AclEntryType
|
||||
* name - The name of the user/group used to set ACLs Optional.
|
||||
* displayName - The name to display in the UI
|
||||
* permission - The permission set for this ACL. @see AclPermission
|
||||
*/
|
||||
export class AclEntry {
|
||||
private readonly permissions = new Map<AclEntryScope, AclEntryPermission>();
|
||||
|
||||
constructor(
|
||||
public readonly type: AclType | PermissionType,
|
||||
public readonly name: string,
|
||||
public readonly displayName: string,
|
||||
) { }
|
||||
|
||||
/**
|
||||
* Adds a new permission at the specified scope, overwriting the existing permission at that scope if it
|
||||
* exists
|
||||
* @param scope The scope to add the new permission at
|
||||
* @param permission The permission to set
|
||||
*/
|
||||
public addPermission(scope: AclEntryScope, permission: AclEntryPermission): void {
|
||||
this.permissions.set(scope, permission);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the permission at the specified scope.
|
||||
* @param scope The scope to delete the permission for
|
||||
* @returns True if the entry was successfully deleted, false if not (it didn't exist)
|
||||
*/
|
||||
public removePermission(scope: AclEntryScope): boolean {
|
||||
return this.permissions.delete(scope);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the permission at the specified scope if one exists
|
||||
* @param scope The scope to retrieve the permission for
|
||||
*/
|
||||
public getPermission(scope: AclEntryScope): AclEntryPermission | undefined {
|
||||
return this.permissions.get(scope);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the full list of permissions and their scopes for this entry
|
||||
*/
|
||||
public getAllPermissions(): { scope: AclEntryScope, permission: AclEntryPermission }[] {
|
||||
return Array.from(this.permissions.entries()).map((entry: [AclEntryScope, AclEntryPermission]) => {
|
||||
return { scope: entry[0], permission: entry[1] };
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the octal number representing the permission for the specified scope of
|
||||
* this entry. This will either be a number between 0 and 7 inclusive (which is
|
||||
* a bitwise OR the permission flags rwx) or undefined if the scope doesn't exist
|
||||
* for this entry.
|
||||
*/
|
||||
public getPermissionDigit(scope: AclEntryScope): number | undefined {
|
||||
return this.permissions.has(scope) ? this.permissions.get(scope).permissionDigit : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string representation of each ACL Entry in the form [SCOPE:]TYPE:NAME:PERMISSION.
|
||||
* Note that SCOPE is only displayed if it's default - access is implied if there is no scope
|
||||
* specified.
|
||||
* The name is optional and so may be empty.
|
||||
* Example strings :
|
||||
* user:bob:rwx
|
||||
* default:user:bob:rwx
|
||||
* user::r-x
|
||||
* default:group::r--
|
||||
*/
|
||||
toAclStrings(includeDefaults: boolean = true): string[] {
|
||||
return Array.from(this.permissions.entries()).filter((entry: [AclEntryScope, AclEntryPermission]) => includeDefaults || entry[0] !== AclEntryScope.default).map((entry: [AclEntryScope, AclEntryPermission]) => {
|
||||
return `${entry[0] === AclEntryScope.default ? 'default:' : ''}${getAclEntryType(this.type)}:${this.name}:${entry[1].toString()}`;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether this and the specified AclEntry are equal. Two entries are considered equal
|
||||
* if their scope, type and name are equal.
|
||||
* @param other The other entry to compare against
|
||||
*/
|
||||
public isEqual(other: AclEntry): boolean {
|
||||
if (!other) {
|
||||
return false;
|
||||
}
|
||||
return AclEntry.compare(this, other) === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two AclEntry objects for ordering
|
||||
* @param a The first AclEntry to compare
|
||||
* @param b The second AclEntry to compare
|
||||
*/
|
||||
static compare(a: AclEntry, b: AclEntry): number {
|
||||
if (a.name === b.name) {
|
||||
if (a.type === b.type) {
|
||||
return 0;
|
||||
}
|
||||
return a.type.localeCompare(b.type);
|
||||
}
|
||||
return a.name.localeCompare(b.name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the possible entry types into their corresponding values for using in an ACL string
|
||||
* @param type The type to convert
|
||||
*/
|
||||
function getAclEntryType(type: AclType | PermissionType): AclType {
|
||||
// We only need to map AclPermissionType - AclEntryType is already the
|
||||
// correct values we're mapping to.
|
||||
if (type in PermissionType) {
|
||||
switch (type) {
|
||||
case PermissionType.owner:
|
||||
return AclType.user;
|
||||
case PermissionType.group:
|
||||
return AclType.group;
|
||||
case PermissionType.other:
|
||||
return AclType.other;
|
||||
default:
|
||||
throw new Error(`Unknown AclPermissionType : ${type}`);
|
||||
}
|
||||
}
|
||||
return <AclType>type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a complete ACL string into separate AclEntry objects for each entry. A valid string consists of multiple entries
|
||||
* separated by a comma.
|
||||
*
|
||||
* A valid entry must match (default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})
|
||||
* e.g. the following are all valid entries
|
||||
* user:bob:rwx
|
||||
* user::rwx
|
||||
* default::bob:rwx
|
||||
* group::r-x
|
||||
* default:other:r--
|
||||
*
|
||||
* So a valid ACL string might look like this
|
||||
* user:bob:rwx,user::rwx,default::bob:rwx,group::r-x,default:other:r--
|
||||
* @param aclString The string representation of the ACL
|
||||
*/
|
||||
export function parseAclList(aclString: string): AclEntry[] {
|
||||
if (aclString === '') {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!/^(default:)?(user|group|mask|other):([A-Za-z_][A-Za-z0-9._-]*)?:([rwx-]{3})?(,(default:)?(user|group|mask|other):([A-Za-z_][A-Za-z0-9._-]*)?:([rwx-]{3})?)*$/.test(aclString)) {
|
||||
throw new Error(`Invalid ACL string ${aclString}. Expected to match ^(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?(,(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?)*$`);
|
||||
}
|
||||
return mergeAclEntries(aclString.split(',').map(aclEntryString => parseAclEntry(aclEntryString)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a given string representation of an ACL Entry into an AclEntry object. This method
|
||||
* assumes the string has already been checked for validity.
|
||||
* @param aclString The string representation of the ACL entry
|
||||
*/
|
||||
function parseAclEntry(aclString: string): AclEntry {
|
||||
const parts: string[] = aclString.split(':');
|
||||
let i = 0;
|
||||
const scope: AclEntryScope = parts.length === 4 && parts[i++] === 'default' ? AclEntryScope.default : AclEntryScope.access;
|
||||
let type: AclType;
|
||||
switch (parts[i++]) {
|
||||
case 'user':
|
||||
type = AclType.user;
|
||||
break;
|
||||
case 'group':
|
||||
type = AclType.group;
|
||||
break;
|
||||
case 'mask':
|
||||
type = AclType.mask;
|
||||
break;
|
||||
case 'other':
|
||||
type = AclType.other;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown ACL Entry type ${parts[i - 1]}`);
|
||||
}
|
||||
const name = parts[i++];
|
||||
const permission = parseAclPermission(parts[i++]);
|
||||
const entry = new AclEntry(type, name, name);
|
||||
entry.addPermission(scope, permission);
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an octal in the form [#]### into a combination of an optional sticky bit and a set
|
||||
* of @see AclEntryPermission. Each digit in the octal corresponds to the sticky bit or a
|
||||
* particular user type - owner, group and other respectively.
|
||||
* If the sticky bit exists and its value is 1 then the sticky bit value is set to true.
|
||||
* Each permission digit is then expected to be a value between 0 and 7 inclusive, which is a bitwise OR the permission flags
|
||||
* for the file.
|
||||
* 4 - Read
|
||||
* 2 - Write
|
||||
* 1 - Execute
|
||||
* So an octal of 1730 would map to :
|
||||
* - sticky === true
|
||||
* - The owner with rwx permissions
|
||||
* - The group with -wx permissions
|
||||
* - All others with --- permissions
|
||||
* @param octal The octal string to parse
|
||||
*/
|
||||
export function parseAclPermissionFromOctal(octal: string): { sticky: boolean, owner: AclEntryPermission, group: AclEntryPermission, other: AclEntryPermission } {
|
||||
if (!octal || (octal.length !== 3 && octal.length !== 4)) {
|
||||
throw new Error(`Invalid octal ${octal} - it must be a 3 or 4 digit string`);
|
||||
}
|
||||
|
||||
const sticky = octal.length === 4 ? octal[0] === '1' : false;
|
||||
const ownerPermissionDigit = parseInt(octal[octal.length - 3]);
|
||||
const groupPermissionDigit = parseInt(octal[octal.length - 2]);
|
||||
const otherPermissionDigit = parseInt(octal[octal.length - 1]);
|
||||
|
||||
return {
|
||||
sticky: sticky,
|
||||
owner: new AclEntryPermission((ownerPermissionDigit & 4) === 4, (ownerPermissionDigit & 2) === 2, (ownerPermissionDigit & 1) === 1),
|
||||
group: new AclEntryPermission((groupPermissionDigit & 4) === 4, (groupPermissionDigit & 2) === 2, (groupPermissionDigit & 1) === 1),
|
||||
other: new AclEntryPermission((otherPermissionDigit & 4) === 4, (otherPermissionDigit & 2) === 2, (otherPermissionDigit & 1) === 1)
|
||||
};
|
||||
}
|
||||
|
||||
export function getImageForType(type: AclType | PermissionType): { iconPath: IconPath, title: string } {
|
||||
switch (type) {
|
||||
case AclType.user:
|
||||
case PermissionType.owner:
|
||||
return { iconPath: IconPathHelper.user, title: loc.owner };
|
||||
case AclType.group:
|
||||
case PermissionType.group:
|
||||
case PermissionType.other:
|
||||
return { iconPath: IconPathHelper.group, title: loc.group };
|
||||
}
|
||||
return { iconPath: { dark: '', light: '' }, title: '' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges a list of AclEntry objects such that the resulting list contains only a single entry for each name/type pair with
|
||||
* a separate permission for each separate AclEntry
|
||||
* @param entries The set of AclEntries to merge
|
||||
*/
|
||||
function mergeAclEntries(entries: AclEntry[]): AclEntry[] {
|
||||
const groupedEntries = groupBy(entries, (a, b) => AclEntry.compare(a, b)); // First group the entries together
|
||||
return groupedEntries.map(entryGroup => { // Now make a single AclEntry for each group and add all the permissions from each group
|
||||
const entry = new AclEntry(entryGroup[0].type, entryGroup[0].name, entryGroup[0].displayName);
|
||||
entryGroup.forEach(e => {
|
||||
e.getAllPermissions().forEach(sp => entry.addPermission(sp.scope, sp.permission));
|
||||
});
|
||||
return entry;
|
||||
});
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
|
||||
export const enum HdfsFileType {
|
||||
File = 'File',
|
||||
Directory = 'Directory',
|
||||
Symlink = 'Symlink'
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a @see HdfsFileType to its corresponding @see FileType. Will return undefined if
|
||||
* passed in type is undefined.
|
||||
* @param hdfsFileType The HdfsFileType to map from
|
||||
*/
|
||||
export function hdfsFileTypeToFileType(hdfsFileType: HdfsFileType | undefined): FileType | undefined {
|
||||
switch (hdfsFileType) {
|
||||
case HdfsFileType.Directory:
|
||||
return FileType.Directory;
|
||||
case HdfsFileType.File:
|
||||
return FileType.File;
|
||||
case HdfsFileType.Symlink:
|
||||
return FileType.Symlink;
|
||||
case undefined:
|
||||
return undefined;
|
||||
default:
|
||||
throw new Error(`Unexpected file type ${hdfsFileType}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class FileStatus {
|
||||
/**
|
||||
*
|
||||
* @param accessTime
|
||||
* @param blockSize
|
||||
* @param group The ACL entry object for the group permissions
|
||||
* @param length
|
||||
* @param modificationTime
|
||||
* @param owner The ACL entry object for the owner permissions
|
||||
* @param pathSuffix
|
||||
* @param permission
|
||||
* @param replication
|
||||
* @param snapshotEnabled
|
||||
* @param type
|
||||
*/
|
||||
constructor(
|
||||
/**
|
||||
* Access time for the file
|
||||
*/
|
||||
public readonly accessTime: string,
|
||||
/**
|
||||
* The block size of a file.
|
||||
*/
|
||||
public readonly blockSize: string,
|
||||
/**
|
||||
* The group owner.
|
||||
*/
|
||||
public readonly group: string,
|
||||
/**
|
||||
* The number of bytes in a file. (0 for directories)
|
||||
*/
|
||||
public readonly length: string,
|
||||
/**
|
||||
* The modification time.
|
||||
*/
|
||||
public readonly modificationTime: string,
|
||||
/**
|
||||
* The user who is the owner.
|
||||
*/
|
||||
public readonly owner: string,
|
||||
/**
|
||||
* The path suffix.
|
||||
*/
|
||||
public readonly pathSuffix: string,
|
||||
/**
|
||||
* The permission represented as a octal string.
|
||||
*/
|
||||
public readonly permission: string,
|
||||
/**
|
||||
* The number of replication of a file.
|
||||
*/
|
||||
public readonly replication: string,
|
||||
/**
|
||||
* Whether a directory is snapshot enabled or not
|
||||
*/
|
||||
public readonly snapshotEnabled: string,
|
||||
/**
|
||||
* The type of the path object.
|
||||
*/
|
||||
public readonly type: HdfsFileType
|
||||
) { }
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a fileType string into the corresponding @see HdfsFileType
|
||||
* @param fileType The fileType string to parse
|
||||
*/
|
||||
export function parseHdfsFileType(fileType: string): HdfsFileType {
|
||||
switch (fileType.toLowerCase()) {
|
||||
case 'file':
|
||||
return HdfsFileType.File;
|
||||
case 'directory':
|
||||
return HdfsFileType.Directory;
|
||||
case 'symlink':
|
||||
return HdfsFileType.Symlink;
|
||||
default:
|
||||
throw new Error(`Unknown HdfsFileType '${fileType}'`);
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { IFileSource, FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { PermissionStatus, AclEntry, AclEntryScope, AclType, AclEntryPermission } from './aclEntry';
|
||||
import { FileStatus, hdfsFileTypeToFileType } from './fileStatus';
|
||||
import * as nls from 'vscode-nls';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
/**
|
||||
* Model for storing the state of a specified file/folder in HDFS
|
||||
*/
|
||||
export class HdfsModel {
|
||||
|
||||
private readonly _onPermissionStatusUpdated = new vscode.EventEmitter<PermissionStatus>();
|
||||
/**
|
||||
* Event that's fired anytime changes are made by the model to the @see PermissionStatus
|
||||
*/
|
||||
public onPermissionStatusUpdated = this._onPermissionStatusUpdated.event;
|
||||
|
||||
/**
|
||||
* The @see PermissionStatus of the file/folder
|
||||
*/
|
||||
public permissionStatus: PermissionStatus;
|
||||
|
||||
/**
|
||||
* The @see FileStatus of the file/folder
|
||||
*/
|
||||
public fileStatus: FileStatus;
|
||||
|
||||
constructor(private readonly fileSource: IFileSource, private readonly path: string) {
|
||||
this.refresh().catch(err => console.error('Error refreshing HDFS Model ', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh the ACL status with the current values on HDFS
|
||||
*/
|
||||
public async refresh(): Promise<void> {
|
||||
[this.permissionStatus, this.fileStatus] = await Promise.all([
|
||||
this.fileSource.getAclStatus(this.path),
|
||||
this.fileSource.getFileStatus(this.path)]);
|
||||
this._onPermissionStatusUpdated.fire(this.permissionStatus);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new ACL Entry and adds it to the list of current entries. Will do nothing
|
||||
* if a duplicate entry (@see AclEntry.isEqual) exists
|
||||
* @param name The name of the ACL Entry
|
||||
* @param type The type of ACL to create
|
||||
*/
|
||||
public createAndAddAclEntry(name: string, type: AclType): void {
|
||||
if (!this.permissionStatus || !name || name.length < 1) {
|
||||
return;
|
||||
}
|
||||
const newEntry = new AclEntry(type, name, name);
|
||||
newEntry.addPermission(AclEntryScope.access, new AclEntryPermission(true, true, true));
|
||||
// Don't add duplicates. This also checks the owner, group and other items
|
||||
if ([this.permissionStatus.owner, this.permissionStatus.group, this.permissionStatus.other].concat(this.permissionStatus.aclEntries).find(entry => entry.isEqual(newEntry))) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.permissionStatus.aclEntries.push(newEntry);
|
||||
this._onPermissionStatusUpdated.fire(this.permissionStatus);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the specified entry from the list of registered
|
||||
* @param entryToDelete The entry to delete
|
||||
*/
|
||||
public deleteAclEntry(entryToDelete: AclEntry): void {
|
||||
this.permissionStatus.aclEntries = this.permissionStatus.aclEntries.filter(entry => !entry.isEqual(entryToDelete));
|
||||
this._onPermissionStatusUpdated.fire(this.permissionStatus);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Applies the changes made to this model to HDFS. Note that this will overwrite ALL permissions so any
|
||||
* permissions that shouldn't change need to still exist and have the same values.
|
||||
* @param recursive Whether to apply the changes recursively (to all sub-folders and files)
|
||||
*/
|
||||
public async apply(recursive: boolean = false): Promise<void> {
|
||||
await this.applyAclChanges(this.path, hdfsFileTypeToFileType(this.fileStatus ? this.fileStatus.type : undefined));
|
||||
if (recursive) {
|
||||
azdata.tasks.startBackgroundOperation(
|
||||
{
|
||||
connection: undefined,
|
||||
displayName: localize('mssql.recursivePermissionOpStarted', "Applying permission changes recursively under '{0}'", this.path),
|
||||
description: '',
|
||||
isCancelable: false,
|
||||
operation: async op => {
|
||||
await this.applyToChildrenRecursive(op, this.path);
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, localize('mssql.recursivePermissionOpSucceeded', "Permission changes applied successfully."));
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive call to apply the current set of changes to all children of this path (if any)
|
||||
* @param op Background operation used to track status of the task
|
||||
* @param path The path
|
||||
*/
|
||||
private async applyToChildrenRecursive(op: azdata.BackgroundOperation, path: string): Promise<void> {
|
||||
try {
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, localize('mssql.recursivePermissionOpProgress', "Applying permission changes to '{0}'.", path));
|
||||
const files = await this.fileSource.enumerateFiles(path, true);
|
||||
// Apply changes to all children of this path and then recursively apply to children of any directories
|
||||
await Promise.all(
|
||||
files.map(file => this.applyAclChanges(file.path, file.fileType)).concat(
|
||||
files.filter(f => f.fileType === FileType.Directory).map(d => this.applyToChildrenRecursive(op, d.path)))
|
||||
);
|
||||
} catch (error) {
|
||||
const errMsg = localize('mssql.recursivePermissionOpError', "Error applying permission changes: {0}", (error instanceof Error ? error.message : error));
|
||||
void vscode.window.showErrorMessage(errMsg);
|
||||
op.updateStatus(azdata.TaskStatus.Failed, errMsg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the current set of Permissions/ACLs to the specified path
|
||||
* @param path The path to apply the changes to
|
||||
*/
|
||||
private async applyAclChanges(path: string, fileType: FileType | undefined): Promise<any> {
|
||||
// HDFS won't remove existing default ACLs even if you call setAcl with no default ACLs specified. You
|
||||
// need to call removeDefaultAcl specifically to remove them.
|
||||
if (!this.permissionStatus.owner.getPermission(AclEntryScope.default) &&
|
||||
!this.permissionStatus.group.getPermission(AclEntryScope.default) &&
|
||||
!this.permissionStatus.other.getPermission(AclEntryScope.default)) {
|
||||
await this.fileSource.removeDefaultAcl(path);
|
||||
}
|
||||
return Promise.all([
|
||||
this.fileSource.setAcl(path, fileType, this.permissionStatus),
|
||||
this.fileSource.setPermission(path, this.permissionStatus)]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* Information about a HDFS mount to a remote directory
|
||||
*/
|
||||
export interface Mount {
|
||||
mountPath: string;
|
||||
mountStatus: string;
|
||||
remotePath: string;
|
||||
}
|
||||
|
||||
export enum MountStatus {
|
||||
None = 0,
|
||||
Mount = 1,
|
||||
Mount_Child = 2
|
||||
}
|
||||
@@ -1,641 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { HdfsModel } from '../hdfsModel';
|
||||
import { IFileSource } from '../../objectExplorerNodeProvider/fileSources';
|
||||
import { PermissionStatus, AclEntry, AclType, getImageForType, AclEntryScope, AclEntryPermission, PermissionType } from '../../hdfs/aclEntry';
|
||||
import { cssStyles } from './uiConstants';
|
||||
import * as loc from '../../localizedConstants';
|
||||
import { HdfsError } from '../webhdfs';
|
||||
import { IconPathHelper } from '../../iconHelper';
|
||||
import { HdfsFileType } from '../fileStatus';
|
||||
|
||||
const permissionsTypeIconColumnWidth = 35;
|
||||
const permissionsDeleteColumnWidth = 50;
|
||||
|
||||
const permissionsCheckboxColumnWidth = 50;
|
||||
|
||||
const permissionsRowHeight = 35;
|
||||
const locationLabelHeight = 23; // Fits the text size without too much white space
|
||||
|
||||
const checkboxSize = 20;
|
||||
|
||||
|
||||
type PermissionCheckboxesMapping = {
|
||||
model: AclEntry,
|
||||
access: { read: azdata.CheckBoxComponent, write: azdata.CheckBoxComponent, execute: azdata.CheckBoxComponent },
|
||||
default: { read: azdata.CheckBoxComponent, write: azdata.CheckBoxComponent, execute: azdata.CheckBoxComponent }
|
||||
};
|
||||
|
||||
export class ManageAccessDialog {
|
||||
|
||||
private hdfsModel: HdfsModel;
|
||||
private viewInitialized: boolean = false;
|
||||
private modelInitialized: boolean = false;
|
||||
private modelBuilder: azdata.ModelBuilder;
|
||||
private rootContainer: azdata.FlexContainer;
|
||||
private rootLoadingComponent: azdata.LoadingComponent;
|
||||
private stickyCheckbox: azdata.CheckBoxComponent;
|
||||
private inheritDefaultsCheckbox: azdata.CheckBoxComponent;
|
||||
private posixPermissionsContainer: azdata.FlexContainer;
|
||||
private namedUsersAndGroupsPermissionsContainer: azdata.FlexContainer;
|
||||
private addUserOrGroupInput: azdata.InputBoxComponent;
|
||||
private dialog: azdata.window.Dialog;
|
||||
private applyRecursivelyButton: azdata.window.Button;
|
||||
private posixPermissionCheckboxesMapping: PermissionCheckboxesMapping[] = [];
|
||||
private namedSectionInheritCheckboxes: azdata.CheckBoxComponent[] = [];
|
||||
private addUserOrGroupSelectedType: AclType;
|
||||
private onViewInitializedEvent: vscode.EventEmitter<void> = new vscode.EventEmitter();
|
||||
|
||||
constructor(private hdfsPath: string, private fileSource: IFileSource) {
|
||||
this.hdfsModel = new HdfsModel(this.fileSource, this.hdfsPath);
|
||||
this.hdfsModel.onPermissionStatusUpdated(permissionStatus => this.handlePermissionStatusUpdated(permissionStatus));
|
||||
}
|
||||
|
||||
public openDialog(): void {
|
||||
if (!this.dialog) {
|
||||
this.dialog = azdata.window.createModelViewDialog(loc.manageAccessTitle, 'HdfsManageAccess', true);
|
||||
this.dialog.okButton.label = loc.applyText;
|
||||
|
||||
this.applyRecursivelyButton = azdata.window.createButton(loc.applyRecursivelyText);
|
||||
this.applyRecursivelyButton.onClick(async () => {
|
||||
try {
|
||||
azdata.window.closeDialog(this.dialog);
|
||||
await this.hdfsModel.apply(true);
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
}
|
||||
});
|
||||
this.dialog.customButtons = [this.applyRecursivelyButton];
|
||||
this.dialog.registerCloseValidator(async (): Promise<boolean> => {
|
||||
try {
|
||||
await this.hdfsModel.apply();
|
||||
return true;
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
}
|
||||
return false;
|
||||
});
|
||||
const tab = azdata.window.createTab(loc.manageAccessTitle);
|
||||
tab.registerContent(async (modelView: azdata.ModelView) => {
|
||||
this.modelBuilder = modelView.modelBuilder;
|
||||
|
||||
this.rootContainer = modelView.modelBuilder.flexContainer()
|
||||
.withLayout({ flexFlow: 'column', width: '100%', height: '100%' })
|
||||
.component();
|
||||
|
||||
this.rootLoadingComponent = modelView.modelBuilder.loadingComponent().withItem(this.rootContainer).component();
|
||||
|
||||
await modelView.initializeModel(this.rootLoadingComponent);
|
||||
this.modelInitialized = true;
|
||||
this.handlePermissionStatusUpdated(this.hdfsModel.permissionStatus);
|
||||
});
|
||||
this.dialog.content = [tab];
|
||||
}
|
||||
|
||||
this.applyRecursivelyButton.hidden = true; // Always hide the button until we get the status back saying whether this is a directory or not
|
||||
azdata.window.openDialog(this.dialog);
|
||||
}
|
||||
|
||||
private initializeView(permissionStatus: PermissionStatus): void {
|
||||
// We nest the content inside another container for the margins - getting them on the root container isn't supported
|
||||
const contentContainer = this.modelBuilder.flexContainer()
|
||||
.withLayout({ flexFlow: 'column', width: '100%', height: '100%' })
|
||||
.component();
|
||||
this.rootContainer.addItem(contentContainer, { CSSStyles: { 'margin-left': '20px', 'margin-right': '20px' } });
|
||||
|
||||
const locationContainer = this.modelBuilder.flexContainer().withLayout({ flexFlow: 'row', alignItems: 'center' }).component();
|
||||
|
||||
const locationLabel = this.modelBuilder.text()
|
||||
.withProps({
|
||||
value: loc.locationTitle,
|
||||
CSSStyles: { ...cssStyles.titleCss }
|
||||
}).component();
|
||||
|
||||
const pathLabel = this.modelBuilder.text()
|
||||
.withProps({
|
||||
value: this.hdfsPath,
|
||||
title: this.hdfsPath,
|
||||
height: locationLabelHeight,
|
||||
CSSStyles: { 'user-select': 'text', 'overflow': 'hidden', 'text-overflow': 'ellipsis', ...cssStyles.titleCss }
|
||||
}).component();
|
||||
|
||||
locationContainer.addItem(locationLabel,
|
||||
{
|
||||
flex: '0 0 auto',
|
||||
CSSStyles: { 'margin-bottom': '5px' }
|
||||
});
|
||||
locationContainer.addItem(pathLabel,
|
||||
{
|
||||
flex: '1 1 auto',
|
||||
CSSStyles: { 'border': '1px solid #ccc', 'padding': '5px', 'margin-left': '10px', 'min-height': `${locationLabelHeight}px` }
|
||||
});
|
||||
|
||||
contentContainer.addItem(locationContainer, { flex: '0 0 auto', CSSStyles: { 'margin-top': '20px' } });
|
||||
|
||||
// =====================
|
||||
// = Permissions Title =
|
||||
// =====================
|
||||
const permissionsTitle = this.modelBuilder.text()
|
||||
.withProps({ value: loc.permissionsHeader })
|
||||
.component();
|
||||
contentContainer.addItem(permissionsTitle, { CSSStyles: { 'margin-top': '15px', ...cssStyles.titleCss } });
|
||||
|
||||
// ====================
|
||||
// = Inherit Defaults =
|
||||
// ====================
|
||||
|
||||
// Defaults are only settable for directories
|
||||
if (this.hdfsModel.fileStatus.type === HdfsFileType.Directory) {
|
||||
contentContainer.addItem(this.createInheritDefaultsCheckbox());
|
||||
}
|
||||
|
||||
// ==========
|
||||
// = Sticky =
|
||||
// ==========
|
||||
this.stickyCheckbox = this.modelBuilder.checkBox()
|
||||
.withProps({
|
||||
width: checkboxSize,
|
||||
height: checkboxSize,
|
||||
checked: permissionStatus.stickyBit,
|
||||
label: loc.stickyLabel
|
||||
}).component();
|
||||
this.stickyCheckbox.onChanged(() => {
|
||||
this.hdfsModel.permissionStatus.stickyBit = this.stickyCheckbox.checked;
|
||||
});
|
||||
contentContainer.addItem(this.stickyCheckbox);
|
||||
|
||||
// =============================
|
||||
// = POSIX permissions section =
|
||||
// =============================
|
||||
|
||||
const posixPermissionsSectionHeaderRow = this.createPermissionsSectionHeaderRow(0, 0);
|
||||
contentContainer.addItem(posixPermissionsSectionHeaderRow, { CSSStyles: { ...cssStyles.tableHeaderLayoutCss } });
|
||||
|
||||
this.posixPermissionsContainer = this.modelBuilder.flexContainer().withLayout({ flexFlow: 'column' }).component();
|
||||
contentContainer.addItem(this.posixPermissionsContainer, { flex: '0 0 auto', CSSStyles: { 'margin-bottom': '20px' } });
|
||||
|
||||
// ===========================
|
||||
// = Add User Or Group Input =
|
||||
// ===========================
|
||||
|
||||
const addUserOrGroupTitle = this.modelBuilder.text()
|
||||
.withProps({ value: loc.addUserOrGroupHeader, CSSStyles: { 'margin-block-start': '0px', 'margin-block-end': '10px' } })
|
||||
.component();
|
||||
contentContainer.addItem(addUserOrGroupTitle, { CSSStyles: { 'margin-top': '15px', ...cssStyles.titleCss } });
|
||||
|
||||
const typeContainer = this.modelBuilder.flexContainer().component();
|
||||
const aclEntryTypeGroup = 'aclEntryType';
|
||||
const userTypeButton = this.createRadioButton(this.modelBuilder, loc.userLabel, aclEntryTypeGroup, AclType.user);
|
||||
const groupTypeButton = this.createRadioButton(this.modelBuilder, loc.groupLabel, aclEntryTypeGroup, AclType.group);
|
||||
userTypeButton.checked = true;
|
||||
this.addUserOrGroupSelectedType = AclType.user;
|
||||
|
||||
typeContainer.addItems([userTypeButton, groupTypeButton], { flex: '0 0 auto' });
|
||||
contentContainer.addItem(typeContainer, { flex: '0 0 auto', CSSStyles: { 'margin-bottom': '5px' } });
|
||||
const addUserOrGroupInputRow = this.modelBuilder.flexContainer().component();
|
||||
|
||||
this.addUserOrGroupInput = this.modelBuilder.inputBox()
|
||||
.withProps({
|
||||
inputType: 'text',
|
||||
placeHolder: loc.enterNamePlaceholder,
|
||||
width: 250,
|
||||
stopEnterPropagation: true
|
||||
})
|
||||
.component();
|
||||
this.addUserOrGroupInput.onEnterKeyPressed((value: string) => {
|
||||
this.hdfsModel.createAndAddAclEntry(value, this.addUserOrGroupSelectedType);
|
||||
this.addUserOrGroupInput.value = '';
|
||||
});
|
||||
const addUserOrGroupButton = this.modelBuilder.button().withProps({
|
||||
label: loc.addLabel,
|
||||
width: 75,
|
||||
secondary: true
|
||||
}).component();
|
||||
addUserOrGroupButton.onDidClick(() => {
|
||||
this.hdfsModel.createAndAddAclEntry(this.addUserOrGroupInput.value, this.addUserOrGroupSelectedType);
|
||||
this.addUserOrGroupInput.value = '';
|
||||
});
|
||||
addUserOrGroupButton.enabled = false; // Init to disabled since we don't have any name entered in yet
|
||||
this.addUserOrGroupInput.onTextChanged(() => {
|
||||
addUserOrGroupButton.enabled = this.addUserOrGroupInput.value !== '';
|
||||
});
|
||||
|
||||
addUserOrGroupInputRow.addItem(this.addUserOrGroupInput, { flex: '0 0 auto' });
|
||||
addUserOrGroupInputRow.addItem(addUserOrGroupButton, { flex: '0 0 auto', CSSStyles: { 'margin-left': '20px' } });
|
||||
|
||||
contentContainer.addItem(addUserOrGroupInputRow, { flex: '0 0 auto', CSSStyles: { 'margin-bottom': '20px' } });
|
||||
|
||||
// =================================================
|
||||
// = Named Users and Groups permissions header row =
|
||||
// =================================================
|
||||
|
||||
const namedUsersAndGroupsSectionsHeaderRow = this.createPermissionsSectionHeaderRow(permissionsDeleteColumnWidth, permissionsCheckboxColumnWidth);
|
||||
contentContainer.addItem(namedUsersAndGroupsSectionsHeaderRow, { CSSStyles: { ...cssStyles.tableHeaderLayoutCss } });
|
||||
|
||||
this.namedUsersAndGroupsPermissionsContainer = this.modelBuilder.flexContainer()
|
||||
.withLayout({ flexFlow: 'column' })
|
||||
.component();
|
||||
contentContainer.addItem(this.namedUsersAndGroupsPermissionsContainer, { flex: '1', CSSStyles: { 'overflow': 'scroll', 'min-height': '200px' } });
|
||||
this.viewInitialized = true;
|
||||
this.onViewInitializedEvent.fire();
|
||||
}
|
||||
|
||||
private handlePermissionStatusUpdated(permissionStatus: PermissionStatus): void {
|
||||
if (!permissionStatus || !this.modelInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If this is the first time go through and create the UI components now that we have a model to use
|
||||
if (!this.viewInitialized) {
|
||||
this.initializeView(permissionStatus);
|
||||
}
|
||||
|
||||
this.eventuallyRunOnInitialized(() => {
|
||||
this.stickyCheckbox.checked = permissionStatus.stickyBit;
|
||||
if (this.hdfsModel.fileStatus.type === HdfsFileType.Directory) {
|
||||
this.inheritDefaultsCheckbox.checked =
|
||||
!permissionStatus.owner.getPermission(AclEntryScope.default) &&
|
||||
!permissionStatus.group.getPermission(AclEntryScope.default) &&
|
||||
!permissionStatus.other.getPermission(AclEntryScope.default);
|
||||
}
|
||||
|
||||
this.applyRecursivelyButton.hidden = this.hdfsModel.fileStatus.type !== HdfsFileType.Directory;
|
||||
|
||||
this.posixPermissionsContainer.clearItems();
|
||||
|
||||
const posixPermissionData = [permissionStatus.owner, permissionStatus.group, permissionStatus.other].map(aclEntry => {
|
||||
return this.createPermissionsTableRow(aclEntry, false/*includeDelete*/, false/*includeInherit*/);
|
||||
});
|
||||
|
||||
const posixPermissionsNamesColumnWidth = 800 + (this.hdfsModel.fileStatus.type === HdfsFileType.Directory ? 0 : permissionsCheckboxColumnWidth * 3);
|
||||
const namedUsersAndGroupsPermissionsNamesColumnWidth = 700 + (this.hdfsModel.fileStatus.type === HdfsFileType.Directory ? 0 : permissionsCheckboxColumnWidth * 3);
|
||||
|
||||
// Default set of columns that are always shown
|
||||
let posixPermissionsColumns = [
|
||||
this.createTableColumn('', loc.userOrGroupIcon, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn('', loc.defaultUserAndGroups, posixPermissionsNamesColumnWidth, azdata.DeclarativeDataType.string),
|
||||
this.createTableColumn(loc.readHeader, `${loc.accessHeader} ${loc.readHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.writeHeader, `${loc.accessHeader} ${loc.writeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.executeHeader, `${loc.accessHeader} ${loc.executeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component)];
|
||||
let namedUsersAndGroupsColumns = [
|
||||
this.createTableColumn('', loc.userOrGroupIcon, 50, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.namedUsersAndGroupsHeader, loc.namedUsersAndGroupsHeader, namedUsersAndGroupsPermissionsNamesColumnWidth, azdata.DeclarativeDataType.string),
|
||||
this.createTableColumn(loc.readHeader, `${loc.accessHeader} ${loc.readHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.writeHeader, `${loc.accessHeader} ${loc.writeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.executeHeader, `${loc.accessHeader} ${loc.executeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component)];
|
||||
|
||||
// Additional columns that are only shown for directories
|
||||
if (this.hdfsModel.fileStatus.type === HdfsFileType.Directory) {
|
||||
posixPermissionsColumns = posixPermissionsColumns.concat([
|
||||
this.createTableColumn(loc.readHeader, `${loc.defaultHeader} ${loc.readHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.writeHeader, `${loc.defaultHeader} ${loc.writeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.executeHeader, `${loc.defaultHeader} ${loc.executeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component)
|
||||
]);
|
||||
namedUsersAndGroupsColumns = namedUsersAndGroupsColumns.concat([
|
||||
this.createTableColumn(loc.inheritDefaultsLabel, loc.inheritDefaultsLabel, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.readHeader, `${loc.defaultHeader} ${loc.readHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.writeHeader, `${loc.defaultHeader} ${loc.writeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
this.createTableColumn(loc.executeHeader, `${loc.defaultHeader} ${loc.executeHeader}`, permissionsCheckboxColumnWidth, azdata.DeclarativeDataType.component),
|
||||
]);
|
||||
}
|
||||
namedUsersAndGroupsColumns.push(this.createTableColumn('', loc.deleteTitle, permissionsDeleteColumnWidth, azdata.DeclarativeDataType.component));
|
||||
|
||||
const posixPermissionsTable = this.modelBuilder.declarativeTable()
|
||||
.withProps(
|
||||
{
|
||||
columns: posixPermissionsColumns,
|
||||
data: posixPermissionData
|
||||
}).component();
|
||||
|
||||
this.posixPermissionsContainer.addItem(posixPermissionsTable, { CSSStyles: { 'margin-right': '12px' } });
|
||||
|
||||
this.namedUsersAndGroupsPermissionsContainer.clearItems();
|
||||
|
||||
const namedUsersAndGroupsData = permissionStatus.aclEntries.map(aclEntry => {
|
||||
return this.createPermissionsTableRow(aclEntry, true/*includeDelete*/, this.hdfsModel.fileStatus.type === HdfsFileType.Directory/*includeInherit*/);
|
||||
});
|
||||
|
||||
const namedUsersAndGroupsTable = this.modelBuilder.declarativeTable()
|
||||
.withProps(
|
||||
{
|
||||
columns: namedUsersAndGroupsColumns,
|
||||
data: namedUsersAndGroupsData
|
||||
}).component();
|
||||
|
||||
this.namedUsersAndGroupsPermissionsContainer.addItem(namedUsersAndGroupsTable);
|
||||
|
||||
this.rootLoadingComponent.loading = false;
|
||||
|
||||
void this.addUserOrGroupInput.focus();
|
||||
});
|
||||
}
|
||||
|
||||
private createRadioButton(modelBuilder: azdata.ModelBuilder, label: string, name: string, aclEntryType: AclType): azdata.RadioButtonComponent {
|
||||
const button = modelBuilder.radioButton().withProps({ label: label, name: name }).component();
|
||||
button.onDidClick(() => {
|
||||
this.addUserOrGroupSelectedType = aclEntryType;
|
||||
});
|
||||
return button;
|
||||
}
|
||||
|
||||
private createTableColumn(header: string, ariaLabel: string, width: number, type: azdata.DeclarativeDataType): azdata.DeclarativeTableColumn {
|
||||
return {
|
||||
displayName: header,
|
||||
ariaLabel: ariaLabel,
|
||||
valueType: type,
|
||||
isReadOnly: true,
|
||||
width: width,
|
||||
headerCssStyles: {
|
||||
'border': 'none',
|
||||
'padding': '0px',
|
||||
...cssStyles.permissionsTableHeaderCss
|
||||
},
|
||||
rowCssStyles: {
|
||||
'border-top': 'solid 1px #ccc',
|
||||
'border-bottom': 'solid 1px #ccc',
|
||||
'border-left': 'none',
|
||||
'border-right': 'none',
|
||||
'padding': '0px'
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private createImageComponent(type: AclType | PermissionType): azdata.ImageComponent {
|
||||
const imageProperties = getImageForType(type);
|
||||
return this.modelBuilder.image()
|
||||
.withProps({
|
||||
iconPath: imageProperties.iconPath,
|
||||
width: permissionsTypeIconColumnWidth,
|
||||
height: permissionsRowHeight,
|
||||
iconWidth: 20,
|
||||
iconHeight: 20,
|
||||
title: imageProperties.title
|
||||
}).component();
|
||||
}
|
||||
|
||||
private createPermissionsTableRow(aclEntry: AclEntry, includeDelete: boolean, includeInherit: boolean): any[] {
|
||||
// Access Read
|
||||
const accessReadComponents = createCheckbox(this.modelBuilder, aclEntry.getPermission(AclEntryScope.access).read, true, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.accessHeader} ${loc.readHeader}`);
|
||||
accessReadComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.access).read = accessReadComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
// Access Write
|
||||
const accessWriteComponents = createCheckbox(this.modelBuilder, aclEntry.getPermission(AclEntryScope.access).write, true, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.accessHeader} ${loc.writeHeader}`);
|
||||
accessWriteComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.access).write = accessWriteComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
// Access Execute
|
||||
const accessExecuteComponents = createCheckbox(this.modelBuilder, aclEntry.getPermission(AclEntryScope.access).execute, true, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.accessHeader} ${loc.executeHeader}`);
|
||||
accessExecuteComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.access).execute = accessExecuteComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
const permissionsCheckboxesMapping: PermissionCheckboxesMapping = {
|
||||
model: aclEntry,
|
||||
access: { read: accessReadComponents.checkbox, write: accessWriteComponents.checkbox, execute: accessExecuteComponents.checkbox },
|
||||
default: { read: undefined, write: undefined, execute: undefined }
|
||||
};
|
||||
|
||||
let row = [
|
||||
this.createImageComponent(aclEntry.type),
|
||||
aclEntry.displayName,
|
||||
accessReadComponents.container,
|
||||
accessWriteComponents.container,
|
||||
accessExecuteComponents.container
|
||||
];
|
||||
|
||||
// Default permissions can only be set on directories
|
||||
if (this.hdfsModel.fileStatus.type === HdfsFileType.Directory) {
|
||||
const defaultPermission = aclEntry.getPermission(AclEntryScope.default);
|
||||
|
||||
// Default Read
|
||||
const defaultReadCheckboxComponents = createCheckbox(this.modelBuilder, defaultPermission && defaultPermission.read, !!defaultPermission, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.defaultHeader} ${loc.readHeader}`);
|
||||
defaultReadCheckboxComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.default).read = defaultReadCheckboxComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
// Default Write
|
||||
const defaultWriteCheckboxComponents = createCheckbox(this.modelBuilder, defaultPermission && defaultPermission.write, !!defaultPermission, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.defaultHeader} ${loc.writeHeader}`);
|
||||
defaultWriteCheckboxComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.default).write = defaultWriteCheckboxComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
// Default Execute
|
||||
const defaultExecuteCheckboxComponents = createCheckbox(this.modelBuilder, defaultPermission && defaultPermission.execute, !!defaultPermission, permissionsCheckboxColumnWidth, permissionsRowHeight, `${loc.defaultHeader} ${loc.executeHeader}`);
|
||||
defaultExecuteCheckboxComponents.checkbox.onChanged(() => {
|
||||
aclEntry.getPermission(AclEntryScope.default).execute = defaultExecuteCheckboxComponents.checkbox.checked;
|
||||
});
|
||||
|
||||
permissionsCheckboxesMapping.default = { read: defaultReadCheckboxComponents.checkbox, write: defaultWriteCheckboxComponents.checkbox, execute: defaultExecuteCheckboxComponents.checkbox };
|
||||
|
||||
if (includeInherit) {
|
||||
const inheritCheckboxComponents = createCheckbox(this.modelBuilder, !defaultPermission, !this.inheritDefaultsCheckbox.checked, permissionsCheckboxColumnWidth, permissionsRowHeight, loc.inheritDefaultsLabel);
|
||||
inheritCheckboxComponents.checkbox.onChanged(() => {
|
||||
defaultReadCheckboxComponents.checkbox.enabled = !inheritCheckboxComponents.checkbox.checked;
|
||||
defaultWriteCheckboxComponents.checkbox.enabled = !inheritCheckboxComponents.checkbox.checked;
|
||||
defaultExecuteCheckboxComponents.checkbox.enabled = !inheritCheckboxComponents.checkbox.checked;
|
||||
if (inheritCheckboxComponents.checkbox.checked) {
|
||||
aclEntry.removePermission(AclEntryScope.default);
|
||||
defaultReadCheckboxComponents.checkbox.checked = false;
|
||||
defaultWriteCheckboxComponents.checkbox.checked = false;
|
||||
defaultExecuteCheckboxComponents.checkbox.checked = false;
|
||||
} else {
|
||||
// Default to the access settings - this is what HDFS does if you don't
|
||||
// specify the complete set of default ACLs for owner, owning group and other
|
||||
const accessRead = accessReadComponents.checkbox.checked;
|
||||
const accessWrite = accessWriteComponents.checkbox.checked;
|
||||
const accessExecute = accessExecuteComponents.checkbox.checked;
|
||||
defaultReadCheckboxComponents.checkbox.checked = accessRead;
|
||||
defaultWriteCheckboxComponents.checkbox.checked = accessWrite;
|
||||
defaultExecuteCheckboxComponents.checkbox.checked = accessExecute;
|
||||
aclEntry.addPermission(AclEntryScope.default,
|
||||
new AclEntryPermission(accessRead, accessWrite, accessExecute));
|
||||
}
|
||||
});
|
||||
this.namedSectionInheritCheckboxes.push(inheritCheckboxComponents.checkbox);
|
||||
row.push(inheritCheckboxComponents.container);
|
||||
}
|
||||
|
||||
this.posixPermissionCheckboxesMapping.push(permissionsCheckboxesMapping);
|
||||
|
||||
row = row.concat([
|
||||
defaultReadCheckboxComponents.container,
|
||||
defaultWriteCheckboxComponents.container,
|
||||
defaultExecuteCheckboxComponents.container
|
||||
]);
|
||||
}
|
||||
|
||||
if (includeDelete) {
|
||||
const deleteButton = this.modelBuilder.button()
|
||||
.withProps(
|
||||
{
|
||||
label: '',
|
||||
title: loc.deleteTitle,
|
||||
iconPath: IconPathHelper.delete,
|
||||
width: 20,
|
||||
height: 20
|
||||
})
|
||||
.component();
|
||||
deleteButton.onDidClick(() => { this.hdfsModel.deleteAclEntry(aclEntry); });
|
||||
row.push(deleteButton);
|
||||
}
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
private createInheritDefaultsCheckbox(): azdata.CheckBoxComponent {
|
||||
this.inheritDefaultsCheckbox = this.modelBuilder.checkBox()
|
||||
.withProps({
|
||||
width: checkboxSize,
|
||||
height: checkboxSize,
|
||||
checked: false, // Will be set when we get the model update
|
||||
label: loc.inheritDefaultsLabel
|
||||
})
|
||||
.component();
|
||||
|
||||
this.inheritDefaultsCheckbox.onChanged(() => {
|
||||
if (this.inheritDefaultsCheckbox.checked) {
|
||||
this.namedSectionInheritCheckboxes.forEach(c => {
|
||||
c.enabled = false;
|
||||
c.checked = true;
|
||||
});
|
||||
} else {
|
||||
this.namedSectionInheritCheckboxes.forEach(c => {
|
||||
c.enabled = true;
|
||||
c.checked = false;
|
||||
});
|
||||
}
|
||||
// Go through each of the rows for owner/owning group/other and update
|
||||
// their checkboxes based on the new value of the inherit checkbox
|
||||
this.posixPermissionCheckboxesMapping.forEach(m => {
|
||||
m.default.read.enabled = !this.inheritDefaultsCheckbox.checked;
|
||||
m.default.write.enabled = !this.inheritDefaultsCheckbox.checked;
|
||||
m.default.execute.enabled = !this.inheritDefaultsCheckbox.checked;
|
||||
if (this.inheritDefaultsCheckbox.checked) {
|
||||
m.model.removePermission(AclEntryScope.default);
|
||||
m.default.read.checked = false;
|
||||
m.default.write.checked = false;
|
||||
m.default.execute.checked = false;
|
||||
} else {
|
||||
// Default to the access settings - this is what HDFS does if you don't
|
||||
// specify the complete set of default ACLs for owner, owning group and other
|
||||
const accessRead = m.access.read.checked;
|
||||
const accessWrite = m.access.write.checked;
|
||||
const accessExecute = m.access.execute.checked;
|
||||
m.default.read.checked = accessRead;
|
||||
m.default.write.checked = accessWrite;
|
||||
m.default.execute.checked = accessExecute;
|
||||
m.model.addPermission(AclEntryScope.default, new AclEntryPermission(accessRead, accessWrite, accessExecute));
|
||||
}
|
||||
});
|
||||
});
|
||||
return this.inheritDefaultsCheckbox;
|
||||
}
|
||||
/**
|
||||
* Creates the header row for the permissions tables. This contains headers for the name and read/write/execute for the
|
||||
* access section. If the path is for a directory then a default section is included for specifying default permissions.
|
||||
* @param rightSpacerWidth The amount of space to include on the right to correctly align the headers with the
|
||||
* @param middleSpacerWidth The amount of space to include between the text to correctly align the headers with the table sections
|
||||
*/
|
||||
private createPermissionsSectionHeaderRow(rightSpacerWidth: number, middleSpacerWidth: number): azdata.FlexContainer {
|
||||
// Section Headers
|
||||
const sectionHeaderContainer = this.modelBuilder.flexContainer().withLayout({ flexFlow: 'row', justifyContent: 'flex-end' }).component();
|
||||
|
||||
// Access
|
||||
const accessSectionHeader = this.modelBuilder.text()
|
||||
.withProps({
|
||||
value: loc.accessHeader,
|
||||
ariaHidden: true,
|
||||
CSSStyles: {
|
||||
// This covers 3 checkbox columns
|
||||
'width': `${permissionsCheckboxColumnWidth * 3}px`,
|
||||
'min-width': `${permissionsCheckboxColumnWidth * 3}px`,
|
||||
...cssStyles.permissionsTableHeaderCss
|
||||
}
|
||||
})
|
||||
.component();
|
||||
sectionHeaderContainer.addItem(accessSectionHeader, { flex: '0 0 auto' });
|
||||
|
||||
// Only show default section for directories
|
||||
if (this.hdfsModel.fileStatus.type === HdfsFileType.Directory) {
|
||||
// Middle spacer
|
||||
const middleSpacer = this.modelBuilder.text().withProps({ CSSStyles: { 'width': `${middleSpacerWidth}px`, 'min-width': `${middleSpacerWidth}px` } }).component();
|
||||
sectionHeaderContainer.addItem(middleSpacer, { flex: '0 0 auto' });
|
||||
|
||||
// Default
|
||||
const defaultSectionHeader = this.modelBuilder.text()
|
||||
.withProps({
|
||||
value: loc.defaultHeader,
|
||||
ariaHidden: true,
|
||||
CSSStyles: {
|
||||
// This covers 3 checkbox columns
|
||||
'width': `${permissionsCheckboxColumnWidth * 3}px`,
|
||||
'min-width': `${permissionsCheckboxColumnWidth * 3}px`,
|
||||
...cssStyles.permissionsTableHeaderCss
|
||||
}
|
||||
})
|
||||
.component();
|
||||
sectionHeaderContainer.addItem(defaultSectionHeader, { flex: '0 0 auto' });
|
||||
}
|
||||
|
||||
// Right spacer
|
||||
const rightSpacer = this.modelBuilder.text().withProps({ CSSStyles: { 'width': `${rightSpacerWidth}px`, 'min-width': `${rightSpacerWidth}px` } }).component();
|
||||
sectionHeaderContainer.addItem(rightSpacer, { flex: '0 0 auto' });
|
||||
|
||||
return sectionHeaderContainer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the specified action when the component is initialized. If already initialized just runs
|
||||
* the action immediately.
|
||||
* @param action The action to be ran when the page is initialized
|
||||
*/
|
||||
protected eventuallyRunOnInitialized(action: () => void): void {
|
||||
if (!this.viewInitialized) {
|
||||
this.onViewInitializedEvent.event(() => {
|
||||
try {
|
||||
action();
|
||||
} catch (error) {
|
||||
console.error(`Unexpected error running onInitialized action for Manage Access dialog : ${error}`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
action();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a checkbox to be hosted inside of a table cell
|
||||
* @param builder The ModelBuilder used to create the components
|
||||
* @param checked Whether the checkbox is initially checked or not
|
||||
* @param enabled Whether the checkbox is initially enabled or not
|
||||
* @param containerWidth The width of the container holding the checkbox
|
||||
* @param containerHeight The height of the container holding the checkbox
|
||||
* @param ariaLabel The aria label to apply to the checkbox
|
||||
*/
|
||||
function createCheckbox(builder: azdata.ModelBuilder, checked: boolean, enabled: boolean, containerWidth: number, containerHeight: number, ariaLabel: string): { container: azdata.FlexContainer, checkbox: azdata.CheckBoxComponent } {
|
||||
const checkbox = builder.checkBox()
|
||||
.withProps({
|
||||
checked: checked,
|
||||
enabled: enabled,
|
||||
height: checkboxSize,
|
||||
width: checkboxSize,
|
||||
ariaLabel: ariaLabel
|
||||
}).component();
|
||||
const container = builder.flexContainer()
|
||||
.withLayout({ width: containerWidth, height: containerHeight })
|
||||
.component();
|
||||
container.addItem(checkbox, { CSSStyles: { ...cssStyles.permissionCheckboxCss } });
|
||||
return {
|
||||
container: container,
|
||||
checkbox: checkbox
|
||||
};
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export namespace cssStyles {
|
||||
export const tableBorderCss = '1px solid #ccc';
|
||||
export const titleCss = { 'font-size': '20px', 'font-weight': '600', 'margin-block-end': '0px', 'margin-block-start': '0px' };
|
||||
export const tableHeaderCss = { 'font-weight': 'bold', 'text-transform': 'uppercase', 'font-size': '10px', 'user-select': 'text' };
|
||||
export const permissionsTableHeaderCss = { ...tableHeaderCss, 'text-align': 'center' };
|
||||
export const permissionCheckboxCss = { 'margin-top': '5px', 'margin-left': '13px' };
|
||||
export const tableHeaderLayoutCss = { 'padding-left': '10px', 'box-sizing': 'border-box', 'user-select': 'text', 'margin-right': '12px' };
|
||||
}
|
||||
@@ -1,995 +0,0 @@
|
||||
// This code is originally from https://github.com/harrisiirak/webhdfs
|
||||
// License: https://github.com/harrisiirak/webhdfs/blob/master/LICENSE
|
||||
|
||||
import * as url from 'url';
|
||||
import * as fs from 'fs';
|
||||
import * as querystring from 'querystring';
|
||||
import * as request from 'request';
|
||||
import * as BufferStreamReader from 'buffer-stream-reader';
|
||||
import { Cookie } from 'tough-cookie';
|
||||
import * as through from 'through2';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as auth from '../util/auth';
|
||||
import { IHdfsOptions, IRequestParams, FileType } from '../objectExplorerNodeProvider/fileSources';
|
||||
import { PermissionStatus, AclEntry, parseAclList, PermissionType, parseAclPermissionFromOctal, AclEntryScope, AclType } from './aclEntry';
|
||||
import { Mount } from './mount';
|
||||
import { everyoneName, ownerPostfix, owningGroupPostfix } from '../localizedConstants';
|
||||
import { FileStatus, parseHdfsFileType } from './fileStatus';
|
||||
import { Readable, Transform } from 'stream';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
const ErrorMessageInvalidDataStructure = localize('webhdfs.invalidDataStructure', "Invalid Data Structure");
|
||||
|
||||
const emitError = (instance: request.Request | Transform, err: any) => {
|
||||
const isErrorEmitted = (instance as any).errorEmitted;
|
||||
|
||||
if (!isErrorEmitted) {
|
||||
instance.emit('error', err);
|
||||
instance.emit('finish');
|
||||
}
|
||||
|
||||
(instance as any).errorEmitted = true;
|
||||
};
|
||||
|
||||
export class WebHDFS {
|
||||
private _requestParams: IRequestParams;
|
||||
private _opts: IHdfsOptions;
|
||||
private _url: any;
|
||||
private _authCookie: Cookie;
|
||||
constructor(opts: IHdfsOptions, requestParams: IRequestParams) {
|
||||
if (!(this instanceof WebHDFS)) {
|
||||
return new WebHDFS(opts, requestParams);
|
||||
}
|
||||
|
||||
let missingProps = ['host', 'port', 'path']
|
||||
.filter((p: keyof IHdfsOptions) => !opts.hasOwnProperty(p) || !opts[p]);
|
||||
if (missingProps && missingProps.length > 0) {
|
||||
throw new Error(localize('webhdfs.missingProperties',
|
||||
"Unable to create WebHDFS client due to missing options: ${0}", missingProps.join(', ')));
|
||||
}
|
||||
|
||||
this._requestParams = requestParams || {};
|
||||
this._requestParams.timeout = this._requestParams.timeout || 10000;
|
||||
|
||||
this._opts = opts;
|
||||
this._url = {
|
||||
protocol: opts.protocol || 'http',
|
||||
hostname: opts.host.trim(),
|
||||
port: opts.port || 80,
|
||||
pathname: opts.path
|
||||
};
|
||||
}
|
||||
|
||||
private checkArgDefined(argName: string, argValue: any): void {
|
||||
if (!argValue) {
|
||||
throw new Error(localize('webhdfs.undefinedArgument', "'${0}' is undefined.", argName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate WebHDFS REST API endpoint URL for given operation
|
||||
*
|
||||
* @param operation WebHDFS operation name
|
||||
* @returns WebHDFS REST API endpoint URL
|
||||
*/
|
||||
private getOperationEndpoint(operation: string, path: string, params?: object): string {
|
||||
let endpoint = this._url;
|
||||
endpoint.pathname = encodeURI(this._opts.path + path);
|
||||
let searchOpts = Object.assign(
|
||||
{ 'op': operation },
|
||||
this._opts.user ? { 'user.name': this._opts.user } : {},
|
||||
params || {}
|
||||
);
|
||||
endpoint.search = querystring.stringify(searchOpts);
|
||||
return url.format(endpoint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets localized status message for given status code
|
||||
*
|
||||
* @param statusCode Http status code
|
||||
* @returns status message
|
||||
*/
|
||||
private toStatusMessage(statusCode: number): string {
|
||||
let statusMessage: string = undefined;
|
||||
switch (statusCode) {
|
||||
case 400: statusMessage = localize('webhdfs.httpError400', "Bad Request"); break;
|
||||
case 401: statusMessage = localize('webhdfs.httpError401', "Unauthorized"); break;
|
||||
case 403: statusMessage = localize('webhdfs.httpError403', "Forbidden"); break;
|
||||
case 404: statusMessage = localize('webhdfs.httpError404', "Not Found"); break;
|
||||
case 500: statusMessage = localize('webhdfs.httpError500', "Internal Server Error"); break;
|
||||
// TODO: define more messages here
|
||||
default: break;
|
||||
}
|
||||
return statusMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets status message from response
|
||||
*
|
||||
* @param response response object
|
||||
* @returns Error message interpreted by status code
|
||||
*/
|
||||
private getStatusMessage(response: request.Response): string {
|
||||
if (!response) { return undefined; }
|
||||
let statusMessage: string = this.toStatusMessage(response.statusCode)
|
||||
|| (response && response.statusMessage);
|
||||
return statusMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets remote exception message from response body
|
||||
*
|
||||
* @param responseBody response body
|
||||
* @returns Error message interpreted by status code
|
||||
*/
|
||||
private getRemoteExceptionMessage(responseBody: any): string {
|
||||
if (!responseBody) { return undefined; }
|
||||
if (typeof responseBody === 'string') {
|
||||
try {
|
||||
responseBody = JSON.parse(responseBody);
|
||||
} catch { }
|
||||
}
|
||||
let remoteExceptionMessage: string = undefined;
|
||||
if (responseBody.hasOwnProperty('RemoteException')
|
||||
&& responseBody.RemoteException.hasOwnProperty('message')) {
|
||||
remoteExceptionMessage = responseBody.RemoteException.message;
|
||||
}
|
||||
return remoteExceptionMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates error message descriptive as much as possible
|
||||
*
|
||||
* @param statusMessage status message
|
||||
* @param [remoteExceptionMessage] remote exception message
|
||||
* @param [error] error
|
||||
* @returns error message
|
||||
*/
|
||||
private getErrorMessage(statusMessage: string, remoteExceptionMessage?: string, error?: any): string {
|
||||
statusMessage = statusMessage === '' ? undefined : statusMessage;
|
||||
remoteExceptionMessage = remoteExceptionMessage === '' ? undefined : remoteExceptionMessage;
|
||||
let messageFromError: string = error ? (error['message'] || error.toString()) : undefined;
|
||||
return statusMessage && remoteExceptionMessage ?
|
||||
`${statusMessage} (${remoteExceptionMessage})` :
|
||||
statusMessage || remoteExceptionMessage || messageFromError ||
|
||||
localize('webhdfs.unknownError', "Unknown Error");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse error state from response and return valid Error object
|
||||
*
|
||||
* @param response response object
|
||||
* @param [responseBody] response body
|
||||
* @param [error] error
|
||||
* @returns HdfsError object
|
||||
*/
|
||||
private parseError(response: request.Response, responseBody?: any, error?: any): HdfsError {
|
||||
let statusMessage: string = this.getStatusMessage(response);
|
||||
if (!responseBody && response) {
|
||||
responseBody = response.body;
|
||||
}
|
||||
let remoteExceptionMessage: string = this.getRemoteExceptionMessage(responseBody);
|
||||
let errorMessage: string = this.getErrorMessage(statusMessage, remoteExceptionMessage, error);
|
||||
return new HdfsError(errorMessage, response && response.statusCode,
|
||||
response && response.statusMessage, remoteExceptionMessage, error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if response is redirect
|
||||
*
|
||||
* @param response response object
|
||||
* @returns if response is redirect
|
||||
*/
|
||||
private isRedirect(response: request.Response): boolean {
|
||||
return [301, 307].indexOf(response.statusCode) !== -1 &&
|
||||
response.headers.hasOwnProperty('location');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if response is successful
|
||||
*
|
||||
* @param response response object
|
||||
* @returns if response is successful
|
||||
*/
|
||||
private isSuccess(response: request.Response): boolean {
|
||||
return [200, 201].indexOf(response.statusCode) !== -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if response is error
|
||||
*
|
||||
* @param response response object
|
||||
* @returns if response is error
|
||||
*/
|
||||
private isError(response: request.Response): boolean {
|
||||
return [400, 401, 402, 403, 404, 500].indexOf(response.statusCode) !== -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a request to WebHDFS REST API
|
||||
*
|
||||
* @param method HTTP method
|
||||
* @param urlValue
|
||||
* @param opts Options for request
|
||||
* @returns void
|
||||
*/
|
||||
private sendRequest(method: string, urlValue: string, opts: object, callback: (error: HdfsError, response: request.Response) => void): void {
|
||||
if (!callback) {
|
||||
return;
|
||||
}
|
||||
let requestParams = Object.assign(
|
||||
{ method: method, url: urlValue, json: true },
|
||||
this._requestParams,
|
||||
opts || {}
|
||||
);
|
||||
this.ensureCookie(requestParams);
|
||||
// Add a wrapper to handle unauthorized requests by adding kerberos auth steps
|
||||
let handler = (error: any, response: request.Response) => {
|
||||
if (error && error.statusCode === 401 && this._requestParams.isKerberos) {
|
||||
this.requestWithKerberosSync(requestParams, callback);
|
||||
} else {
|
||||
callback(error, response);
|
||||
}
|
||||
};
|
||||
this.doSendRequest(requestParams, handler);
|
||||
}
|
||||
|
||||
private ensureCookie(requestParams: { headers?: { [key: string]: string } }) {
|
||||
if (this._authCookie && this._authCookie.expiryTime() > Date.now()) {
|
||||
requestParams.headers = requestParams.headers || {};
|
||||
requestParams.headers['cookie'] = `${this._authCookie.key}=${this._authCookie.value}`;
|
||||
}
|
||||
}
|
||||
|
||||
private doSendRequest(requestParams: any, callback: (error: HdfsError, response: any) => void): void {
|
||||
request(requestParams, (error: any, response: request.Response, body: any) => {
|
||||
if (error || this.isError(response)) {
|
||||
let hdfsError = this.parseError(response, body, error);
|
||||
callback(hdfsError, response);
|
||||
}
|
||||
else if (this.isSuccess(response)) {
|
||||
callback(undefined, response);
|
||||
}
|
||||
else {
|
||||
let hdfsError = new HdfsError(localize('webhdfs.unexpectedRedirect', "Unexpected Redirect"), response && response.statusCode, response && response.statusMessage, this.getRemoteExceptionMessage(body || response.body), error);
|
||||
callback(hdfsError, response);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticates using kerberos as part of a request, and saves cookie if successful.
|
||||
* Ideally would use request's built-in cookie functionality but this isn't working with non-public domains.
|
||||
* Instead, save the cookie in this module and reuse if not expired
|
||||
*/
|
||||
private requestWithKerberosSync(requestParams: any, callback: (error: HdfsError, response: request.Response) => void) {
|
||||
this.setKerberosAuthOnParams(requestParams).then(() => {
|
||||
this.doSendRequest(requestParams, (error, response) => {
|
||||
if (error) {
|
||||
// Pass on the callback
|
||||
callback(error, response);
|
||||
}
|
||||
else {
|
||||
// Capture cookie for future requests
|
||||
this.setAuthCookie(response);
|
||||
callback(error, response);
|
||||
}
|
||||
});
|
||||
}).catch((err) => {
|
||||
callback(err, undefined);
|
||||
});
|
||||
}
|
||||
|
||||
private async setKerberosAuthOnParams(requestParams: any): Promise<void> {
|
||||
let kerberosToken = await auth.authenticateKerberos(this._opts.host);
|
||||
requestParams.headers = { Authorization: `Negotiate ${kerberosToken}` };
|
||||
return requestParams;
|
||||
}
|
||||
|
||||
private setAuthCookie(response: request.Response) {
|
||||
try {
|
||||
if (response && response.headers && response.headers['set-cookie']) {
|
||||
let cookies: Cookie[];
|
||||
if (response.headers['set-cookie'] instanceof Array) {
|
||||
cookies = response.headers['set-cookie'].map(c => Cookie.parse(c));
|
||||
}
|
||||
else {
|
||||
cookies = [Cookie.parse(response.headers['set-cookie'])];
|
||||
}
|
||||
this._authCookie = cookies[0];
|
||||
}
|
||||
} catch { }
|
||||
}
|
||||
|
||||
/**
|
||||
* Change file permissions
|
||||
* @returns void
|
||||
*/
|
||||
public chmod(path: string, mode: string, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('mode', mode);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('setpermission', path, { permission: mode });
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
return callback && callback(error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Change file owner
|
||||
*
|
||||
* @param path
|
||||
* @param userId User name
|
||||
* @param groupId Group name
|
||||
* @param callback
|
||||
* @returns void
|
||||
*/
|
||||
public chown(path: string, userId: string, groupId: string, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('userId', userId);
|
||||
this.checkArgDefined('groupId', groupId);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('setowner', path, {
|
||||
owner: userId,
|
||||
group: groupId
|
||||
});
|
||||
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List the status of a path
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
public listStatus(path: string, callback: (error: HdfsError, files: FileStatus[]) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('liststatus', path);
|
||||
this.sendRequest('GET', endpoint, undefined, (error, response) => {
|
||||
if (!callback) { return; }
|
||||
|
||||
let files: any[] = [];
|
||||
if (error) {
|
||||
callback(error, undefined);
|
||||
} else if (response.body.hasOwnProperty('FileStatuses')
|
||||
&& response.body.FileStatuses.hasOwnProperty('FileStatus')) {
|
||||
files = (<any[]>response.body.FileStatuses.FileStatus).map(fs => {
|
||||
return new FileStatus(
|
||||
fs.accessTime || '',
|
||||
fs.blockSize || '',
|
||||
fs.group || '',
|
||||
fs.length || '',
|
||||
fs.modificationTime || '',
|
||||
fs.owner || '',
|
||||
fs.pathSuffix || '',
|
||||
fs.permission || '',
|
||||
fs.replication || '',
|
||||
fs.snapshotEnabled || '',
|
||||
parseHdfsFileType(fs.type)
|
||||
);
|
||||
});
|
||||
callback(undefined, files);
|
||||
} else {
|
||||
callback(new HdfsError(ErrorMessageInvalidDataStructure), undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Make new directory
|
||||
* @returns void
|
||||
*/
|
||||
public mkdir(path: string, permission: string = '0755', callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('mkdirs', path, {
|
||||
permission: permission
|
||||
});
|
||||
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Rename path
|
||||
* @returns void
|
||||
*/
|
||||
public rename(path: string, destination: string, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('destination', destination);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('rename', path, {
|
||||
destination: destination
|
||||
});
|
||||
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public getFileStatus(path: string, callback: (error: HdfsError, fileStatus: FileStatus) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('getfilestatus', path);
|
||||
this.sendRequest('GET', endpoint, undefined, (error, response) => {
|
||||
if (!callback) { return; }
|
||||
if (error) {
|
||||
callback(error, undefined);
|
||||
} else if (response.body.hasOwnProperty('FileStatus')) {
|
||||
const fileStatus = new FileStatus(
|
||||
response.body.FileStatus.accessTime || '',
|
||||
response.body.FileStatus.blockSize || '',
|
||||
response.body.FileStatus.group || '',
|
||||
response.body.FileStatus.length || '',
|
||||
response.body.FileStatus.modificationTime || '',
|
||||
response.body.FileStatus.owner || '',
|
||||
response.body.FileStatus.pathSuffix || '',
|
||||
response.body.FileStatus.permission || '',
|
||||
response.body.FileStatus.replication || '',
|
||||
response.body.FileStatus.snapshotEnabled || '',
|
||||
parseHdfsFileType(response.body.FileStatus.type || 'undefined')
|
||||
);
|
||||
callback(undefined, fileStatus);
|
||||
} else {
|
||||
callback(new HdfsError(ErrorMessageInvalidDataStructure), undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ACL status for given path
|
||||
* @param path The path to the file/folder to get the status of
|
||||
* @param callback Callback to handle the response
|
||||
* @returns void
|
||||
*/
|
||||
public getAclStatus(path: string, callback: (error: HdfsError, permissionStatus: PermissionStatus) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('getaclstatus', path);
|
||||
this.sendRequest('GET', endpoint, undefined, (error, response) => {
|
||||
if (!callback) { return; }
|
||||
if (error) {
|
||||
callback(error, undefined);
|
||||
} else if (response.body.hasOwnProperty('AclStatus')) {
|
||||
const permissions = parseAclPermissionFromOctal(response.body.AclStatus.permission);
|
||||
const ownerEntry = new AclEntry(PermissionType.owner, '', `${response.body.AclStatus.owner || ''}${ownerPostfix}`);
|
||||
ownerEntry.addPermission(AclEntryScope.access, permissions.owner);
|
||||
const groupEntry = new AclEntry(PermissionType.group, '', `${response.body.AclStatus.group || ''}${owningGroupPostfix}`);
|
||||
groupEntry.addPermission(AclEntryScope.access, permissions.group);
|
||||
const otherEntry = new AclEntry(PermissionType.other, '', everyoneName);
|
||||
otherEntry.addPermission(AclEntryScope.access, permissions.other);
|
||||
const parsedEntries = parseAclList((<any[]>response.body.AclStatus.entries).join(','));
|
||||
|
||||
// First go through and apply any ACLs for the unnamed entries (which correspond to the permissions in
|
||||
// the permission octal)
|
||||
parsedEntries.filter(e => e.name === '').forEach(e => {
|
||||
let targetEntry: AclEntry;
|
||||
switch (e.type) {
|
||||
case AclType.user:
|
||||
targetEntry = ownerEntry;
|
||||
break;
|
||||
case AclType.group:
|
||||
targetEntry = groupEntry;
|
||||
break;
|
||||
case AclType.other:
|
||||
targetEntry = otherEntry;
|
||||
break;
|
||||
default:
|
||||
// Unknown type - just ignore since we don't currently support the other types
|
||||
return;
|
||||
}
|
||||
e.getAllPermissions().forEach(sp => {
|
||||
targetEntry.addPermission(sp.scope, sp.permission);
|
||||
});
|
||||
});
|
||||
|
||||
const permissionStatus = new PermissionStatus(
|
||||
ownerEntry,
|
||||
groupEntry,
|
||||
otherEntry,
|
||||
!!response.body.AclStatus.stickyBit,
|
||||
// We filter out empty names here since those have already been merged into the
|
||||
// owner/owning group/other entries
|
||||
parsedEntries.filter(e => e.name !== ''));
|
||||
callback(undefined, permissionStatus);
|
||||
} else {
|
||||
callback(new HdfsError(ErrorMessageInvalidDataStructure), undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set ACL for the given path. The owner, group and other fields are required - other entries are optional.
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param permissionStatus The status containing the permissions to set
|
||||
* @param callback Callback to handle the response
|
||||
*/
|
||||
public setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('permissionStatus', permissionStatus);
|
||||
const concatEntries = [permissionStatus.owner, permissionStatus.group, permissionStatus.other].concat(permissionStatus.aclEntries);
|
||||
const aclSpec = concatEntries.reduce((acc, entry: AclEntry) => acc.concat(entry.toAclStrings(fileType !== FileType.File)), []).join(',');
|
||||
let endpoint = this.getOperationEndpoint('setacl', path, { aclspec: aclSpec });
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
return callback && callback(error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the permission octal (sticky, owner, group & other) for a file/folder
|
||||
* @param path The path to the file/folder to set the permission of
|
||||
* @param permissionStatus The status containing the permission to set
|
||||
* @param callback Callback to handle the response
|
||||
*/
|
||||
public setPermission(path: string, permissionStatus: PermissionStatus, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('permissionStatus', permissionStatus);
|
||||
let endpoint = this.getOperationEndpoint('setpermission', path, { permission: permissionStatus.permissionOctal });
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
return callback && callback(error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the default ACLs for the specified path
|
||||
* @param path The path to remove the default ACLs for
|
||||
* @param callback Callback to handle the response
|
||||
*/
|
||||
public removeDefaultAcl(path: string, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
let endpoint = this.getOperationEndpoint('removedefaultacl', path);
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
return callback && callback(error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all mounts for a HDFS connection
|
||||
* @param callback Callback to handle the response
|
||||
* @returns void
|
||||
*/
|
||||
public getMounts(callback: (error: HdfsError, mounts: Mount[]) => void): void {
|
||||
let endpoint = this.getOperationEndpoint('listmounts', '');
|
||||
this.sendRequest('GET', endpoint, undefined, (error, response) => {
|
||||
if (!callback) { return; }
|
||||
if (error) {
|
||||
callback(error, undefined);
|
||||
} else if (response.body.hasOwnProperty('Mounts')) {
|
||||
const mounts = response.body.Mounts;
|
||||
callback(undefined, mounts);
|
||||
} else {
|
||||
callback(new HdfsError(ErrorMessageInvalidDataStructure), undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check file existence
|
||||
* Wraps stat method
|
||||
*
|
||||
* @see WebHDFS.stat
|
||||
* @returns void
|
||||
*/
|
||||
public exists(path: string, callback: (error: HdfsError, exists: boolean) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
this.listStatus(path, (error, fileStatus) => {
|
||||
let exists = !fileStatus ? false : true;
|
||||
callback(error, exists);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Write data to the file
|
||||
*
|
||||
* @param path
|
||||
* @param data
|
||||
* @param append If set to true then append data to the file
|
||||
* @param opts
|
||||
* @param callback
|
||||
*/
|
||||
public writeFile(path: string, data: string | Buffer, append: boolean, opts: object,
|
||||
callback: (error: HdfsError) => void): fs.WriteStream {
|
||||
this.checkArgDefined('path', path);
|
||||
this.checkArgDefined('data', data);
|
||||
|
||||
let error: HdfsError = null;
|
||||
let localStream = new BufferStreamReader(data);
|
||||
let remoteStream: fs.WriteStream = this.createWriteStream(path, !!append, opts || {});
|
||||
|
||||
// Handle events
|
||||
remoteStream.once('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
});
|
||||
|
||||
remoteStream.once('finish', () => {
|
||||
if (callback && error) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
|
||||
localStream.pipe(remoteStream); // Pipe data
|
||||
return remoteStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append data to the file
|
||||
*
|
||||
* @see writeFile
|
||||
*/
|
||||
public appendFile(path: string, data: string | Buffer, opts: object, callback: (error: HdfsError) => void): fs.WriteStream {
|
||||
return this.writeFile(path, data, true, opts, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read data from the file
|
||||
*
|
||||
* @fires Request#data
|
||||
* @fires WebHDFS#finish
|
||||
* @returns void
|
||||
*/
|
||||
public readFile(path: string, callback: (error: HdfsError, buffer: Buffer) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let remoteFileStream = this.createReadStream(path);
|
||||
let data: any[] = [];
|
||||
let error: HdfsError = undefined;
|
||||
|
||||
remoteFileStream.once('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
});
|
||||
|
||||
remoteFileStream.on('data', (dataChunk) => {
|
||||
data.push(dataChunk);
|
||||
});
|
||||
|
||||
remoteFileStream.once('finish', () => {
|
||||
if (!callback) { return; }
|
||||
if (!error) {
|
||||
callback(undefined, Buffer.concat(data));
|
||||
} else {
|
||||
callback(error, undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create writable stream for given path
|
||||
*
|
||||
* @fires WebHDFS#finish
|
||||
* @param path
|
||||
* @param append If set to true then append data to the file
|
||||
* @param opts
|
||||
* @example
|
||||
* let hdfs = WebHDFS.createClient();
|
||||
*
|
||||
* let localFileStream = hdfs.createReadStream('/path/to/local/file');
|
||||
* let remoteFileStream = hdfs.createWriteStream('/path/to/remote/file');
|
||||
*
|
||||
* localFileStream.pipe(remoteFileStream);
|
||||
*
|
||||
* remoteFileStream.on('error', (err) => {
|
||||
* // Do something with the error
|
||||
* });
|
||||
*
|
||||
* remoteFileStream.on('finish', () => {
|
||||
* // Upload is done
|
||||
* });
|
||||
*/
|
||||
public createWriteStream(path: string, append?: boolean, opts?: object): fs.WriteStream {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint(
|
||||
append ? 'append' : 'create',
|
||||
path,
|
||||
Object.assign(
|
||||
{
|
||||
overwrite: true,
|
||||
permission: '0755'
|
||||
},
|
||||
opts || {}
|
||||
)
|
||||
);
|
||||
|
||||
let params: any = Object.assign(
|
||||
{
|
||||
method: append ? 'POST' : 'PUT',
|
||||
url: endpoint,
|
||||
json: true,
|
||||
},
|
||||
this._requestParams
|
||||
);
|
||||
params.headers = params.headers || {};
|
||||
params.headers['content-type'] = 'application/octet-stream';
|
||||
|
||||
if (!this._requestParams.isKerberos) {
|
||||
return this.doCreateWriteStream(params);
|
||||
}
|
||||
// Else, must add kerberos token and handle redirects
|
||||
return this.createKerberosWriteStream(params);
|
||||
}
|
||||
|
||||
private createKerberosWriteStream(params: any): fs.WriteStream {
|
||||
params.followRedirect = false;
|
||||
// Create an intermediate stream that pauses until we get a positive
|
||||
// response from the server
|
||||
let isWaiting = true;
|
||||
let firstCb: Function = undefined;
|
||||
let replyStream = through(function (chunk, enc, cb) {
|
||||
this.push(chunk, enc);
|
||||
if (isWaiting) {
|
||||
firstCb = cb;
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
});
|
||||
let handleErr = (err: any) => {
|
||||
replyStream.emit('error', err);
|
||||
replyStream.end();
|
||||
};
|
||||
let initRedirectedStream = () => {
|
||||
// After redirect, create valid stream to correct location
|
||||
// and pipe the intermediate stream to it, unblocking the data flow
|
||||
params.headers['content-type'] = 'application/octet-stream';
|
||||
let upload = request(params, (err: any, res: request.Response, bo: any) => {
|
||||
if (err || this.isError(res)) {
|
||||
emitError(replyStream, this.parseError(res, bo, err));
|
||||
replyStream.end();
|
||||
}
|
||||
else if (res.headers.hasOwnProperty('location')) {
|
||||
replyStream.emit('finish', res.headers.location);
|
||||
}
|
||||
else {
|
||||
replyStream.emit('finish');
|
||||
}
|
||||
});
|
||||
isWaiting = false;
|
||||
replyStream.pipe(upload);
|
||||
if (firstCb) {
|
||||
firstCb();
|
||||
}
|
||||
};
|
||||
this.requestWithRedirectAndAuth(params, initRedirectedStream, handleErr);
|
||||
return <fs.WriteStream><any>replyStream;
|
||||
}
|
||||
|
||||
private doCreateWriteStream(params: any): fs.WriteStream {
|
||||
|
||||
let canResume: boolean = true;
|
||||
let stream: Readable;
|
||||
let req = request(params, (error: any, response: request.Response, body: any) => {
|
||||
// Handle redirect only if there was not an error (e.g. res is defined)
|
||||
if (response && this.isRedirect(response)) {
|
||||
let upload = request(Object.assign(params, { url: response.headers.location }), (err: any, res: request.Response, bo: any) => {
|
||||
if (err || this.isError(res)) {
|
||||
emitError(req, this.parseError(res, bo, err));
|
||||
req.end();
|
||||
}
|
||||
else if (res.headers.hasOwnProperty('location')) {
|
||||
req.emit('finish', res.headers.location);
|
||||
}
|
||||
else {
|
||||
req.emit('finish');
|
||||
}
|
||||
});
|
||||
canResume = true; // Enable resume
|
||||
stream.pipe(upload);
|
||||
stream.resume();
|
||||
}
|
||||
if (error || this.isError(response)) {
|
||||
emitError(req, this.parseError(response, body, error));
|
||||
}
|
||||
});
|
||||
req.on('pipe', (src: Readable) => {
|
||||
// Pause read stream
|
||||
stream = src;
|
||||
stream.pause();
|
||||
// This is not an elegant solution but here we go
|
||||
// Basically we don't allow pipe() method to resume reading input
|
||||
// and set internal _readableState.flowing to false
|
||||
canResume = false;
|
||||
stream.on('resume', () => {
|
||||
if (!canResume) {
|
||||
(stream as any)._readableState.flowing = false; // i guess we are unsafely accessing this
|
||||
}
|
||||
});
|
||||
// Unpipe initial request
|
||||
src.unpipe(req);
|
||||
req.end();
|
||||
});
|
||||
return <fs.WriteStream><any>req;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create readable stream for given path
|
||||
*
|
||||
* @fires Request#data
|
||||
* @fires WebHDFS#finish
|
||||
*
|
||||
* @example
|
||||
* let hdfs = WebHDFS.createClient();
|
||||
*
|
||||
* let remoteFileStream = hdfs.createReadStream('/path/to/remote/file');
|
||||
*
|
||||
* remoteFileStream.on('error', (err) => {
|
||||
* // Do something with the error
|
||||
* });
|
||||
*
|
||||
* remoteFileStream.on('data', (dataChunk) => {
|
||||
* // Do something with the data chunk
|
||||
* });
|
||||
*
|
||||
* remoteFileStream.on('finish', () => {
|
||||
* // Upload is done
|
||||
* });
|
||||
*/
|
||||
public createReadStream(path: string, opts?: object): fs.ReadStream {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('open', path, opts);
|
||||
let params: request.OptionsWithUrl = Object.assign(
|
||||
{
|
||||
method: 'GET',
|
||||
url: endpoint,
|
||||
json: true
|
||||
},
|
||||
this._requestParams
|
||||
);
|
||||
if (!this._requestParams.isKerberos) {
|
||||
return <fs.ReadStream><any>this.doCreateReadStream(params);
|
||||
}
|
||||
// Else, must add kerberos token and handle redirects
|
||||
params.followRedirect = false;
|
||||
let replyStream = through();
|
||||
let handleErr = (err: any) => {
|
||||
replyStream.emit('error', err);
|
||||
replyStream.end();
|
||||
};
|
||||
let initRedirectedStream = () => {
|
||||
let redirectedStream = this.doCreateReadStream(params);
|
||||
redirectedStream.pipe(replyStream);
|
||||
};
|
||||
this.requestWithRedirectAndAuth(params, initRedirectedStream, handleErr);
|
||||
|
||||
return <fs.ReadStream><any>replyStream;
|
||||
}
|
||||
|
||||
private requestWithRedirectAndAuth(params: request.OptionsWithUrl, onRedirected: () => void, handleErr: (err: any) => void) {
|
||||
this.requestWithKerberosSync(params, (err, response: request.Response) => {
|
||||
if (err && err.statusCode === 307 && response.headers['location']) {
|
||||
// It's a redirect
|
||||
params.url = response.headers['location'];
|
||||
this.setKerberosAuthOnParams(params)
|
||||
.then(onRedirected)
|
||||
.catch(handleErr);
|
||||
} else {
|
||||
handleErr(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private doCreateReadStream(params: request.OptionsWithUrl): fs.ReadStream {
|
||||
|
||||
let req: request.Request = request(params);
|
||||
req.on('complete', (response) => {
|
||||
req.emit('finish');
|
||||
});
|
||||
req.on('response', (response) => {
|
||||
// Handle remote exceptions
|
||||
// Remove all data handlers and parse error data
|
||||
if (this.isError(response)) {
|
||||
req.removeAllListeners('data');
|
||||
req.on('data', (data) => {
|
||||
req.emit('error', this.parseError(response, data.toString()));
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
else if (this.isRedirect(response)) {
|
||||
let download = request(params);
|
||||
download.on('complete', (response) => {
|
||||
req.emit('finish');
|
||||
});
|
||||
// Proxy data to original data handler
|
||||
// Not the nicest way but hey
|
||||
download.on('data', (dataChunk) => {
|
||||
req.emit('data', dataChunk);
|
||||
});
|
||||
// Handle subrequest
|
||||
download.on('response', (response) => {
|
||||
if (this.isError(response)) {
|
||||
download.removeAllListeners('data');
|
||||
download.on('data', (data) => {
|
||||
req.emit('error', this.parseError(response, data.toString()));
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// No need to interrupt the request
|
||||
// data will be automatically sent to the data handler
|
||||
});
|
||||
return <fs.ReadStream><any>req;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create symbolic link to the destination path
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
public symlink(src: string, destination: string, createParent: boolean = false, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('src', src);
|
||||
this.checkArgDefined('destination', destination);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('createsymlink', src, {
|
||||
createParent: createParent,
|
||||
destination: destination
|
||||
});
|
||||
|
||||
this.sendRequest('PUT', endpoint, undefined, (error) => {
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlink path
|
||||
*
|
||||
* @returns void
|
||||
*/
|
||||
public unlink(path: string, recursive: boolean = false, callback: (error: HdfsError) => void): void {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('delete', path, { recursive: recursive });
|
||||
this.sendRequest('DELETE', endpoint, undefined, (error) => {
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @alias WebHDFS.unlink
|
||||
* @returns void
|
||||
*/
|
||||
public rmdir(path: string, recursive: boolean = false, callback: (error: HdfsError) => void): void {
|
||||
this.unlink(path, recursive, callback);
|
||||
}
|
||||
|
||||
public static createClient(opts: IHdfsOptions): WebHDFS {
|
||||
return new WebHDFS(
|
||||
Object.assign(
|
||||
{
|
||||
host: 'localhost',
|
||||
port: '50070',
|
||||
path: '/webhdfs/v1'
|
||||
},
|
||||
opts || {}
|
||||
),
|
||||
opts.requestParams ?? { }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class HdfsError extends Error {
|
||||
constructor(
|
||||
errorMessage: string,
|
||||
public statusCode?: number,
|
||||
public statusMessage?: string,
|
||||
public remoteExceptionMessage?: string,
|
||||
public internalError?: any) {
|
||||
super(errorMessage);
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@ import * as azdata from 'azdata';
|
||||
import * as constants from './constants';
|
||||
|
||||
const cloudIcon = 'mssql:cloud';
|
||||
const clusterIcon = 'mssql:cluster';
|
||||
|
||||
export class MssqlIconProvider implements azdata.IconProvider {
|
||||
public readonly providerId: string = constants.sqlProviderName;
|
||||
@@ -17,8 +16,6 @@ export class MssqlIconProvider implements azdata.IconProvider {
|
||||
if (connection.providerName === 'MSSQL') {
|
||||
if (serverInfo.isCloud) {
|
||||
iconName = cloudIcon;
|
||||
} else if (serverInfo.options['isBigDataCluster']) {
|
||||
iconName = clusterIcon;
|
||||
}
|
||||
}
|
||||
return Promise.resolve(iconName);
|
||||
|
||||
@@ -6,59 +6,7 @@
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
// HDFS Constants //////////////////////////////////////////////////////////
|
||||
export const msgMissingNodeContext = localize('msgMissingNodeContext', "Node Command called without any node passed");
|
||||
|
||||
// HDFS Manage Access Dialog Constants ////////////////////////////////////
|
||||
|
||||
export const manageAccessTitle = localize('mssql.manageAccessTitle', "Manage Access");
|
||||
export const locationTitle = localize('mssql.locationTitle', "Location : ");
|
||||
export const permissionsHeader = localize('mssql.permissionsTitle', "Permissions");
|
||||
export const ownerPostfix = localize('mssql.ownerPostfix', " - Owner");
|
||||
export const owner = localize('mssql.owner', "Owner");
|
||||
export const group = localize('mssql.group', "Group");
|
||||
export const owningGroupPostfix = localize('mssql.owningGroupPostfix', " - Owning Group");
|
||||
export const everyoneName = localize('mssql.everyone', "Everyone else");
|
||||
export const userLabel = localize('mssql.userLabel', "User");
|
||||
export const groupLabel = localize('mssql.groupLabel', "Group");
|
||||
export const accessHeader = localize('mssql.accessHeader', "Access");
|
||||
export const defaultHeader = localize('mssql.defaultHeader', "Default");
|
||||
export const deleteTitle = localize('mssql.delete', "Delete");
|
||||
export const stickyLabel = localize('mssql.stickyHeader', "Sticky Bit");
|
||||
export const inheritDefaultsLabel = localize('mssql.inheritDefaultsLabel', "Inherit Defaults");
|
||||
export const readHeader = localize('mssql.readHeader', "Read");
|
||||
export const writeHeader = localize('mssql.writeHeader', "Write");
|
||||
export const executeHeader = localize('mssql.executeHeader', "Execute");
|
||||
export const addUserOrGroupHeader = localize('mssql.addUserOrGroup', "Add User or Group");
|
||||
export const enterNamePlaceholder = localize('mssql.enterNamePlaceholder', "Enter name");
|
||||
export const addLabel = localize('mssql.addLabel', "Add");
|
||||
export const namedUsersAndGroupsHeader = localize('mssql.namedUsersAndGroups', "Named Users and Groups");
|
||||
export const defaultUserAndGroups = localize('mssql.defaultUserAndGroups', "Default User and Groups");
|
||||
export const userOrGroupIcon = localize('mssql.userOrGroupIcon', "User or Group Icon");
|
||||
export const applyText = localize('mssql.apply', "Apply");
|
||||
export const applyRecursivelyText = localize('mssql.applyRecursively', "Apply Recursively");
|
||||
|
||||
export function errorApplyingAclChanges(errMsg: string): string { return localize('mssql.errorApplyingAclChanges', "Unexpected error occurred while applying changes : {0}", errMsg); }
|
||||
|
||||
// Spark Job Submission Constants //////////////////////////////////////////
|
||||
export const sparkLocalFileDestinationHint = localize('sparkJobSubmission.LocalFileDestinationHint', "Local file will be uploaded to HDFS. ");
|
||||
export const sparkJobSubmissionEndMessage = localize('sparkJobSubmission.SubmissionEndMessage', ".......................... Submit Spark Job End ............................");
|
||||
export function sparkJobSubmissionPrepareUploadingFile(localPath: string, clusterFolder: string): string { return localize('sparkJobSubmission.PrepareUploadingFile', "Uploading file from local {0} to HDFS folder: {1}", localPath, clusterFolder); }
|
||||
export const sparkJobSubmissionUploadingFileSucceeded = localize('sparkJobSubmission.UploadingFileSucceeded', "Upload file to cluster Succeeded!");
|
||||
export function sparkJobSubmissionUploadingFileFailed(err: string): string { return localize('sparkJobSubmission.UploadingFileFailed', "Upload file to cluster Failed. {0}", err); }
|
||||
export function sparkJobSubmissionPrepareSubmitJob(jobName: string): string { return localize('sparkJobSubmission.PrepareSubmitJob', "Submitting job {0} ... ", jobName); }
|
||||
export const sparkJobSubmissionSparkJobHasBeenSubmitted = localize('sparkJobSubmission.SubmitJobFinished', "The Spark Job has been submitted.");
|
||||
export function sparkJobSubmissionSubmitJobFailed(err: string): string { return localize('sparkJobSubmission.SubmitJobFailed', "Spark Job Submission Failed. {0} ", err); }
|
||||
export function sparkJobSubmissionYarnUIMessage(yarnUIURL: string): string { return localize('sparkJobSubmission.YarnUIMessage', "YarnUI Url: {0} ", yarnUIURL); }
|
||||
export function sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryLink: string): string { return localize('sparkJobSubmission.SparkHistoryLinkMessage', "Spark History Url: {0} ", sparkHistoryLink); }
|
||||
export function sparkJobSubmissionGetApplicationIdFailed(err: string): string { return localize('sparkJobSubmission.GetApplicationIdFailed', "Get Application Id Failed. {0}", err); }
|
||||
export function sparkJobSubmissionLocalFileNotExisted(path: string): string { return localize('sparkJobSubmission.LocalFileNotExisted', "Local file {0} does not existed. ", path); }
|
||||
export const sparkJobSubmissionNoSqlBigDataClusterFound = localize('sparkJobSubmission.NoSqlBigDataClusterFound', "No SQL Server Big Data Cluster found.");
|
||||
export function sparkConnectionRequired(name: string): string { return localize('sparkConnectionRequired', "Please connect to the Spark cluster before View {0} History.", name); }
|
||||
|
||||
|
||||
export function failedToFindTenants(tenantId: string, accountName: string): string { return localize('mssql.failedToFindTenants', "Failed to find tenant '{0}' in account '{1}' when refreshing security token", tenantId, accountName); }
|
||||
export function tokenRefreshFailed(name: string): string { return localize('mssql.tokenRefreshFailed', "{0} AAD token refresh failed, please reconnect to enable {0}", name); }
|
||||
export const tokenRefreshFailedNoSecurityToken = localize('mssql.tokenRefreshFailedNoSecurityToken', "Editor token refresh failed, autocompletion will be disabled until the editor is disconnected and reconnected");
|
||||
export function failedToFindAccount(accountName: string) { return localize('mssql.failedToFindAccount', "Failed to find azure account {0} when executing token refresh", accountName); }
|
||||
|
||||
|
||||
@@ -6,26 +6,16 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
import * as Constants from './constants';
|
||||
import ContextProvider from './contextProvider';
|
||||
import * as Utils from './utils';
|
||||
import { AppContext } from './appContext';
|
||||
import { UploadFilesCommand, MkDirCommand, SaveFileCommand, PreviewFileCommand, CopyPathCommand, DeleteFilesCommand, ManageAccessCommand } from './objectExplorerNodeProvider/hdfsCommands';
|
||||
import { IPrompter } from './prompts/question';
|
||||
import CodeAdapter from './prompts/adapter';
|
||||
import { IExtension } from 'mssql';
|
||||
import { OpenSparkJobSubmissionDialogCommand, OpenSparkJobSubmissionDialogFromFileCommand, OpenSparkJobSubmissionDialogTask } from './sparkFeature/dialog/dialogCommands';
|
||||
import { OpenSparkYarnHistoryTask } from './sparkFeature/historyTask';
|
||||
import { MssqlObjectExplorerNodeProvider, mssqlOutputChannel } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
|
||||
import { registerSearchServerCommand } from './objectExplorerNodeProvider/command';
|
||||
import { MssqlIconProvider } from './iconProvider';
|
||||
import { registerServiceEndpoints, Endpoint } from './dashboard/serviceEndpoints';
|
||||
import { getBookExtensionContributions } from './dashboard/bookExtensions';
|
||||
import { registerBooksWidget } from './dashboard/bookWidget';
|
||||
import { createMssqlApi } from './mssqlApiFactory';
|
||||
import { AuthType } from './util/auth';
|
||||
import { SqlToolsServer } from './sqlToolsServer';
|
||||
import { promises as fs } from 'fs';
|
||||
import { IconPathHelper } from './iconHelper';
|
||||
@@ -34,7 +24,6 @@ import { INotebookConvertService } from './notebookConvert/notebookConvertServic
|
||||
import { registerTableDesignerCommands } from './tableDesigner/tableDesigner';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
|
||||
export async function activate(context: vscode.ExtensionContext): Promise<IExtension> {
|
||||
// lets make sure we support this platform first
|
||||
@@ -52,24 +41,16 @@ export async function activate(context: vscode.ExtensionContext): Promise<IExten
|
||||
|
||||
IconPathHelper.setExtensionContext(context);
|
||||
|
||||
let prompter: IPrompter = new CodeAdapter();
|
||||
let appContext = new AppContext(context);
|
||||
|
||||
let nodeProvider = new MssqlObjectExplorerNodeProvider(prompter, appContext);
|
||||
azdata.dataprotocol.registerObjectExplorerNodeProvider(nodeProvider);
|
||||
let iconProvider = new MssqlIconProvider();
|
||||
azdata.dataprotocol.registerIconProvider(iconProvider);
|
||||
|
||||
activateSparkFeatures(appContext);
|
||||
activateNotebookTask(appContext);
|
||||
|
||||
registerSearchServerCommand(appContext);
|
||||
registerSearchServerCommand();
|
||||
context.subscriptions.push(new ContextProvider());
|
||||
registerHdfsCommands(context, prompter, appContext);
|
||||
|
||||
registerLogCommand(context);
|
||||
|
||||
registerServiceEndpoints(context);
|
||||
// Get book contributions - in the future this will be integrated with the Books/Notebook widget to show as a dashboard widget
|
||||
const bookContributionProvider = getBookExtensionContributions(context);
|
||||
context.subscriptions.push(bookContributionProvider);
|
||||
@@ -123,46 +104,19 @@ function registerLogCommand(context: vscode.ExtensionContext) {
|
||||
}));
|
||||
}
|
||||
|
||||
function registerHdfsCommands(context: vscode.ExtensionContext, prompter: IPrompter, appContext: AppContext) {
|
||||
context.subscriptions.push(new UploadFilesCommand(prompter, appContext));
|
||||
context.subscriptions.push(new MkDirCommand(prompter, appContext));
|
||||
context.subscriptions.push(new SaveFileCommand(prompter, appContext));
|
||||
context.subscriptions.push(new PreviewFileCommand(prompter, appContext));
|
||||
context.subscriptions.push(new CopyPathCommand(appContext));
|
||||
context.subscriptions.push(new DeleteFilesCommand(prompter, appContext));
|
||||
context.subscriptions.push(new ManageAccessCommand(appContext));
|
||||
}
|
||||
|
||||
function activateSparkFeatures(appContext: AppContext): void {
|
||||
let extensionContext = appContext.extensionContext;
|
||||
let outputChannel: vscode.OutputChannel = mssqlOutputChannel;
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogCommand(appContext, outputChannel));
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogFromFileCommand(appContext, outputChannel));
|
||||
azdata.tasks.registerTask(Constants.mssqlClusterLivySubmitSparkJobTask, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkJobSubmissionDialogTask(appContext, outputChannel).execute(profile);
|
||||
function registerSearchServerCommand(): void {
|
||||
vscode.commands.registerCommand('mssql.searchServers', () => {
|
||||
void vscode.window.showInputBox({
|
||||
placeHolder: localize('mssql.searchServers', "Search Server Names")
|
||||
}).then((stringSearch) => {
|
||||
if (stringSearch) {
|
||||
void vscode.commands.executeCommand('registeredServers.searchServer', (stringSearch));
|
||||
}
|
||||
});
|
||||
});
|
||||
azdata.tasks.registerTask(Constants.mssqlClusterLivyOpenSparkHistory, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkYarnHistoryTask(appContext).execute(profile, true);
|
||||
vscode.commands.registerCommand('mssql.clearSearchServerResult', () => {
|
||||
void vscode.commands.executeCommand('registeredServers.clearSearchServerResult');
|
||||
});
|
||||
azdata.tasks.registerTask(Constants.mssqlClusterLivyOpenYarnHistory, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkYarnHistoryTask(appContext).execute(profile, false);
|
||||
});
|
||||
}
|
||||
|
||||
function activateNotebookTask(appContext: AppContext): void {
|
||||
azdata.tasks.registerTask(Constants.mssqlClusterNewNotebookTask, (profile: azdata.IConnectionProfile) => {
|
||||
return saveProfileAndCreateNotebook(profile);
|
||||
});
|
||||
azdata.tasks.registerTask(Constants.mssqlClusterOpenNotebookTask, (profile: azdata.IConnectionProfile) => {
|
||||
return handleOpenNotebookTask(profile);
|
||||
});
|
||||
azdata.tasks.registerTask(Constants.mssqlOpenClusterDashboard, (profile: azdata.IConnectionProfile) => {
|
||||
return handleOpenClusterDashboardTask(profile, appContext);
|
||||
});
|
||||
}
|
||||
|
||||
function saveProfileAndCreateNotebook(profile: azdata.IConnectionProfile): Promise<void> {
|
||||
return handleNewNotebookTask(undefined, profile);
|
||||
}
|
||||
|
||||
function findNextUntitledEditorName(): string {
|
||||
@@ -178,74 +132,6 @@ function findNextUntitledEditorName(): string {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleNewNotebookTask(oeContext?: azdata.ObjectExplorerContext, profile?: azdata.IConnectionProfile): Promise<void> {
|
||||
// Ensure we get a unique ID for the notebook. For now we're using a different prefix to the built-in untitled files
|
||||
// to handle this. We should look into improving this in the future
|
||||
let title = findNextUntitledEditorName();
|
||||
let untitledUri = vscode.Uri.parse(`untitled:${title}`);
|
||||
let editor = await azdata.nb.showNotebookDocument(untitledUri, {
|
||||
connectionProfile: profile,
|
||||
preview: false
|
||||
});
|
||||
if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
|
||||
// Get the file path after '/HDFS'
|
||||
let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = '#' + msgSampleCodeDataFrame + os.EOL + 'df = (spark.read.option("inferSchema", "true")'
|
||||
+ os.EOL + '.option("header", "true")' + os.EOL + '.csv("{0}"))' + os.EOL + 'df.show(10)';
|
||||
await editor.edit(editBuilder => {
|
||||
editBuilder.replace(0, {
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function handleOpenNotebookTask(profile: azdata.IConnectionProfile): Promise<void> {
|
||||
let notebookFileTypeName = localize('notebookFileType', "Notebooks");
|
||||
let filter: { [key: string]: string[] } = {};
|
||||
filter[notebookFileTypeName] = ['ipynb'];
|
||||
let uris = await vscode.window.showOpenDialog({
|
||||
filters: filter,
|
||||
canSelectFiles: true,
|
||||
canSelectMany: false
|
||||
});
|
||||
if (uris && uris.length > 0) {
|
||||
let fileUri = uris[0];
|
||||
// Verify this is a .ipynb file since this isn't actually filtered on Mac/Linux
|
||||
if (path.extname(fileUri.fsPath) !== '.ipynb') {
|
||||
// in the future might want additional supported types
|
||||
void vscode.window.showErrorMessage(localize('unsupportedFileType', "Only .ipynb Notebooks are supported"));
|
||||
} else {
|
||||
await azdata.nb.showNotebookDocument(fileUri, {
|
||||
connectionProfile: profile,
|
||||
preview: false
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function handleOpenClusterDashboardTask(profile: azdata.IConnectionProfile, appContext: AppContext): Promise<void> {
|
||||
const serverInfo = await azdata.connection.getServerInfo(profile.id);
|
||||
const controller = Utils.getClusterEndpoints(serverInfo).find(e => e.name === Endpoint.controller);
|
||||
if (!controller) {
|
||||
void vscode.window.showErrorMessage(localize('noController', "Could not find the controller endpoint for this instance"));
|
||||
return;
|
||||
}
|
||||
|
||||
void vscode.commands.executeCommand('bigDataClusters.command.manageController',
|
||||
{
|
||||
url: controller.endpoint,
|
||||
auth: profile.authenticationType === 'Integrated' ? AuthType.Integrated : AuthType.Basic,
|
||||
username: 'admin', // Default to admin as a best-guess, we'll prompt for re-entering credentials if that fails
|
||||
password: profile.password,
|
||||
rememberPassword: true
|
||||
}, /*addOrUpdateController*/true);
|
||||
}
|
||||
|
||||
// this method is called when your extension is deactivated
|
||||
export function deactivate(): void {
|
||||
}
|
||||
|
||||
5
extensions/mssql/src/mssql.d.ts
vendored
@@ -27,11 +27,6 @@ declare module 'mssql' {
|
||||
* Path to the root of the SQL Tools Service folder
|
||||
*/
|
||||
readonly sqlToolsServicePath: string;
|
||||
/**
|
||||
* Gets the object explorer API that supports querying over the connections supported by this extension
|
||||
*
|
||||
*/
|
||||
getMssqlObjectExplorerBrowser(): MssqlObjectExplorerBrowser;
|
||||
|
||||
/**
|
||||
* Get the Cms Service APIs to communicate with CMS connections supported by this extension
|
||||
|
||||
@@ -4,10 +4,8 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { AppContext } from './appContext';
|
||||
import { IExtension, ICmsService, IDacFxService, ISchemaCompareService, MssqlObjectExplorerBrowser, ILanguageExtensionService, ISqlAssessmentService, ISqlMigrationService, IAzureBlobService } from 'mssql';
|
||||
import { IExtension, ICmsService, IDacFxService, ISchemaCompareService, ILanguageExtensionService, ISqlAssessmentService, ISqlMigrationService, IAzureBlobService } from 'mssql';
|
||||
import * as constants from './constants';
|
||||
import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
|
||||
import * as azdata from 'azdata';
|
||||
import { SqlToolsServer } from './sqlToolsServer';
|
||||
|
||||
export function createMssqlApi(context: AppContext, sqlToolsServer: SqlToolsServer): IExtension {
|
||||
@@ -27,14 +25,6 @@ export function createMssqlApi(context: AppContext, sqlToolsServer: SqlToolsServ
|
||||
get languageExtension() {
|
||||
return context.getService<ILanguageExtensionService>(constants.LanguageExtensionService);
|
||||
},
|
||||
getMssqlObjectExplorerBrowser(): MssqlObjectExplorerBrowser {
|
||||
return {
|
||||
getNode: (explorerContext: azdata.ObjectExplorerContext) => {
|
||||
let oeProvider = context.getService<MssqlObjectExplorerNodeProvider>(constants.ObjectExplorerService);
|
||||
return <any>oeProvider.findSqlClusterNodeByContext(explorerContext);
|
||||
}
|
||||
};
|
||||
},
|
||||
get sqlAssessment() {
|
||||
return context.getService<ISqlAssessmentService>(constants.SqlAssessmentService);
|
||||
},
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Transform } from 'stream';
|
||||
import * as vscode from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export class CancelableStream extends Transform {
|
||||
constructor(private cancelationToken: vscode.CancellationTokenSource) {
|
||||
super();
|
||||
}
|
||||
|
||||
public override _transform(chunk: any, encoding: string, callback: Function): void {
|
||||
if (this.cancelationToken && this.cancelationToken.token.isCancellationRequested) {
|
||||
callback(new Error(localize('streamCanceled', "Stream operation canceled by the user")));
|
||||
} else {
|
||||
this.push(chunk);
|
||||
callback();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,187 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { TreeNode } from './treeNodes';
|
||||
import { QuestionTypes, IPrompter, IQuestion } from '../prompts/question';
|
||||
import * as utils from '../utils';
|
||||
import * as constants from '../constants';
|
||||
import { AppContext } from '../appContext';
|
||||
|
||||
interface ICommandContextParsingOptions {
|
||||
editor: boolean;
|
||||
uri: boolean;
|
||||
}
|
||||
|
||||
interface ICommandBaseContext {
|
||||
command: string;
|
||||
editor?: vscode.TextEditor;
|
||||
uri?: vscode.Uri;
|
||||
}
|
||||
|
||||
export interface ICommandUnknownContext extends ICommandBaseContext {
|
||||
type: 'unknown';
|
||||
}
|
||||
|
||||
interface ICommandUriContext extends ICommandBaseContext {
|
||||
type: 'uri';
|
||||
}
|
||||
|
||||
export interface ICommandViewContext extends ICommandBaseContext {
|
||||
type: 'view';
|
||||
node: TreeNode;
|
||||
}
|
||||
|
||||
export interface ICommandObjectExplorerContext extends ICommandBaseContext {
|
||||
type: 'objectexplorer';
|
||||
explorerContext: azdata.ObjectExplorerContext;
|
||||
}
|
||||
|
||||
type CommandContext = ICommandObjectExplorerContext | ICommandViewContext | ICommandUriContext | ICommandUnknownContext;
|
||||
|
||||
function isTextEditor(editor: any): editor is vscode.TextEditor {
|
||||
if (editor === undefined) { return false; }
|
||||
|
||||
return editor.id !== undefined && ((editor as vscode.TextEditor).edit !== undefined || (editor as vscode.TextEditor).document !== undefined);
|
||||
}
|
||||
|
||||
export abstract class Command extends vscode.Disposable {
|
||||
|
||||
|
||||
protected readonly contextParsingOptions: ICommandContextParsingOptions = { editor: false, uri: false };
|
||||
|
||||
private disposable: vscode.Disposable;
|
||||
|
||||
constructor(command: string | string[], protected appContext: AppContext) {
|
||||
super(() => this.dispose());
|
||||
|
||||
if (typeof command === 'string') {
|
||||
this.disposable = vscode.commands.registerCommand(command, (...args: any[]) => this._execute(command, ...args), this);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const subscriptions = command.map(cmd => vscode.commands.registerCommand(cmd, (...args: any[]) => this._execute(cmd, ...args), this));
|
||||
this.disposable = vscode.Disposable.from(...subscriptions);
|
||||
}
|
||||
|
||||
override dispose(): void {
|
||||
if (this.disposable) {
|
||||
this.disposable.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
protected async preExecute(...args: any[]): Promise<any> {
|
||||
return this.execute(...args);
|
||||
}
|
||||
|
||||
abstract execute(...args: any[]): any;
|
||||
|
||||
protected _execute(command: string, ...args: any[]): any {
|
||||
// TODO consider using Telemetry.trackEvent(command);
|
||||
|
||||
const [context, rest] = Command.parseContext(command, this.contextParsingOptions, ...args);
|
||||
return this.preExecute(context, ...rest);
|
||||
}
|
||||
|
||||
private static parseContext(command: string, options: ICommandContextParsingOptions, ...args: any[]): [CommandContext, any[]] {
|
||||
let editor: vscode.TextEditor | undefined = undefined;
|
||||
|
||||
let firstArg = args[0];
|
||||
if (options.editor && (firstArg === undefined || isTextEditor(firstArg))) {
|
||||
editor = firstArg;
|
||||
args = args.slice(1);
|
||||
firstArg = args[0];
|
||||
}
|
||||
|
||||
if (options.uri && (firstArg === undefined || firstArg instanceof vscode.Uri)) {
|
||||
const [uri, ...rest] = args as [vscode.Uri, any];
|
||||
return [{ command: command, type: 'uri', editor: editor, uri: uri }, rest];
|
||||
}
|
||||
|
||||
if (firstArg instanceof TreeNode) {
|
||||
const [node, ...rest] = args as [TreeNode, any];
|
||||
return [{ command: command, type: constants.ViewType, node: node }, rest];
|
||||
}
|
||||
|
||||
if (firstArg && utils.isObjectExplorerContext(firstArg)) {
|
||||
const [explorerContext, ...rest] = args as [azdata.ObjectExplorerContext, any];
|
||||
return [{ command: command, type: constants.ObjectExplorerService, explorerContext: explorerContext }, rest];
|
||||
}
|
||||
|
||||
return [{ command: command, type: 'unknown', editor: editor }, args];
|
||||
}
|
||||
}
|
||||
|
||||
export abstract class ProgressCommand extends Command {
|
||||
static progressId = 0;
|
||||
constructor(command: string, protected prompter: IPrompter, appContext: AppContext) {
|
||||
super(command, appContext);
|
||||
}
|
||||
|
||||
protected async executeWithProgress(
|
||||
execution: (cancelToken: vscode.CancellationTokenSource) => Promise<void>,
|
||||
label: string,
|
||||
isCancelable: boolean = false,
|
||||
onCanceled?: () => void
|
||||
): Promise<void> {
|
||||
let disposables: vscode.Disposable[] = [];
|
||||
const tokenSource = new vscode.CancellationTokenSource();
|
||||
const statusBarItem = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
|
||||
disposables.push(vscode.Disposable.from(statusBarItem));
|
||||
statusBarItem.text = localize('progress', "$(sync~spin) {0}...", label);
|
||||
if (isCancelable) {
|
||||
const cancelCommandId = `cancelProgress${ProgressCommand.progressId++}`;
|
||||
disposables.push(vscode.commands.registerCommand(cancelCommandId, async () => {
|
||||
if (await this.confirmCancel()) {
|
||||
tokenSource.cancel();
|
||||
}
|
||||
}));
|
||||
statusBarItem.tooltip = localize('cancelTooltip', "Cancel");
|
||||
statusBarItem.command = cancelCommandId;
|
||||
}
|
||||
statusBarItem.show();
|
||||
|
||||
try {
|
||||
await execution(tokenSource);
|
||||
} catch (error) {
|
||||
if (isCancelable && onCanceled && tokenSource.token.isCancellationRequested) {
|
||||
// The error can be assumed to be due to cancelation occurring. Do the callback
|
||||
onCanceled();
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
} finally {
|
||||
disposables.forEach(d => d.dispose());
|
||||
}
|
||||
}
|
||||
|
||||
private async confirmCancel(): Promise<boolean> {
|
||||
return await this.prompter.promptSingle<boolean>(<IQuestion>{
|
||||
type: QuestionTypes.confirm,
|
||||
message: localize('cancel', "Cancel operation?"),
|
||||
default: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function registerSearchServerCommand(appContext: AppContext): void {
|
||||
vscode.commands.registerCommand('mssql.searchServers', () => {
|
||||
void vscode.window.showInputBox({
|
||||
placeHolder: localize('mssql.searchServers', "Search Server Names")
|
||||
}).then((stringSearch) => {
|
||||
if (stringSearch) {
|
||||
void vscode.commands.executeCommand('registeredServers.searchServer', (stringSearch));
|
||||
}
|
||||
});
|
||||
});
|
||||
vscode.commands.registerCommand('mssql.clearSearchServerResult', () => {
|
||||
void vscode.commands.executeCommand('registeredServers.clearSearchServerResult');
|
||||
});
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import * as constants from '../constants';
|
||||
import { getIgnoreSslVerificationConfigSetting } from '../util/auth';
|
||||
import { IFileSource, IHdfsOptions, FileSourceFactory } from './fileSources';
|
||||
|
||||
export class SqlClusterConnection {
|
||||
private _connection: azdata.connection.Connection;
|
||||
private _profile: azdata.IConnectionProfile;
|
||||
private _host: string;
|
||||
private _port: string;
|
||||
private _user: string;
|
||||
private _password: string;
|
||||
|
||||
constructor(connectionInfo: azdata.connection.Connection | azdata.IConnectionProfile) {
|
||||
this.validate(connectionInfo);
|
||||
|
||||
if ('id' in connectionInfo) {
|
||||
this._profile = connectionInfo;
|
||||
this._connection = this.toConnection(this._profile);
|
||||
} else {
|
||||
this._connection = connectionInfo;
|
||||
}
|
||||
this._host = this._connection.options[constants.hostPropName];
|
||||
this._port = this._connection.options[constants.knoxPortPropName];
|
||||
this._user = this._connection.options[constants.userPropName];
|
||||
this._password = this._connection.options[constants.passwordPropName];
|
||||
}
|
||||
|
||||
public get connection(): azdata.connection.Connection { return this._connection; }
|
||||
public get host(): string { return this._host; }
|
||||
public get port(): number { return this._port ? Number.parseInt(this._port) : constants.defaultKnoxPort; }
|
||||
public get user(): string { return this._user; }
|
||||
public get password(): string { return this._password; }
|
||||
|
||||
public isMatch(connection: SqlClusterConnection | azdata.ConnectionInfo): boolean {
|
||||
if (!connection) { return false; }
|
||||
let options1 = connection instanceof SqlClusterConnection ?
|
||||
connection._connection.options : connection.options;
|
||||
let options2 = this._connection.options;
|
||||
return [constants.hostPropName, constants.knoxPortPropName, constants.userPropName]
|
||||
.every(e => options1[e] === options2[e]);
|
||||
}
|
||||
|
||||
public async createHdfsFileSource(): Promise<IFileSource> {
|
||||
let options: IHdfsOptions = {
|
||||
protocol: 'https',
|
||||
host: this.host,
|
||||
port: this.port,
|
||||
user: this.user,
|
||||
path: 'gateway/default/webhdfs/v1',
|
||||
requestParams: {
|
||||
rejectUnauthorized: !getIgnoreSslVerificationConfigSetting()
|
||||
}
|
||||
};
|
||||
if (this.isIntegratedAuth()) {
|
||||
options.requestParams.isKerberos = this.isIntegratedAuth();
|
||||
options.requestParams.auth = undefined;
|
||||
} else {
|
||||
options.requestParams.auth = {
|
||||
user: this.user,
|
||||
pass: this.password
|
||||
};
|
||||
}
|
||||
let fileSource = await FileSourceFactory.instance.createHdfsFileSource(options);
|
||||
return fileSource;
|
||||
}
|
||||
|
||||
public isIntegratedAuth(): boolean {
|
||||
let authType: string = this._connection.options[constants.authenticationTypePropName];
|
||||
return authType && authType.toLowerCase() === constants.integratedAuth;
|
||||
}
|
||||
|
||||
public updateUsername(username: string): void {
|
||||
if (username) {
|
||||
this._user = username;
|
||||
}
|
||||
}
|
||||
|
||||
public updatePassword(password: string): void {
|
||||
if (password) {
|
||||
this._password = password;
|
||||
}
|
||||
}
|
||||
|
||||
private validate(connectionInfo: azdata.ConnectionInfo): void {
|
||||
if (!connectionInfo) {
|
||||
throw new Error(localize('connectionInfoUndefined', "ConnectionInfo is undefined."));
|
||||
}
|
||||
if (!connectionInfo.options) {
|
||||
throw new Error(localize('connectionInfoOptionsUndefined', "ConnectionInfo.options is undefined."));
|
||||
}
|
||||
let missingProperties: string[] = this.getMissingProperties(connectionInfo);
|
||||
if (missingProperties && missingProperties.length > 0) {
|
||||
throw new Error(localize('connectionInfoOptionsMissingProperties',
|
||||
"Some missing properties in connectionInfo.options: {0}",
|
||||
missingProperties.join(', ')));
|
||||
}
|
||||
}
|
||||
|
||||
private getMissingProperties(connectionInfo: azdata.ConnectionInfo): string[] {
|
||||
if (!connectionInfo || !connectionInfo.options) { return undefined; }
|
||||
let requiredProps = [constants.hostPropName, constants.knoxPortPropName];
|
||||
let authType = connectionInfo.options[constants.authenticationTypePropName] && connectionInfo.options[constants.authenticationTypePropName].toLowerCase();
|
||||
if (authType !== constants.integratedAuth) {
|
||||
requiredProps.push(constants.userPropName, constants.passwordPropName);
|
||||
}
|
||||
return requiredProps.filter(e => connectionInfo.options[e] === undefined);
|
||||
}
|
||||
|
||||
private toConnection(connProfile: azdata.IConnectionProfile): azdata.connection.Connection {
|
||||
let connection: azdata.connection.Connection = Object.assign(connProfile,
|
||||
{ connectionId: this._profile.id });
|
||||
return connection;
|
||||
}
|
||||
}
|
||||
@@ -1,428 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as fspath from 'path';
|
||||
import * as fs from 'fs';
|
||||
import * as meter from 'stream-meter';
|
||||
import * as bytes from 'bytes';
|
||||
import * as https from 'https';
|
||||
import * as readline from 'readline';
|
||||
import * as os from 'os';
|
||||
import * as nls from 'vscode-nls';
|
||||
|
||||
import * as constants from '../constants';
|
||||
import { WebHDFS, HdfsError } from '../hdfs/webhdfs';
|
||||
import { PermissionStatus } from '../hdfs/aclEntry';
|
||||
import { Mount, MountStatus } from '../hdfs/mount';
|
||||
import { FileStatus, hdfsFileTypeToFileType } from '../hdfs/fileStatus';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export function joinHdfsPath(parent: string, child: string): string {
|
||||
if (parent === constants.hdfsRootPath) {
|
||||
return `/${child}`;
|
||||
}
|
||||
return `${parent}/${child}`;
|
||||
}
|
||||
|
||||
export const enum FileType {
|
||||
Directory = 'Directory',
|
||||
File = 'File',
|
||||
Symlink = 'Symlink'
|
||||
}
|
||||
|
||||
export interface IFile {
|
||||
path: string;
|
||||
fileType: FileType;
|
||||
mountStatus?: MountStatus;
|
||||
}
|
||||
|
||||
export class File implements IFile {
|
||||
public mountStatus?: MountStatus;
|
||||
constructor(public path: string, public fileType: FileType) {
|
||||
|
||||
}
|
||||
|
||||
public static createPath(path: string, fileName: string): string {
|
||||
return joinHdfsPath(path, fileName);
|
||||
}
|
||||
|
||||
public static createChild(parent: IFile, fileName: string, fileType: FileType): IFile {
|
||||
return new File(File.createPath(parent.path, fileName), fileType);
|
||||
}
|
||||
|
||||
public static createFile(parent: IFile, fileName: string): File {
|
||||
return File.createChild(parent, fileName, FileType.File);
|
||||
}
|
||||
|
||||
public static createDirectory(parent: IFile, fileName: string): IFile {
|
||||
return File.createChild(parent, fileName, FileType.Directory);
|
||||
}
|
||||
|
||||
public static getBasename(file: IFile): string {
|
||||
return fspath.basename(file.path);
|
||||
}
|
||||
}
|
||||
|
||||
export interface IFileSource {
|
||||
enumerateFiles(path: string, refresh?: boolean): Promise<IFile[]>;
|
||||
mkdir(dirName: string, remoteBasePath: string): Promise<void>;
|
||||
createReadStream(path: string): fs.ReadStream;
|
||||
readFile(path: string, maxBytes?: number): Promise<Buffer>;
|
||||
readFileLines(path: string, maxLines: number): Promise<Buffer>;
|
||||
writeFile(localFile: IFile, remoteDir: string): Promise<string>;
|
||||
delete(path: string, recursive?: boolean): Promise<void>;
|
||||
/**
|
||||
* Retrieves the file status for the specified path (may be a file or directory)
|
||||
*/
|
||||
getFileStatus(path: string): Promise<FileStatus>;
|
||||
/**
|
||||
* Get ACL status for given path
|
||||
* @param path The path to the file/folder to get the status of
|
||||
*/
|
||||
getAclStatus(path: string): Promise<PermissionStatus>;
|
||||
/**
|
||||
* Sets the ACL status for given path
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param permissionStatus The status containing the permissions to set
|
||||
*/
|
||||
setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus): Promise<void>;
|
||||
/**
|
||||
* Removes the default ACLs for the specified path
|
||||
* @param path The path to remove the default ACLs for
|
||||
*/
|
||||
removeDefaultAcl(path: string): Promise<void>;
|
||||
/**
|
||||
* Sets the permission octal (sticky, owner, group & other) for a file/folder
|
||||
* @param path The path to the file/folder to set the permission of
|
||||
* @param aclStatus The status containing the permission to set
|
||||
*/
|
||||
setPermission(path: string, aclStatus: PermissionStatus): Promise<void>;
|
||||
exists(path: string): Promise<boolean>;
|
||||
}
|
||||
|
||||
interface IHttpAuthentication {
|
||||
user: string;
|
||||
pass: string;
|
||||
}
|
||||
|
||||
export interface IHdfsOptions {
|
||||
host?: string;
|
||||
port?: number;
|
||||
protocol?: string;
|
||||
user?: string;
|
||||
path?: string;
|
||||
requestParams?: IRequestParams;
|
||||
}
|
||||
|
||||
export interface IRequestParams {
|
||||
auth?: IHttpAuthentication;
|
||||
isKerberos?: boolean;
|
||||
/**
|
||||
* Timeout in milliseconds to wait for response
|
||||
*/
|
||||
timeout?: number;
|
||||
agent?: https.Agent;
|
||||
headers?: {};
|
||||
rejectUnauthorized?: boolean;
|
||||
}
|
||||
|
||||
export class FileSourceFactory {
|
||||
private static _instance: FileSourceFactory;
|
||||
|
||||
public static get instance(): FileSourceFactory {
|
||||
if (!FileSourceFactory._instance) {
|
||||
FileSourceFactory._instance = new FileSourceFactory();
|
||||
}
|
||||
return FileSourceFactory._instance;
|
||||
}
|
||||
|
||||
public async createHdfsFileSource(options: IHdfsOptions): Promise<IFileSource> {
|
||||
options = options && options.host ? FileSourceFactory.removePortFromHost(options) : options;
|
||||
return new HdfsFileSource(WebHDFS.createClient(options));
|
||||
}
|
||||
|
||||
// remove port from host when port is specified after a comma or colon
|
||||
private static removePortFromHost(options: IHdfsOptions): IHdfsOptions {
|
||||
// determine whether the host has either a ',' or ':' in it
|
||||
options = this.setHostAndPort(options, ',');
|
||||
options = this.setHostAndPort(options, ':');
|
||||
return options;
|
||||
}
|
||||
|
||||
// set port and host correctly after we've identified that a delimiter exists in the host name
|
||||
private static setHostAndPort(options: IHdfsOptions, delimeter: string): IHdfsOptions {
|
||||
let optionsHost: string = options.host;
|
||||
if (options.host.indexOf(delimeter) > -1) {
|
||||
options.host = options.host.slice(0, options.host.indexOf(delimeter));
|
||||
options.port = Number.parseInt(optionsHost.replace(options.host + delimeter, ''));
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
class HdfsFileSource implements IFileSource {
|
||||
private mounts: Map<string, Mount>;
|
||||
constructor(private client: WebHDFS) {
|
||||
}
|
||||
|
||||
public async enumerateFiles(path: string, refresh?: boolean): Promise<IFile[]> {
|
||||
if (!this.mounts || refresh) {
|
||||
await this.loadMounts();
|
||||
}
|
||||
return this.listStatus(path);
|
||||
}
|
||||
|
||||
private loadMounts(): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.getMounts((error, mounts) => {
|
||||
this.mounts = new Map();
|
||||
if (!error && mounts) {
|
||||
mounts.forEach(m => this.mounts.set(m.mountPath, m));
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private listStatus(path: string): Promise<IFile[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.listStatus(path, (error, fileStatuses) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
}
|
||||
else {
|
||||
let hdfsFiles: IFile[] = fileStatuses.map(fileStatus => {
|
||||
let file = new File(File.createPath(path, fileStatus.pathSuffix), hdfsFileTypeToFileType(fileStatus.type));
|
||||
if (this.mounts && this.mounts.has(file.path)) {
|
||||
file.mountStatus = MountStatus.Mount;
|
||||
}
|
||||
return file;
|
||||
});
|
||||
resolve(hdfsFiles);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public mkdir(dirName: string, remoteBasePath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let remotePath = joinHdfsPath(remoteBasePath, dirName);
|
||||
this.client.mkdir(remotePath, undefined, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public createReadStream(path: string): fs.ReadStream {
|
||||
return this.client.createReadStream(path);
|
||||
}
|
||||
|
||||
public readFile(path: string, maxBytes?: number): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let error: HdfsError = undefined;
|
||||
let remoteFileStream: fs.ReadStream | meter.StreamMeter = this.client.createReadStream(path);
|
||||
remoteFileStream.on('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
reject(error);
|
||||
});
|
||||
|
||||
let data: any[] = [];
|
||||
if (maxBytes) {
|
||||
remoteFileStream = remoteFileStream.pipe(meter(maxBytes));
|
||||
remoteFileStream.on('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
if (error.message.includes('Stream exceeded specified max')) {
|
||||
// We have data > maxbytes, show we're truncating
|
||||
let previewNote: string = '#################################################################################################################### \r\n' +
|
||||
'########################### ' + localize('maxSizeNotice', "NOTICE: This file has been truncated at {0} for preview. ", bytes(maxBytes)) + '############################### \r\n' +
|
||||
'#################################################################################################################### \r\n';
|
||||
data.splice(0, 0, Buffer.from(previewNote, 'utf-8'));
|
||||
void vscode.window.showWarningMessage(localize('maxSizeReached', "The file has been truncated at {0} for preview.", bytes(maxBytes)));
|
||||
resolve(Buffer.concat(data));
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
remoteFileStream.on('data', (chunk) => {
|
||||
data.push(chunk);
|
||||
});
|
||||
|
||||
remoteFileStream.once('finish', () => {
|
||||
if (!error) {
|
||||
resolve(Buffer.concat(data));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public readFileLines(path: string, maxLines: number): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let lineReader = readline.createInterface({
|
||||
input: this.client.createReadStream(path)
|
||||
});
|
||||
|
||||
let lineCount = 0;
|
||||
let lineData: string[] = [];
|
||||
let error: HdfsError = undefined;
|
||||
lineReader.on('line', (line: string) => {
|
||||
lineCount++;
|
||||
lineData.push(line);
|
||||
if (lineCount >= maxLines) {
|
||||
resolve(Buffer.from(lineData.join(os.EOL)));
|
||||
lineReader.close();
|
||||
}
|
||||
})
|
||||
.on('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
reject(error);
|
||||
})
|
||||
.on('close', () => {
|
||||
if (!error) {
|
||||
resolve(Buffer.from(lineData.join(os.EOL)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public writeFile(localFile: IFile, remoteDirPath: string): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let fileName = fspath.basename(localFile.path);
|
||||
let remotePath = joinHdfsPath(remoteDirPath, fileName);
|
||||
|
||||
let error: HdfsError = undefined;
|
||||
let writeStream = this.client.createWriteStream(remotePath);
|
||||
// API always calls finish, so catch error then handle exit in the finish event
|
||||
writeStream.on('error', (err) => {
|
||||
error = <HdfsError>err;
|
||||
reject(error);
|
||||
});
|
||||
writeStream.on('finish', (location: string) => {
|
||||
if (!error) {
|
||||
resolve(location);
|
||||
}
|
||||
});
|
||||
|
||||
let readStream = fs.createReadStream(localFile.path);
|
||||
readStream.on('error', (err) => {
|
||||
error = err;
|
||||
reject(error);
|
||||
});
|
||||
|
||||
readStream.pipe(writeStream);
|
||||
});
|
||||
}
|
||||
|
||||
public delete(path: string, recursive: boolean = false): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.rmdir(path, recursive, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public exists(path: string): Promise<boolean> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.exists(path, (error, exists) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(exists);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public getFileStatus(path: string): Promise<FileStatus> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.getFileStatus(path, (error: HdfsError, fileStatus: FileStatus) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(fileStatus);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ACL status for given path
|
||||
* @param path The path to the file/folder to get the status of
|
||||
*/
|
||||
public getAclStatus(path: string): Promise<PermissionStatus> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.getAclStatus(path, (error: HdfsError, permissionStatus: PermissionStatus) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(permissionStatus);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the ACL status for given path
|
||||
* @param path The path to the file/folder to set the ACL on
|
||||
* @param fileType The type of file we're setting to determine if defaults should be applied. Use undefined if type is unknown
|
||||
* @param permissionStatus The permissions to set
|
||||
*/
|
||||
public setAcl(path: string, fileType: FileType | undefined, permissionStatus: PermissionStatus): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.setAcl(path, fileType, permissionStatus, (error: HdfsError) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the default ACLs for the specified path
|
||||
* @param path The path to remove the default ACLs for
|
||||
*/
|
||||
public removeDefaultAcl(path: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.removeDefaultAcl(path, (error: HdfsError) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the permission octal (sticky, owner, group & other) for a file/folder
|
||||
* @param path The path to the file/folder to set the permission of
|
||||
* @param aclStatus The status containing the permission to set
|
||||
*/
|
||||
public setPermission(path: string, aclStatus: PermissionStatus): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.setPermission(path, aclStatus, (error: HdfsError) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,418 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as fspath from 'path';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { Command, ICommandViewContext, ProgressCommand, ICommandObjectExplorerContext } from './command';
|
||||
import { File, IFile, joinHdfsPath, FileType } from './fileSources';
|
||||
import { FolderNode, FileNode, HdfsFileSourceNode } from './hdfsProvider';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import * as constants from '../constants';
|
||||
import * as LocalizedConstants from '../localizedConstants';
|
||||
import * as utils from '../utils';
|
||||
import { AppContext } from '../appContext';
|
||||
import { TreeNode } from './treeNodes';
|
||||
import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider';
|
||||
import { ManageAccessDialog } from '../hdfs/ui/hdfsManageAccessDialog';
|
||||
|
||||
async function getSaveableUri(fileName: string, isPreview?: boolean): Promise<vscode.Uri> {
|
||||
let root = utils.getUserHome();
|
||||
let workspaceFolders = vscode.workspace.workspaceFolders;
|
||||
if (workspaceFolders && workspaceFolders.length > 0) {
|
||||
root = workspaceFolders[0].uri.fsPath;
|
||||
}
|
||||
// Cannot preview with a file path that already exists, so keep looking for a valid path that does not exist
|
||||
if (isPreview) {
|
||||
let fileNum = 1;
|
||||
let fileNameWithoutExtension = fspath.parse(fileName).name;
|
||||
let fileExtension = fspath.parse(fileName).ext;
|
||||
while (await utils.exists(fspath.join(root, fileName))) {
|
||||
fileName = `${fileNameWithoutExtension}-${fileNum}${fileExtension}`;
|
||||
fileNum++;
|
||||
}
|
||||
}
|
||||
return vscode.Uri.file(fspath.join(root, fileName));
|
||||
}
|
||||
|
||||
export async function getNode<T extends TreeNode>(context: ICommandViewContext | ICommandObjectExplorerContext, appContext: AppContext): Promise<T> {
|
||||
let node: T = undefined;
|
||||
if (context && context.type === constants.ViewType && context.node) {
|
||||
node = context.node as T;
|
||||
} else if (context && context.type === constants.ObjectExplorerService) {
|
||||
let oeNodeProvider = appContext.getService<MssqlObjectExplorerNodeProvider>(constants.ObjectExplorerService);
|
||||
if (oeNodeProvider) {
|
||||
node = await oeNodeProvider.findSqlClusterNodeByContext<T>(context);
|
||||
}
|
||||
} else {
|
||||
throw new Error(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
export class UploadFilesCommand extends ProgressCommand {
|
||||
|
||||
constructor(prompter: IPrompter, appContext: AppContext) {
|
||||
super('mssqlCluster.uploadFiles', prompter, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let folderNode = await getNode<FolderNode>(context, this.appContext);
|
||||
const allFilesFilter = localize('allFiles', "All Files");
|
||||
let filter: { [key: string]: string[] } = {};
|
||||
filter[allFilesFilter] = ['*'];
|
||||
if (folderNode) {
|
||||
let options: vscode.OpenDialogOptions = {
|
||||
canSelectFiles: true,
|
||||
canSelectFolders: false,
|
||||
canSelectMany: true,
|
||||
openLabel: localize('lblUploadFiles', "Upload"),
|
||||
filters: filter
|
||||
};
|
||||
let fileUris: vscode.Uri[] = await vscode.window.showOpenDialog(options);
|
||||
if (fileUris) {
|
||||
let files: IFile[] = await Promise.all(fileUris.map(uri => uri.fsPath).map(this.mapPathsToFiles()));
|
||||
await this.executeWithProgress(
|
||||
(cancelToken: vscode.CancellationTokenSource) => this.writeFiles(files, folderNode, cancelToken),
|
||||
localize('uploading', "Uploading files to HDFS"), true,
|
||||
() => vscode.window.showInformationMessage(localize('uploadCanceled', "Upload operation was canceled")));
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
let objectExplorerNode = await azdata.objectexplorer.getNode(context.explorerContext.connectionProfile.id, folderNode.getNodeInfo().nodePath);
|
||||
await objectExplorerNode.refresh();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('uploadError', "Error uploading files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
private mapPathsToFiles(): (value: string, index: number, array: string[]) => Promise<File> {
|
||||
return async (path: string) => {
|
||||
const stats = (await fs.lstat(path));
|
||||
if (stats.isDirectory()) {
|
||||
return new File(path, FileType.Directory);
|
||||
} else if (stats.isSymbolicLink()) {
|
||||
return new File(path, FileType.Symlink);
|
||||
} else {
|
||||
return new File(path, FileType.File);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
private async writeFiles(files: IFile[], folderNode: FolderNode, cancelToken: vscode.CancellationTokenSource): Promise<void> {
|
||||
for (let file of files) {
|
||||
if (cancelToken.token.isCancellationRequested) {
|
||||
// Throw here so that all recursion is ended
|
||||
throw new Error('Upload canceled');
|
||||
}
|
||||
if (file.fileType === FileType.Directory) {
|
||||
let dirName = fspath.basename(file.path);
|
||||
let subFolder = await folderNode.mkdir(dirName);
|
||||
let children: IFile[] = await Promise.all((await fs.readdir(file.path))
|
||||
.map(childFileName => joinHdfsPath(file.path, childFileName))
|
||||
.map(this.mapPathsToFiles()));
|
||||
await this.writeFiles(children, subFolder, cancelToken);
|
||||
} else {
|
||||
await folderNode.writeFile(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export class MkDirCommand extends ProgressCommand {
|
||||
|
||||
constructor(prompter: IPrompter, appContext: AppContext) {
|
||||
super('mssqlCluster.mkdir', prompter, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let folderNode = await getNode<FolderNode>(context, this.appContext);
|
||||
|
||||
if (folderNode) {
|
||||
let fileName: string = await this.getDirName();
|
||||
if (fileName && fileName.length > 0) {
|
||||
await this.executeWithProgress(
|
||||
async (cancelToken: vscode.CancellationTokenSource) => this.mkDir(fileName, folderNode, cancelToken),
|
||||
localize('makingDir', "Creating directory"), true,
|
||||
() => vscode.window.showInformationMessage(localize('mkdirCanceled', "Operation was canceled")));
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
let objectExplorerNode = await azdata.objectexplorer.getNode(context.explorerContext.connectionProfile.id, folderNode.getNodeInfo().nodePath);
|
||||
await objectExplorerNode.refresh();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('mkDirError', "Error on making directory: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
private async getDirName(): Promise<string> {
|
||||
return await this.prompter.promptSingle(<IQuestion>{
|
||||
type: QuestionTypes.input,
|
||||
name: 'enterDirName',
|
||||
message: localize('enterDirName', "Enter directory name"),
|
||||
default: ''
|
||||
}).then(confirmed => <string>confirmed);
|
||||
}
|
||||
|
||||
private async mkDir(fileName: string, folderNode: FolderNode, cancelToken: vscode.CancellationTokenSource): Promise<void> {
|
||||
await folderNode.mkdir(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
export class DeleteFilesCommand extends Command {
|
||||
|
||||
constructor(private prompter: IPrompter, appContext: AppContext) {
|
||||
super('mssqlCluster.deleteFiles', appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let node = await getNode<TreeNode>(context, this.appContext);
|
||||
if (node) {
|
||||
// TODO ideally would let node define if it's deletable
|
||||
// TODO also, would like to change this to getNodeInfo as OE is the primary use case now
|
||||
let treeItem = await node.getTreeItem();
|
||||
let oeNodeToRefresh: azdata.objectexplorer.ObjectExplorerNode = undefined;
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
let oeNodeToDelete = await azdata.objectexplorer.getNode(context.explorerContext.connectionProfile.id, node.getNodeInfo().nodePath);
|
||||
oeNodeToRefresh = await oeNodeToDelete.getParent();
|
||||
}
|
||||
switch (treeItem.contextValue) {
|
||||
case constants.MssqlClusterItems.Folder:
|
||||
await this.deleteFolder(<FolderNode>node);
|
||||
break;
|
||||
case constants.MssqlClusterItems.File:
|
||||
await this.deleteFile(<FileNode>node);
|
||||
break;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
if (oeNodeToRefresh) {
|
||||
await oeNodeToRefresh.refresh();
|
||||
}
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('deleteError', "Error on deleting files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
private async confirmDelete(deleteMsg: string): Promise<boolean> {
|
||||
return await this.prompter.promptSingle(<IQuestion>{
|
||||
type: QuestionTypes.confirm,
|
||||
message: deleteMsg,
|
||||
default: false
|
||||
}).then(confirmed => <boolean>confirmed);
|
||||
}
|
||||
|
||||
private async deleteFolder(node: FolderNode): Promise<void> {
|
||||
if (node) {
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFolder', "Are you sure you want to delete this folder and its contents?"));
|
||||
if (confirmed) {
|
||||
// TODO prompt for recursive delete if non-empty?
|
||||
await node.delete(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async deleteFile(node: FileNode): Promise<void> {
|
||||
if (node) {
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFile', "Are you sure you want to delete this file?"));
|
||||
if (confirmed) {
|
||||
await node.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class SaveFileCommand extends ProgressCommand {
|
||||
|
||||
constructor(prompter: IPrompter, appContext: AppContext) {
|
||||
super('mssqlCluster.saveFile', prompter, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let fileNode = await getNode<FileNode>(context, this.appContext);
|
||||
if (fileNode) {
|
||||
let defaultUri = await getSaveableUri(fspath.basename(fileNode.hdfsPath));
|
||||
let fileUri: vscode.Uri = await vscode.window.showSaveDialog({
|
||||
defaultUri: defaultUri
|
||||
});
|
||||
if (fileUri) {
|
||||
await this.executeWithProgress(
|
||||
(cancelToken: vscode.CancellationTokenSource) => this.doSaveAndOpen(fileUri, fileNode, cancelToken),
|
||||
localize('saving', "Saving HDFS Files"), true,
|
||||
() => vscode.window.showInformationMessage(localize('saveCanceled', "Save operation was canceled")));
|
||||
}
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('saveError', "Error on saving file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
private async doSaveAndOpen(fileUri: vscode.Uri, fileNode: FileNode, cancelToken: vscode.CancellationTokenSource): Promise<void> {
|
||||
await fileNode.writeFileContentsToDisk(fileUri.fsPath, cancelToken);
|
||||
await vscode.commands.executeCommand('vscode.open', fileUri);
|
||||
}
|
||||
}
|
||||
|
||||
export class PreviewFileCommand extends ProgressCommand {
|
||||
public static readonly DefaultMaxSize = 30 * 1024 * 1024;
|
||||
|
||||
constructor(prompter: IPrompter, appContext: AppContext) {
|
||||
super('mssqlCluster.previewFile', prompter, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let fileNode = await getNode<FileNode>(context, this.appContext);
|
||||
if (fileNode) {
|
||||
await this.executeWithProgress(
|
||||
async (cancelToken: vscode.CancellationTokenSource) => {
|
||||
let contents = await fileNode.getFileContentsAsString(PreviewFileCommand.DefaultMaxSize);
|
||||
let fileName: string = fspath.basename(fileNode.hdfsPath);
|
||||
if (fspath.extname(fileName) !== '.ipynb') {
|
||||
const doc = await this.openTextDocument(fileName);
|
||||
const options: vscode.TextDocumentShowOptions = {
|
||||
viewColumn: vscode.ViewColumn.Active,
|
||||
preserveFocus: false
|
||||
};
|
||||
const editor = await vscode.window.showTextDocument(doc, options);
|
||||
await editor.edit(edit => {
|
||||
edit.insert(new vscode.Position(0, 0), contents);
|
||||
});
|
||||
} else {
|
||||
let connectionProfile: azdata.IConnectionProfile = undefined;
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
connectionProfile = context.explorerContext.connectionProfile;
|
||||
}
|
||||
await this.showNotebookDocument(fileName, connectionProfile, contents);
|
||||
}
|
||||
},
|
||||
localize('previewing', "Generating preview"),
|
||||
false);
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('previewError', "Error on previewing file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
private async showNotebookDocument(fileName: string, connectionProfile?: azdata.IConnectionProfile,
|
||||
initialContent?: string
|
||||
): Promise<azdata.nb.NotebookEditor> {
|
||||
let docUri: vscode.Uri = (await getSaveableUri(fileName, true))
|
||||
.with({ scheme: constants.UNTITLED_SCHEMA });
|
||||
return await azdata.nb.showNotebookDocument(docUri, {
|
||||
connectionProfile: connectionProfile,
|
||||
preview: false,
|
||||
initialContent: initialContent
|
||||
});
|
||||
}
|
||||
|
||||
private async openTextDocument(fileName: string): Promise<vscode.TextDocument> {
|
||||
let docUri: vscode.Uri = await getSaveableUri(fileName, true);
|
||||
if (docUri) {
|
||||
docUri = docUri.with({ scheme: constants.UNTITLED_SCHEMA });
|
||||
return await vscode.workspace.openTextDocument(docUri);
|
||||
} else {
|
||||
// Can't reliably create a filename to save as so just use untitled
|
||||
let language = fspath.extname(fileName);
|
||||
if (language && language.length > 0) {
|
||||
// trim the '.'
|
||||
language = language.substring(1);
|
||||
}
|
||||
return await vscode.workspace.openTextDocument({
|
||||
language: language
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class CopyPathCommand extends Command {
|
||||
public static readonly DefaultMaxSize = 30 * 1024 * 1024;
|
||||
|
||||
constructor(appContext: AppContext) {
|
||||
super('mssqlCluster.copyPath', appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let node = await getNode<HdfsFileSourceNode>(context, this.appContext);
|
||||
if (node) {
|
||||
let path = node.hdfsPath;
|
||||
void vscode.env.clipboard.writeText(path);
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('copyPathError', "Error on copying path: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class ManageAccessCommand extends Command {
|
||||
|
||||
constructor(appContext: AppContext) {
|
||||
super('mssqlCluster.manageAccess', appContext);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let node = await getNode<HdfsFileSourceNode>(context, this.appContext);
|
||||
if (node) {
|
||||
new ManageAccessDialog(node.hdfsPath, await node.getFileSource()).openDialog();
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('manageAccessError', "An unexpected error occurred while opening the Manage Access dialog: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,382 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as fspath from 'path';
|
||||
import * as fs from 'fs';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import * as Constants from '../constants';
|
||||
import { IFileSource, IFile, File, FileType } from './fileSources';
|
||||
import { CancelableStream } from './cancelableStream';
|
||||
import { TreeNode } from './treeNodes';
|
||||
import * as utils from '../utils';
|
||||
import { IFileNode } from './types';
|
||||
import { MountStatus } from '../hdfs/mount';
|
||||
import { SqlClusterSession } from './objectExplorerNodeProvider';
|
||||
|
||||
export interface ITreeChangeHandler {
|
||||
notifyNodeChanged(node: TreeNode): void;
|
||||
}
|
||||
export class TreeDataContext {
|
||||
|
||||
constructor(public extensionContext: vscode.ExtensionContext, public changeHandler: ITreeChangeHandler) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
export abstract class HdfsFileSourceNode extends TreeNode {
|
||||
constructor(protected context: TreeDataContext, protected _path: string, fileSource: IFileSource | undefined, protected mountStatus?: MountStatus) {
|
||||
super(fileSource);
|
||||
}
|
||||
|
||||
public get hdfsPath(): string {
|
||||
return this._path;
|
||||
}
|
||||
|
||||
public get nodePathValue(): string {
|
||||
return this.getDisplayName();
|
||||
}
|
||||
|
||||
|
||||
protected isMounted(): boolean {
|
||||
return this.mountStatus === MountStatus.Mount || this.mountStatus === MountStatus.Mount_Child;
|
||||
}
|
||||
|
||||
getDisplayName(): string {
|
||||
return fspath.basename(this._path);
|
||||
}
|
||||
|
||||
public async delete(recursive: boolean = false): Promise<void> {
|
||||
const fileSource = await this.getFileSource();
|
||||
await fileSource.delete(this.hdfsPath, recursive);
|
||||
// Notify parent should be updated. If at top, will return undefined which will refresh whole tree
|
||||
(<HdfsFileSourceNode>this.parent).onChildRemoved();
|
||||
this.context.changeHandler.notifyNodeChanged(this.parent);
|
||||
}
|
||||
public abstract onChildRemoved(): void;
|
||||
}
|
||||
|
||||
export class FolderNode extends HdfsFileSourceNode {
|
||||
private children: TreeNode[] = [];
|
||||
protected _nodeType: string;
|
||||
constructor(context: TreeDataContext, path: string, fileSource: IFileSource | undefined, nodeType?: string, mountStatus?: MountStatus) {
|
||||
super(context, path, fileSource, mountStatus);
|
||||
this._nodeType = nodeType ? nodeType : Constants.MssqlClusterItems.Folder;
|
||||
}
|
||||
|
||||
public onChildRemoved(): void {
|
||||
this.children = undefined;
|
||||
}
|
||||
|
||||
async getChildren(refreshChildren: boolean): Promise<TreeNode[]> {
|
||||
if (refreshChildren || !this.children) {
|
||||
try {
|
||||
const fileSource = await this.getFileSource();
|
||||
let files: IFile[] = await fileSource.enumerateFiles(this._path);
|
||||
if (files) {
|
||||
// Note: for now, assuming HDFS-provided sorting is sufficient
|
||||
this.children = files.map((file) => {
|
||||
let node: TreeNode = file.fileType === FileType.File ?
|
||||
new FileNode(this.context, file.path, fileSource, this.getChildMountStatus(file)) :
|
||||
new FolderNode(this.context, file.path, fileSource, Constants.MssqlClusterItems.Folder, this.getChildMountStatus(file));
|
||||
node.parent = this;
|
||||
return node;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
this.children = [ErrorNode.create(localize('errorExpanding', "Error: {0}", utils.getErrorMessage(error)), this, error.statusCode)];
|
||||
}
|
||||
}
|
||||
return this.children;
|
||||
}
|
||||
|
||||
private getChildMountStatus(file: IFile): MountStatus {
|
||||
if (file.mountStatus !== undefined && file.mountStatus !== MountStatus.None) {
|
||||
return file.mountStatus;
|
||||
}
|
||||
else if (this.mountStatus !== undefined && this.mountStatus !== MountStatus.None) {
|
||||
// Any child node of a mount (or subtree) must be a mount child
|
||||
return MountStatus.Mount_Child;
|
||||
}
|
||||
return MountStatus.None;
|
||||
}
|
||||
|
||||
getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
|
||||
let item = new vscode.TreeItem(this.getDisplayName(), vscode.TreeItemCollapsibleState.Collapsed);
|
||||
// For now, folder always looks the same. We're using SQL icons to differentiate remote vs local files
|
||||
item.iconPath = {
|
||||
dark: this.context.extensionContext.asAbsolutePath('resources/light/Folder.svg'),
|
||||
light: this.context.extensionContext.asAbsolutePath('resources/light/Folder.svg')
|
||||
};
|
||||
item.contextValue = this._nodeType;
|
||||
return item;
|
||||
}
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
// TODO handle error message case by returning it in the OE API
|
||||
// TODO support better mapping of node type
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: this.getDisplayName(),
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
nodePath: this.generateNodePath(),
|
||||
nodeStatus: undefined,
|
||||
nodeType: this._nodeType,
|
||||
nodeSubType: this.getSubType(),
|
||||
iconType: this.isMounted() ? 'Folder_mounted' : 'Folder'
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
|
||||
private getSubType(): string | undefined {
|
||||
if (this.mountStatus === MountStatus.Mount) {
|
||||
return Constants.MssqlClusterItemsSubType.Mount;
|
||||
} else if (this.mountStatus === MountStatus.Mount_Child) {
|
||||
return Constants.MssqlClusterItemsSubType.MountChild;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async writeFile(localFile: IFile): Promise<FileNode> {
|
||||
return this.runChildAddAction<FileNode>(() => this.writeFileAsync(localFile));
|
||||
}
|
||||
|
||||
private async writeFileAsync(localFile: IFile): Promise<FileNode> {
|
||||
const fileSource = await this.getFileSource();
|
||||
await fileSource.writeFile(localFile, this._path);
|
||||
let fileNode = new FileNode(this.context, File.createPath(this._path, File.getBasename(localFile)), fileSource);
|
||||
return fileNode;
|
||||
}
|
||||
|
||||
public async mkdir(name: string): Promise<FolderNode> {
|
||||
return this.runChildAddAction<FolderNode>(() => this.mkdirAsync(name));
|
||||
}
|
||||
|
||||
private async mkdirAsync(name: string): Promise<FolderNode> {
|
||||
const fileSource = await this.getFileSource();
|
||||
await fileSource.mkdir(name, this._path);
|
||||
let subDir = new FolderNode(this.context, File.createPath(this._path, name), fileSource);
|
||||
return subDir;
|
||||
}
|
||||
|
||||
private async runChildAddAction<T extends TreeNode>(action: () => Promise<T>): Promise<T> {
|
||||
let node = await action();
|
||||
await this.getChildren(true);
|
||||
if (this.children) {
|
||||
// Find the child matching the node. This is necessary
|
||||
// since writing can add duplicates.
|
||||
node = this.children.find(n => n.nodePathValue === node.nodePathValue) as T;
|
||||
this.context.changeHandler.notifyNodeChanged(this);
|
||||
} else {
|
||||
// Failed to retrieve children from server so something went wrong
|
||||
node = undefined;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
export class ConnectionNode extends FolderNode {
|
||||
|
||||
constructor(context: TreeDataContext, private displayName: string, private clusterSession: SqlClusterSession) {
|
||||
super(context, '/', undefined, Constants.MssqlClusterItems.Connection);
|
||||
}
|
||||
|
||||
override getDisplayName(): string {
|
||||
return this.displayName;
|
||||
}
|
||||
|
||||
public override async delete(): Promise<void> {
|
||||
throw new Error(localize('errDeleteConnectionNode', "Cannot delete a connection. Only subfolders and files can be deleted."));
|
||||
}
|
||||
|
||||
override async getTreeItem(): Promise<vscode.TreeItem> {
|
||||
let item = await super.getTreeItem();
|
||||
item.contextValue = this._nodeType;
|
||||
return item;
|
||||
}
|
||||
|
||||
public override async getFileSource(): Promise<IFileSource | undefined> {
|
||||
// The node is initially created without a filesource and then one is created only once an action is
|
||||
// taken that requires a connection
|
||||
const fileSource = await super.getFileSource();
|
||||
if (!fileSource) {
|
||||
await this.updateFileSource(await this.clusterSession.getSqlClusterConnection());
|
||||
}
|
||||
return super.getFileSource();
|
||||
}
|
||||
|
||||
override getNodeInfo(): azdata.NodeInfo {
|
||||
// TODO handle error message case by returning it in the OE API
|
||||
// TODO support better mapping of node type
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: this.getDisplayName(),
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
nodePath: this.generateNodePath(),
|
||||
nodeStatus: undefined,
|
||||
nodeType: 'mssqlCluster:hdfs',
|
||||
nodeSubType: undefined,
|
||||
iconType: 'Folder'
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
}
|
||||
|
||||
export class FileNode extends HdfsFileSourceNode implements IFileNode {
|
||||
|
||||
constructor(context: TreeDataContext, path: string, fileSource: IFileSource, mountStatus?: MountStatus) {
|
||||
super(context, path, fileSource, mountStatus);
|
||||
}
|
||||
|
||||
public onChildRemoved(): void {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
|
||||
let item = new vscode.TreeItem(this.getDisplayName(), vscode.TreeItemCollapsibleState.None);
|
||||
item.iconPath = {
|
||||
dark: this.context.extensionContext.asAbsolutePath('resources/dark/file_inverse.svg'),
|
||||
light: this.context.extensionContext.asAbsolutePath('resources/light/file.svg')
|
||||
};
|
||||
item.contextValue = Constants.MssqlClusterItems.File;
|
||||
return item;
|
||||
}
|
||||
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
// TODO improve node type handling so it's not tied to SQL Server types
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: this.getDisplayName(),
|
||||
isLeaf: true,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
nodePath: this.generateNodePath(),
|
||||
nodeStatus: undefined,
|
||||
nodeType: Constants.MssqlClusterItems.File,
|
||||
nodeSubType: this.getSubType(),
|
||||
iconType: this.isMounted() ? 'FileGroupFile_mounted' : 'FileGroupFile'
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
|
||||
public async getFileContentsAsString(maxBytes?: number): Promise<string> {
|
||||
const fileSource = await this.getFileSource();
|
||||
let contents: Buffer = await fileSource.readFile(this.hdfsPath, maxBytes);
|
||||
return contents ? contents.toString('utf8') : '';
|
||||
}
|
||||
|
||||
public async getFileLinesAsString(maxLines: number): Promise<string> {
|
||||
const fileSource = await this.getFileSource();
|
||||
let contents: Buffer = await fileSource.readFileLines(this.hdfsPath, maxLines);
|
||||
return contents ? contents.toString('utf8') : '';
|
||||
}
|
||||
|
||||
public async writeFileContentsToDisk(localPath: string, cancelToken?: vscode.CancellationTokenSource): Promise<vscode.Uri> {
|
||||
const fileSource = await this.getFileSource();
|
||||
return new Promise((resolve, reject) => {
|
||||
let readStream: fs.ReadStream = fileSource.createReadStream(this.hdfsPath);
|
||||
readStream.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let error: string | Error = undefined;
|
||||
let writeStream = fs.createWriteStream(localPath, {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
writeStream.on('error', (err) => {
|
||||
error = err;
|
||||
reject(error);
|
||||
});
|
||||
writeStream.on('finish', () => {
|
||||
if (!error) {
|
||||
resolve(vscode.Uri.file(localPath));
|
||||
}
|
||||
});
|
||||
|
||||
let cancelable = new CancelableStream(cancelToken);
|
||||
cancelable.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
readStream.pipe(cancelable).pipe(writeStream);
|
||||
});
|
||||
}
|
||||
|
||||
private getSubType(): string | undefined {
|
||||
let subType = '';
|
||||
if (this.getDisplayName().toLowerCase().endsWith('.jar') || this.getDisplayName().toLowerCase().endsWith('.py')) {
|
||||
subType += Constants.MssqlClusterItemsSubType.Spark;
|
||||
} else if (this.mountStatus === MountStatus.Mount_Child) {
|
||||
subType += Constants.MssqlClusterItemsSubType.MountChild;
|
||||
}
|
||||
|
||||
return subType.length > 0 ? subType : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
class ErrorNode extends TreeNode {
|
||||
static messageNum: number = 0;
|
||||
|
||||
private _nodePathValue: string;
|
||||
constructor(private message: string) {
|
||||
super(undefined);
|
||||
}
|
||||
|
||||
public static create(message: string, parent: TreeNode, errorCode?: number): ErrorNode {
|
||||
let node = new ErrorNode(message);
|
||||
node.parent = parent;
|
||||
if (errorCode) {
|
||||
node.errorStatusCode = errorCode;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
private ensureNodePathValue(): void {
|
||||
if (!this._nodePathValue) {
|
||||
this._nodePathValue = `message_${ErrorNode.messageNum++}`;
|
||||
}
|
||||
}
|
||||
|
||||
public get nodePathValue(): string {
|
||||
this.ensureNodePathValue();
|
||||
return this._nodePathValue;
|
||||
}
|
||||
|
||||
public getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
public getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
|
||||
let item = new vscode.TreeItem(this.message, vscode.TreeItemCollapsibleState.None);
|
||||
item.contextValue = Constants.MssqlClusterItems.Error;
|
||||
return item;
|
||||
}
|
||||
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: this.message,
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
nodePath: this.generateNodePath(),
|
||||
nodeStatus: undefined,
|
||||
nodeType: Constants.MssqlClusterItems.Error,
|
||||
nodeSubType: undefined,
|
||||
iconType: 'MessageType'
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
}
|
||||
@@ -1,328 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { ProviderBase } from './providerBase';
|
||||
import { SqlClusterConnection } from './connection';
|
||||
import * as utils from '../utils';
|
||||
import { TreeNode } from './treeNodes';
|
||||
import { ConnectionNode, TreeDataContext, ITreeChangeHandler } from './hdfsProvider';
|
||||
import { AppContext } from '../appContext';
|
||||
import * as constants from '../constants';
|
||||
import { ICommandObjectExplorerContext } from './command';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import { getSqlClusterConnectionParams } from '../sqlClusterLookUp';
|
||||
|
||||
export const mssqlOutputChannel = vscode.window.createOutputChannel(constants.providerId);
|
||||
|
||||
export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azdata.ObjectExplorerNodeProvider, ITreeChangeHandler {
|
||||
public readonly supportedProviderId: string = constants.providerId;
|
||||
private clusterSessionMap: Map<string, SqlClusterSession>;
|
||||
private expandCompleteEmitter = new vscode.EventEmitter<azdata.ObjectExplorerExpandInfo>();
|
||||
|
||||
constructor(private prompter: IPrompter, private appContext: AppContext) {
|
||||
super();
|
||||
this.clusterSessionMap = new Map<string, SqlClusterSession>();
|
||||
this.appContext.registerService<MssqlObjectExplorerNodeProvider>(constants.ObjectExplorerService, this);
|
||||
}
|
||||
|
||||
handleSessionOpen(session: azdata.ObjectExplorerSession): Thenable<boolean> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!session) {
|
||||
reject('handleSessionOpen requires a session object to be passed');
|
||||
} else {
|
||||
resolve(this.doSessionOpen(session));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async doSessionOpen(session: azdata.ObjectExplorerSession): Promise<boolean> {
|
||||
if (!session || !session.sessionId) { return false; }
|
||||
|
||||
let sqlConnProfile = await azdata.objectexplorer.getSessionConnectionProfile(session.sessionId);
|
||||
if (!sqlConnProfile) { return false; }
|
||||
|
||||
const isBigDataCluster = await utils.isBigDataCluster(sqlConnProfile.id);
|
||||
if (!isBigDataCluster) { return false; }
|
||||
|
||||
let clusterSession = new SqlClusterSession(session, sqlConnProfile, this.appContext, this);
|
||||
this.clusterSessionMap.set(session.sessionId, clusterSession);
|
||||
return true;
|
||||
}
|
||||
|
||||
expandNode(nodeInfo: azdata.ExpandNodeInfo, isRefresh: boolean = false): Thenable<boolean> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!nodeInfo) {
|
||||
reject('expandNode requires a nodeInfo object to be passed');
|
||||
} else {
|
||||
resolve(this.doExpandNode(nodeInfo, isRefresh));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async doExpandNode(nodeInfo: azdata.ExpandNodeInfo, isRefresh: boolean = false): Promise<boolean> {
|
||||
let session = this.clusterSessionMap.get(nodeInfo.sessionId);
|
||||
let response: azdata.ObjectExplorerExpandInfo = {
|
||||
sessionId: nodeInfo.sessionId,
|
||||
nodePath: nodeInfo.nodePath,
|
||||
errorMessage: undefined,
|
||||
nodes: []
|
||||
};
|
||||
|
||||
if (!session) {
|
||||
// This is not an error case. Just fire reponse with empty nodes for example: request from standalone SQL instance
|
||||
this.expandCompleteEmitter.fire(response);
|
||||
return false;
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
|
||||
// Running after promise resolution as we need the ADS-side map to have been updated
|
||||
// Intentionally not awaiting or catching errors.
|
||||
// Any failure in startExpansion should be emitted in the expand complete result
|
||||
// We want this to be async and ideally return true before it completes
|
||||
this.startExpansion(session, nodeInfo, isRefresh).catch(err => console.log('Error expanding Object Explorer Node ', err));
|
||||
}, 10);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private hasExpansionError(children: TreeNode[]): boolean {
|
||||
if (children.find(c => c.errorStatusCode > 0)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private async startExpansion(session: SqlClusterSession, nodeInfo: azdata.ExpandNodeInfo, isRefresh: boolean = false): Promise<void> {
|
||||
let expandResult: azdata.ObjectExplorerExpandInfo = {
|
||||
sessionId: session.sessionId,
|
||||
nodePath: nodeInfo.nodePath,
|
||||
errorMessage: undefined,
|
||||
nodes: []
|
||||
};
|
||||
try {
|
||||
let node = await session.rootNode.findNodeByPath(nodeInfo.nodePath, true);
|
||||
if (node) {
|
||||
expandResult.errorMessage = node.getNodeInfo().errorMessage;
|
||||
let children = await node.getChildren(true);
|
||||
if (children && children.length > 0) {
|
||||
// Only child returned when failure happens : When failed with 'Unauthorized' error, prompt for password.
|
||||
if (children.length === 1 && this.hasExpansionError(children)) {
|
||||
if (children[0].errorStatusCode === 401) {
|
||||
const sqlClusterConnection = await session.getSqlClusterConnection();
|
||||
// First prompt for username (defaulting to existing username)
|
||||
let username = await this.prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.input,
|
||||
name: 'inputPrompt',
|
||||
message: localize('promptUsername', "Please provide the username to connect to HDFS:"),
|
||||
default: sqlClusterConnection.user
|
||||
});
|
||||
// Only update the username if it's different than the original (the update functions ignore falsy values)
|
||||
if (username === sqlClusterConnection.user) {
|
||||
username = '';
|
||||
}
|
||||
sqlClusterConnection.updateUsername(username);
|
||||
|
||||
// And then prompt for password
|
||||
const password = await this.prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.password,
|
||||
name: 'passwordPrompt',
|
||||
message: localize('prmptPwd', "Please provide the password to connect to HDFS:"),
|
||||
default: ''
|
||||
});
|
||||
sqlClusterConnection.updatePassword(password);
|
||||
|
||||
if (username || password) {
|
||||
await node.updateFileSource(sqlClusterConnection);
|
||||
children = await node.getChildren(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expandResult.nodes = children.map(c => c.getNodeInfo());
|
||||
if (children.length === 1 && this.hasExpansionError(children)) {
|
||||
let child = children[0].getNodeInfo();
|
||||
expandResult.errorMessage = child ? child.label : 'Unknown Error';
|
||||
expandResult.nodes = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
expandResult.errorMessage = utils.getErrorMessage(error);
|
||||
}
|
||||
this.expandCompleteEmitter.fire(expandResult);
|
||||
}
|
||||
|
||||
refreshNode(nodeInfo: azdata.ExpandNodeInfo): Thenable<boolean> {
|
||||
// TODO #3815 implement properly
|
||||
return this.expandNode(nodeInfo, true);
|
||||
}
|
||||
|
||||
handleSessionClose(closeSessionInfo: azdata.ObjectExplorerCloseSessionInfo): void {
|
||||
this.clusterSessionMap.delete(closeSessionInfo.sessionId);
|
||||
}
|
||||
|
||||
findNodes(findNodesInfo: azdata.FindNodesInfo): Thenable<azdata.ObjectExplorerFindNodesResponse> {
|
||||
// TODO #3814 implement
|
||||
let response: azdata.ObjectExplorerFindNodesResponse = {
|
||||
nodes: []
|
||||
};
|
||||
return Promise.resolve(response);
|
||||
}
|
||||
|
||||
registerOnExpandCompleted(handler: (response: azdata.ObjectExplorerExpandInfo) => any): void {
|
||||
this.expandCompleteEmitter.event(handler);
|
||||
}
|
||||
|
||||
notifyNodeChanged(node: TreeNode): void {
|
||||
void this.notifyNodeChangesAsync(node);
|
||||
}
|
||||
|
||||
private async notifyNodeChangesAsync(node: TreeNode): Promise<void> {
|
||||
try {
|
||||
let session = this.getSqlClusterSessionForNode(node);
|
||||
if (!session) {
|
||||
void vscode.window.showErrorMessage(localize('sessionNotFound', "Session for node {0} does not exist", node.nodePathValue));
|
||||
} else {
|
||||
let nodeInfo = node.getNodeInfo();
|
||||
let expandInfo: azdata.ExpandNodeInfo = {
|
||||
nodePath: nodeInfo.nodePath,
|
||||
sessionId: session.sessionId
|
||||
};
|
||||
await this.refreshNode(expandInfo);
|
||||
}
|
||||
} catch (err) {
|
||||
mssqlOutputChannel.appendLine(localize('notifyError', "Error notifying of node change: {0}", err));
|
||||
}
|
||||
}
|
||||
|
||||
private getSqlClusterSessionForNode(node: TreeNode): SqlClusterSession {
|
||||
let sqlClusterSession: SqlClusterSession = undefined;
|
||||
while (node !== undefined) {
|
||||
if (node instanceof SqlClusterRootNode) {
|
||||
sqlClusterSession = node.session;
|
||||
break;
|
||||
} else {
|
||||
node = node.parent;
|
||||
}
|
||||
}
|
||||
return sqlClusterSession;
|
||||
}
|
||||
|
||||
async findSqlClusterNodeByContext<T extends TreeNode>(context: ICommandObjectExplorerContext | azdata.ObjectExplorerContext): Promise<T> {
|
||||
let node: T = undefined;
|
||||
let explorerContext = 'explorerContext' in context ? context.explorerContext : context;
|
||||
let sqlConnProfile = explorerContext.connectionProfile;
|
||||
let session = this.findSqlClusterSessionBySqlConnProfile(sqlConnProfile);
|
||||
if (session) {
|
||||
if (explorerContext.isConnectionNode) {
|
||||
// Note: ideally fix so we verify T matches RootNode and go from there
|
||||
node = <T><any>session.rootNode;
|
||||
} else {
|
||||
// Find the node under the session
|
||||
node = <T><any>await session.rootNode.findNodeByPath(explorerContext.nodeInfo.nodePath, true);
|
||||
}
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
public findSqlClusterSessionBySqlConnProfile(connectionProfile: azdata.IConnectionProfile): SqlClusterSession | undefined {
|
||||
for (let session of this.clusterSessionMap.values()) {
|
||||
if (session.isMatchedSqlConnection(connectionProfile)) {
|
||||
return session;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export class SqlClusterSession {
|
||||
private _rootNode: SqlClusterRootNode;
|
||||
private _sqlClusterConnection: SqlClusterConnection | undefined = undefined;
|
||||
constructor(
|
||||
private _sqlSession: azdata.ObjectExplorerSession,
|
||||
private _sqlConnectionProfile: azdata.IConnectionProfile,
|
||||
private _appContext: AppContext,
|
||||
private _changeHandler: ITreeChangeHandler
|
||||
) {
|
||||
this._rootNode = new SqlClusterRootNode(this,
|
||||
new TreeDataContext(this._appContext.extensionContext, this._changeHandler),
|
||||
this._sqlSession.rootNode.nodePath);
|
||||
}
|
||||
|
||||
public async getSqlClusterConnection(): Promise<SqlClusterConnection> {
|
||||
if (!this._sqlClusterConnection) {
|
||||
const sqlClusterConnectionParams = await getSqlClusterConnectionParams(this._sqlConnectionProfile, this._appContext);
|
||||
this._sqlClusterConnection = new SqlClusterConnection(sqlClusterConnectionParams);
|
||||
}
|
||||
return this._sqlClusterConnection;
|
||||
}
|
||||
public get sqlSession(): azdata.ObjectExplorerSession { return this._sqlSession; }
|
||||
public get sqlConnectionProfile(): azdata.IConnectionProfile { return this._sqlConnectionProfile; }
|
||||
public get sessionId(): string { return this._sqlSession.sessionId; }
|
||||
public get rootNode(): SqlClusterRootNode { return this._rootNode; }
|
||||
|
||||
public isMatchedSqlConnection(sqlConnProfile: azdata.IConnectionProfile): boolean {
|
||||
return this._sqlConnectionProfile.id === sqlConnProfile.id;
|
||||
}
|
||||
}
|
||||
|
||||
class SqlClusterRootNode extends TreeNode {
|
||||
private _children: TreeNode[];
|
||||
constructor(
|
||||
private _session: SqlClusterSession,
|
||||
private _treeDataContext: TreeDataContext,
|
||||
private _nodePathValue: string
|
||||
) {
|
||||
super(undefined);
|
||||
}
|
||||
|
||||
public get session(): SqlClusterSession {
|
||||
return this._session;
|
||||
}
|
||||
|
||||
public get nodePathValue(): string {
|
||||
return this._nodePathValue;
|
||||
}
|
||||
|
||||
public getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]> {
|
||||
if (refreshChildren || !this._children) {
|
||||
return this.refreshChildren();
|
||||
}
|
||||
return this._children;
|
||||
}
|
||||
|
||||
private async refreshChildren(): Promise<TreeNode[]> {
|
||||
this._children = [];
|
||||
|
||||
let hdfsNode = new ConnectionNode(this._treeDataContext, localize('hdfsFolder', "HDFS"), this.session);
|
||||
hdfsNode.parent = this;
|
||||
this._children.push(hdfsNode);
|
||||
return this._children;
|
||||
}
|
||||
|
||||
getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
|
||||
throw new Error('Not intended for use in a file explorer view.');
|
||||
}
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: localize('rootLabel', "Root"),
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
nodePath: this.generateNodePath(),
|
||||
nodeStatus: undefined,
|
||||
nodeType: 'sqlCluster:root',
|
||||
nodeSubType: undefined,
|
||||
iconType: 'folder'
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as constants from '../constants';
|
||||
|
||||
export abstract class ProviderBase {
|
||||
public readonly providerId: string = constants.mssqlClusterProviderName;
|
||||
public handle: number;
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { ITreeNode } from './types';
|
||||
import { IFileSource } from './fileSources';
|
||||
import { SqlClusterConnection } from './connection';
|
||||
|
||||
type TreeNodePredicate = (node: TreeNode) => boolean;
|
||||
|
||||
export abstract class TreeNode implements ITreeNode {
|
||||
private _parent: TreeNode = undefined;
|
||||
private _errorStatusCode: number;
|
||||
|
||||
constructor(private _fileSource: IFileSource | undefined) { }
|
||||
|
||||
public get parent(): TreeNode {
|
||||
return this._parent;
|
||||
}
|
||||
|
||||
public set parent(node: TreeNode) {
|
||||
this._parent = node;
|
||||
}
|
||||
|
||||
public get errorStatusCode(): number {
|
||||
return this._errorStatusCode;
|
||||
}
|
||||
|
||||
public set errorStatusCode(error: number) {
|
||||
this._errorStatusCode = error;
|
||||
}
|
||||
|
||||
public generateNodePath(): string {
|
||||
let path = undefined;
|
||||
if (this.parent) {
|
||||
path = this.parent.generateNodePath();
|
||||
}
|
||||
path = path ? `${path}/${this.nodePathValue}` : this.nodePathValue;
|
||||
return path;
|
||||
}
|
||||
|
||||
public findNodeByPath(path: string, expandIfNeeded: boolean = false): Promise<TreeNode> {
|
||||
let condition: TreeNodePredicate = (node: TreeNode) => node.getNodeInfo().nodePath === path || node.getNodeInfo().nodePath.startsWith(path);
|
||||
let filter: TreeNodePredicate = (node: TreeNode) => path.startsWith(node.getNodeInfo().nodePath);
|
||||
return TreeNode.findNode(this, condition, filter, true);
|
||||
}
|
||||
|
||||
public static async findNode(node: TreeNode, condition: TreeNodePredicate, filter: TreeNodePredicate, expandIfNeeded: boolean): Promise<TreeNode> {
|
||||
if (!node) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (condition(node)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
let nodeInfo = node.getNodeInfo();
|
||||
if (nodeInfo.isLeaf) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// TODO #3813 support filtering by already expanded / not yet expanded
|
||||
let children = await node.getChildren(false);
|
||||
if (children) {
|
||||
for (let child of children) {
|
||||
if (filter && filter(child)) {
|
||||
let childNode = await this.findNode(child, condition, filter, expandIfNeeded);
|
||||
if (childNode) {
|
||||
return childNode;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async updateFileSource(connection: SqlClusterConnection): Promise<void> {
|
||||
this._fileSource = await connection.createHdfsFileSource();
|
||||
}
|
||||
|
||||
public async getFileSource(): Promise<IFileSource | undefined> {
|
||||
return this._fileSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* The value to use for this node in the node path
|
||||
*/
|
||||
public abstract get nodePathValue(): string;
|
||||
|
||||
abstract getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]>;
|
||||
abstract getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem>;
|
||||
|
||||
abstract getNodeInfo(): azdata.NodeInfo;
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
|
||||
/**
|
||||
* A tree node in the object explorer tree
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export interface ITreeNode {
|
||||
getNodeInfo(): azdata.NodeInfo;
|
||||
getChildren(refreshChildren: boolean): ITreeNode[] | Promise<ITreeNode[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* A HDFS file node. This is a leaf node in the object explorer tree, and its contents
|
||||
* can be queried
|
||||
*
|
||||
* @export
|
||||
* @extends {ITreeNode}
|
||||
*/
|
||||
export interface IFileNode extends ITreeNode {
|
||||
getFileContentsAsString(maxBytes?: number): Promise<string>;
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import { window } from 'vscode';
|
||||
import PromptFactory from './factory';
|
||||
import EscapeException from '../escapeException';
|
||||
import { IQuestion, IPrompter } from './question';
|
||||
|
||||
// Supports simple pattern for prompting for user input and acting on this
|
||||
export default class CodeAdapter implements IPrompter {
|
||||
|
||||
// TODO define question interface
|
||||
private fixQuestion(question: IQuestion): any {
|
||||
if (question.type === 'checkbox' && Array.isArray(question.choices)) {
|
||||
// For some reason when there's a choice of checkboxes, they aren't formatted properly
|
||||
// Not sure where the issue is
|
||||
question.choices = question.choices.map(item => {
|
||||
if (typeof (item) === 'string') {
|
||||
return { checked: false, name: item, value: item };
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public promptSingle<T>(question: IQuestion, ignoreFocusOut?: boolean): Promise<T> {
|
||||
let questions: IQuestion[] = [question];
|
||||
return this.prompt(questions, ignoreFocusOut).then((answers: { [key: string]: T }) => {
|
||||
if (answers) {
|
||||
let response: T = answers[question.name];
|
||||
return response || undefined;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
|
||||
public prompt<T>(questions: IQuestion[], ignoreFocusOut?: boolean): Promise<{ [key: string]: T }> {
|
||||
let answers: { [key: string]: T } = {};
|
||||
|
||||
// Collapse multiple questions into a set of prompt steps
|
||||
let promptResult: Promise<{ [key: string]: T }> = questions.reduce((promise: Promise<{ [key: string]: T }>, question: IQuestion) => {
|
||||
this.fixQuestion(question);
|
||||
|
||||
return promise.then(() => {
|
||||
return PromptFactory.createPrompt(question, ignoreFocusOut);
|
||||
}).then(prompt => {
|
||||
if (!question.shouldPrompt || question.shouldPrompt(answers) === true) {
|
||||
return prompt.render().then((result: T) => {
|
||||
answers[question.name] = result;
|
||||
|
||||
if (question.onAnswered) {
|
||||
question.onAnswered(result);
|
||||
}
|
||||
return answers;
|
||||
});
|
||||
}
|
||||
return answers;
|
||||
});
|
||||
}, Promise.resolve());
|
||||
|
||||
return promptResult.catch(err => {
|
||||
if (err instanceof EscapeException || err instanceof TypeError) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
void window.showErrorMessage(err.message);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { window } from 'vscode';
|
||||
import Prompt from './prompt';
|
||||
import EscapeException from '../escapeException';
|
||||
|
||||
export default class ConfirmPrompt extends Prompt {
|
||||
|
||||
constructor(question: any, ignoreFocusOut?: boolean) {
|
||||
super(question, ignoreFocusOut);
|
||||
}
|
||||
|
||||
public render(): any {
|
||||
let choices: { [id: string]: boolean } = {};
|
||||
choices[localize('msgYes', 'Yes')] = true;
|
||||
choices[localize('msgNo', 'No')] = false;
|
||||
|
||||
let options = this.defaultQuickPickOptions;
|
||||
options.placeHolder = this._question.message;
|
||||
|
||||
return window.showQuickPick(Object.keys(choices), options)
|
||||
.then(result => {
|
||||
if (result === undefined) {
|
||||
throw new EscapeException();
|
||||
}
|
||||
|
||||
return choices[result] || false;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import Prompt from './prompt';
|
||||
import InputPrompt from './input';
|
||||
import PasswordPrompt from './password';
|
||||
import ConfirmPrompt from './confirm';
|
||||
import { IQuestion } from './question';
|
||||
|
||||
export default class PromptFactory {
|
||||
|
||||
public static createPrompt(question: IQuestion, ignoreFocusOut?: boolean): Prompt {
|
||||
switch (question.type) {
|
||||
case 'input':
|
||||
return new InputPrompt(question, ignoreFocusOut);
|
||||
case 'password':
|
||||
return new PasswordPrompt(question, ignoreFocusOut);
|
||||
case 'confirm':
|
||||
return new ConfirmPrompt(question, ignoreFocusOut);
|
||||
default:
|
||||
throw new Error(`Could not find a prompt for question type ${question.type}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import { window, InputBoxOptions } from 'vscode';
|
||||
import Prompt from './prompt';
|
||||
import EscapeException from '../escapeException';
|
||||
|
||||
const figures = require('figures');
|
||||
|
||||
export default class InputPrompt extends Prompt {
|
||||
|
||||
protected _options: InputBoxOptions;
|
||||
|
||||
constructor(question: any, ignoreFocusOut?: boolean) {
|
||||
super(question, ignoreFocusOut);
|
||||
|
||||
this._options = this.defaultInputBoxOptions;
|
||||
this._options.prompt = this._question.message;
|
||||
}
|
||||
|
||||
// Helper for callers to know the right type to get from the type factory
|
||||
public static get promptType(): string { return 'input'; }
|
||||
|
||||
public render(): any {
|
||||
// Prefer default over the placeHolder, if specified
|
||||
let placeHolder = this._question.default ? this._question.default : this._question.placeHolder;
|
||||
|
||||
if (this._question.default instanceof Error) {
|
||||
placeHolder = this._question.default.message;
|
||||
this._question.default = undefined;
|
||||
}
|
||||
|
||||
this._options.placeHolder = placeHolder;
|
||||
|
||||
return window.showInputBox(this._options)
|
||||
.then(result => {
|
||||
if (result === undefined) {
|
||||
throw new EscapeException();
|
||||
}
|
||||
|
||||
if (result === '') {
|
||||
// Use the default value, if defined
|
||||
result = this._question.default || '';
|
||||
}
|
||||
|
||||
const validationError = this._question.validate ? this._question.validate(result || '') : undefined;
|
||||
|
||||
if (validationError) {
|
||||
this._question.default = new Error(`${figures.warning} ${validationError}`);
|
||||
|
||||
return this.render();
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import InputPrompt from './input';
|
||||
|
||||
export default class PasswordPrompt extends InputPrompt {
|
||||
|
||||
constructor(question: any, ignoreFocusOut?: boolean) {
|
||||
super(question, ignoreFocusOut);
|
||||
|
||||
this._options.password = true;
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
// This code is originally from https://github.com/DonJayamanne/bowerVSCode
|
||||
// License: https://github.com/DonJayamanne/bowerVSCode/blob/master/LICENSE
|
||||
|
||||
import { InputBoxOptions, QuickPickOptions } from 'vscode';
|
||||
import { IQuestion } from './question';
|
||||
|
||||
abstract class Prompt {
|
||||
|
||||
protected _question: IQuestion;
|
||||
protected _ignoreFocusOut?: boolean;
|
||||
|
||||
constructor(question: IQuestion, ignoreFocusOut?: boolean) {
|
||||
this._question = question;
|
||||
this._ignoreFocusOut = ignoreFocusOut ? ignoreFocusOut : false;
|
||||
}
|
||||
|
||||
public abstract render(): any;
|
||||
|
||||
protected get defaultQuickPickOptions(): QuickPickOptions {
|
||||
return {
|
||||
ignoreFocusOut: this._ignoreFocusOut
|
||||
};
|
||||
}
|
||||
|
||||
protected get defaultInputBoxOptions(): InputBoxOptions {
|
||||
return {
|
||||
ignoreFocusOut: this._ignoreFocusOut
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default Prompt;
|
||||
@@ -1,54 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export class QuestionTypes {
|
||||
public static get input(): string { return 'input'; }
|
||||
public static get password(): string { return 'password'; }
|
||||
public static get confirm(): string { return 'confirm'; }
|
||||
}
|
||||
|
||||
// Question interface to clarify how to use the prompt feature
|
||||
// based on Bower Question format: https://github.com/bower/bower/blob/89069784bb46bfd6639b4a75e98a0d7399a8c2cb/packages/bower-logger/README.md
|
||||
export interface IQuestion {
|
||||
// Type of question (see QuestionTypes)
|
||||
type: string;
|
||||
// Name of the question for disambiguation
|
||||
name: string;
|
||||
// Message to display to the user
|
||||
message: string;
|
||||
// Optional placeHolder to give more detailed information to the user
|
||||
placeHolder?: any;
|
||||
// Optional default value - this will be used instead of placeHolder
|
||||
default?: any;
|
||||
// optional set of choices to be used. Can be QuickPickItems or a simple name-value pair
|
||||
choices?: Array<vscode.QuickPickItem | INameValueChoice>;
|
||||
// Optional validation function that returns an error string if validation fails
|
||||
validate?: (value: any) => string;
|
||||
// Optional pre-prompt function. Takes in set of answers so far, and returns true if prompt should occur
|
||||
shouldPrompt?: (answers: { [id: string]: any }) => boolean;
|
||||
// Optional action to take on the question being answered
|
||||
onAnswered?: (value: any) => void;
|
||||
// Optional set of options to support matching choices.
|
||||
matchOptions?: vscode.QuickPickOptions;
|
||||
}
|
||||
|
||||
// Pair used to display simple choices to the user
|
||||
interface INameValueChoice {
|
||||
name: string;
|
||||
value: any;
|
||||
}
|
||||
|
||||
export interface IPrompter {
|
||||
promptSingle<T>(question: IQuestion, ignoreFocusOut?: boolean): Promise<T>;
|
||||
/**
|
||||
* Prompts for multiple questions
|
||||
*
|
||||
* @returns Map of question IDs to results, or undefined if
|
||||
* the user canceled the question session
|
||||
*/
|
||||
prompt(questions: IQuestion[], ignoreFocusOut?: boolean): Promise<{ [questionId: string]: any }>;
|
||||
}
|
||||
@@ -1,172 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as vscode from 'vscode';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { ICommandViewContext, Command, ICommandObjectExplorerContext, ICommandUnknownContext } from '../../objectExplorerNodeProvider/command';
|
||||
import { SparkJobSubmissionDialog } from './sparkJobSubmission/sparkJobSubmissionDialog';
|
||||
import { AppContext } from '../../appContext';
|
||||
import { getErrorMessage } from '../../utils';
|
||||
import * as constants from '../../constants';
|
||||
import { HdfsFileSourceNode } from '../../objectExplorerNodeProvider/hdfsProvider';
|
||||
import { getNode } from '../../objectExplorerNodeProvider/hdfsCommands';
|
||||
import * as LocalizedConstants from '../../localizedConstants';
|
||||
import * as SqlClusterLookUp from '../../sqlClusterLookUp';
|
||||
import { SqlClusterConnection } from '../../objectExplorerNodeProvider/connection';
|
||||
|
||||
interface MssqlOptions {
|
||||
server: string;
|
||||
}
|
||||
|
||||
const timeout = (millis: number) => new Promise(c => setTimeout(c, millis));
|
||||
|
||||
export class OpenSparkJobSubmissionDialogCommand extends Command {
|
||||
constructor(appContext: AppContext, private outputChannel: vscode.OutputChannel) {
|
||||
super(constants.mssqlClusterLivySubmitSparkJobCommand, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandUnknownContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandUnknownContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let sqlClusterConnection: SqlClusterConnection = undefined;
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(context, this.appContext);
|
||||
}
|
||||
if (!sqlClusterConnection) {
|
||||
sqlClusterConnection = await this.selectConnection();
|
||||
}
|
||||
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
private async selectConnection(): Promise<SqlClusterConnection> {
|
||||
let connectionList: azdata.connection.Connection[] = await azdata.connection.getActiveConnections();
|
||||
let connectionMap: Map<string, azdata.connection.Connection> = new Map();
|
||||
let selectedHost: string = undefined;
|
||||
let showConnectionDialog = false;
|
||||
|
||||
// Filter invalid connections
|
||||
if (connectionList && connectionList.length > 0) {
|
||||
connectionList = connectionList.filter(conn => conn.providerName === constants.sqlProviderName && (<MssqlOptions><any>conn.options).server);
|
||||
}
|
||||
// Prompt choice if we have active connections
|
||||
if (connectionList && connectionList.length > 0) {
|
||||
let selectConnectionMsg = localize('selectOtherServer', "Select other SQL Server");
|
||||
let displayList: string[] = [];
|
||||
connectionList.forEach(conn => {
|
||||
let options: MssqlOptions = <any>conn.options;
|
||||
displayList.push(options.server);
|
||||
connectionMap.set(options.server, conn);
|
||||
});
|
||||
displayList.push(selectConnectionMsg);
|
||||
|
||||
selectedHost = await vscode.window.showQuickPick(displayList, {
|
||||
placeHolder:
|
||||
localize('sparkJobSubmission.PleaseSelectSqlWithCluster',
|
||||
"Please select SQL Server with Big Data Cluster.")
|
||||
});
|
||||
if (selectedHost === selectConnectionMsg) {
|
||||
showConnectionDialog = true;
|
||||
selectedHost = undefined;
|
||||
}
|
||||
} else {
|
||||
showConnectionDialog = true;
|
||||
}
|
||||
|
||||
// Show connection dialog if still don't have a server
|
||||
if (showConnectionDialog) {
|
||||
let connection = await azdata.connection.openConnectionDialog([constants.sqlProviderName]);
|
||||
if (connection) {
|
||||
let options: MssqlOptions = <any>connection.options;
|
||||
connectionMap.set(options.server, connection);
|
||||
selectedHost = options.server;
|
||||
// Wait an appropriate timeout so that the serverInfo object can populate...
|
||||
await timeout(150);
|
||||
}
|
||||
}
|
||||
|
||||
let errorMsg = localize('sparkJobSubmission.NoSqlSelected', "No SQL Server is selected.");
|
||||
if (!selectedHost) { throw new Error(errorMsg); }
|
||||
|
||||
let sqlConnection = connectionMap.get(selectedHost);
|
||||
if (!sqlConnection) { throw new Error(errorMsg); }
|
||||
|
||||
let sqlClusterConnection = await SqlClusterLookUp.getSqlClusterConnectionParams(sqlConnection, this.appContext);
|
||||
if (!sqlClusterConnection) {
|
||||
throw new Error(localize('errorNotSqlBigDataCluster', "The selected server does not belong to a SQL Server Big Data Cluster"));
|
||||
}
|
||||
|
||||
return new SqlClusterConnection(sqlClusterConnection);
|
||||
}
|
||||
}
|
||||
|
||||
// Open the submission dialog for a specific file path.
|
||||
export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
constructor(appContext: AppContext, private outputChannel: vscode.OutputChannel) {
|
||||
super(constants.mssqlClusterLivySubmitSparkJobFromFileCommand, appContext);
|
||||
}
|
||||
|
||||
protected override async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise<any> {
|
||||
return this.execute(context, args);
|
||||
}
|
||||
|
||||
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise<void> {
|
||||
let path: string = undefined;
|
||||
try {
|
||||
let node = await getNode<HdfsFileSourceNode>(context, this.appContext);
|
||||
if (node && node.hdfsPath) {
|
||||
path = node.hdfsPath;
|
||||
} else {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let sqlClusterConnection: SqlClusterConnection = undefined;
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(context, this.appContext);
|
||||
}
|
||||
if (!sqlClusterConnection) {
|
||||
throw new Error(LocalizedConstants.sparkJobSubmissionNoSqlBigDataClusterFound);
|
||||
}
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog(path);
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class OpenSparkJobSubmissionDialogTask {
|
||||
constructor(private appContext: AppContext, private outputChannel: vscode.OutputChannel) {
|
||||
}
|
||||
|
||||
async execute(profile: azdata.IConnectionProfile, ...args: any[]): Promise<void> {
|
||||
try {
|
||||
let sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(profile, this.appContext);
|
||||
if (!sqlClusterConnection) {
|
||||
throw new Error(LocalizedConstants.sparkJobSubmissionNoSqlBigDataClusterFound);
|
||||
}
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,192 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
/**
|
||||
* Configuration values for the advanced tab of the spark job submission dialog.
|
||||
* See https://livy.incubator.apache.org/docs/latest/rest-api.html for more information
|
||||
* on the specific values
|
||||
*/
|
||||
export interface SparkAdvancedConfigModel {
|
||||
jarFiles?: string,
|
||||
pyFiles?: string,
|
||||
otherFiles?: string,
|
||||
driverMemory?: string,
|
||||
driverCores?: number,
|
||||
executorMemory?: string,
|
||||
executeCores?: number,
|
||||
executorCount?: number,
|
||||
queueName?: string,
|
||||
configValues?: string
|
||||
}
|
||||
|
||||
const baseFormItemLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
export class SparkAdvancedTab {
|
||||
private _tab: azdata.window.DialogTab;
|
||||
public get tab(): azdata.window.DialogTab { return this._tab; }
|
||||
|
||||
private _referenceFilesInputBox: azdata.InputBoxComponent;
|
||||
private _referenceJARFilesInputBox: azdata.InputBoxComponent;
|
||||
private _referencePyFilesInputBox: azdata.InputBoxComponent;
|
||||
private _driverMemoryInputBox: azdata.InputBoxComponent;
|
||||
private _driverCoresInputBox: azdata.InputBoxComponent;
|
||||
private _executorMemoryInputBox: azdata.InputBoxComponent;
|
||||
private _executorCoresInputBox: azdata.InputBoxComponent;
|
||||
private _executorCountInputBox: azdata.InputBoxComponent;
|
||||
private _queueInputBox: azdata.InputBoxComponent;
|
||||
private _configValuesInputBox: azdata.InputBoxComponent;
|
||||
|
||||
constructor() {
|
||||
this._tab = azdata.window.createTab(localize('sparkJobSubmission.AdvancedTabName', "ADVANCED"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
|
||||
let formContainer = builder.formContainer();
|
||||
|
||||
this._referenceJARFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._referenceJARFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceJarList', "Reference Jars")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferenceJarListToolTip',
|
||||
"Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)")
|
||||
});
|
||||
|
||||
this._referencePyFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referencePyFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferencePyList', "Reference py Files")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferencePyListTooltip',
|
||||
"Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
});
|
||||
|
||||
this._referenceFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._referenceFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceFilesList', "Reference Files")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferenceFilesListTooltip',
|
||||
"Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
});
|
||||
|
||||
this._driverMemoryInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._driverMemoryInputBox,
|
||||
title: localize('sparkJobSubmission.driverMemory', "Driver Memory")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.driverMemoryTooltip', "Amount of memory to allocate to the driver. Specify units as part of value. Example 512M or 2G.")
|
||||
});
|
||||
|
||||
this._driverCoresInputBox = builder.inputBox()
|
||||
.withProps({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._driverCoresInputBox,
|
||||
title: localize('sparkJobSubmission.driverCores', "Driver Cores")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.driverCoresTooltip', "Amount of CPU cores to allocate to the driver.")
|
||||
});
|
||||
|
||||
this._executorMemoryInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorMemoryInputBox,
|
||||
title: localize('sparkJobSubmission.executorMemory', "Executor Memory")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorMemoryTooltip', "Amount of memory to allocate to the executor. Specify units as part of value. Example 512M or 2G.")
|
||||
});
|
||||
|
||||
this._executorCoresInputBox = builder.inputBox()
|
||||
.withProps({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorCoresInputBox,
|
||||
title: localize('sparkJobSubmission.executorCores', "Executor Cores")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorCoresTooltip', "Amount of CPU cores to allocate to the executor.")
|
||||
});
|
||||
|
||||
this._executorCountInputBox = builder.inputBox()
|
||||
.withProps({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorCountInputBox,
|
||||
title: localize('sparkJobSubmission.executorCount', "Executor Count")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorCountTooltip', "Number of instances of the executor to run.")
|
||||
});
|
||||
|
||||
this._queueInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._queueInputBox,
|
||||
title: localize('sparkJobSubmission.queueName', "Queue Name")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.queueNameTooltip', "Name of the Spark queue to execute the session in.")
|
||||
});
|
||||
|
||||
this._configValuesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._configValuesInputBox,
|
||||
title: localize('sparkJobSubmission.configValues', "Configuration Values")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.configValuesTooltip', "List of name value pairs containing Spark configuration values. Encoded as JSON dictionary. Example: '{\"name\":\"value\", \"name2\":\"value2\"}'.")
|
||||
});
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
});
|
||||
}
|
||||
|
||||
public getAdvancedConfigValues(): SparkAdvancedConfigModel {
|
||||
return {
|
||||
jarFiles: this._referenceJARFilesInputBox.value,
|
||||
pyFiles: this._referencePyFilesInputBox.value,
|
||||
otherFiles: this._referenceFilesInputBox.value,
|
||||
driverMemory: this._driverMemoryInputBox.value,
|
||||
driverCores: +this._driverCoresInputBox.value,
|
||||
executorMemory: this._executorMemoryInputBox.value,
|
||||
executeCores: +this._executorCoresInputBox.value,
|
||||
executorCount: +this._executorCountInputBox.value,
|
||||
queueName: this._queueInputBox.value,
|
||||
configValues: this._configValuesInputBox.value
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,288 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as fspath from 'path';
|
||||
import * as vscode from 'vscode';
|
||||
import * as utils from '../../../utils';
|
||||
import * as LocalizedConstants from '../../../localizedConstants';
|
||||
import * as constants from '../../../constants';
|
||||
|
||||
import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
|
||||
import { SparkFileSource } from './sparkJobSubmissionService';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
/**
|
||||
* Configuration values for the general tab of the spark job submission dialog.
|
||||
* See https://livy.incubator.apache.org/docs/latest/rest-api.html for more information
|
||||
* on the specific values
|
||||
*/
|
||||
export interface SparkConfigModel {
|
||||
jobName: string,
|
||||
mainClass: string,
|
||||
arguments: string
|
||||
}
|
||||
|
||||
const baseFormItemLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
|
||||
export class SparkConfigurationTab {
|
||||
private _tab: azdata.window.DialogTab;
|
||||
public get tab(): azdata.window.DialogTab { return this._tab; }
|
||||
|
||||
private _jobNameInputBox: azdata.InputBoxComponent;
|
||||
private _sparkContextLabel: azdata.TextComponent;
|
||||
private _fileSourceDropDown: azdata.DropDownComponent;
|
||||
private _sparkSourceFileInputBox: azdata.InputBoxComponent;
|
||||
private _filePickerButton: azdata.ButtonComponent;
|
||||
private _sourceFlexContainer: azdata.FlexContainer;
|
||||
private _sourceFlexContainerWithHint: azdata.FlexContainer;
|
||||
private _localUploadDestinationLabel: azdata.TextComponent;
|
||||
private _mainClassInputBox: azdata.InputBoxComponent;
|
||||
private _argumentsInputBox: azdata.InputBoxComponent;
|
||||
|
||||
// If path is specified, means the default source setting for this tab is HDFS file, otherwise, it would be local file.
|
||||
constructor(private _dataModel: SparkJobSubmissionModel, private _path?: string) {
|
||||
this._tab = azdata.window.createTab(localize('sparkJobSubmission.GeneralTabName', "GENERAL"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
|
||||
let formContainer = builder.formContainer();
|
||||
|
||||
this._jobNameInputBox = builder.inputBox().withProps({
|
||||
placeHolder: localize('sparkJobSubmission.JobNamePlaceHolder', "Enter a name ..."),
|
||||
value: (this._path) ? fspath.basename(this._path) : ''
|
||||
}).component();
|
||||
|
||||
formContainer.addFormItem({
|
||||
component: this._jobNameInputBox,
|
||||
title: localize('sparkJobSubmission.JobName', "Job Name"),
|
||||
required: true
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._sparkContextLabel = builder.text().withProps({
|
||||
value: this._dataModel.getSparkClusterUrl()
|
||||
}).component();
|
||||
formContainer.addFormItem({
|
||||
component: this._sparkContextLabel,
|
||||
title: localize('sparkJobSubmission.SparkCluster', "Spark Cluster")
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._fileSourceDropDown = builder.dropDown().withProps({
|
||||
values: [SparkFileSource.Local.toString(), SparkFileSource.HDFS.toString()],
|
||||
value: (this._path) ? SparkFileSource.HDFS.toString() : SparkFileSource.Local.toString()
|
||||
}).component();
|
||||
|
||||
this._fileSourceDropDown.onValueChanged(async selection => {
|
||||
let isLocal = selection.selected === SparkFileSource.Local.toString();
|
||||
// Disable browser button for cloud source.
|
||||
if (this._filePickerButton) {
|
||||
await this._filePickerButton.updateProperties({
|
||||
enabled: isLocal,
|
||||
required: isLocal
|
||||
});
|
||||
}
|
||||
|
||||
// Clear the path When switching source.
|
||||
if (this._sparkSourceFileInputBox) {
|
||||
this._sparkSourceFileInputBox.value = '';
|
||||
}
|
||||
|
||||
if (this._localUploadDestinationLabel) {
|
||||
if (isLocal) {
|
||||
this._localUploadDestinationLabel.value = LocalizedConstants.sparkLocalFileDestinationHint;
|
||||
} else {
|
||||
this._localUploadDestinationLabel.value = '';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this._sparkSourceFileInputBox = builder.inputBox().withProps({
|
||||
required: true,
|
||||
placeHolder: localize('sparkJobSubmission.FilePathPlaceHolder', "Path to a .jar or .py file"),
|
||||
value: (this._path) ? this._path : ''
|
||||
}).component();
|
||||
this._sparkSourceFileInputBox.onTextChanged(async text => {
|
||||
if (this._fileSourceDropDown.value === SparkFileSource.Local.toString()) {
|
||||
this._dataModel.updateModelByLocalPath(text);
|
||||
if (this._localUploadDestinationLabel) {
|
||||
if (text) {
|
||||
this._localUploadDestinationLabel.value = localize('sparkJobSubmission.LocalFileDestinationHintWithPath',
|
||||
"The selected local file will be uploaded to HDFS: {0}", this._dataModel.hdfsSubmitFilePath);
|
||||
} else {
|
||||
this._localUploadDestinationLabel.value = LocalizedConstants.sparkLocalFileDestinationHint;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this._dataModel.hdfsSubmitFilePath = text;
|
||||
}
|
||||
|
||||
// main class disable/enable is according to whether it's jar file.
|
||||
let isJarFile = this._dataModel.isJarFile();
|
||||
await this._mainClassInputBox.updateProperties({ enabled: isJarFile, required: isJarFile });
|
||||
if (!isJarFile) {
|
||||
// Clear main class for py file.
|
||||
this._mainClassInputBox.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
this._filePickerButton = builder.button().withProps({
|
||||
enabled: (this._path) ? false : true,
|
||||
label: '•••',
|
||||
width: constants.mssqlClusterSparkJobFileSelectorButtonWidth,
|
||||
height: constants.mssqlClusterSparkJobFileSelectorButtonHeight,
|
||||
secondary: true
|
||||
}).component();
|
||||
this._filePickerButton.onDidClick(() => this.onSelectFile());
|
||||
|
||||
this._sourceFlexContainer = builder.flexContainer().component();
|
||||
this._sourceFlexContainer.addItem(this._fileSourceDropDown, { flex: '0 0 auto', CSSStyles: { 'minWidth': '75px', 'marginBottom': '5px', 'paddingRight': '3px' } });
|
||||
this._sourceFlexContainer.addItem(this._sparkSourceFileInputBox, { flex: '1 1 auto', CSSStyles: { 'marginBottom': '5px', 'paddingRight': '3px' } });
|
||||
// Do not add margin for file picker button as the label forces it to have 5px margin
|
||||
this._sourceFlexContainer.addItem(this._filePickerButton, { flex: '0 0 auto' });
|
||||
this._sourceFlexContainer.setLayout({
|
||||
flexFlow: 'row',
|
||||
height: '100%',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
alignContent: 'stretch'
|
||||
});
|
||||
|
||||
this._localUploadDestinationLabel = builder.text().withProps({
|
||||
value: (this._path) ? '' : LocalizedConstants.sparkLocalFileDestinationHint
|
||||
}).component();
|
||||
this._sourceFlexContainerWithHint = builder.flexContainer().component();
|
||||
this._sourceFlexContainerWithHint.addItem(this._sourceFlexContainer, { flex: '0 0 auto' });
|
||||
this._sourceFlexContainerWithHint.addItem(this._localUploadDestinationLabel, { flex: '1 1 auto' });
|
||||
this._sourceFlexContainerWithHint.setLayout({
|
||||
flexFlow: 'column',
|
||||
width: '100%',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'stretch',
|
||||
alignContent: 'stretch'
|
||||
});
|
||||
|
||||
formContainer.addFormItem({
|
||||
component: this._sourceFlexContainerWithHint,
|
||||
title: localize('sparkJobSubmission.MainFilePath', "JAR/py File"),
|
||||
required: true
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._mainClassInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._mainClassInputBox,
|
||||
title: localize('sparkJobSubmission.MainClass', "Main Class"),
|
||||
required: true
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._argumentsInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._argumentsInputBox,
|
||||
title: localize('sparkJobSubmission.Arguments', "Arguments")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ArgumentsTooltip', "Command line arguments used in your main class, multiple arguments should be split by space.")
|
||||
});
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
});
|
||||
}
|
||||
|
||||
public async validate(): Promise<boolean> {
|
||||
if (!this._jobNameInputBox.value) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJobName', "Property Job Name is not specified."));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this._fileSourceDropDown.value === SparkFileSource.Local.toString()) {
|
||||
if (this._sparkSourceFileInputBox.value) {
|
||||
this._dataModel.isMainSourceFromLocal = true;
|
||||
this._dataModel.updateModelByLocalPath(this._sparkSourceFileInputBox.value);
|
||||
} else {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', "Property JAR/py File is not specified."));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (this._sparkSourceFileInputBox.value) {
|
||||
this._dataModel.isMainSourceFromLocal = false;
|
||||
this._dataModel.hdfsSubmitFilePath = this._sparkSourceFileInputBox.value;
|
||||
} else {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', "Property JAR/py File is not specified."));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._dataModel.isJarFile() && !this._mainClassInputBox.value) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyMainClass', "Property Main Class is not specified."));
|
||||
return false;
|
||||
}
|
||||
|
||||
// 1. For local file Source check whether they existed.
|
||||
if (this._dataModel.isMainSourceFromLocal) {
|
||||
if (!(await utils.exists(this._dataModel.localFileSourcePath))) {
|
||||
this._dataModel.showDialogError(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(this._dataModel.localFileSourcePath));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// 2. Check HDFS file existed for HDFS source.
|
||||
try {
|
||||
let isFileExisted = await this._dataModel.isClusterFileExisted(this._dataModel.hdfsSubmitFilePath);
|
||||
if (!isFileExisted) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExistedWithPath', "{0} does not exist in Cluster or exception thrown. ", this._dataModel.hdfsSubmitFilePath));
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExisted', "The specified HDFS file does not exist. "));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private async onSelectFile(): Promise<void> {
|
||||
let filePath = await this.pickFile();
|
||||
if (filePath) {
|
||||
this._sparkSourceFileInputBox.value = filePath;
|
||||
}
|
||||
}
|
||||
|
||||
public getSparkConfigValues(): SparkConfigModel {
|
||||
return {
|
||||
jobName: this._jobNameInputBox.value ?? '',
|
||||
mainClass: this._mainClassInputBox.value ?? '',
|
||||
arguments: this._argumentsInputBox.value ?? ''
|
||||
};
|
||||
}
|
||||
|
||||
public async pickFile(): Promise<string> {
|
||||
try {
|
||||
let filter = { 'JAR/py files': ['jar', 'py'] };
|
||||
let options: vscode.OpenDialogOptions = {
|
||||
canSelectFiles: true,
|
||||
canSelectFolders: false,
|
||||
canSelectMany: false,
|
||||
openLabel: localize('sparkSelectLocalFile', "Select"),
|
||||
filters: filter
|
||||
};
|
||||
|
||||
let fileUris: vscode.Uri[] = await vscode.window.showOpenDialog(options);
|
||||
if (fileUris && fileUris[0]) {
|
||||
return fileUris[0].fsPath;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
void vscode.window.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,166 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as utils from '../../../utils';
|
||||
import * as LocalizedConstants from '../../../localizedConstants';
|
||||
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
|
||||
import { SparkConfigurationTab } from './sparkConfigurationTab';
|
||||
import { SparkJobSubmissionInput } from './sparkJobSubmissionService';
|
||||
import { SparkAdvancedTab } from './sparkAdvancedTab';
|
||||
import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export class SparkJobSubmissionDialog {
|
||||
private _dialog: azdata.window.Dialog;
|
||||
private _dataModel: SparkJobSubmissionModel;
|
||||
private _sparkConfigTab: SparkConfigurationTab;
|
||||
private _sparkAdvancedTab: SparkAdvancedTab;
|
||||
|
||||
constructor(
|
||||
private sqlClusterConnection: SqlClusterConnection,
|
||||
private appContext: AppContext,
|
||||
private outputChannel: vscode.OutputChannel) {
|
||||
if (!this.sqlClusterConnection || !this.appContext || !this.outputChannel) {
|
||||
throw new Error(localize('sparkJobSubmission.SparkJobSubmissionDialogInitializeError',
|
||||
"Parameters for SparkJobSubmissionDialog is illegal"));
|
||||
}
|
||||
}
|
||||
|
||||
public async openDialog(path?: string): Promise<void> {
|
||||
this._dialog = azdata.window.createModelViewDialog(localize('sparkJobSubmission.DialogTitleNewJob', "New Job"));
|
||||
|
||||
this._dataModel = new SparkJobSubmissionModel(this.sqlClusterConnection, this._dialog, this.appContext);
|
||||
|
||||
this._sparkConfigTab = new SparkConfigurationTab(this._dataModel, path);
|
||||
this._sparkAdvancedTab = new SparkAdvancedTab();
|
||||
|
||||
this._dialog.content = [this._sparkConfigTab.tab, this._sparkAdvancedTab.tab];
|
||||
|
||||
this._dialog.cancelButton.label = localize('sparkJobSubmission.DialogCancelButton', "Cancel");
|
||||
|
||||
this._dialog.okButton.label = localize('sparkJobSubmission.DialogSubmitButton', "Submit");
|
||||
this._dialog.okButton.onClick(() => this.onClickOk());
|
||||
|
||||
this._dialog.registerCloseValidator(() => this.handleValidate());
|
||||
|
||||
azdata.window.openDialog(this._dialog);
|
||||
}
|
||||
|
||||
private onClickOk(): void {
|
||||
let jobName = localize('sparkJobSubmission.SubmitSparkJob', "{0} Spark Job Submission:",
|
||||
this._sparkConfigTab.getSparkConfigValues().jobName);
|
||||
azdata.tasks.startBackgroundOperation(
|
||||
{
|
||||
connection: this.sqlClusterConnection.connection,
|
||||
displayName: jobName,
|
||||
description: jobName,
|
||||
isCancelable: false,
|
||||
operation: op => {
|
||||
void this.onSubmit(op);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private async onSubmit(op: azdata.BackgroundOperation): Promise<void> {
|
||||
try {
|
||||
this.outputChannel.show();
|
||||
let msg = localize('sparkJobSubmission.SubmissionStartMessage',
|
||||
".......................... Submit Spark Job Start ..........................");
|
||||
this.outputChannel.appendLine(msg);
|
||||
// 1. Upload local file to HDFS for local source.
|
||||
if (this._dataModel.isMainSourceFromLocal) {
|
||||
try {
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath));
|
||||
await this._dataModel.uploadFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Submit job to cluster.
|
||||
let submissionSettings: SparkJobSubmissionInput = this.getSubmissionInput();
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName));
|
||||
let livyBatchId = await this._dataModel.submitBatchJobByLivy(submissionSettings);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
|
||||
// 3. Get SparkHistory/YarnUI Url.
|
||||
try {
|
||||
let appId = await this._dataModel.getApplicationID(submissionSettings, livyBatchId);
|
||||
|
||||
let sparkHistoryUrl = this._dataModel.generateSparkHistoryUIUrl(submissionSettings, appId);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl)));
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
|
||||
|
||||
/*
|
||||
// Spark Tracking URl is not working now.
|
||||
let sparkTrackingUrl = this._dataModel.generateSparkTrackingUIUrl(submissionSettings, appId);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl)));
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl));
|
||||
*/
|
||||
|
||||
let yarnUIUrl = this._dataModel.generateYarnUIUrl(submissionSettings, appId);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl)));
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
}
|
||||
}
|
||||
|
||||
private async handleValidate(): Promise<boolean> {
|
||||
return this._sparkConfigTab.validate();
|
||||
}
|
||||
|
||||
private getSubmissionInput(): SparkJobSubmissionInput {
|
||||
const generalConfig = this._sparkConfigTab.getSparkConfigValues();
|
||||
const advancedConfig = this._sparkAdvancedTab.getAdvancedConfigValues();
|
||||
return new SparkJobSubmissionInput(
|
||||
{
|
||||
sparkFile: this._dataModel.hdfsSubmitFilePath,
|
||||
...generalConfig,
|
||||
...advancedConfig
|
||||
});
|
||||
}
|
||||
|
||||
private addInfoTag(info: string): string {
|
||||
return `[Info] ${info}`;
|
||||
}
|
||||
|
||||
private addErrorTag(error: string): string {
|
||||
return `[Error] ${error}`;
|
||||
}
|
||||
}
|
||||
@@ -1,202 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
import * as fspath from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
import * as constants from '../../../constants';
|
||||
import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
|
||||
import * as LocalizedConstants from '../../../localizedConstants';
|
||||
import * as utils from '../../../utils';
|
||||
import { SparkJobSubmissionService, SparkJobSubmissionInput, LivyLogResponse } from './sparkJobSubmissionService';
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { IFileSource, File, joinHdfsPath, FileType } from '../../../objectExplorerNodeProvider/fileSources';
|
||||
|
||||
|
||||
// Stores important state and service methods used by the Spark Job Submission Dialog.
|
||||
export class SparkJobSubmissionModel {
|
||||
private _dialogService: SparkJobSubmissionService;
|
||||
private _guidForClusterFolder: string;
|
||||
public get guidForClusterFolder(): string { return this._guidForClusterFolder; }
|
||||
|
||||
// Whether the file is from local or HDFS
|
||||
public isMainSourceFromLocal: boolean;
|
||||
|
||||
// indicate the final path to be submitted within HDFS
|
||||
public hdfsSubmitFilePath: string;
|
||||
|
||||
// local file uploading related path: source; destinationFolder
|
||||
public localFileSourcePath: string;
|
||||
public hdfsFolderDestinationPath: string;
|
||||
|
||||
constructor(
|
||||
private readonly _sqlClusterConnection: SqlClusterConnection,
|
||||
private readonly _dialog: azdata.window.Dialog,
|
||||
private readonly _appContext: AppContext) {
|
||||
|
||||
if (!this._sqlClusterConnection || !this._dialog || !this._appContext) {
|
||||
throw new Error(localize('sparkJobSubmission.SparkJobSubmissionModelInitializeError',
|
||||
"Parameters for SparkJobSubmissionModel is illegal"));
|
||||
}
|
||||
|
||||
this._dialogService = new SparkJobSubmissionService();
|
||||
this._guidForClusterFolder = utils.generateGuid();
|
||||
}
|
||||
|
||||
public get connection(): SqlClusterConnection { return this._sqlClusterConnection; }
|
||||
public get dialogService(): SparkJobSubmissionService { return this._dialogService; }
|
||||
public get dialog(): azdata.window.Dialog { return this._dialog; }
|
||||
|
||||
public isJarFile(): boolean {
|
||||
if (this.hdfsSubmitFilePath) {
|
||||
return this.hdfsSubmitFilePath.toLowerCase().endsWith('jar');
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public showDialogError(message: string): void {
|
||||
let errorLevel = azdata.window.MessageLevel ? azdata.window.MessageLevel : 0;
|
||||
this._dialog.message = {
|
||||
text: message,
|
||||
level: <azdata.window.MessageLevel>errorLevel
|
||||
};
|
||||
}
|
||||
|
||||
public showDialogInfo(message: string): void {
|
||||
let infoLevel = azdata.window.MessageLevel ? azdata.window.MessageLevel.Information : 2;
|
||||
this._dialog.message = {
|
||||
text: message,
|
||||
level: infoLevel
|
||||
};
|
||||
}
|
||||
|
||||
public getSparkClusterUrl(): string {
|
||||
if (this._sqlClusterConnection && this._sqlClusterConnection.host && this._sqlClusterConnection.port) {
|
||||
return `https://${this._sqlClusterConnection.host}:${this._sqlClusterConnection.port}`;
|
||||
}
|
||||
|
||||
// Only for safety check, Won't happen with correct Model initialize.
|
||||
return '';
|
||||
}
|
||||
|
||||
public async submitBatchJobByLivy(submissionArgs: SparkJobSubmissionInput): Promise<string> {
|
||||
try {
|
||||
if (!submissionArgs) {
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', "submissionArgs is invalid. "));
|
||||
}
|
||||
|
||||
submissionArgs.setSparkClusterInfo(this._sqlClusterConnection);
|
||||
let livyBatchId = await this._dialogService.submitBatchJob(submissionArgs);
|
||||
return livyBatchId;
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
public async getApplicationID(submissionArgs: SparkJobSubmissionInput, livyBatchId: string, retryTime?: number): Promise<string> {
|
||||
// TODO: whether set timeout as 15000ms
|
||||
try {
|
||||
if (!submissionArgs) {
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', "submissionArgs is invalid. "));
|
||||
}
|
||||
|
||||
if (!utils.isValidNumber(livyBatchId)) {
|
||||
return Promise.reject(new Error(localize('sparkJobSubmission.LivyBatchIdIsInvalid', "livyBatchId is invalid. ")));
|
||||
}
|
||||
|
||||
if (!retryTime) {
|
||||
retryTime = constants.mssqlClusterLivyRetryTimesForCheckYarnApp;
|
||||
}
|
||||
|
||||
submissionArgs.setSparkClusterInfo(this._sqlClusterConnection);
|
||||
let response: LivyLogResponse = undefined;
|
||||
let timeOutCount: number = 0;
|
||||
do {
|
||||
timeOutCount++;
|
||||
await this.sleep(constants.mssqlClusterLivyTimeInMSForCheckYarnApp);
|
||||
response = await this._dialogService.getYarnAppId(submissionArgs, livyBatchId);
|
||||
} while (response.appId === '' && timeOutCount < retryTime);
|
||||
|
||||
if (response.appId === '') {
|
||||
return Promise.reject(localize('sparkJobSubmission.GetApplicationIdTimeOut', "Get Application Id time out. {0}[Log] {1}", os.EOL, response.log));
|
||||
} else {
|
||||
return response.appId;
|
||||
}
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
public async uploadFile(localFilePath: string, hdfsFolderPath: string): Promise<void> {
|
||||
try {
|
||||
if (!localFilePath || !hdfsFolderPath) {
|
||||
return Promise.reject(localize('sparkJobSubmission.localFileOrFolderNotSpecified.', "Property localFilePath or hdfsFolderPath is not specified. "));
|
||||
}
|
||||
|
||||
if (!(await utils.exists(localFilePath))) {
|
||||
return Promise.reject(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(localFilePath));
|
||||
}
|
||||
|
||||
const fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
|
||||
await fileSource.writeFile(new File(localFilePath, FileType.File), hdfsFolderPath);
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
public async isClusterFileExisted(path: string): Promise<boolean> {
|
||||
try {
|
||||
if (!path) {
|
||||
return Promise.reject(localize('sparkJobSubmission.PathNotSpecified.', "Property Path is not specified. "));
|
||||
}
|
||||
|
||||
let fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
|
||||
return await fileSource.exists(path);
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
public updateModelByLocalPath(localPath: string): void {
|
||||
if (localPath) {
|
||||
this.localFileSourcePath = localPath;
|
||||
this.hdfsFolderDestinationPath = this.generateDestinationFolder();
|
||||
let fileName = fspath.basename(localPath);
|
||||
this.hdfsSubmitFilePath = joinHdfsPath(this.hdfsFolderDestinationPath, fileName);
|
||||
} else {
|
||||
this.hdfsSubmitFilePath = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Example path: /SparkSubmission/2018/08/21/b682a6c4-1954-401e-8542-9c573d69d9c0/default_artifact.jar
|
||||
private generateDestinationFolder(): string {
|
||||
let day = new Date();
|
||||
return `/SparkSubmission/${day.getUTCFullYear()}/${day.getUTCMonth() + 1}/${day.getUTCDate()}/${this._guidForClusterFolder}`;
|
||||
}
|
||||
|
||||
// Example: https://host:30443/gateway/default/yarn/cluster/app/application_1532646201938_0057
|
||||
public generateYarnUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
|
||||
return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/yarn/cluster/app/${appId}`;
|
||||
}
|
||||
|
||||
// Example: https://host:30443/gateway/default/yarn/proxy/application_1532646201938_0411
|
||||
public generateSparkTrackingUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
|
||||
return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/yarn/proxy/${appId}`;
|
||||
}
|
||||
|
||||
// Example: https://host:30443/gateway/default/sparkhistory/history/application_1532646201938_0057/1
|
||||
public generateSparkHistoryUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
|
||||
return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/sparkhistory/history/${appId}/1`;
|
||||
}
|
||||
|
||||
private async sleep(ms: number): Promise<{}> {
|
||||
// tslint:disable-next-line no-string-based-set-timeout
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
@@ -1,240 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
import * as constants from '../../../constants';
|
||||
import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
|
||||
import * as utils from '../../../utils';
|
||||
import * as auth from '../../../util/auth';
|
||||
import * as request from 'request-light';
|
||||
|
||||
export class SparkJobSubmissionService {
|
||||
public async submitBatchJob(submissionArgs: SparkJobSubmissionInput): Promise<string> {
|
||||
let livyUrl: string = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/`;
|
||||
|
||||
// Get correct authentication headers
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'POST',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
data: {
|
||||
file: submissionArgs.config.sparkFile,
|
||||
proxyUser: submissionArgs.user,
|
||||
className: submissionArgs.config.mainClass,
|
||||
name: submissionArgs.config.jobName
|
||||
},
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
|
||||
// Now set the other parameters based on the user configuration - see
|
||||
// https://livy.incubator.apache.org/docs/latest/rest-api.html for more detailed information
|
||||
|
||||
// Set arguments
|
||||
const args = submissionArgs.config.arguments?.trim();
|
||||
if (arguments) {
|
||||
const argsList = args.split(' ');
|
||||
if (argsList.length > 0) {
|
||||
options.data['args'] = argsList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set jars files
|
||||
const jarFiles = submissionArgs.config.jarFiles?.trim();
|
||||
if (jarFiles) {
|
||||
const jarList = jarFiles.split(';');
|
||||
if (jarList.length > 0) {
|
||||
options.data['jars'] = jarList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set py files
|
||||
if (submissionArgs.config.pyFiles?.trim()) {
|
||||
const pyList = submissionArgs.config.pyFiles.split(';');
|
||||
if (pyList.length > 0) {
|
||||
options.data['pyFiles'] = pyList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set other files
|
||||
const otherFiles = submissionArgs.config.otherFiles?.trim();
|
||||
if (otherFiles) {
|
||||
const otherList = otherFiles.split(';');
|
||||
if (otherList.length > 0) {
|
||||
options.data['files'] = otherList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set driver memory
|
||||
const driverMemory = submissionArgs.config.driverMemory?.trim();
|
||||
if (driverMemory) {
|
||||
options.data['driverMemory'] = driverMemory;
|
||||
}
|
||||
|
||||
// Set driver cores
|
||||
if (submissionArgs.config.driverCores) {
|
||||
options.data['driverCores'] = submissionArgs.config.driverCores;
|
||||
}
|
||||
|
||||
// Set executor memory
|
||||
const executorMemory = submissionArgs.config.executorMemory?.trim();
|
||||
if (executorMemory) {
|
||||
options.data['executorMemory'] = executorMemory;
|
||||
}
|
||||
|
||||
// Set executor cores
|
||||
if (submissionArgs.config.executorCores) {
|
||||
options.data['executorCores'] = submissionArgs.config.executorCores;
|
||||
}
|
||||
|
||||
// Set executor count
|
||||
if (submissionArgs.config.executorCount) {
|
||||
options.data['numExecutors'] = submissionArgs.config.executorCount;
|
||||
}
|
||||
|
||||
if (submissionArgs.config.queueName) {
|
||||
options.data['queue'] = submissionArgs.config.queueName;
|
||||
}
|
||||
// Set driver memory
|
||||
const configurationValues = submissionArgs.config.configValues?.trim();
|
||||
if (configurationValues) {
|
||||
options.data['conf'] = configurationValues;
|
||||
}
|
||||
|
||||
options.data = JSON.stringify(options.data);
|
||||
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && utils.isValidNumber(response.id)) {
|
||||
return response.id;
|
||||
}
|
||||
|
||||
throw new Error(localize('sparkJobSubmission.LivyNoBatchIdReturned',
|
||||
"No Spark job batch id is returned from response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
}
|
||||
|
||||
private async getAuthenticationHeaders(submissionArgs: SparkJobSubmissionInput) {
|
||||
let headers = {};
|
||||
if (submissionArgs.isIntegratedAuth) {
|
||||
let kerberosToken = await auth.authenticateKerberos(submissionArgs.host);
|
||||
headers = { Authorization: `Negotiate ${kerberosToken}` };
|
||||
}
|
||||
else {
|
||||
headers = { Authorization: 'Basic ' + Buffer.from(submissionArgs.user + ':' + submissionArgs.password).toString('base64') };
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
public async getYarnAppId(submissionArgs: SparkJobSubmissionInput, livyBatchId: string): Promise<LivyLogResponse> {
|
||||
let livyUrl = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/${livyBatchId}/log`;
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'GET',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && response.log) {
|
||||
return this.extractYarnAppIdFromLog(response.log);
|
||||
}
|
||||
|
||||
throw new Error(localize('sparkJobSubmission.LivyNoLogReturned',
|
||||
"No log is returned within response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
}
|
||||
|
||||
|
||||
private extractYarnAppIdFromLog(log: any): LivyLogResponse {
|
||||
let logForPrint = log;
|
||||
if (Array.isArray(log)) {
|
||||
logForPrint = log.join(os.EOL);
|
||||
}
|
||||
|
||||
// eg: '18/08/23 11:02:50 INFO yarn.Client: Application report for application_1532646201938_0182 (state: ACCEPTED)'
|
||||
for (let entry of log) {
|
||||
if (entry.indexOf('Application report for') >= 0 && entry.indexOf('(state: ACCEPTED)') >= 0) {
|
||||
let tokens = entry.split(' ');
|
||||
for (let token of tokens) {
|
||||
if (token.startsWith('application_')) {
|
||||
return new LivyLogResponse(logForPrint, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new LivyLogResponse(logForPrint, '');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration values for the spark job submission. See https://livy.incubator.apache.org/docs/latest/rest-api.html
|
||||
* for more detailed information.
|
||||
*/
|
||||
export interface SparkJobSubmissionConfig {
|
||||
readonly jobName: string,
|
||||
readonly sparkFile: string,
|
||||
readonly mainClass: string,
|
||||
readonly arguments?: string,
|
||||
readonly jarFiles?: string,
|
||||
readonly pyFiles?: string,
|
||||
readonly otherFiles?: string,
|
||||
readonly driverMemory?: string,
|
||||
readonly driverCores?: number,
|
||||
readonly executorMemory?: string,
|
||||
readonly executorCores?: number,
|
||||
readonly executorCount?: number,
|
||||
readonly queueName?: string,
|
||||
readonly configValues?: string
|
||||
}
|
||||
|
||||
export class SparkJobSubmissionInput {
|
||||
public setSparkClusterInfo(sqlClusterConnection: SqlClusterConnection): void {
|
||||
this._host = sqlClusterConnection.host;
|
||||
this._port = sqlClusterConnection.port;
|
||||
this._livyPath = constants.mssqlClusterLivySubmitPath;
|
||||
this._user = sqlClusterConnection.user;
|
||||
this._password = sqlClusterConnection.password;
|
||||
this._isIntegratedAuth = sqlClusterConnection.isIntegratedAuth();
|
||||
}
|
||||
|
||||
constructor(
|
||||
public readonly config: SparkJobSubmissionConfig,
|
||||
private _host?: string,
|
||||
private _port?: number,
|
||||
private _livyPath?: string,
|
||||
private _user?: string,
|
||||
private _password?: string,
|
||||
private _isIntegratedAuth?: boolean) { }
|
||||
|
||||
public get host(): string { return this._host; }
|
||||
public get port(): number { return this._port; }
|
||||
public get livyPath(): string { return this._livyPath; }
|
||||
public get user(): string { return this._user; }
|
||||
public get password(): string { return this._password; }
|
||||
public get isIntegratedAuth(): boolean { return this._isIntegratedAuth; }
|
||||
}
|
||||
|
||||
export enum SparkFileSource {
|
||||
HDFS = 'HDFS',
|
||||
Local = 'Local'
|
||||
}
|
||||
|
||||
export class LivyLogResponse {
|
||||
constructor(public log: string, public appId: string) { }
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { AppContext } from '../appContext';
|
||||
import { getErrorMessage } from '../utils';
|
||||
import * as SqlClusterLookUp from '../sqlClusterLookUp';
|
||||
import * as loc from '../localizedConstants';
|
||||
|
||||
export class OpenSparkYarnHistoryTask {
|
||||
constructor(private appContext: AppContext) {
|
||||
}
|
||||
|
||||
async execute(sqlConnProfile: azdata.IConnectionProfile, isSpark: boolean): Promise<void> {
|
||||
try {
|
||||
let sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(sqlConnProfile, this.appContext);
|
||||
if (!sqlClusterConnection) {
|
||||
let name = isSpark ? 'Spark' : 'Yarn';
|
||||
void vscode.window.showErrorMessage(loc.sparkConnectionRequired(name));
|
||||
return;
|
||||
}
|
||||
if (isSpark) {
|
||||
void vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateSparkHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
}
|
||||
else {
|
||||
void vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateYarnHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
}
|
||||
} catch (error) {
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
private generateSparkHistoryUrl(host: string, port: number): string {
|
||||
return `https://${host}:${port}/gateway/default/sparkhistory/`;
|
||||
}
|
||||
|
||||
private generateYarnHistoryUrl(host: string, port: number): string {
|
||||
return `https://${host}:${port}/gateway/default/yarn/cluster/apps`;
|
||||
}
|
||||
}
|
||||
@@ -1,249 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as bdc from 'bdc';
|
||||
import * as vscode from 'vscode';
|
||||
import * as constants from './constants';
|
||||
import * as UUID from 'vscode-languageclient/lib/utils/uuid';
|
||||
import { AppContext } from './appContext';
|
||||
import { SqlClusterConnection } from './objectExplorerNodeProvider/connection';
|
||||
import { ICommandObjectExplorerContext } from './objectExplorerNodeProvider/command';
|
||||
import { getClusterEndpoints, getHostAndPortFromEndpoint } from './utils';
|
||||
import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
|
||||
import CodeAdapter from './prompts/adapter';
|
||||
import { IQuestion, QuestionTypes } from './prompts/question';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { AuthType } from './util/auth';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export async function findSqlClusterConnection(
|
||||
obj: ICommandObjectExplorerContext | azdata.IConnectionProfile,
|
||||
appContext: AppContext): Promise<SqlClusterConnection | undefined> {
|
||||
|
||||
if (!obj || !appContext) {
|
||||
console.error('SqlClusterLookup::findSqlClusterConnection - No context available');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let sqlConnProfile: azdata.IConnectionProfile;
|
||||
if ('type' in obj && obj.type === constants.ObjectExplorerService
|
||||
&& 'explorerContext' in obj && obj.explorerContext && obj.explorerContext.connectionProfile) {
|
||||
sqlConnProfile = obj.explorerContext.connectionProfile;
|
||||
} else if ('options' in obj) {
|
||||
sqlConnProfile = obj;
|
||||
}
|
||||
|
||||
let sqlClusterConnection: SqlClusterConnection = undefined;
|
||||
if (sqlConnProfile) {
|
||||
sqlClusterConnection = await findSqlClusterConnectionBySqlConnProfile(sqlConnProfile, appContext);
|
||||
} else {
|
||||
console.error('SqlClusterLookup::findSqlClusterConnection - No connection profile');
|
||||
}
|
||||
return sqlClusterConnection;
|
||||
}
|
||||
|
||||
async function findSqlClusterConnectionBySqlConnProfile(sqlConnProfile: azdata.IConnectionProfile, appContext: AppContext): Promise<SqlClusterConnection | undefined> {
|
||||
if (!sqlConnProfile || !appContext) {
|
||||
console.error('SqlClusterLookup::findSqlClusterConnectionBySqlConnProfile - No context available');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let sqlOeNodeProvider = appContext.getService<MssqlObjectExplorerNodeProvider>(constants.ObjectExplorerService);
|
||||
if (!sqlOeNodeProvider) {
|
||||
console.error('SqlClusterLookup::findSqlClusterConnectionBySqlConnProfile - No OE Node Provider available');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let sqlClusterSession = sqlOeNodeProvider.findSqlClusterSessionBySqlConnProfile(sqlConnProfile);
|
||||
if (!sqlClusterSession) {
|
||||
console.error('SqlClusterLookup::findSqlClusterConnectionBySqlConnProfile - No SQL Cluster Session found');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return sqlClusterSession.getSqlClusterConnection();
|
||||
}
|
||||
|
||||
export async function getSqlClusterConnectionParams(
|
||||
obj: azdata.IConnectionProfile | azdata.connection.Connection | ICommandObjectExplorerContext,
|
||||
appContext: AppContext): Promise<ConnectionParam> {
|
||||
|
||||
if (!obj) { return undefined; }
|
||||
|
||||
let sqlClusterConnInfo: ConnectionParam = undefined;
|
||||
if ('providerName' in obj) {
|
||||
if (obj.providerName === constants.mssqlClusterProviderName) {
|
||||
sqlClusterConnInfo = 'id' in obj ? connProfileToConnectionParam(obj) : connToConnectionParam(obj);
|
||||
} else {
|
||||
sqlClusterConnInfo = await createSqlClusterConnInfo(obj, appContext);
|
||||
}
|
||||
} else {
|
||||
sqlClusterConnInfo = await createSqlClusterConnInfo(obj.explorerContext.connectionProfile, appContext);
|
||||
}
|
||||
|
||||
return sqlClusterConnInfo;
|
||||
}
|
||||
|
||||
async function createSqlClusterConnInfo(sqlConnInfo: azdata.IConnectionProfile | azdata.connection.Connection, appContext: AppContext): Promise<ConnectionParam> {
|
||||
if (!sqlConnInfo) { return undefined; }
|
||||
|
||||
let connectionId: string = 'id' in sqlConnInfo ? sqlConnInfo.id : sqlConnInfo.connectionId;
|
||||
if (!connectionId) { return undefined; }
|
||||
|
||||
let serverInfo = await azdata.connection.getServerInfo(connectionId);
|
||||
if (!serverInfo || !serverInfo.options) { return undefined; }
|
||||
|
||||
let endpoints: bdc.IEndpointModel[] = getClusterEndpoints(serverInfo);
|
||||
if (!endpoints || endpoints.length === 0) { return undefined; }
|
||||
|
||||
let credentials = await azdata.connection.getCredentials(connectionId);
|
||||
if (!credentials) { return undefined; }
|
||||
|
||||
let clusterConnInfo = <ConnectionParam>{
|
||||
providerName: constants.mssqlClusterProviderName,
|
||||
connectionId: UUID.generateUuid(),
|
||||
options: {}
|
||||
};
|
||||
|
||||
// We need to populate some extra information here in order to be able to browse the HDFS nodes.
|
||||
// First - if the auth type isn't integrated auth then we need to try and find the username to connect
|
||||
// to the knox endpoint with.
|
||||
// Next we need the knox endpoint - if we didn't get that from the SQL instance (because the user didn't have permissions
|
||||
// to see the full DMV usually) then we need to connect to the controller to fetch the full list of endpoints and get it
|
||||
// that way.
|
||||
let clusterController: bdc.IClusterController | undefined = undefined;
|
||||
let authType = clusterConnInfo.options[constants.authenticationTypePropName] = sqlConnInfo.options[constants.authenticationTypePropName];
|
||||
const controllerEndpoint = endpoints.find(ep => ep.name.toLowerCase() === 'controller');
|
||||
if (authType && authType.toLowerCase() !== constants.integratedAuth) {
|
||||
const usernameKey = `bdc.username::${connectionId}`;
|
||||
const savedUsername = appContext.extensionContext.globalState.get(usernameKey);
|
||||
const credentialProvider = await azdata.credentials.getProvider('mssql.bdc.password');
|
||||
const savedPassword = (await credentialProvider.readCredential(connectionId)).password;
|
||||
// If we don't have a previously saved username/password then use the SQL connection credentials as a best guess,
|
||||
// if those don't work then we'll prompt the user for the info
|
||||
clusterConnInfo.options[constants.userPropName] = savedUsername ?? sqlConnInfo.options[constants.userPropName];
|
||||
clusterConnInfo.options[constants.passwordPropName] = savedPassword ?? credentials.password;
|
||||
try {
|
||||
clusterController = await getClusterController(controllerEndpoint.endpoint, clusterConnInfo);
|
||||
// We've successfully connected so now store the username/password for future connections
|
||||
await appContext.extensionContext.globalState.update(usernameKey, clusterConnInfo.options[constants.userPropName]);
|
||||
await credentialProvider.saveCredential(connectionId, clusterConnInfo.options[constants.passwordPropName]);
|
||||
clusterConnInfo.options[constants.userPropName] = await clusterController.getKnoxUsername(clusterConnInfo.options[constants.userPropName]);
|
||||
} catch (err) {
|
||||
console.log(`Unexpected error getting Knox username for SQL Cluster connection: ${err}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
let hadoopEndpointIndex = endpoints.findIndex(ep => ep.name.toLowerCase() === constants.hadoopEndpointNameGateway.toLowerCase());
|
||||
if (hadoopEndpointIndex < 0) {
|
||||
clusterController = await getClusterController(controllerEndpoint.endpoint, clusterConnInfo);
|
||||
endpoints = (await clusterController.getEndPoints()).endPoints;
|
||||
hadoopEndpointIndex = endpoints.findIndex(ep => ep.name.toLowerCase() === constants.hadoopEndpointNameGateway.toLowerCase());
|
||||
}
|
||||
const hostAndIp = getHostAndPortFromEndpoint(endpoints[hadoopEndpointIndex].endpoint);
|
||||
clusterConnInfo.options[constants.hostPropName] = hostAndIp.host;
|
||||
// TODO should we default the port? Or just ignore later?
|
||||
clusterConnInfo.options[constants.knoxPortPropName] = hostAndIp.port || constants.defaultKnoxPort;
|
||||
clusterConnInfo = connToConnectionParam(clusterConnInfo);
|
||||
|
||||
return clusterConnInfo;
|
||||
}
|
||||
|
||||
async function getClusterController(controllerEndpoint: string, connInfo: ConnectionParam): Promise<bdc.IClusterController | undefined> {
|
||||
const bdcApi = <bdc.IExtension>await vscode.extensions.getExtension(bdc.constants.extensionName).activate();
|
||||
const authType: bdc.AuthType = connInfo.options[constants.authenticationTypePropName].toLowerCase() === AuthType.Integrated ? 'integrated' : 'basic';
|
||||
const controller = bdcApi.getClusterController(
|
||||
controllerEndpoint,
|
||||
authType,
|
||||
connInfo.options[constants.userPropName],
|
||||
connInfo.options[constants.passwordPropName]);
|
||||
try {
|
||||
// We just want to test the connection - so using getEndpoints since that is available to all users (not just admin)
|
||||
await controller.getEndPoints();
|
||||
return controller;
|
||||
} catch (err) {
|
||||
// Initial username/password failed so prompt user for username password until either user
|
||||
// cancels out or we successfully connect
|
||||
console.log(`Error connecting to cluster controller: ${err}`);
|
||||
let errorMessage = '';
|
||||
const prompter = new CodeAdapter();
|
||||
while (true) {
|
||||
let username = await prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.input,
|
||||
name: 'inputPrompt',
|
||||
message: localize('promptBDCUsername', "{0}Please provide the username to connect to the BDC Controller:", errorMessage),
|
||||
default: connInfo.options[constants.userPropName]
|
||||
});
|
||||
if (!username) {
|
||||
console.log(`User cancelled out of username prompt for BDC Controller`);
|
||||
break;
|
||||
}
|
||||
const password = await prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.password,
|
||||
name: 'passwordPrompt',
|
||||
message: localize('promptBDCPassword', "Please provide the password to connect to the BDC Controller"),
|
||||
default: ''
|
||||
});
|
||||
if (!password) {
|
||||
console.log(`User cancelled out of password prompt for BDC Controller`);
|
||||
break;
|
||||
}
|
||||
const controller = bdcApi.getClusterController(controllerEndpoint, authType, username, password);
|
||||
try {
|
||||
// We just want to test the connection - so using getEndpoints since that is available to all users (not just admin)
|
||||
await controller.getEndPoints();
|
||||
// Update our connection with the new info
|
||||
connInfo.options[constants.userPropName] = username;
|
||||
connInfo.options[constants.passwordPropName] = password;
|
||||
return controller;
|
||||
} catch (err) {
|
||||
errorMessage = localize('bdcConnectError', "Error: {0}. ", err.message ?? err);
|
||||
}
|
||||
}
|
||||
throw new Error(localize('usernameAndPasswordRequired', "Username and password are required"));
|
||||
}
|
||||
|
||||
}
|
||||
function connProfileToConnectionParam(connectionProfile: azdata.IConnectionProfile): ConnectionParam {
|
||||
let result = Object.assign(connectionProfile, { connectionId: connectionProfile.id });
|
||||
return <ConnectionParam>result;
|
||||
}
|
||||
|
||||
function connToConnectionParam(connection: azdata.connection.Connection): ConnectionParam {
|
||||
let connectionId = connection.connectionId;
|
||||
let options = connection.options;
|
||||
let result = Object.assign(connection,
|
||||
{
|
||||
serverName: `${options[constants.hostPropName]},${options[constants.knoxPortPropName]}`,
|
||||
userName: options[constants.userPropName],
|
||||
password: options[constants.passwordPropName],
|
||||
id: connectionId,
|
||||
authenticationType: options[constants.authenticationTypePropName]
|
||||
}
|
||||
);
|
||||
return <ConnectionParam>result;
|
||||
}
|
||||
|
||||
class ConnectionParam implements azdata.connection.Connection, azdata.IConnectionProfile, azdata.ConnectionInfo {
|
||||
public connectionName: string;
|
||||
public serverName: string;
|
||||
public databaseName: string;
|
||||
public userName: string;
|
||||
public password: string;
|
||||
public authenticationType: string;
|
||||
public savePassword: boolean;
|
||||
public groupFullName: string;
|
||||
public groupId: string;
|
||||
public saveProfile: boolean;
|
||||
public id: string;
|
||||
public azureTenantId?: string;
|
||||
public azureAccount?: string;
|
||||
|
||||
public providerName: string;
|
||||
public connectionId: string;
|
||||
|
||||
public options: { [name: string]: any; };
|
||||
}
|
||||
1
extensions/mssql/src/typings/refs.d.ts
vendored
@@ -7,4 +7,3 @@
|
||||
/// <reference path='../../../../src/sql/azdata.proposed.d.ts'/>
|
||||
/// <reference path='../../../../src/vscode-dts/vscode.d.ts'/>
|
||||
/// <reference path='../../../azurecore/src/azurecore.d.ts' />
|
||||
/// <reference path='../../../big-data-cluster/src/bdc.d.ts'/>
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as kerberos from '@microsoft/ads-kerberos';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export enum AuthType {
|
||||
Integrated = 'integrated',
|
||||
Basic = 'basic'
|
||||
}
|
||||
|
||||
export async function authenticateKerberos(hostname: string): Promise<string> {
|
||||
const service = 'HTTP' + (process.platform === 'win32' ? '/' : '@') + hostname;
|
||||
const mechOID = kerberos.GSS_MECH_OID_KRB5;
|
||||
let client = await kerberos.initializeClient(service, { mechOID });
|
||||
let response = await client.step('');
|
||||
return response;
|
||||
}
|
||||
|
||||
const bdcConfigSectionName = 'bigDataCluster';
|
||||
const ignoreSslConfigName = 'ignoreSslVerification';
|
||||
|
||||
/**
|
||||
* Retrieves the current setting for whether to ignore SSL verification errors
|
||||
*/
|
||||
export function getIgnoreSslVerificationConfigSetting(): boolean {
|
||||
try {
|
||||
const config = vscode.workspace.getConfiguration(bdcConfigSectionName);
|
||||
return config.get<boolean>(ignoreSslConfigName, true);
|
||||
} catch (error) {
|
||||
console.error(`Unexpected error retrieving ${bdcConfigSectionName}.${ignoreSslConfigName} setting : ${error}`);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -5,12 +5,10 @@
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as bdc from 'bdc';
|
||||
import * as path from 'path';
|
||||
import * as crypto from 'crypto';
|
||||
import * as os from 'os';
|
||||
import * as findRemoveSync from 'find-remove';
|
||||
import * as constants from './constants';
|
||||
import { promises as fs } from 'fs';
|
||||
import { IConfig, ServerProvider } from '@microsoft/ads-service-downloader';
|
||||
import { env } from 'process';
|
||||
@@ -273,56 +271,6 @@ export function getUserHome(): string {
|
||||
return process.env.HOME || process.env.USERPROFILE;
|
||||
}
|
||||
|
||||
export function getClusterEndpoints(serverInfo: azdata.ServerInfo): bdc.IEndpointModel[] | undefined {
|
||||
let endpoints: RawEndpoint[] = serverInfo.options[constants.clusterEndpointsProperty];
|
||||
if (!endpoints || endpoints.length === 0) { return []; }
|
||||
|
||||
return endpoints.map(e => {
|
||||
// If endpoint is missing, we're on CTP bits. All endpoints from the CTP serverInfo should be treated as HTTPS
|
||||
let endpoint = e.endpoint ? e.endpoint : `https://${e.ipAddress}:${e.port}`;
|
||||
let updatedEndpoint: bdc.IEndpointModel = {
|
||||
name: e.serviceName,
|
||||
description: e.description,
|
||||
endpoint: endpoint,
|
||||
protocol: e.protocol
|
||||
};
|
||||
return updatedEndpoint;
|
||||
});
|
||||
}
|
||||
|
||||
export async function isBigDataCluster(connectionId: string): Promise<boolean> {
|
||||
const serverInfo = await azdata.connection.getServerInfo(connectionId);
|
||||
|
||||
return !!serverInfo?.options?.[constants.isBigDataClusterProperty];
|
||||
}
|
||||
|
||||
export type HostAndIp = { host: string, port: string };
|
||||
|
||||
export function getHostAndPortFromEndpoint(endpoint: string): HostAndIp {
|
||||
let authority = vscode.Uri.parse(endpoint).authority;
|
||||
let hostAndPortRegex = /^(.*)([,:](\d+))/g;
|
||||
let match = hostAndPortRegex.exec(authority);
|
||||
if (match) {
|
||||
return {
|
||||
host: match[1],
|
||||
port: match[3]
|
||||
};
|
||||
}
|
||||
return {
|
||||
host: authority,
|
||||
port: undefined
|
||||
};
|
||||
}
|
||||
|
||||
interface RawEndpoint {
|
||||
serviceName: string;
|
||||
description?: string;
|
||||
endpoint?: string;
|
||||
protocol?: string;
|
||||
ipAddress?: string;
|
||||
port?: number;
|
||||
}
|
||||
|
||||
export function isValidNumber(maybeNumber: any) {
|
||||
return maybeNumber !== undefined
|
||||
&& maybeNumber !== null
|
||||
|
||||