mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-09 09:42:34 -05:00
@@ -65,7 +65,7 @@
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependencies before you proceed. You can do so by running \"Reinstall Notebook dependencies\" from the command palette in Azure Data Studio.')\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\r\n",
|
||||
"\r\n",
|
||||
"def run_command():\r\n",
|
||||
" print(\"Executing: \" + cmd)\r\n",
|
||||
@@ -103,7 +103,7 @@
|
||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||
" if mssql_password != confirm_password:\n",
|
||||
" sys.exit(f'Passwords do not match.')\n",
|
||||
"print('Password accepted, you can also use the same password to access Knox and SQL Server.')"
|
||||
"print('You can also use the same password to access Knox and SQL Server.')"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
|
||||
@@ -67,8 +67,7 @@
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependencies before you proceed. You can do so by running \"Reinstall Notebook dependencies\" from the command palette in Azure Data Studio.')\r\n",
|
||||
"\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\r\n",
|
||||
"def run_command():\r\n",
|
||||
" print(\"Executing: \" + cmd)\r\n",
|
||||
" !{cmd}\r\n",
|
||||
@@ -118,7 +117,7 @@
|
||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||
" if mssql_password != confirm_password:\n",
|
||||
" sys.exit(f'Passwords do not match.')\n",
|
||||
"print('Password accepted, you can also use the same password to access Knox and SQL Server.')"
|
||||
"print('You can also use the same password to access Knox and SQL Server.')"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
|
||||
@@ -0,0 +1,270 @@
|
||||
{
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.7.3",
|
||||
"mimetype": "text/x-python",
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"pygments_lexer": "ipython3",
|
||||
"nbconvert_exporter": "python",
|
||||
"file_extension": ".py"
|
||||
}
|
||||
},
|
||||
"nbformat_minor": 2,
|
||||
"nbformat": 4,
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"\n",
|
||||
" \n",
|
||||
"## Deploy SQL Server 2019 CTP 3.2 big data cluster on an existing cluster deployed using kubeadm\n",
|
||||
" \n",
|
||||
"This notebook walks through the process of deploying a <a href=\"https://docs.microsoft.com/sql/big-data-cluster/big-data-cluster-overview?view=sqlallproducts-allversions\">SQL Server 2019 CTP 3.2 big data cluster</a> on an existing kubeadm cluster.\n",
|
||||
" \n",
|
||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||
"* Make sure you have the target cluster set as the current context in your kubectl config file.\n",
|
||||
" The config file would typically be under C:\\Users\\(userid)\\.kube on Windows, and under ~/.kube/ for macOS and Linux for a default installation.\n",
|
||||
" In the kubectl config file, look for \"current-context\" and ensure it is set to the AKS cluster that the SQL Server 2019 CTP 3.2 big data cluster will be deployed to.\n",
|
||||
"* The **Required information** cell will prompt you for password that will be used to access the cluster controller, SQL Server, and Knox.\n",
|
||||
"* The values in the **Default settings** cell can be changed as appropriate.\n",
|
||||
"\n",
|
||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run Cells\" button to run the notebook</font></span>"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Prerequisites** \n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|kubectl | Command-line tool for monitoring the underlying Kuberentes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
||||
"|azdata | Command-line tool for installing and managing a big data cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |"
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Check dependencies**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import pandas,sys,os,json,html,getpass,time\r\n",
|
||||
"pandas_version = pandas.__version__.split('.')\r\n",
|
||||
"pandas_major = int(pandas_version[0])\r\n",
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\r\n",
|
||||
"\r\n",
|
||||
"def run_command():\r\n",
|
||||
" print(\"Executing: \" + cmd)\r\n",
|
||||
" !{cmd}\r\n",
|
||||
" if _exit_code != 0:\r\n",
|
||||
" sys.exit(f'Command execution failed with exit code: {str(_exit_code)}.\\n\\t{cmd}\\n')\r\n",
|
||||
" print(f'Successfully executed: {cmd}')\r\n",
|
||||
"\r\n",
|
||||
"cmd = 'kubectl version --client=true'\r\n",
|
||||
"run_command()\r\n",
|
||||
"cmd = 'azdata --version'\r\n",
|
||||
"run_command()"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 1
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Show current context**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"cmd = ' kubectl config current-context'\r\n",
|
||||
"run_command()"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 2
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Required information**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"env_var_flag = \"AZDATA_NB_VAR_BDC_CONTROLLER_PASSWORD\" in os.environ\n",
|
||||
"if env_var_flag:\n",
|
||||
" mssql_password = os.environ[\"AZDATA_NB_VAR_BDC_CONTROLLER_PASSWORD\"]\n",
|
||||
" mssql_storage_class = os.environ[\"AZDATA_NB_VAR_BDC_STORAGE_CLASS\"]\n",
|
||||
" mssql_data_size = os.environ[\"AZDATA_NB_VAR_BDC_DATA_SIZE\"]\n",
|
||||
" mssql_log_size = os.environ[\"AZDATA_NB_VAR_BDC_LOG_SIZE\"]\n",
|
||||
"else: \n",
|
||||
" mssql_password = getpass.getpass(prompt = 'SQL Server 2019 big data cluster controller password')\n",
|
||||
" if mssql_password == \"\":\n",
|
||||
" sys.exit(f'Password is required.')\n",
|
||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||
" if mssql_password != confirm_password:\n",
|
||||
" sys.exit(f'Passwords do not match.')\n",
|
||||
" mssql_storage_class = input('Storage class name')\n",
|
||||
" mssql_data_size = input('Capacity for data in GB, default is 100GB')\n",
|
||||
" if mssql_data_size == \"\":\n",
|
||||
" mssql_data_size = \"100\"\n",
|
||||
" mssql_log_size = input('Capacity for logs in GB, default is 50GB')\n",
|
||||
" if mssql_log_size == \"\":\n",
|
||||
" mssql_log_size = \"50\"\n",
|
||||
"print('You can also use the same password to access Knox and SQL Server.')"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 3
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Default settings**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"if env_var_flag:\n",
|
||||
" mssql_cluster_name = os.environ[\"AZDATA_NB_VAR_BDC_NAME\"]\n",
|
||||
" mssql_controller_username = os.environ[\"AZDATA_NB_VAR_BDC_CONTROLLER_USERNAME\"]\n",
|
||||
"else:\n",
|
||||
" mssql_cluster_name = 'mssql-cluster'\n",
|
||||
" mssql_controller_username = 'admin'\n",
|
||||
"configuration_profile = 'kubeadm-dev-test'\n",
|
||||
"configuration_folder = 'mssql-bdc-configuration'\n",
|
||||
"print(f'SQL Server big data cluster name: {mssql_cluster_name}')\n",
|
||||
"print(f'SQL Server big data cluster controller user name: {mssql_controller_username}')\n",
|
||||
"print(f'Storage class name: {mssql_storage_class}')\n",
|
||||
"print(f'Capacity for data (GB): {mssql_data_size}')\n",
|
||||
"print(f'Capacity for logs (GB): {mssql_log_size}')\n",
|
||||
"print(f'Deployment configuration profile: {configuration_profile}')\n",
|
||||
"print(f'Deployment configuration: {configuration_folder}')"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 4
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Create a deployment configuration file**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"os.environ[\"ACCEPT_EULA\"] = 'yes'\n",
|
||||
"cmd = f'azdata bdc config init --source {configuration_profile} --target {configuration_folder} --force'\n",
|
||||
"run_command()\n",
|
||||
"cmd = f'azdata bdc config replace -c {configuration_folder}/cluster.json -j metadata.name={mssql_cluster_name}'\n",
|
||||
"run_command()\n",
|
||||
"cmd = f'azdata bdc config replace -c {configuration_folder}/control.json -j $.spec.storage.data.className={mssql_storage_class}'\n",
|
||||
"run_command()\n",
|
||||
"cmd = f'azdata bdc config replace -c {configuration_folder}/control.json -j $.spec.storage.data.size={mssql_data_size}Gi'\n",
|
||||
"run_command()\n",
|
||||
"cmd = f'azdata bdc config replace -c {configuration_folder}/control.json -j $.spec.storage.logs.className={mssql_storage_class}'\n",
|
||||
"run_command()\n",
|
||||
"cmd = f'azdata bdc config replace -c {configuration_folder}/control.json -j $.spec.storage.logs.size={mssql_log_size}Gi'\n",
|
||||
"run_command()"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 6
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Create SQL Server 2019 big data cluster**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating SQL Server 2019 big data cluster: {mssql_cluster_name} using configuration {configuration_folder}')\n",
|
||||
"os.environ[\"CONTROLLER_USERNAME\"] = mssql_controller_username\n",
|
||||
"os.environ[\"CONTROLLER_PASSWORD\"] = mssql_password\n",
|
||||
"os.environ[\"MSSQL_SA_PASSWORD\"] = mssql_password\n",
|
||||
"os.environ[\"KNOX_PASSWORD\"] = mssql_password\n",
|
||||
"cmd = f'azdata bdc create -c {configuration_folder}'\n",
|
||||
"run_command()"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 7
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Login to SQL Server 2019 big data cluster**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"cmd = f'azdata login --cluster-name {mssql_cluster_name}'\n",
|
||||
"run_command()"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 8
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": "### **Show SQL Server 2019 big data cluster endpoints**",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"from IPython.display import *\n",
|
||||
"pandas.set_option('display.max_colwidth', -1)\n",
|
||||
"cmd = f'azdata bdc endpoint list'\n",
|
||||
"cmdOutput = !{cmd}\n",
|
||||
"endpoints = json.loads(''.join(cmdOutput))\n",
|
||||
"endpointsDataFrame = pandas.DataFrame(endpoints)\n",
|
||||
"endpointsDataFrame.columns = [' '.join(word[0].upper() + word[1:] for word in columnName.split()) for columnName in endpoints[0].keys()]\n",
|
||||
"display(HTML(endpointsDataFrame.to_html(index=False, render_links=True)))"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 9
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Connect to master SQL Server instance in Azure Data Studio**\r\n",
|
||||
"Click the link below to connect to the master SQL Server instance of the SQL Server 2019 big data cluster."
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"sqlEndpoints = [x for x in endpoints if x['name'] == 'sql-server-master']\r\n",
|
||||
"if sqlEndpoints and len(sqlEndpoints) == 1:\r\n",
|
||||
" connectionParameter = '{\"serverName\":\"' + sqlEndpoints[0]['endpoint'] + '\",\"providerName\":\"MSSQL\",\"authenticationType\":\"SqlLogin\",\"userName\":\"sa\",\"password\":' + json.dumps(mssql_password) + '}'\r\n",
|
||||
" display(HTML('<br/><a href=\"command:azdata.connect?' + html.escape(connectionParameter)+'\"><font size=\"3\">Click here to connect to master SQL Server instance</font></a><br/>'))\r\n",
|
||||
"else:\r\n",
|
||||
" sys.exit('Could not find the master SQL Server instance endpoint')"
|
||||
],
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"execution_count": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -48,7 +48,7 @@
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependencies before you proceed. You can do so by running \"Reinstall Notebook dependencies\" from the command palette in Azure Data Studio.')\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\r\n",
|
||||
"\r\n",
|
||||
"def run_command():\r\n",
|
||||
" print(\"Executing: \" + cmd)\r\n",
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
"pandas_minor = int(pandas_version[1])\r\n",
|
||||
"pandas_patch = int(pandas_version[2])\r\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependencies before you proceed. You can do so by running \"Reinstall Notebook dependencies\" from the command palette in Azure Data Studio.')\r\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\r\n",
|
||||
"\r\n",
|
||||
"def run_command():\r\n",
|
||||
" print(\"Executing: \" + cmd)\r\n",
|
||||
|
||||
@@ -227,8 +227,12 @@
|
||||
"displayName": "%bdc-deployment-target-aks%"
|
||||
},
|
||||
{
|
||||
"name": "existingAks",
|
||||
"name": "existing-aks",
|
||||
"displayName": "%bdc-deployment-target-existing-aks%"
|
||||
},
|
||||
{
|
||||
"name": "existing-kubeadm",
|
||||
"displayName": "%bdc-deployment-target-existing-kubeadm%"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -387,7 +391,81 @@
|
||||
"name": "azdata"
|
||||
}
|
||||
],
|
||||
"when": "target=existingAks&&version=bdc2019"
|
||||
"when": "target=existing-aks&&version=bdc2019"
|
||||
},
|
||||
{
|
||||
"dialog": {
|
||||
"title": "%bdc-existing-kubeadm-dialog-title%",
|
||||
"name": "bdc-existing-kubeadm-dialog",
|
||||
"tabs": [
|
||||
{
|
||||
"title": "",
|
||||
"sections": [
|
||||
{
|
||||
"title": "%bdc-cluster-settings-section-title%",
|
||||
"fields": [
|
||||
{
|
||||
"label": "%bdc-cluster-name-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_NAME",
|
||||
"type": "text",
|
||||
"defaultValue": "mssql-cluster",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%bdc-controller-username-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_CONTROLLER_USERNAME",
|
||||
"type": "text",
|
||||
"defaultValue": "admin",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%bdc-password-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_CONTROLLER_PASSWORD",
|
||||
"type": "password",
|
||||
"confirmationRequired": true,
|
||||
"confirmationLabel": "%bdc-confirm-password-field%",
|
||||
"defaultValue": "",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%bdc-storage-class-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_STORAGE_CLASS",
|
||||
"type": "text",
|
||||
"defaultValue": "",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%bdc-data-size-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_DATA_SIZE",
|
||||
"type": "number",
|
||||
"defaultValue": "100",
|
||||
"min": 1,
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%bdc-log-size-field%",
|
||||
"variableName": "AZDATA_NB_VAR_BDC_LOG_SIZE",
|
||||
"type": "number",
|
||||
"defaultValue": "50",
|
||||
"min": 1,
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"notebook": "%bdc-2019-existing-kubeadm-notebook%",
|
||||
"requiredTools": [
|
||||
{
|
||||
"name": "kubectl"
|
||||
},
|
||||
{
|
||||
"name": "azdata"
|
||||
}
|
||||
],
|
||||
"when": "target=existing-kubeadm&&version=bdc2019"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -18,16 +18,18 @@
|
||||
"bdc-deployment-target": "Deployment target",
|
||||
"bdc-deployment-target-aks": "New Azure Kubernetes Service Cluster",
|
||||
"bdc-deployment-target-existing-aks": "Existing Azure Kubernetes Service Cluster",
|
||||
"bdc-deployment-target-existing-kubeadm": "Existing Kubernetes Cluster (kubeadm)",
|
||||
"bdc-2019-aks-notebook": "./notebooks/bdc/2019/deploy-bdc-aks.ipynb",
|
||||
"bdc-2019-existing-aks-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb",
|
||||
"bdc-2019-existing-kubeadm-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb",
|
||||
"docker-sql-2017-title": "Deploy SQL Server 2017 container images with Docker",
|
||||
"docker-sql-2019-title": "Deploy SQL Server 2019 container images with Docker",
|
||||
"docker-container-name-field": "Container name",
|
||||
"docker-sql-password-field": "SQL Server password",
|
||||
"docker-confirm-sql-password-field": "Confirm password",
|
||||
"docker-sql-port-field": "Port",
|
||||
"bdc-new-aks-dialog-title": "Deploy SQL Server big data cluster on new AKS cluster",
|
||||
"bdc-existing-aks-dialog-title": "Deploy SQL Server big data cluster on existing AKS cluster",
|
||||
"bdc-new-aks-dialog-title": "Deployment target: new AKS cluster",
|
||||
"bdc-existing-aks-dialog-title": "Deployment target: existing AKS cluster",
|
||||
"bdc-cluster-settings-section-title": "SQL Server big data cluster settings",
|
||||
"bdc-cluster-name-field": "Cluster name",
|
||||
"bdc-controller-username-field": "Controller username",
|
||||
@@ -40,5 +42,9 @@
|
||||
"bdc-azure-region-field": "Region",
|
||||
"bdc-azure-aks-name-field": "AKS cluster name",
|
||||
"bdc-azure-vm-size-field": "VM size",
|
||||
"bdc-azure-vm-count-field": "VM count"
|
||||
"bdc-azure-vm-count-field": "VM count",
|
||||
"bdc-existing-kubeadm-dialog-title": "Deployment target: existing Kubernetes cluster (kubeadm)",
|
||||
"bdc-storage-class-field": "Storage class name",
|
||||
"bdc-data-size-field": "Capacity for data (GB)",
|
||||
"bdc-log-size-field": "Capacity for logs (GB)"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user