diff --git a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-aks.ipynb b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-aks.ipynb index ddbd72c0b7..d95b6e2b43 100644 --- a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-aks.ipynb +++ b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-aks.ipynb @@ -23,7 +23,7 @@ { "cell_type": "markdown", "source": [ - "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/src/sql/media/microsoft-small-logo.png)\n", + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", " \n", "## Create Azure Kubernetes Service cluster and deploy SQL Server 2019 Big Data Cluster\n", " \n", @@ -32,7 +32,7 @@ "* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n", "* The **Required information** will check and prompt you for password if it is not set in the environment variable. The password will be used to access the cluster controller, SQL Server, and Knox.\n", "\n", - "Please press the \"Run Cells\" button to run the notebook" + "Please press the \"Run all\" button to run the notebook" ], "metadata": { "azdata_cell_guid": "4f6bc3bc-3592-420a-b534-384011189005" @@ -47,7 +47,7 @@ "|Tools|Description|Installation|\n", "|---|---|---|\n", "|Azure CLI |Command-line tool for managing Azure services. Used to create AKS cluster | [Installation](https://docs.microsoft.com/cli/azure/install-azure-cli?view=azure-cli-latest) |\n", - "|kubectl | Command-line tool for monitoring the underlying Kuberentes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", + "|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", "|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |" ], "metadata": { @@ -87,7 +87,7 @@ ] }, "outputs": [], - "execution_count": 1 + "execution_count": null }, { "cell_type": "markdown", @@ -122,7 +122,7 @@ ] }, "outputs": [], - "execution_count": 0 + "execution_count": null }, { "cell_type": "markdown", @@ -155,7 +155,7 @@ ] }, "outputs": [], - "execution_count": 2 + "execution_count": null }, { "cell_type": "markdown", @@ -181,7 +181,7 @@ ] }, "outputs": [], - "execution_count": 5 + "execution_count": null }, { "cell_type": "markdown", @@ -209,7 +209,7 @@ ] }, "outputs": [], - "execution_count": 6 + "execution_count": null }, { "cell_type": "markdown", @@ -232,7 +232,7 @@ ] }, "outputs": [], - "execution_count": 7 + "execution_count": null }, { "cell_type": "markdown", @@ -255,7 +255,7 @@ ] }, "outputs": [], - "execution_count": 8 + "execution_count": null }, { "cell_type": "markdown", @@ -278,7 +278,7 @@ ] }, "outputs": [], - "execution_count": 9 + "execution_count": null }, { "cell_type": "markdown", @@ -312,7 +312,7 @@ ] }, "outputs": [], - "execution_count": 10 + "execution_count": null }, { "cell_type": "markdown", @@ -341,7 +341,7 @@ ] }, "outputs": [], - "execution_count": 11 + "execution_count": null }, { "cell_type": "markdown", @@ -364,7 +364,7 @@ ] }, "outputs": [], - "execution_count": 12 + "execution_count": null }, { "cell_type": "markdown", @@ -394,7 +394,7 @@ ] }, "outputs": [], - "execution_count": 13 + "execution_count": null }, { "cell_type": "markdown", @@ -423,7 +423,7 @@ ] }, "outputs": [], - "execution_count": 14 + "execution_count": null } ] -} +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb index 2090f9119c..fd1087f876 100644 --- a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb +++ b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb @@ -23,7 +23,7 @@ { "cell_type": "markdown", "source": [ - "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/src/sql/media/microsoft-small-logo.png)\n", + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", " \n", "## Deploy SQL Server 2019 Big Data Cluster on an existing Azure Kubernetes Service (AKS) cluster\n", " \n", @@ -32,7 +32,7 @@ "* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n", "* The **Required information** will check and prompt you for password if it is not set in the environment variable. The password can be used to access the cluster controller, SQL Server, and Knox.\n", "\n", - "Please press the \"Run Cells\" button to run the notebook" + "Please press the \"Run all\" button to run the notebook" ], "metadata": { "azdata_cell_guid": "82e60c1a-7acf-47ee-877f-9e85e92e11da" @@ -46,7 +46,7 @@ " \n", "|Tools|Description|Installation|\n", "|---|---|---|\n", - "|kubectl | Command-line tool for monitoring the underlying Kuberentes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", + "|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", "|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |" ], "metadata": { @@ -86,7 +86,7 @@ ] }, "outputs": [], - "execution_count": 1 + "execution_count": null }, { "cell_type": "markdown", @@ -120,7 +120,7 @@ ] }, "outputs": [], - "execution_count": 0 + "execution_count": null }, { "cell_type": "markdown", @@ -153,7 +153,7 @@ ] }, "outputs": [], - "execution_count": 3 + "execution_count": null }, { "cell_type": "markdown", @@ -177,7 +177,7 @@ ] }, "outputs": [], - "execution_count": 0 + "execution_count": null }, { "cell_type": "markdown", @@ -211,7 +211,7 @@ ] }, "outputs": [], - "execution_count": 6 + "execution_count": null }, { "cell_type": "markdown", @@ -240,7 +240,7 @@ ] }, "outputs": [], - "execution_count": 7 + "execution_count": null }, { "cell_type": "markdown", @@ -263,7 +263,7 @@ ] }, "outputs": [], - "execution_count": 8 + "execution_count": null }, { "cell_type": "markdown", @@ -293,7 +293,7 @@ ] }, "outputs": [], - "execution_count": 9 + "execution_count": null }, { "cell_type": "markdown", @@ -322,7 +322,7 @@ ] }, "outputs": [], - "execution_count": 10 + "execution_count": null } ] -} +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aro.ipynb b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aro.ipynb new file mode 100644 index 0000000000..0934c2bea7 --- /dev/null +++ b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-aro.ipynb @@ -0,0 +1,350 @@ +{ + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python", + "version": "3.6.6", + "mimetype": "text/x-python", + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "pygments_lexer": "ipython3", + "nbconvert_exporter": "python", + "file_extension": ".py" + } + }, + "nbformat_minor": 2, + "nbformat": 4, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", + " \n", + "## Deploy SQL Server 2019 Big Data Cluster on an existing Azure Red Hat OpenShift cluster\n", + " \n", + "This notebook walks through the process of deploying a SQL Server 2019 Big Data Cluster on an existing Azure Red Hat OpenShift cluster.\n", + " \n", + "* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n", + "* The **Required information** will check and prompt you for password if it is not set in the environment variable. The password can be used to access the cluster controller, SQL Server, and Knox.\n", + "\n", + "Please press the \"Run all\" button to run the notebook" + ], + "metadata": { + "azdata_cell_guid": "23954d96-3932-4a8e-ab73-da605f99b1a4" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Prerequisites** \n", + "Ensure the following tools are installed and added to PATH before proceeding.\n", + " \n", + "|Tools|Description|Installation|\n", + "|---|---|---|\n", + "|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", + "|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |" + ], + "metadata": { + "azdata_cell_guid": "1d7f4c6a-0cb8-4ecc-81c8-544712253a3f" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Setup**" + ], + "metadata": { + "azdata_cell_guid": "a31f9894-903f-4e19-a5a8-6fd888ff013b" + } + }, + { + "cell_type": "code", + "source": [ + "import pandas,sys,os,json,html,getpass,time\n", + "pandas_version = pandas.__version__.split('.')\n", + "pandas_major = int(pandas_version[0])\n", + "pandas_minor = int(pandas_version[1])\n", + "pandas_patch = int(pandas_version[2])\n", + "if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\n", + " sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\n", + "def run_command(command):\n", + " print(\"Executing: \" + command)\n", + " !{command}\n", + " if _exit_code != 0:\n", + " sys.exit(f'Command execution failed with exit code: {str(_exit_code)}.\\n\\t{command}\\n')\n", + " print(f'Successfully executed: {command}')" + ], + "metadata": { + "azdata_cell_guid": "26fa8bc4-4b8e-4c31-ae11-50484821cea8", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Set variables**\n", + "Generated by Azure Data Studio using the values collected in the Deploy Big Data Cluster wizard" + ], + "metadata": { + "azdata_cell_guid": "e70640d0-6059-4cab-939e-e985a978c0da" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Check dependencies**" + ], + "metadata": { + "azdata_cell_guid": "869d0397-a280-4dc4-be76-d652189b5131" + } + }, + { + "cell_type": "code", + "source": [ + "run_command('kubectl version --client=true')\n", + "run_command('azdata --version')" + ], + "metadata": { + "azdata_cell_guid": "c38afb67-1132-495e-9af1-35bf067acbeb", + "tags": [] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Required information**" + ], + "metadata": { + "azdata_cell_guid": "7b383b0d-5687-45b3-a16f-ba3b170c796e" + } + }, + { + "cell_type": "code", + "source": [ + "invoked_by_wizard = \"AZDATA_NB_VAR_BDC_ADMIN_PASSWORD\" in os.environ\n", + "if invoked_by_wizard:\n", + " mssql_password = os.environ[\"AZDATA_NB_VAR_BDC_ADMIN_PASSWORD\"]\n", + " if mssql_auth_mode == \"ad\":\n", + " mssql_domain_service_account_password = os.environ[\"AZDATA_NB_VAR_BDC_AD_DOMAIN_SVC_PASSWORD\"]\n", + "else:\n", + " mssql_password = getpass.getpass(prompt = 'SQL Server 2019 Big Data Cluster controller password')\n", + " if mssql_password == \"\":\n", + " sys.exit(f'Password is required.')\n", + " confirm_password = getpass.getpass(prompt = 'Confirm password')\n", + " if mssql_password != confirm_password:\n", + " sys.exit(f'Passwords do not match.')\n", + " if mssql_auth_mode == \"ad\":\n", + " mssql_domain_service_account_password = getpass.getpass(prompt = 'Domain service account password')\n", + " if mssql_domain_service_account_password == \"\":\n", + " sys.exit(f'Domain service account password is required.')\n", + "print('You can also use the controller password to access Knox and SQL Server.')" + ], + "metadata": { + "azdata_cell_guid": "b5970f2b-cf13-41af-b0a2-5133d840325e", + "tags": [] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Set and show current context**" + ], + "metadata": { + "azdata_cell_guid": "6456bd0c-5b64-4d76-be59-e3a5b32697f5" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'kubectl config use-context {mssql_cluster_context}')\n", + "run_command('kubectl config current-context')" + ], + "metadata": { + "azdata_cell_guid": "a38f8b3a-f93a-484c-b9e2-4eba3ed99cc2" + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Make sure the target namespace already exists**" + ], + "metadata": { + "azdata_cell_guid": "3bf1d902-2217-4c99-b2d6-38e45de8e308" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'kubectl get namespace {mssql_cluster_name}')" + ], + "metadata": { + "azdata_cell_guid": "6ca9bf71-049a-458e-8000-311d4c15b1ca" + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Create deployment configuration files**" + ], + "metadata": { + "azdata_cell_guid": "6d78da36-6af5-4309-baad-bc81bb2cdb7f" + } + }, + { + "cell_type": "code", + "source": [ + "mssql_target_profile = 'ads-bdc-custom-profile'\n", + "if not os.path.exists(mssql_target_profile):\n", + " os.mkdir(mssql_target_profile)\n", + "bdcJsonObj = json.loads(bdc_json)\n", + "controlJsonObj = json.loads(control_json)\n", + "bdcJsonFile = open(f'{mssql_target_profile}/bdc.json', 'w')\n", + "bdcJsonFile.write(json.dumps(bdcJsonObj, indent = 4))\n", + "bdcJsonFile.close()\n", + "controlJsonFile = open(f'{mssql_target_profile}/control.json', 'w')\n", + "controlJsonFile.write(json.dumps(controlJsonObj, indent = 4))\n", + "controlJsonFile.close()\n", + "print(f'Created deployment configuration folder: {mssql_target_profile}')" + ], + "metadata": { + "azdata_cell_guid": "3110ab23-ecfc-4e36-a1c5-28536b7edebf", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Create SQL Server 2019 Big Data Cluster**" + ], + "metadata": { + "azdata_cell_guid": "7d56d262-8cd5-49e4-b745-332c6e7a3cb2" + } + }, + { + "cell_type": "code", + "source": [ + "print (f'Creating SQL Server 2019 Big Data Cluster: {mssql_cluster_name} using configuration {mssql_target_profile}')\n", + "os.environ[\"ACCEPT_EULA\"] = 'yes'\n", + "os.environ[\"AZDATA_USERNAME\"] = mssql_username\n", + "os.environ[\"AZDATA_PASSWORD\"] = mssql_password\n", + "if mssql_auth_mode == \"ad\":\n", + " os.environ[\"DOMAIN_SERVICE_ACCOUNT_USERNAME\"] = mssql_domain_service_account_username\n", + " os.environ[\"DOMAIN_SERVICE_ACCOUNT_PASSWORD\"] = mssql_domain_service_account_password\n", + "if os.name == 'nt':\n", + " print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t{os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {mssql_cluster_name} ')\n", + "run_command(f'azdata bdc create -c {mssql_target_profile}')" + ], + "metadata": { + "azdata_cell_guid": "0a743e88-e7d0-4b41-b8a3-e43985d15f2b", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Login to SQL Server 2019 Big Data Cluster**" + ], + "metadata": { + "azdata_cell_guid": "7929fd90-324d-482a-a101-ae29cb183691" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'azdata login -n {mssql_cluster_name}')" + ], + "metadata": { + "azdata_cell_guid": "3a49909b-e09e-4e62-a825-c39de2cffc94", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Show SQL Server 2019 Big Data Cluster endpoints**" + ], + "metadata": { + "azdata_cell_guid": "038e801a-a393-4f8d-8e2d-97bc3b740b0c" + } + }, + { + "cell_type": "code", + "source": [ + "from IPython.display import *\n", + "pandas.set_option('display.max_colwidth', -1)\n", + "cmd = f'azdata bdc endpoint list'\n", + "cmdOutput = !{cmd}\n", + "endpoints = json.loads(''.join(cmdOutput))\n", + "endpointsDataFrame = pandas.DataFrame(endpoints)\n", + "endpointsDataFrame.columns = [' '.join(word[0].upper() + word[1:] for word in columnName.split()) for columnName in endpoints[0].keys()]\n", + "display(HTML(endpointsDataFrame.to_html(index=False, render_links=True)))" + ], + "metadata": { + "azdata_cell_guid": "2a8c8d5d-862c-4672-9309-38aa03afc4e6", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Connect to SQL Server Master instance in Azure Data Studio**\n", + "Click the link below to connect to the SQL Server Master instance of the SQL Server 2019 Big Data Cluster." + ], + "metadata": { + "azdata_cell_guid": "0bd809fa-8225-4954-a50c-da57ea167896" + } + }, + { + "cell_type": "code", + "source": [ + "sqlEndpoints = [x for x in endpoints if x['name'] == 'sql-server-master']\n", + "if sqlEndpoints and len(sqlEndpoints) == 1:\n", + " connectionParameter = '{\"serverName\":\"' + sqlEndpoints[0]['endpoint'] + '\",\"providerName\":\"MSSQL\",\"authenticationType\":\"SqlLogin\",\"userName\":' + json.dumps(mssql_username) + ',\"password\":' + json.dumps(mssql_password) + '}'\n", + " display(HTML('
Click here to connect to SQL Server Master instance
'))\n", + "else:\n", + " sys.exit('Could not find the SQL Server Master instance endpoint.')" + ], + "metadata": { + "azdata_cell_guid": "d591785d-71aa-4c5d-9cbb-a7da79bca503", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + } + ] +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb index 207ed76cca..c328327abe 100644 --- a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb +++ b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb @@ -23,7 +23,7 @@ { "cell_type": "markdown", "source": [ - "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/src/sql/media/microsoft-small-logo.png)\n", + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", " \n", "## Deploy SQL Server 2019 Big Data Cluster on an existing cluster deployed using kubeadm\n", " \n", @@ -32,7 +32,7 @@ "* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n", "* The **Required information** will check and prompt you for password if it is not set in the environment variable. The password can be used to access the cluster controller, SQL Server, and Knox.\n", "\n", - "Please press the \"Run Cells\" button to run the notebook" + "Please press the \"Run all\" button to run the notebook" ], "metadata": { "azdata_cell_guid": "23954d96-3932-4a8e-ab73-da605f99b1a4" @@ -46,7 +46,7 @@ " \n", "|Tools|Description|Installation|\n", "|---|---|---|\n", - "|kubectl | Command-line tool for monitoring the underlying Kuberentes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", + "|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", "|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |" ], "metadata": { @@ -82,11 +82,11 @@ "metadata": { "azdata_cell_guid": "26fa8bc4-4b8e-4c31-ae11-50484821cea8", "tags": [ - "hide_input" - ] + "hide_input" + ] }, "outputs": [], - "execution_count": 1 + "execution_count": null }, { "cell_type": "markdown", @@ -120,7 +120,7 @@ ] }, "outputs": [], - "execution_count": 0 + "execution_count": null }, { "cell_type": "markdown", @@ -159,7 +159,7 @@ ] }, "outputs": [], - "execution_count": 3 + "execution_count": null }, { "cell_type": "markdown", @@ -180,7 +180,7 @@ "azdata_cell_guid": "a38f8b3a-f93a-484c-b9e2-4eba3ed99cc2" }, "outputs": [], - "execution_count": 0 + "execution_count": null }, { "cell_type": "markdown", @@ -214,7 +214,7 @@ ] }, "outputs": [], - "execution_count": 6 + "execution_count": null }, { "cell_type": "markdown", @@ -246,7 +246,7 @@ ] }, "outputs": [], - "execution_count": 7 + "execution_count": null }, { "cell_type": "markdown", @@ -269,7 +269,7 @@ ] }, "outputs": [], - "execution_count": 8 + "execution_count": null }, { "cell_type": "markdown", @@ -299,7 +299,7 @@ ] }, "outputs": [], - "execution_count": 9 + "execution_count": null }, { "cell_type": "markdown", @@ -328,7 +328,7 @@ ] }, "outputs": [], - "execution_count": 10 + "execution_count": null } ] -} +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-openshift.ipynb b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-openshift.ipynb new file mode 100644 index 0000000000..257d70b19e --- /dev/null +++ b/extensions/resource-deployment/notebooks/bdc/2019/deploy-bdc-existing-openshift.ipynb @@ -0,0 +1,352 @@ +{ + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python", + "version": "3.6.6", + "mimetype": "text/x-python", + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "pygments_lexer": "ipython3", + "nbconvert_exporter": "python", + "file_extension": ".py" + } + }, + "nbformat_minor": 2, + "nbformat": 4, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", + " \n", + "## Deploy SQL Server 2019 Big Data Cluster on an existing OpenShift cluster\n", + " \n", + "This notebook walks through the process of deploying a SQL Server 2019 Big Data Cluster on an existing OpenShift cluster.\n", + " \n", + "* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n", + "* The **Required information** will check and prompt you for password if it is not set in the environment variable. The password can be used to access the cluster controller, SQL Server, and Knox.\n", + "\n", + "Please press the \"Run all\" button to run the notebook" + ], + "metadata": { + "azdata_cell_guid": "23954d96-3932-4a8e-ab73-da605f99b1a4" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Prerequisites** \n", + "Ensure the following tools are installed and added to PATH before proceeding.\n", + " \n", + "|Tools|Description|Installation|\n", + "|---|---|---|\n", + "|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n", + "|azdata | Command-line tool for installing and managing a Big Data Cluster |[Installation](https://docs.microsoft.com/en-us/sql/big-data-cluster/deploy-install-azdata?view=sqlallproducts-allversions) |" + ], + "metadata": { + "azdata_cell_guid": "1d7f4c6a-0cb8-4ecc-81c8-544712253a3f" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Setup**" + ], + "metadata": { + "azdata_cell_guid": "a31f9894-903f-4e19-a5a8-6fd888ff013b" + } + }, + { + "cell_type": "code", + "source": [ + "import pandas,sys,os,json,html,getpass,time\n", + "pandas_version = pandas.__version__.split('.')\n", + "pandas_major = int(pandas_version[0])\n", + "pandas_minor = int(pandas_version[1])\n", + "pandas_patch = int(pandas_version[2])\n", + "if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\n", + " sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\n", + "def run_command(command):\n", + " print(\"Executing: \" + command)\n", + " !{command}\n", + " if _exit_code != 0:\n", + " sys.exit(f'Command execution failed with exit code: {str(_exit_code)}.\\n\\t{command}\\n')\n", + " print(f'Successfully executed: {command}')" + ], + "metadata": { + "azdata_cell_guid": "26fa8bc4-4b8e-4c31-ae11-50484821cea8", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Set variables**\n", + "Generated by Azure Data Studio using the values collected in the Deploy Big Data Cluster wizard" + ], + "metadata": { + "azdata_cell_guid": "e70640d0-6059-4cab-939e-e985a978c0da" + } + }, + { + "cell_type": "markdown", + "source": [ + "### **Check dependencies**" + ], + "metadata": { + "azdata_cell_guid": "869d0397-a280-4dc4-be76-d652189b5131" + } + }, + { + "cell_type": "code", + "source": [ + "run_command('kubectl version --client=true')\n", + "run_command('azdata --version')" + ], + "metadata": { + "azdata_cell_guid": "c38afb67-1132-495e-9af1-35bf067acbeb", + "tags": [] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Required information**" + ], + "metadata": { + "azdata_cell_guid": "7b383b0d-5687-45b3-a16f-ba3b170c796e" + } + }, + { + "cell_type": "code", + "source": [ + "invoked_by_wizard = \"AZDATA_NB_VAR_BDC_ADMIN_PASSWORD\" in os.environ\n", + "if invoked_by_wizard:\n", + " mssql_password = os.environ[\"AZDATA_NB_VAR_BDC_ADMIN_PASSWORD\"]\n", + " if mssql_auth_mode == \"ad\":\n", + " mssql_domain_service_account_password = os.environ[\"AZDATA_NB_VAR_BDC_AD_DOMAIN_SVC_PASSWORD\"]\n", + "else:\n", + " mssql_password = getpass.getpass(prompt = 'SQL Server 2019 Big Data Cluster controller password')\n", + " if mssql_password == \"\":\n", + " sys.exit(f'Password is required.')\n", + " confirm_password = getpass.getpass(prompt = 'Confirm password')\n", + " if mssql_password != confirm_password:\n", + " sys.exit(f'Passwords do not match.')\n", + " if mssql_auth_mode == \"ad\":\n", + " mssql_domain_service_account_password = getpass.getpass(prompt = 'Domain service account password')\n", + " if mssql_domain_service_account_password == \"\":\n", + " sys.exit(f'Domain service account password is required.')\n", + "print('You can also use the controller password to access Knox and SQL Server.')" + ], + "metadata": { + "azdata_cell_guid": "b5970f2b-cf13-41af-b0a2-5133d840325e", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Set and show current context**" + ], + "metadata": { + "azdata_cell_guid": "6456bd0c-5b64-4d76-be59-e3a5b32697f5" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'kubectl config use-context {mssql_cluster_context}')\n", + "run_command('kubectl config current-context')" + ], + "metadata": { + "azdata_cell_guid": "a38f8b3a-f93a-484c-b9e2-4eba3ed99cc2" + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Make sure the target namespace already exists**" + ], + "metadata": { + "azdata_cell_guid": "b903f09b-0eeb-45c0-8173-1741cce3790c" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'kubectl get namespace {mssql_cluster_name}')" + ], + "metadata": { + "azdata_cell_guid": "174c02ea-8876-43be-bd93-3a39223e25ec" + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Create deployment configuration files**" + ], + "metadata": { + "azdata_cell_guid": "6d78da36-6af5-4309-baad-bc81bb2cdb7f" + } + }, + { + "cell_type": "code", + "source": [ + "mssql_target_profile = 'ads-bdc-custom-profile'\n", + "if not os.path.exists(mssql_target_profile):\n", + " os.mkdir(mssql_target_profile)\n", + "bdcJsonObj = json.loads(bdc_json)\n", + "controlJsonObj = json.loads(control_json)\n", + "bdcJsonFile = open(f'{mssql_target_profile}/bdc.json', 'w')\n", + "bdcJsonFile.write(json.dumps(bdcJsonObj, indent = 4))\n", + "bdcJsonFile.close()\n", + "controlJsonFile = open(f'{mssql_target_profile}/control.json', 'w')\n", + "controlJsonFile.write(json.dumps(controlJsonObj, indent = 4))\n", + "controlJsonFile.close()\n", + "print(f'Created deployment configuration folder: {mssql_target_profile}')" + ], + "metadata": { + "azdata_cell_guid": "3110ab23-ecfc-4e36-a1c5-28536b7edebf", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Create SQL Server 2019 Big Data Cluster**" + ], + "metadata": { + "azdata_cell_guid": "7d56d262-8cd5-49e4-b745-332c6e7a3cb2" + } + }, + { + "cell_type": "code", + "source": [ + "print (f'Creating SQL Server 2019 Big Data Cluster: {mssql_cluster_name} using configuration {mssql_target_profile}')\n", + "os.environ[\"ACCEPT_EULA\"] = 'yes'\n", + "os.environ[\"AZDATA_USERNAME\"] = mssql_username\n", + "os.environ[\"AZDATA_PASSWORD\"] = mssql_password\n", + "if mssql_auth_mode == \"ad\":\n", + " os.environ[\"DOMAIN_SERVICE_ACCOUNT_USERNAME\"] = mssql_domain_service_account_username\n", + " os.environ[\"DOMAIN_SERVICE_ACCOUNT_PASSWORD\"] = mssql_domain_service_account_password\n", + "if os.name == 'nt':\n", + " print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t{os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {mssql_cluster_name} ')\n", + "run_command(f'azdata bdc create -c {mssql_target_profile}')" + ], + "metadata": { + "azdata_cell_guid": "0a743e88-e7d0-4b41-b8a3-e43985d15f2b", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Login to SQL Server 2019 Big Data Cluster**" + ], + "metadata": { + "azdata_cell_guid": "7929fd90-324d-482a-a101-ae29cb183691" + } + }, + { + "cell_type": "code", + "source": [ + "run_command(f'azdata login -n {mssql_cluster_name}')" + ], + "metadata": { + "azdata_cell_guid": "3a49909b-e09e-4e62-a825-c39de2cffc94", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Show SQL Server 2019 Big Data Cluster endpoints**" + ], + "metadata": { + "azdata_cell_guid": "038e801a-a393-4f8d-8e2d-97bc3b740b0c" + } + }, + { + "cell_type": "code", + "source": [ + "from IPython.display import *\n", + "pandas.set_option('display.max_colwidth', -1)\n", + "cmd = f'azdata bdc endpoint list'\n", + "cmdOutput = !{cmd}\n", + "endpoints = json.loads(''.join(cmdOutput))\n", + "endpointsDataFrame = pandas.DataFrame(endpoints)\n", + "endpointsDataFrame.columns = [' '.join(word[0].upper() + word[1:] for word in columnName.split()) for columnName in endpoints[0].keys()]\n", + "display(HTML(endpointsDataFrame.to_html(index=False, render_links=True)))" + ], + "metadata": { + "azdata_cell_guid": "2a8c8d5d-862c-4672-9309-38aa03afc4e6", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + }, + { + "cell_type": "markdown", + "source": [ + "### **Connect to SQL Server Master instance in Azure Data Studio**\n", + "Click the link below to connect to the SQL Server Master instance of the SQL Server 2019 Big Data Cluster." + ], + "metadata": { + "azdata_cell_guid": "0bd809fa-8225-4954-a50c-da57ea167896" + } + }, + { + "cell_type": "code", + "source": [ + "sqlEndpoints = [x for x in endpoints if x['name'] == 'sql-server-master']\n", + "if sqlEndpoints and len(sqlEndpoints) == 1:\n", + " connectionParameter = '{\"serverName\":\"' + sqlEndpoints[0]['endpoint'] + '\",\"providerName\":\"MSSQL\",\"authenticationType\":\"SqlLogin\",\"userName\":' + json.dumps(mssql_username) + ',\"password\":' + json.dumps(mssql_password) + '}'\n", + " display(HTML('
Click here to connect to SQL Server Master instance
'))\n", + "else:\n", + " sys.exit('Could not find the SQL Server Master instance endpoint.')" + ], + "metadata": { + "azdata_cell_guid": "d591785d-71aa-4c5d-9cbb-a7da79bca503", + "tags": [ + "hide_input" + ] + }, + "outputs": [], + "execution_count": null + } + ] +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/docker/2017/deploy-sql2017-image.ipynb b/extensions/resource-deployment/notebooks/docker/2017/deploy-sql2017-image.ipynb index 6eb3f7fd1d..cb29f223e5 100644 --- a/extensions/resource-deployment/notebooks/docker/2017/deploy-sql2017-image.ipynb +++ b/extensions/resource-deployment/notebooks/docker/2017/deploy-sql2017-image.ipynb @@ -6,7 +6,7 @@ }, "language_info": { "name": "python", - "version": "3.7.3", + "version": "3.6.6", "mimetype": "text/x-python", "codemirror_mode": { "name": "ipython", @@ -23,21 +23,27 @@ { "cell_type": "markdown", "source": [ - "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/src/sql/media/microsoft-small-logo.png)\n", + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", "## Run SQL Server 2017 container images with Docker\n", "This notebook will use Docker to pull and run the SQL Server 2017 container image and connect to it in Azure Data Studio\n", "\n", "### Dependencies\n", "- Docker Engine. For more information, see [Install Docker](https://docs.docker.com/engine/installation/).\n", "\n", - "Please press the \"Run Cells\" button to run the notebook" + "Please press the \"Run all\" button to run the notebook" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "1d10b817-e9f4-4532-b69f-4d47504d590b" + } }, { "cell_type": "markdown", - "source": "### Check dependencies", - "metadata": {} + "source": [ + "### Check dependencies" + ], + "metadata": { + "azdata_cell_guid": "630e76af-01a6-4905-90a7-2d643126cdb4" + } }, { "cell_type": "code", @@ -60,9 +66,11 @@ "cmd = 'docker version'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "64f3e119-408a-4b61-bce4-68114e2a727c" + }, "outputs": [], - "execution_count": 1 + "execution_count": null }, { "cell_type": "markdown", @@ -70,7 +78,9 @@ "### List existing containers\n", "You can view the ports that have been used by existing containers" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "b671aaac-226f-4bbd-9839-552ec676c027" + } }, { "cell_type": "code", @@ -78,14 +88,20 @@ "cmd = f'docker ps -a'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "dd665751-efbc-4089-af1b-0fa456a4fb58" + }, "outputs": [], - "execution_count": 2 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Required information", - "metadata": {} + "source": [ + "### Required information" + ], + "metadata": { + "azdata_cell_guid": "9927c86e-e929-497e-bced-ced60e6a5a7c" + } }, { "cell_type": "code", @@ -106,14 +122,20 @@ "print(f'{password_name}: ******')\n", "print(f'Port: {sql_port}')" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "a2e06bb2-23f3-4fc7-81bc-d38e250b24c0" + }, "outputs": [], - "execution_count": 3 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Pull the container image", - "metadata": {} + "source": [ + "### Pull the container image" + ], + "metadata": { + "azdata_cell_guid": "ca95b65d-e6f9-43a3-b06e-aa9cfc917725" + } }, { "cell_type": "code", @@ -121,14 +143,20 @@ "cmd = f'docker pull mcr.microsoft.com/mssql/server:2017-latest'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "d75ae87c-2e59-4cf5-b61e-43f4bf8ac680" + }, "outputs": [], - "execution_count": 4 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Start a new container", - "metadata": {} + "source": [ + "### Start a new container" + ], + "metadata": { + "azdata_cell_guid": "5bbde6b1-4933-417d-90cc-0c853e002552" + } }, { "cell_type": "code", @@ -141,14 +169,20 @@ "cmd = f'docker run -e ACCEPT_EULA=Y -e \"SA_PASSWORD={sql_password}\" -p {sql_port}:1433 --name {container_name} -d mcr.microsoft.com/mssql/server:2017-latest'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "73585d5d-0c8c-4786-92f2-7e0fd2653f9e" + }, "outputs": [], - "execution_count": 5 + "execution_count": null }, { "cell_type": "markdown", - "source": "### List all the containers", - "metadata": {} + "source": [ + "### List all the containers" + ], + "metadata": { + "azdata_cell_guid": "e338290a-0e1d-4780-aad5-3416447be0a1" + } }, { "cell_type": "code", @@ -156,9 +190,11 @@ "cmd = f'docker ps -a'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "5e30c110-a631-4c9b-8485-cd64d34c54d5" + }, "outputs": [], - "execution_count": 6 + "execution_count": null }, { "cell_type": "markdown", @@ -166,7 +202,9 @@ "### Connect to SQL Server in Azure Data Studio\n", "It might take a couple minutes for SQL Server to launch" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "11add202-2f03-4d20-8411-d4f274910107" + } }, { "cell_type": "code", @@ -175,14 +213,20 @@ "connectionParameter = '{\"serverName\":\"localhost,' + sql_port + '\",\"providerName\":\"MSSQL\",\"authenticationType\":\"SqlLogin\",\"userName\":\"sa\",\"password\":' + json.dumps(sql_password) + '}'\n", "display(HTML('
Click here to connect to SQL Server
'))" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "182517d3-f6d6-4ecc-b2cb-4c21a0f23b4e" + }, "outputs": [], - "execution_count": 7 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Stop and remove the container", - "metadata": {} + "source": [ + "### Stop and remove the container" + ], + "metadata": { + "azdata_cell_guid": "416a7e11-0ea7-487d-804f-b8449f0ce5fc" + } }, { "cell_type": "code", @@ -193,9 +237,11 @@ "display(HTML(\"Stop the container: \" + stop_container_command + \"\"))\n", "display(HTML(\"Remove the container: \" + remove_container_command + \"\"))" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "1593a0a6-7009-4d0b-9f3f-ac7ed6d2bb1e" + }, "outputs": [], - "execution_count": 8 + "execution_count": null } ] -} +} \ No newline at end of file diff --git a/extensions/resource-deployment/notebooks/docker/2019/deploy-sql2019-image.ipynb b/extensions/resource-deployment/notebooks/docker/2019/deploy-sql2019-image.ipynb index f9b80f320b..9f806fef11 100644 --- a/extensions/resource-deployment/notebooks/docker/2019/deploy-sql2019-image.ipynb +++ b/extensions/resource-deployment/notebooks/docker/2019/deploy-sql2019-image.ipynb @@ -6,7 +6,7 @@ }, "language_info": { "name": "python", - "version": "3.7.3", + "version": "3.6.6", "mimetype": "text/x-python", "codemirror_mode": { "name": "ipython", @@ -23,21 +23,27 @@ { "cell_type": "markdown", "source": [ - "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/src/sql/media/microsoft-small-logo.png)\n", + "![Microsoft](https://raw.githubusercontent.com/microsoft/azuredatastudio/main/extensions/resource-deployment/images/microsoft-small-logo.png)\n", "## Run SQL Server 2019 container image with Docker\n", "This notebook will use Docker to pull and run the SQL Server 2019 container image and connect to it in Azure Data Studio\n", "\n", "### Dependencies\n", "- Docker Engine. For more information, see [Install Docker](https://docs.docker.com/engine/installation/).\n", "\n", - "Please press the \"Run Cells\" button to run the notebook" + "Please press the \"Run all\" button to run the notebook" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "e5fb2be9-e904-4821-8473-b69b90760c6a" + } }, { "cell_type": "markdown", - "source": "### Check dependencies", - "metadata": {} + "source": [ + "### Check dependencies" + ], + "metadata": { + "azdata_cell_guid": "76c571ab-358a-4b07-810c-53020ee1745a" + } }, { "cell_type": "code", @@ -60,9 +66,11 @@ "cmd = 'docker version'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "6196300e-f896-489b-8dca-b2c42eda2d6d" + }, "outputs": [], - "execution_count": 1 + "execution_count": null }, { "cell_type": "markdown", @@ -70,7 +78,9 @@ "### List existing containers\n", "You can view the ports that have been used by existing containers" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "87b07614-d57d-4731-ac3e-a8b324d231f2" + } }, { "cell_type": "code", @@ -78,14 +88,20 @@ "cmd = f'docker ps -a'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "26170d1b-4332-4383-bcc4-1d97030daffc" + }, "outputs": [], - "execution_count": 2 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Required information", - "metadata": {} + "source": [ + "### Required information" + ], + "metadata": { + "azdata_cell_guid": "52b1faf2-d7c7-446b-ba0b-4f8b744da0bb" + } }, { "cell_type": "code", @@ -106,14 +122,20 @@ "print(f'{password_name}: ******')\n", "print(f'Port: {sql_port}')" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "93cb0147-7bf6-4630-b796-3811dfd1354b" + }, "outputs": [], - "execution_count": 3 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Pull the container image", - "metadata": {} + "source": [ + "### Pull the container image" + ], + "metadata": { + "azdata_cell_guid": "643ccaca-fd1d-4482-b81e-aee29b627e34" + } }, { "cell_type": "code", @@ -121,14 +143,20 @@ "cmd = f'docker pull mcr.microsoft.com/mssql/server:2019-latest'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "7b102447-3198-488f-a995-982ae1fc8555" + }, "outputs": [], - "execution_count": 4 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Start a new container", - "metadata": {} + "source": [ + "### Start a new container" + ], + "metadata": { + "azdata_cell_guid": "a4527a5f-c2c5-4f60-bfd1-b119576178c5" + } }, { "cell_type": "code", @@ -141,14 +169,20 @@ "cmd = f'docker run -e ACCEPT_EULA=Y -e \"SA_PASSWORD={sql_password}\" -p {sql_port}:1433 --name {container_name} -d mcr.microsoft.com/mssql/server:2019-latest'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "82f27460-88eb-4484-92ee-40305e650d70" + }, "outputs": [], - "execution_count": 5 + "execution_count": null }, { "cell_type": "markdown", - "source": "### List all the containers", - "metadata": {} + "source": [ + "### List all the containers" + ], + "metadata": { + "azdata_cell_guid": "e267aa7d-dd22-43ac-9b03-cf282ef15f67" + } }, { "cell_type": "code", @@ -156,9 +190,11 @@ "cmd = f'docker ps -a'\n", "run_command()" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "211ee198-f1d1-4781-9daa-8497c2665de6" + }, "outputs": [], - "execution_count": 6 + "execution_count": null }, { "cell_type": "markdown", @@ -166,7 +202,9 @@ "### Connect to SQL Server in Azure Data Studio\n", "It might take a couple minutes for SQL Server to launch" ], - "metadata": {} + "metadata": { + "azdata_cell_guid": "5f5860c4-7962-439e-a15b-7f24f504dc18" + } }, { "cell_type": "code", @@ -175,14 +213,20 @@ "connectionParameter = '{\"serverName\":\"localhost,' + sql_port + '\",\"providerName\":\"MSSQL\",\"authenticationType\":\"SqlLogin\",\"userName\":\"sa\",\"password\":' + json.dumps(sql_password) + '}'\n", "display(HTML('
Click here to connect to SQL Server
'))" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "4bc64915-c5ae-4507-8fb0-9e413ccc2fd0" + }, "outputs": [], - "execution_count": 7 + "execution_count": null }, { "cell_type": "markdown", - "source": "### Stop and remove the container", - "metadata": {} + "source": [ + "### Stop and remove the container" + ], + "metadata": { + "azdata_cell_guid": "9a1039fa-fdd3-408b-b649-8fde0fcee660" + } }, { "cell_type": "code", @@ -193,9 +237,11 @@ "display(HTML(\"Stop the container: \" + stop_container_command + \"\"))\n", "display(HTML(\"Remove the container: \" + remove_container_command + \"\"))" ], - "metadata": {}, + "metadata": { + "azdata_cell_guid": "f9e0f1ad-ba6e-4c17-84ea-cc5dceb1289b" + }, "outputs": [], - "execution_count": 8 + "execution_count": null } ] -} +} \ No newline at end of file diff --git a/extensions/resource-deployment/package.json b/extensions/resource-deployment/package.json index f1b1e66ec8..94cb098877 100644 --- a/extensions/resource-deployment/package.json +++ b/extensions/resource-deployment/package.json @@ -231,6 +231,14 @@ { "name": "existing-kubeadm", "displayName": "%bdc-deployment-target-existing-kubeadm%" + }, + { + "name": "existing-aro", + "displayName": "%bdc-deployment-target-existing-aro%" + }, + { + "name": "existing-openshift", + "displayName": "%bdc-deployment-target-existing-openshift%" } ] } @@ -251,7 +259,7 @@ }, { "name": "azdata", - "version": "15.0.4013" + "version": "20.0.0" } ], "when": "target=new-aks&&version=bdc2019" @@ -263,10 +271,12 @@ }, "requiredTools": [ { - "name": "kubectl" + "name": "kubectl", + "version": "1.13.0" }, { - "name": "azdata" + "name": "azdata", + "version": "20.0.0" } ], "when": "target=existing-aks&&version=bdc2019" @@ -278,13 +288,49 @@ }, "requiredTools": [ { - "name": "kubectl" + "name": "kubectl", + "version": "1.13.0" }, { - "name": "azdata" + "name": "azdata", + "version": "20.0.0" } ], "when": "target=existing-kubeadm&&version=bdc2019" + }, + { + "bdcWizard": { + "type": "existing-aro", + "notebook": "%bdc-2019-existing-aro-notebook%" + }, + "requiredTools": [ + { + "name": "kubectl", + "version": "1.13.0" + }, + { + "name": "azdata", + "version": "20.0.0" + } + ], + "when": "target=existing-aro&&version=bdc2019" + }, + { + "bdcWizard": { + "type": "existing-openshift", + "notebook": "%bdc-2019-existing-openshift-notebook%" + }, + "requiredTools": [ + { + "name": "kubectl", + "version": "1.13.0" + }, + { + "name": "azdata", + "version": "20.0.0" + } + ], + "when": "target=existing-openshift&&version=bdc2019" } ], "agreement": { diff --git a/extensions/resource-deployment/package.nls.json b/extensions/resource-deployment/package.nls.json index 44e2438a22..2de5240544 100644 --- a/extensions/resource-deployment/package.nls.json +++ b/extensions/resource-deployment/package.nls.json @@ -17,9 +17,13 @@ "bdc-deployment-target-new-aks": "New Azure Kubernetes Service Cluster", "bdc-deployment-target-existing-aks": "Existing Azure Kubernetes Service Cluster", "bdc-deployment-target-existing-kubeadm": "Existing Kubernetes Cluster (kubeadm)", + "bdc-deployment-target-existing-aro": "Existing Azure Red Hat OpenShift cluster", + "bdc-deployment-target-existing-openshift": "Existing OpenShift cluster", "bdc-2019-aks-notebook": "./notebooks/bdc/2019/deploy-bdc-aks.ipynb", "bdc-2019-existing-aks-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-aks.ipynb", "bdc-2019-existing-kubeadm-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-kubeadm.ipynb", + "bdc-2019-existing-aro-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-aro.ipynb", + "bdc-2019-existing-openshift-notebook": "./notebooks/bdc/2019/deploy-bdc-existing-openshift.ipynb", "docker-sql-2017-title": "Deploy SQL Server 2017 container images", "docker-sql-2019-title": "Deploy SQL Server 2019 container images", "docker-container-name-field": "Container name", diff --git a/extensions/resource-deployment/src/interfaces.ts b/extensions/resource-deployment/src/interfaces.ts index 55ab646151..d157ae73e9 100644 --- a/extensions/resource-deployment/src/interfaces.ts +++ b/extensions/resource-deployment/src/interfaces.ts @@ -347,7 +347,9 @@ export interface ITool { export const enum BdcDeploymentType { NewAKS = 'new-aks', ExistingAKS = 'existing-aks', - ExistingKubeAdm = 'existing-kubeadm' + ExistingKubeAdm = 'existing-kubeadm', + ExistingARO = 'existing-aro', + ExistingOpenShift = 'existing-openshift' } export const enum ArcDeploymentType { diff --git a/extensions/resource-deployment/src/services/azdataService.ts b/extensions/resource-deployment/src/services/azdataService.ts index eeb11794d3..5e80572d64 100644 --- a/extensions/resource-deployment/src/services/azdataService.ts +++ b/extensions/resource-deployment/src/services/azdataService.ts @@ -35,6 +35,12 @@ export class AzdataService implements IAzdataService { case BdcDeploymentType.ExistingKubeAdm: profilePrefix = 'kubeadm'; break; + case BdcDeploymentType.ExistingARO: + profilePrefix = 'aro'; + break; + case BdcDeploymentType.ExistingOpenShift: + profilePrefix = 'openshift'; + break; default: throw new Error(`Unknown deployment type: ${deploymentType}`); } diff --git a/extensions/resource-deployment/src/services/bigDataClusterDeploymentProfile.ts b/extensions/resource-deployment/src/services/bigDataClusterDeploymentProfile.ts index 21c50e78e0..563589ffce 100644 --- a/extensions/resource-deployment/src/services/bigDataClusterDeploymentProfile.ts +++ b/extensions/resource-deployment/src/services/bigDataClusterDeploymentProfile.ts @@ -30,6 +30,8 @@ export interface ActiveDirectorySettings { clusterAdmins: string; appReaders?: string; appOwners?: string; + subdomain?: string; + accountPrefix?: string; } export class BigDataClusterDeploymentProfile { @@ -280,6 +282,8 @@ export class BigDataClusterDeploymentProfile { activeDirectoryObject.dnsIpAddresses = this.splitByComma(adSettings.dnsIPAddresses); activeDirectoryObject.domainControllerFullyQualifiedDns = this.splitByComma(adSettings.domainControllerFQDNs.toLowerCase()); activeDirectoryObject.domainDnsName = adSettings.domainDNSName; + activeDirectoryObject.subdomain = adSettings.subdomain; + activeDirectoryObject.accountPrefix = adSettings.accountPrefix; activeDirectoryObject.realm = adSettings.domainDNSName.toUpperCase(); activeDirectoryObject.clusterAdmins = this.splitByComma(adSettings.clusterAdmins); activeDirectoryObject.clusterUsers = this.splitByComma(adSettings.clusterUsers); diff --git a/extensions/resource-deployment/src/ui/deployClusterWizard/constants.ts b/extensions/resource-deployment/src/ui/deployClusterWizard/constants.ts index b759766b04..790506b546 100644 --- a/extensions/resource-deployment/src/ui/deployClusterWizard/constants.ts +++ b/extensions/resource-deployment/src/ui/deployClusterWizard/constants.ts @@ -17,8 +17,10 @@ export const DomainDNSName_VariableName = 'AZDATA_NB_VAR_BDC_AD_DOMAIN_DNS_NAME' export const Realm_VariableName = 'AZDATA_NB_VAR_BDC_AD_REALM'; export const DomainServiceAccountUserName_VariableName = 'AZDATA_NB_VAR_BDC_AD_DOMAIN_SVC_USERNAME'; export const DomainServiceAccountPassword_VariableName = 'AZDATA_NB_VAR_BDC_AD_DOMAIN_SVC_PASSWORD'; -export const AppOwners_VariableName = 'AZDATA_NB_VAR_AD_BDC_APP_OWNERS'; -export const AppReaders_VariableName = 'AZDATA_NB_VAR_AD_BDC_APP_READERS'; +export const AppOwners_VariableName = 'AZDATA_NB_VAR_BDC_AD_APP_OWNERS'; +export const AppReaders_VariableName = 'AZDATA_NB_VAR_AD_BDC_AD_APP_READERS'; +export const Subdomain_VariableName = 'AZDATA_NB_VAR_BDC_AD_SUBDOMAIN'; +export const AccountPrefix_VariableName = 'AZDATA_NB_VAR_BDC_AD_ACCOUNTPREFIX'; export const SubscriptionId_VariableName = 'AZDATA_NB_VAR_BDC_AZURE_SUBSCRIPTION'; export const ResourceGroup_VariableName = 'AZDATA_NB_VAR_BDC_RESOURCEGROUP_NAME'; export const Location_VariableName = 'AZDATA_NB_VAR_BDC_AZURE_REGION'; diff --git a/extensions/resource-deployment/src/ui/deployClusterWizard/deployClusterWizard.ts b/extensions/resource-deployment/src/ui/deployClusterWizard/deployClusterWizard.ts index 2f047a83ca..f6ae1ee681 100644 --- a/extensions/resource-deployment/src/ui/deployClusterWizard/deployClusterWizard.ts +++ b/extensions/resource-deployment/src/ui/deployClusterWizard/deployClusterWizard.ts @@ -94,14 +94,9 @@ export class DeployClusterWizard extends WizardBase { variableName: VariableNames.AppReaders_VariableName, placeHolder: localize('deployCluster.AppReadersPlaceHolder', "Use comma to separate the values."), description: localize('deployCluster.AppReadersDescription', "The Active Directory users or groups of app readers. Use comma as separator them if there are multiple users/groups.") + }, { + type: FieldType.Text, + label: localize('deployCluster.Subdomain', "Subdomain"), + required: false, + variableName: VariableNames.Subdomain_VariableName, + description: localize('deployCluster.SubdomainDescription', "A unique DNS subdomain to use for this SQL Server Big Data Cluster. If not provided, the cluster name will be used as the default value.") + }, { + type: FieldType.Text, + label: localize('deployCluster.AccountPrefix', "Account prefix"), + required: false, + variableName: VariableNames.AccountPrefix_VariableName, + description: localize('deployCluster.AccountPrefixDescription', "A unique prefix for AD accounts SQL Server Big Data Cluster will generate. If not provided, the subdomain name will be used as the default value. If a subdomain is not provided, the cluster name will be used as the default value.") } ] }; @@ -279,10 +291,9 @@ export class ClusterSettingsPage extends WizardPageBase { variableDNSPrefixMapping[VariableNames.SQLServerDNSName_VariableName] = 'bdc-sql'; variableDNSPrefixMapping[VariableNames.ServiceProxyDNSName_VariableName] = 'bdc-proxy'; + const subdomain = this.wizard.model.getStringValue(VariableNames.Subdomain_VariableName) || this.wizard.model.getStringValue(VariableNames.ClusterName_VariableName); Object.keys(variableDNSPrefixMapping).forEach((variableName: string) => { - if (!this.wizard.model.getStringValue(variableName)) { - this.wizard.model.setPropertyValue(variableName, `${variableDNSPrefixMapping[variableName]}.${this.wizard.model.getStringValue(VariableNames.DomainDNSName_VariableName)}`); - } + this.wizard.model.setPropertyValue(variableName, `${variableDNSPrefixMapping[variableName]}.${subdomain}.${this.wizard.model.getStringValue(VariableNames.DomainDNSName_VariableName)}`); }); } this.wizard.wizardObject.registerNavigationValidator((pcInfo) => { diff --git a/extensions/resource-deployment/src/ui/deployClusterWizard/pages/deploymentProfilePage.ts b/extensions/resource-deployment/src/ui/deployClusterWizard/pages/deploymentProfilePage.ts index 3b9e6ae223..4206869c2c 100644 --- a/extensions/resource-deployment/src/ui/deployClusterWizard/pages/deploymentProfilePage.ts +++ b/extensions/resource-deployment/src/ui/deployClusterWizard/pages/deploymentProfilePage.ts @@ -246,6 +246,10 @@ export class DeploymentProfilePage extends WizardPageBase { return 'aks-dev-test'; case BdcDeploymentType.ExistingKubeAdm: return 'kubeadm-dev-test'; + case BdcDeploymentType.ExistingARO: + return 'aro-dev-test'; + case BdcDeploymentType.ExistingOpenShift: + return 'openshift-dev-test'; default: throw new Error(`Unknown deployment type: ${this.wizard.deploymentType}`); } diff --git a/extensions/resource-deployment/src/ui/deployClusterWizard/pages/summaryPage.ts b/extensions/resource-deployment/src/ui/deployClusterWizard/pages/summaryPage.ts index b2699ff145..768184baad 100644 --- a/extensions/resource-deployment/src/ui/deployClusterWizard/pages/summaryPage.ts +++ b/extensions/resource-deployment/src/ui/deployClusterWizard/pages/summaryPage.ts @@ -161,6 +161,21 @@ export class SummaryPage extends WizardPageBase { labelCSSStyles: { fontWeight: FontWeight.Bold } }] }); + clusterSectionInfo.rows!.push({ + items: [ + { + type: FieldType.ReadonlyText, + label: localize('deployCluster.Subdomain', "Subdomain"), + defaultValue: this.wizard.model.getStringValue(VariableNames.Subdomain_VariableName), + labelCSSStyles: { fontWeight: FontWeight.Bold } + }, + { + type: FieldType.ReadonlyText, + label: localize('deployCluster.AccountPrefix', "Account prefix"), + defaultValue: this.wizard.model.getStringValue(VariableNames.AccountPrefix_VariableName), + labelCSSStyles: { fontWeight: FontWeight.Bold } + }] + }); clusterSectionInfo.rows!.push({ items: [ {