Arc bug fix: ADS Create DC wizard should prompt for Log Analytics workspace ID and access token similar to portal (#18742)

* Added monitor log-analytics workspace list to az api

* Made resource group and subscription optional for logs analytics workspace list

* Added dynamic fields for workspace names, id, primary key, based on value of auto-logs checkbox

* Hooked up the newly created source provider for log analytics workspaces. Dropdown now populates all workspace names.

* Added workspaceUtils.ts for a valueprovider. Now workspace name maps to id automatically.

* Replaced promise.all with promise.resolve

* Added workspace id and primary key as env variables in the notebook

* Removed extra space in package.json

* Made getOptions more concise and put azApi definition in function.

* Changed notebook to handle new Azure CLI command with param --clustername
This commit is contained in:
Candice Ye
2022-03-17 20:57:16 -07:00
committed by GitHub
parent d6abcb892d
commit a786e63445
10 changed files with 200 additions and 3 deletions

View File

@@ -185,21 +185,24 @@
"print (f'Creating Azure Arc Data Controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
"os.environ[\"AZDATA_USERNAME\"] = arc_admin_username\n",
"os.environ[\"AZDATA_PASSWORD\"] = arc_admin_password\n",
"os.environ[\"LOG_WORKSPACE_ID\"] = log_analytics_workspace_id\n",
"os.environ[\"LOG_SHARED_KEY\"] = log_analytics_primary_key\n",
"\n",
"# If connection mode is indirect\n",
"namespace = f' --k8s-namespace {arc_data_controller_namespace}' if is_indirect else ''\n",
"use_k8s = ' --use-k8s' if is_indirect else ''\n",
"\n",
"# If connection mode is direct\n",
"custom_location = f' --custom-location {arc_data_controller_custom_location}' if not is_indirect else ''\n",
"\n",
"cluster_name = f' --cluster-name {arc_cluster_context}' if not is_indirect else ''\n",
"auto_upload_metrics_value = 'true' if arc_data_controller_auto_upload_metrics == 'true' else 'false'\n",
"auto_upload_logs_value = 'true' if arc_data_controller_auto_upload_logs == 'true' else 'false'\n",
"\n",
"auto_upload_metrics = f' --auto-upload-metrics {auto_upload_metrics_value}' if not is_indirect else ''\n",
"auto_upload_logs = f' --auto-upload-logs {auto_upload_logs_value}' if not is_indirect else ''\n",
"\n",
"if os.name == 'nt':\n",
" print(f'If you don\\'t see output produced by az, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {arc_data_controller_namespace}')\n",
"run_command(f'az arcdata dc create --connectivity-mode {arc_data_controller_connectivity_mode} --name {arc_data_controller_name}{namespace} --subscription {arc_subscription} --resource-group {arc_resource_group} --location {arc_data_controller_location} --storage-class {arc_data_controller_storage_class} --profile-name {arc_profile} --infrastructure {arc_infrastructure}{custom_location}{auto_upload_metrics}{auto_upload_logs}{use_k8s}')\n",
"run_command(f'az arcdata dc create --connectivity-mode {arc_data_controller_connectivity_mode} --name {arc_data_controller_name}{namespace} --subscription {arc_subscription} --resource-group {arc_resource_group} --location {arc_data_controller_location} --storage-class {arc_data_controller_storage_class} --profile-name {arc_profile} --infrastructure {arc_infrastructure}{custom_location}{cluster_name}{auto_upload_metrics}{auto_upload_logs}{use_k8s}')\n",
"print(f'Azure Arc Data Controller: {arc_data_controller_name} created.') "
]
},