mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Replacing all azdata with az (#16502)
* Changed azdata to az in azcli extension and resource-deployment, and some arc. Removed user, pass, url from controller connect blade. Commented out tests. Ported over work from old branch. * Changed unit tests, all unit tests passing. Changed parameters to new ones, fixed some Controller Connect issues. * Connect data controller and create dc working. * Changed az back to azdata in necessary places in resource-deployment. * Changed notebook values and added namespace to some params. * Added some changes from PR to this branch * Changed azdata.ts to az.ts and changed subscription parameter * Brought over changes from azcli PR into this branch. * added endpoint, username, password to getIsPassword * Changed notebooks to use proper az params, hard coded in some values to verify it is working, removed some variableNames from package.json. * Changed -sc to --storage-class in notebook * Added namespace to SQL deploy, deleted dc create in api * Deleted more dc create code and uncommented findAz() with unfinished work on Do Not Ask Again. * Removed (preview) from extensions/arc and extensions/azcli excluding preview:true in package.json * Commented out install/update prompts until DoNotAskAgain is implemented * Fixed bugs: JSON Output errors are now being caught, --infrastructure now has a required UI component with dropdown options, config page loads properly, SQL create flags use full names instead of shortnames. * Adds validation to pg extensions and bug fixes (#16486) * Extensions * Server parameters * Change locaiton of postgres extensions, pr fixes * Change location of list * List spacing * Commented out Don't Ask Again prompt implementation. * Uncommented header of a test file. * Added Azure CLI arcdata extension to Prerequisites * Reverted package.json and yarn.lock * Took away casting of stderr and stdout in executeCommand. * Deleted override function for initializeFields in connectControllerDialog.ts * Removed fakeAzApi for testing and added back in (Preview) * Removed en-us from python notebook links. * Deleted azdata tool from tool tests in resource-deployment * Deleted another instance of azdata in tool test * Add back in azdata tooltype * Remove en-us * Replaced AzdataTool in typings * Reverting adding azdata tool back in * Changed Azdata to AzdataToolOld * Added back azdata tool type * Added AzdataToolOld to tool types * fix test Co-authored-by: Candice Ye <canye@microsoft.com> Co-authored-by: nasc17 <nasc@microsoft.com> Co-authored-by: nasc17 <69922333+nasc17@users.noreply.github.com> Co-authored-by: chgagnon <chgagnon@microsoft.com>
This commit is contained in:
@@ -47,7 +47,8 @@
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
"|Azure CLI (az) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/cli/azure/install-azure-cli-windows?tabs=azure-cli) |\n",
|
||||
"|Azure CLI arcdata extension | Commands for using Azure Arc for Azure data services. | [Installation](https://docs.microsoft.com/azure/azure-arc/data/install-arcdata-extension)"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "20fe3985-a01e-461c-bce0-235f7606cc3c"
|
||||
@@ -64,6 +65,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"source": [
|
||||
"import sys,os,json,subprocess\n",
|
||||
"def run_command():\n",
|
||||
@@ -76,16 +78,14 @@
|
||||
" print(f'Successfully executed: {cmd}')\n",
|
||||
" print(f'\\t>>>Output: {output.stdout.decode(\"utf-8\")}\\n')\n",
|
||||
" return output.stdout.decode(\"utf-8\")\n",
|
||||
"cmd = 'azdata --version'\n",
|
||||
"out = run_command()\n",
|
||||
""
|
||||
"cmd = 'az --version'\n",
|
||||
"out = run_command()\n"
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "749d8dba-3da8-46e9-ae48-2b38056ab7a2",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@@ -111,48 +111,17 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"os.environ[\"KUBECONFIG\"] = controller_kubeconfig\n",
|
||||
"os.environ[\"KUBECTL_CONTEXT\"] = controller_kubectl_context\n",
|
||||
"endpoint_option = f' -e {controller_endpoint}' if controller_endpoint else \"\"\n",
|
||||
"cmd = f'azdata login --namespace {arc_data_controller_namespace} -u {controller_username}{endpoint_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "71366399-5963-4e24-b2f2-6bb5bffba4ec"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"source": [
|
||||
"print (f'Creating the PostgreSQL Hyperscale - Azure Arc instance')\n",
|
||||
"\n",
|
||||
"workers_option = f' -w {postgres_server_group_workers}' if postgres_server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{postgres_server_group_port}\"' if postgres_server_group_port else \"\"\n",
|
||||
"engine_version_option = f' -ev {postgres_server_group_engine_version}' if postgres_server_group_engine_version else \"\"\n",
|
||||
"extensions_option = f' --extensions \"{postgres_server_group_extensions}\"' if postgres_server_group_extensions else \"\"\n",
|
||||
"volume_size_data_option = f' -vsd {postgres_server_group_volume_size_data}Gi' if postgres_server_group_volume_size_data else \"\"\n",
|
||||
"volume_size_logs_option = f' -vsl {postgres_server_group_volume_size_logs}Gi' if postgres_server_group_volume_size_logs else \"\"\n",
|
||||
"volume_size_backups_option = f' -vsb {postgres_server_group_volume_size_backups}Gi' if postgres_server_group_volume_size_backups else \"\"\n",
|
||||
"cores_request_option = f' -cr \"c={postgres_server_group_coordinator_cores_request},w={postgres_server_group_workers_cores_request}\"' if postgres_server_group_coordinator_cores_request and postgres_server_group_workers_cores_request else f' -cr \"c={postgres_server_group_coordinator_cores_request}\"' if postgres_server_group_coordinator_cores_request else f' -cr \"w={postgres_server_group_workers_cores_request}\"' if postgres_server_group_workers_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"c={postgres_server_group_coordinator_cores_limit},w={postgres_server_group_workers_cores_limit}\"' if postgres_server_group_coordinator_cores_limit and postgres_server_group_workers_cores_limit else f' -cl \"c={postgres_server_group_coordinator_cores_limit}\"' if postgres_server_group_coordinator_cores_limit else f' -cl \"w={postgres_server_group_workers_cores_limit}\"' if postgres_server_group_workers_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"c={postgres_server_group_coordinator_memory_request}Gi,w={postgres_server_group_workers_memory_request}Gi\"' if postgres_server_group_coordinator_memory_request and postgres_server_group_workers_memory_request else f' -mr \"c={postgres_server_group_coordinator_memory_request}Gi\"' if postgres_server_group_coordinator_memory_request else f' -mr \"w={postgres_server_group_workers_memory_request}Gi\"' if postgres_server_group_workers_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"c={postgres_server_group_coordinator_memory_limit}Gi,w={postgres_server_group_workers_memory_limit}Gi\"' if postgres_server_group_coordinator_memory_limit and postgres_server_group_workers_memory_limit else f' -ml \"c={postgres_server_group_coordinator_memory_limit}Gi\"' if postgres_server_group_coordinator_memory_limit else f' -ml \"w={postgres_server_group_workers_memory_limit}Gi\"' if postgres_server_group_workers_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"cmd = f'azdata arc postgres server create -n {postgres_server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{engine_version_option}{extensions_option}{volume_size_data_option}{volume_size_logs_option}{volume_size_backups_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"cmd = f'az postgres arc-server create --name {postgres_server_group_name} --k8s-namespace {arc_data_controller_namespace} --use-k8s'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "4fbaf071-55a1-40bc-be7e-7b9b5547b886"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user