mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-18 11:01:36 -05:00
Compare commits
37 Commits
1.26.0_rel
...
1.22.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77b9a708df | ||
|
|
a4ee871b88 | ||
|
|
3f4e19fc08 | ||
|
|
571fca6de5 | ||
|
|
5a2fdc4034 | ||
|
|
cc6d84e7f6 | ||
|
|
99e11d2e22 | ||
|
|
9a85123e21 | ||
|
|
56669db6b6 | ||
|
|
8782eeb32f | ||
|
|
7f3d5bac0a | ||
|
|
7a1e0a7d2e | ||
|
|
681ecbd946 | ||
|
|
e7798a8e32 | ||
|
|
b158180ef4 | ||
|
|
7ad9da7fda | ||
|
|
94e2016a16 | ||
|
|
21bb577da8 | ||
|
|
5e8325ba28 | ||
|
|
25b7ccade3 | ||
|
|
57940c581c | ||
|
|
82f9e4e24b | ||
|
|
3e22fcfd2d | ||
|
|
0bc81e1078 | ||
|
|
7b6328dccf | ||
|
|
05124273ea | ||
|
|
b1d4444522 | ||
|
|
4ee2d369cf | ||
|
|
fb28b69bb0 | ||
|
|
f2709c7100 | ||
|
|
3476f5ae38 | ||
|
|
b937fdee7a | ||
|
|
dd9ac2e362 | ||
|
|
403ff6cfec | ||
|
|
4a6226974e | ||
|
|
6a2c47f511 | ||
|
|
3d9a316f4b |
@@ -12,10 +12,6 @@
|
||||
{
|
||||
"file": "build\\actions\\AutoMerge\\dist\\index.js",
|
||||
"_justification": "False positive from webpacked code"
|
||||
},
|
||||
{
|
||||
"file": ".devcontainer\\devcontainer.json",
|
||||
"_justification": "Local development environment - not used in production"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -73,7 +73,6 @@ RUN apt-get update \
|
||||
libnss3 \
|
||||
libxss1 \
|
||||
libasound2 \
|
||||
libgbm1 \
|
||||
xfonts-base \
|
||||
xfonts-terminus \
|
||||
fonts-noto \
|
||||
|
||||
16
.github/CODEOWNERS
vendored
16
.github/CODEOWNERS
vendored
@@ -1,16 +0,0 @@
|
||||
# Lines starting with '#' are comments.
|
||||
# Each line is a file pattern followed by one or more owners.
|
||||
# Syntax can be found here: https://docs.github.com/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
||||
|
||||
/extensions/admin-tool-ext-win @Charles-Gagnon
|
||||
/extensions/arc/ @Charles-Gagnon
|
||||
/extensions/azdata/ @Charles-Gagnon
|
||||
/extensions/big-data-cluster/ @Charles-Gagnon
|
||||
/extensions/dacpac/ @kisantia
|
||||
/extensions/query-history/ @Charles-Gagnon
|
||||
/extensions/resource-deployment/ @Charles-Gagnon
|
||||
/extensions/schema-compare/ @kisantia
|
||||
/extensions/sql-database-projects/ @Benjin @kisantia
|
||||
/extensions/mssql/config.json @Charles-Gagnon @alanrenmsft @kburtram
|
||||
|
||||
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon
|
||||
27
.github/label-actions.yml
vendored
27
.github/label-actions.yml
vendored
@@ -1,27 +0,0 @@
|
||||
Needs Logs:
|
||||
comment: "We need more info to debug your particular issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.
|
||||
|
||||
|
||||
There are two types of logs to collect:
|
||||
|
||||
|
||||
**Console Logs**
|
||||
|
||||
|
||||
- Open Developer Tools (Help -> Toggle Developer Tools)
|
||||
|
||||
- Click the **Console** tab
|
||||
|
||||
- Click in the log area and select all text (CTRL+A)
|
||||
|
||||
- Save this text into a file named console.log and attach it to this issue.
|
||||
|
||||
|
||||
**Application Logs**
|
||||
|
||||
|
||||
- Open command palette (Click **View** -> **Command Palette**)
|
||||
|
||||
- Run the command: **`Developer: Open Logs Folder`**
|
||||
|
||||
- This will open the log folder locally. Please zip up this folder and attach it to the issue."
|
||||
7
.github/subscribers.json
vendored
7
.github/subscribers.json
vendored
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"label-to-subscribe-to": [
|
||||
"list of usernames to subscribe",
|
||||
"such as:",
|
||||
"JacksonKearl"
|
||||
]
|
||||
}
|
||||
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@@ -31,10 +31,7 @@ jobs:
|
||||
with:
|
||||
node-version: 10
|
||||
# TODO: cache node modules
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
@@ -82,10 +79,7 @@ jobs:
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '2.x'
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron
|
||||
name: Download Electron
|
||||
@@ -118,10 +112,7 @@ jobs:
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
|
||||
50
.github/workflows/deep-classifier-runner.yml
vendored
50
.github/workflows/deep-classifier-runner.yml
vendored
@@ -1,50 +0,0 @@
|
||||
name: "Deep Classifier: Runner"
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 * * * *
|
||||
repository_dispatch:
|
||||
types: [trigger-deep-classifier-runner]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
ref: v35
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Additional Dependencies
|
||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: "Run Classifier: Scraper"
|
||||
uses: ./actions/classifier-deep/apply/fetch-sources
|
||||
with:
|
||||
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
|
||||
from: 80
|
||||
until: 5
|
||||
configPath: classifier
|
||||
blobContainerName: vscode-issue-classifier
|
||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
|
||||
- name: "Run Classifier: Generator"
|
||||
run: python ./actions/classifier-deep/apply/generate-labels/main.py
|
||||
- name: "Run Classifier: Labeler"
|
||||
uses: ./actions/classifier-deep/apply/apply-labels
|
||||
with:
|
||||
configPath: classifier
|
||||
allowLabels: "needs more info|new release"
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
27
.github/workflows/deep-classifier-scraper.yml
vendored
27
.github/workflows/deep-classifier-scraper.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: "Deep Classifier: Scraper"
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [trigger-deep-classifier-scraper]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
ref: v35
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Additional Dependencies
|
||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: "Run Classifier: Scraper"
|
||||
uses: ./actions/classifier-deep/train/fetch-issues
|
||||
with:
|
||||
blobContainerName: vscode-issue-classifier
|
||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
27
.github/workflows/latest-release-monitor.yml
vendored
27
.github/workflows/latest-release-monitor.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: Latest Release Monitor
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0/5 * * * *
|
||||
repository_dispatch:
|
||||
types: [trigger-latest-release-monitor]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
path: ./actions
|
||||
ref: v35
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Storage Module
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: Run Latest Release Monitor
|
||||
uses: ./actions/latest-release-monitor
|
||||
with:
|
||||
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
15
.github/workflows/on-label.yml
vendored
15
.github/workflows/on-label.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: On Label
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
processLabelAction:
|
||||
name: Process Label Action
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Process Label Action
|
||||
uses: hramos/label-actions@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"September 2020\"",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/notebooks/my-work.github-issues
vendored
2
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"September 2020\"",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"August 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/notebooks/verification.github-issues
vendored
2
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"September 2020\"",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"July 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
194
.vscode/searches/TrustedTypes.code-search
vendored
194
.vscode/searches/TrustedTypes.code-search
vendored
@@ -1,194 +0,0 @@
|
||||
# Query: .innerHTML =
|
||||
# Flags: CaseSensitive WordMatch
|
||||
# Including: src/vs/**/*.{t,j}s
|
||||
# Excluding: *.test.ts
|
||||
# ContextLines: 3
|
||||
|
||||
22 results - 14 files
|
||||
|
||||
src/vs/base/browser/markdownRenderer.ts:
|
||||
161 const strValue = values[0];
|
||||
162 const span = element.querySelector(`div[data-code="${id}"]`);
|
||||
163 if (span) {
|
||||
164: span.innerHTML = strValue;
|
||||
165 }
|
||||
166 }).catch(err => {
|
||||
167 // ignore
|
||||
|
||||
243 return true;
|
||||
244 }
|
||||
245
|
||||
246: element.innerHTML = insane(renderedMarkdown, {
|
||||
247 allowedSchemes,
|
||||
248 // allowedTags should included everything that markdown renders to.
|
||||
249 // Since we have our own sanitize function for marked, it's possible we missed some tag so let insane make sure.
|
||||
|
||||
src/vs/base/browser/ui/contextview/contextview.ts:
|
||||
157 this.shadowRootHostElement = DOM.$('.shadow-root-host');
|
||||
158 this.container.appendChild(this.shadowRootHostElement);
|
||||
159 this.shadowRoot = this.shadowRootHostElement.attachShadow({ mode: 'open' });
|
||||
160: this.shadowRoot.innerHTML = `
|
||||
161 <style>
|
||||
162 ${SHADOW_ROOT_CSS}
|
||||
163 </style>
|
||||
|
||||
src/vs/code/electron-sandbox/issue/issueReporterMain.ts:
|
||||
57 const platformClass = platform.isWindows ? 'windows' : platform.isLinux ? 'linux' : 'mac';
|
||||
58 addClass(document.body, platformClass); // used by our fonts
|
||||
59
|
||||
60: document.body.innerHTML = BaseHtml();
|
||||
61 const issueReporter = new IssueReporter(configuration);
|
||||
62 issueReporter.render();
|
||||
63 document.body.style.display = 'block';
|
||||
|
||||
src/vs/code/electron-sandbox/processExplorer/processExplorerMain.ts:
|
||||
320 content.push(`.highest { color: ${styles.highlightForeground}; }`);
|
||||
321 }
|
||||
322
|
||||
323: styleTag.innerHTML = content.join('\n');
|
||||
324 if (document.head) {
|
||||
325 document.head.appendChild(styleTag);
|
||||
326 }
|
||||
|
||||
src/vs/editor/browser/view/domLineBreaksComputer.ts:
|
||||
107 allCharOffsets[i] = tmp[0];
|
||||
108 allVisibleColumns[i] = tmp[1];
|
||||
109 }
|
||||
110: containerDomNode.innerHTML = sb.build();
|
||||
111
|
||||
112 containerDomNode.style.position = 'absolute';
|
||||
113 containerDomNode.style.top = '10000';
|
||||
|
||||
src/vs/editor/browser/view/viewLayer.ts:
|
||||
507 private _finishRenderingNewLines(ctx: IRendererContext<T>, domNodeIsEmpty: boolean, newLinesHTML: string, wasNew: boolean[]): void {
|
||||
508 const lastChild = <HTMLElement>this.domNode.lastChild;
|
||||
509 if (domNodeIsEmpty || !lastChild) {
|
||||
510: this.domNode.innerHTML = newLinesHTML;
|
||||
511 } else {
|
||||
512 lastChild.insertAdjacentHTML('afterend', newLinesHTML);
|
||||
513 }
|
||||
|
||||
525 private _finishRenderingInvalidLines(ctx: IRendererContext<T>, invalidLinesHTML: string, wasInvalid: boolean[]): void {
|
||||
526 const hugeDomNode = document.createElement('div');
|
||||
527
|
||||
528: hugeDomNode.innerHTML = invalidLinesHTML;
|
||||
529
|
||||
530 for (let i = 0; i < ctx.linesLength; i++) {
|
||||
531 const line = ctx.lines[i];
|
||||
|
||||
src/vs/editor/browser/widget/diffEditorWidget.ts:
|
||||
2157
|
||||
2158 let domNode = document.createElement('div');
|
||||
2159 domNode.className = `view-lines line-delete ${MOUSE_CURSOR_TEXT_CSS_CLASS_NAME}`;
|
||||
2160: domNode.innerHTML = sb.build();
|
||||
2161 Configuration.applyFontInfoSlow(domNode, fontInfo);
|
||||
2162
|
||||
2163 let marginDomNode = document.createElement('div');
|
||||
2164 marginDomNode.className = 'inline-deleted-margin-view-zone';
|
||||
2165: marginDomNode.innerHTML = marginHTML.join('');
|
||||
2166 Configuration.applyFontInfoSlow(marginDomNode, fontInfo);
|
||||
2167
|
||||
2168 return {
|
||||
|
||||
src/vs/editor/standalone/browser/colorizer.ts:
|
||||
40 let text = domNode.firstChild ? domNode.firstChild.nodeValue : '';
|
||||
41 domNode.className += ' ' + theme;
|
||||
42 let render = (str: string) => {
|
||||
43: domNode.innerHTML = str;
|
||||
44 };
|
||||
45 return this.colorize(modeService, text || '', mimeType, options).then(render, (err) => console.error(err));
|
||||
46 }
|
||||
|
||||
src/vs/editor/standalone/browser/standaloneThemeServiceImpl.ts:
|
||||
212 if (!this._globalStyleElement) {
|
||||
213 this._globalStyleElement = dom.createStyleSheet();
|
||||
214 this._globalStyleElement.className = 'monaco-colors';
|
||||
215: this._globalStyleElement.innerHTML = this._css;
|
||||
216 this._styleElements.push(this._globalStyleElement);
|
||||
217 }
|
||||
218 return Disposable.None;
|
||||
|
||||
221 private _registerShadowDomContainer(domNode: HTMLElement): IDisposable {
|
||||
222 const styleElement = dom.createStyleSheet(domNode);
|
||||
223 styleElement.className = 'monaco-colors';
|
||||
224: styleElement.innerHTML = this._css;
|
||||
225 this._styleElements.push(styleElement);
|
||||
226 return {
|
||||
227 dispose: () => {
|
||||
|
||||
291 ruleCollector.addRule(generateTokensCSSForColorMap(colorMap));
|
||||
292
|
||||
293 this._css = cssRules.join('\n');
|
||||
294: this._styleElements.forEach(styleElement => styleElement.innerHTML = this._css);
|
||||
295
|
||||
296 TokenizationRegistry.setColorMap(colorMap);
|
||||
297 this._onColorThemeChange.fire(theme);
|
||||
|
||||
src/vs/editor/test/browser/controller/imeTester.ts:
|
||||
55 let content = this._model.getModelLineContent(i);
|
||||
56 r += content + '<br/>';
|
||||
57 }
|
||||
58: output.innerHTML = r;
|
||||
59 }
|
||||
60 }
|
||||
61
|
||||
|
||||
69 let title = document.createElement('div');
|
||||
70 title.className = 'title';
|
||||
71
|
||||
72: title.innerHTML = description + '. Type <strong>' + inputStr + '</strong>';
|
||||
73 container.appendChild(title);
|
||||
74
|
||||
75 let startBtn = document.createElement('button');
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/cellRenderer.ts:
|
||||
454
|
||||
455 private getMarkdownDragImage(templateData: MarkdownCellRenderTemplate): HTMLElement {
|
||||
456 const dragImageContainer = DOM.$('.cell-drag-image.monaco-list-row.focused.markdown-cell-row');
|
||||
457: dragImageContainer.innerHTML = templateData.container.outerHTML;
|
||||
458
|
||||
459 // Remove all rendered content nodes after the
|
||||
460 const markdownContent = dragImageContainer.querySelector('.cell.markdown')!;
|
||||
|
||||
611 return null;
|
||||
612 }
|
||||
613
|
||||
614: editorContainer.innerHTML = richEditorText;
|
||||
615
|
||||
616 return dragImageContainer;
|
||||
617 }
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/webviewPreloads.ts:
|
||||
375 addMouseoverListeners(outputNode, outputId);
|
||||
376 const content = data.content;
|
||||
377 if (content.type === RenderOutputType.Html) {
|
||||
378: outputNode.innerHTML = content.htmlContent;
|
||||
379 cellOutputContainer.appendChild(outputNode);
|
||||
380 domEval(outputNode);
|
||||
381 } else {
|
||||
|
||||
src/vs/workbench/contrib/webview/browser/pre/main.js:
|
||||
386 // apply default styles
|
||||
387 const defaultStyles = newDocument.createElement('style');
|
||||
388 defaultStyles.id = '_defaultStyles';
|
||||
389: defaultStyles.innerHTML = defaultCssRules;
|
||||
390 newDocument.head.prepend(defaultStyles);
|
||||
391
|
||||
392 applyStyles(newDocument, newDocument.body);
|
||||
|
||||
src/vs/workbench/contrib/welcome/walkThrough/browser/walkThroughPart.ts:
|
||||
281
|
||||
282 const content = model.main.textEditorModel.getValue(EndOfLinePreference.LF);
|
||||
283 if (!strings.endsWith(input.resource.path, '.md')) {
|
||||
284: this.content.innerHTML = content;
|
||||
285 this.updateSizeClasses();
|
||||
286 this.decorateContent();
|
||||
287 this.contentDisposables.push(this.keybindingService.onDidUpdateKeybindings(() => this.decorateContent()));
|
||||
|
||||
303 const innerContent = document.createElement('div');
|
||||
304 innerContent.classList.add('walkThroughContent'); // only for markdown files
|
||||
305 const markdown = this.expandMacros(content);
|
||||
306: innerContent.innerHTML = marked(markdown, { renderer });
|
||||
307 this.content.appendChild(innerContent);
|
||||
308
|
||||
309 model.snippets.forEach((snippet, i) => {
|
||||
48
.vscode/searches/es6.code-search
vendored
48
.vscode/searches/es6.code-search
vendored
@@ -2,31 +2,43 @@
|
||||
# Flags: CaseSensitive WordMatch
|
||||
# ContextLines: 2
|
||||
|
||||
12 results - 4 files
|
||||
14 results - 4 files
|
||||
|
||||
src/vs/base/browser/dom.ts:
|
||||
83 };
|
||||
84
|
||||
81 };
|
||||
82
|
||||
83: /** @deprecated ES6 - use classList*/
|
||||
84 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
||||
85: /** @deprecated ES6 - use classList*/
|
||||
86 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
||||
86 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
||||
87: /** @deprecated ES6 - use classList*/
|
||||
88 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
||||
88 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
||||
89: /** @deprecated ES6 - use classList*/
|
||||
90 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
||||
90 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
||||
91: /** @deprecated ES6 - use classList*/
|
||||
92 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
||||
92 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
||||
93: /** @deprecated ES6 - use classList*/
|
||||
94 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
||||
95: /** @deprecated ES6 - use classList*/
|
||||
96 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
||||
97
|
||||
94 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
||||
95
|
||||
|
||||
src/vs/base/common/arrays.ts:
|
||||
401
|
||||
402 /**
|
||||
403: * @deprecated ES6: use `Array.find`
|
||||
403: * @deprecated ES6: use `Array.findIndex`
|
||||
404 */
|
||||
405 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||
405 export function firstIndex<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean): number {
|
||||
|
||||
417
|
||||
418 /**
|
||||
419: * @deprecated ES6: use `Array.find`
|
||||
420 */
|
||||
421 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||
|
||||
568
|
||||
569 /**
|
||||
570: * @deprecated ES6: use `Array.find`
|
||||
571 */
|
||||
572 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
|
||||
|
||||
src/vs/base/common/objects.ts:
|
||||
115
|
||||
@@ -54,8 +66,8 @@ src/vs/base/common/strings.ts:
|
||||
170 */
|
||||
171 export function endsWith(haystack: string, needle: string): boolean {
|
||||
|
||||
857
|
||||
858 /**
|
||||
859: * @deprecated ES6
|
||||
860 */
|
||||
861 export function repeat(s: string, count: number): string {
|
||||
861
|
||||
862 /**
|
||||
863: * @deprecated ES6
|
||||
864 */
|
||||
865 export function repeat(s: string, count: number): string {
|
||||
|
||||
58
.vscode/searches/ts36031.code-search
vendored
58
.vscode/searches/ts36031.code-search
vendored
@@ -2,52 +2,18 @@
|
||||
# Flags: RegExp
|
||||
# ContextLines: 2
|
||||
|
||||
8 results - 4 files
|
||||
2 results - 2 files
|
||||
|
||||
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
||||
241 } : () => 'treeitem',
|
||||
242 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
||||
243: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
||||
244 } : undefined,
|
||||
245 getAriaLabel(e) {
|
||||
243 } : () => 'treeitem',
|
||||
244 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
||||
245: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
||||
246 } : undefined,
|
||||
247 getAriaLabel(e) {
|
||||
|
||||
src/vs/platform/list/browser/listService.ts:
|
||||
463
|
||||
464 if (typeof options?.openOnSingleClick !== 'boolean' && options?.configurationService) {
|
||||
465: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||
466 this._register(options?.configurationService.onDidChangeConfiguration(() => {
|
||||
467: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||
468 }));
|
||||
469 } else {
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/notebookEditorWidget.ts:
|
||||
1526
|
||||
1527 await this._ensureActiveKernel();
|
||||
1528: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||
1529 }
|
||||
1530
|
||||
|
||||
1535
|
||||
1536 await this._ensureActiveKernel();
|
||||
1537: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||
1538 }
|
||||
1539
|
||||
|
||||
1553
|
||||
1554 await this._ensureActiveKernel();
|
||||
1555: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||
1556 }
|
||||
1557
|
||||
|
||||
1567
|
||||
1568 await this._ensureActiveKernel();
|
||||
1569: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||
1570 }
|
||||
1571
|
||||
|
||||
src/vs/workbench/contrib/webview/electron-browser/iframeWebviewElement.ts:
|
||||
89 .then(() => this._resourceRequestManager.ensureReady())
|
||||
90 .then(() => {
|
||||
91: this.element?.contentWindow!.postMessage({ channel, args: data }, '*');
|
||||
92 });
|
||||
93 }
|
||||
src/vs/workbench/contrib/debug/browser/debugConfigurationManager.ts:
|
||||
254
|
||||
255 return debugDynamicExtensions.map(e => {
|
||||
256: const type = e.contributes?.debuggers![0].type!;
|
||||
257 return {
|
||||
258 label: this.getDebuggerLabel(type)!,
|
||||
|
||||
4
.yarnrc
4
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://electronjs.org/headers"
|
||||
target "9.4.3"
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "9.2.1"
|
||||
runtime "electron"
|
||||
|
||||
80
CHANGELOG.md
80
CHANGELOG.md
@@ -1,85 +1,5 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.25.2
|
||||
* Release date: January 22, 2021
|
||||
* Release status: General Availability
|
||||
* Fixes https://github.com/microsoft/azuredatastudio/issues/13899
|
||||
|
||||
## Version 1.25.1
|
||||
* Release date: December 10, 2020
|
||||
* Release status: General Availability
|
||||
* Fixes https://github.com/microsoft/azuredatastudio/issues/13751
|
||||
|
||||
## Version 1.25.0
|
||||
* Release date: December 8, 2020
|
||||
* Release status: General Availability
|
||||
* Kusto extension improvements
|
||||
* SQL Project extension improvements
|
||||
* Notebook improvements
|
||||
* Azure Browse Connections Preview performance improvements
|
||||
* Bug Fixes
|
||||
|
||||
## Version 1.24.0
|
||||
* Release date: November 12, 2020
|
||||
* Release status: General Availability
|
||||
* SQL Project improvements
|
||||
* Notebook improvements, including in WYSIWYG editor enhancements
|
||||
* Azure Arc improvements
|
||||
* Azure SQL Deployment UX improvements
|
||||
* Azure Browse Connections Preview
|
||||
* Bug Fixes
|
||||
|
||||
## Version 1.23.0
|
||||
* Release date: October 14, 2020
|
||||
* Release status: General Availability
|
||||
* Added deployments of Azure SQL DB and VM
|
||||
* Added PowerShell kernel results streaming support
|
||||
* Added improvements to SQL Database Projects extension
|
||||
* Bug Fixes
|
||||
* Extension Updates:
|
||||
* SQL Server Import
|
||||
* Machine Learning
|
||||
* Schema Compare
|
||||
* Kusto
|
||||
* SQL Assessment
|
||||
* SQL Database Projects
|
||||
* Azure Arc
|
||||
* azdata
|
||||
|
||||
## Version 1.22.1
|
||||
* Release date: September 30, 2020
|
||||
* Release status: General Availability
|
||||
* Fix bug #12615 Active connection filter doesn't untoggle | [#12615](https://github.com/microsoft/azuredatastudio/issues/12615)
|
||||
* Fix bug #12572 Edit Data grid doesn't escape special characters | [#12572](https://github.com/microsoft/azuredatastudio/issues/12572)
|
||||
* Fix bug #12570 Dashboard Explorer table doesn't escape special characters | [#12570](https://github.com/microsoft/azuredatastudio/issues/12570)
|
||||
* Fix bug #12582 Delete row on Edit Data fails | [#12582](https://github.com/microsoft/azuredatastudio/issues/12582)
|
||||
* Fix bug #12646 SQL Notebooks: Cells being treated isolated | [#12646](https://github.com/microsoft/azuredatastudio/issues/12646)
|
||||
|
||||
## Version 1.22.0
|
||||
* Release date: September 22, 2020
|
||||
* Release status: General Availability
|
||||
* New Notebook Features
|
||||
* Supports brand new text cell editing experience based on rich text formatting and seamless conversion to markdown, also known as WYSIWYG toolbar (What You See Is What You Get)
|
||||
* Supports Kusto kernel
|
||||
* Supports pinning of notebooks
|
||||
* Added support for new version of Jupyter Books
|
||||
* Improved Jupyter Shortcuts
|
||||
* Introduced perf loading improvements
|
||||
* Added Azure Arc extension - Users can try out Azure Arc public preview through Azure Data Studio. This includes:
|
||||
* Deploy data controller
|
||||
* Deploy Postgres
|
||||
* Deploy Managed Instance for Azure Arc
|
||||
* Connect to data controller
|
||||
* Access data service dashboards
|
||||
* Azure Arc Jupyter Book
|
||||
* Added new deployment options
|
||||
* Azure SQL Database Edge
|
||||
* (Edge will require Azure SQL Edge Deployment Extension)
|
||||
* Added SQL Database Projects extension - The SQL Database Projects extension brings project-based database development to Azure Data Studio. In this preview release, SQL projects can be created and published from Azure Data Studio.
|
||||
* Added Kusto (KQL) extension - Brings native Kusto experiences in Azure Data Studio for data exploration and data analytics against massive amount of real-time streaming data stored in Azure Data Explorer. This preview release supports connecting and browsing Azure Data Explorer clusters, writing KQL queries as well as authoring notebooks with Kusto kernel.
|
||||
* SQL Server Import extension GA - Announcing the GA of the SQL Server Import extension, features no longer in preview. This extension facilitates importing csv/txt files. Learn more about the extension in [this article](sql-server-import-extension.md).
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22September+2020+Release%22+is%3Aclosed).
|
||||
|
||||
## Version 1.21.0
|
||||
* Release date: August 12, 2020
|
||||
* Release status: General Availability
|
||||
|
||||
18
README.md
18
README.md
@@ -19,7 +19,7 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
||||
| [Linux DEB][linux-deb] |
|
||||
|
||||
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
## Try out the latest insiders build from `main`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
@@ -29,8 +29,6 @@ Go to our [download page](https://aka.ms/getazuredatastudio) for more specific i
|
||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
|
||||
|
||||
## **Feature Highlights**
|
||||
|
||||
@@ -131,10 +129,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Licensed under the [Source EULA](LICENSE.txt).
|
||||
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2150927
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2150928
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2151312
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2151311
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2151508
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2151407
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2151506
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2138608
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2138704
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2138705
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2138609
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2138706
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2138507
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2138508
|
||||
|
||||
1
build/.gitattributes
vendored
1
build/.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
* text eol=lf
|
||||
@@ -15,9 +15,9 @@
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/github": "^2.1.1",
|
||||
"axios": "^0.21.1",
|
||||
"axios": "^0.19.2",
|
||||
"ts-node": "^8.6.2",
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@actions/core@^1.2.6":
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
|
||||
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
|
||||
"@actions/core@^1.2.3":
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95"
|
||||
integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w==
|
||||
|
||||
"@actions/github@^2.1.1":
|
||||
version "2.1.1"
|
||||
@@ -144,12 +144,12 @@ atob-lite@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/atob-lite/-/atob-lite-2.0.0.tgz#0fef5ad46f1bd7a8502c65727f0367d5ee43d696"
|
||||
integrity sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=
|
||||
|
||||
axios@^0.21.1:
|
||||
version "0.21.1"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.1.tgz#22563481962f4d6bde9a76d516ef0e5d3c09b2b8"
|
||||
integrity sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==
|
||||
axios@^0.19.2:
|
||||
version "0.19.2"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.2.tgz#3ea36c5d8818d0d5f8a8a97a6d36b86cdc00cb27"
|
||||
integrity sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA==
|
||||
dependencies:
|
||||
follow-redirects "^1.10.0"
|
||||
follow-redirects "1.5.10"
|
||||
|
||||
before-after-hook@^2.0.0:
|
||||
version "2.1.0"
|
||||
@@ -177,6 +177,13 @@ cross-spawn@^6.0.0:
|
||||
shebang-command "^1.2.0"
|
||||
which "^1.2.9"
|
||||
|
||||
debug@=3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
|
||||
integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==
|
||||
dependencies:
|
||||
ms "2.0.0"
|
||||
|
||||
deprecation@^2.0.0, deprecation@^2.3.1:
|
||||
version "2.3.1"
|
||||
resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
|
||||
@@ -207,10 +214,12 @@ execa@^1.0.0:
|
||||
signal-exit "^3.0.0"
|
||||
strip-eof "^1.0.0"
|
||||
|
||||
follow-redirects@^1.10.0:
|
||||
version "1.13.1"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.1.tgz#5f69b813376cee4fd0474a3aba835df04ab763b7"
|
||||
integrity sha512-SSG5xmZh1mkPGyKzjZP8zLjltIfpW32Y5QpdNJyjcfGxK3qo3NDDkZOZSFiGn1A6SclQxY9GzEwAHQ3dmYRWpg==
|
||||
follow-redirects@1.5.10:
|
||||
version "1.5.10"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a"
|
||||
integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==
|
||||
dependencies:
|
||||
debug "=3.1.0"
|
||||
|
||||
get-stream@^4.0.0:
|
||||
version "4.1.0"
|
||||
@@ -266,6 +275,11 @@ make-error@^1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
||||
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
||||
|
||||
ms@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
|
||||
integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
|
||||
|
||||
nice-try@^1.0.4:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
|
||||
|
||||
@@ -53,7 +53,7 @@ async function uploadBlob(blobService: azure.BlobService, quality: string, blobN
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
|
||||
@@ -17,7 +17,7 @@ const fileNames = [
|
||||
];
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||
@@ -33,7 +33,7 @@ async function uploadBlob(blobService: azure.BlobService, container: string, blo
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||
|
||||
@@ -43,7 +43,6 @@ function createDefaultConfig(quality: string): Config {
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
console.log(`Getting config for quality ${quality}`);
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
@@ -53,13 +52,13 @@ function getConfig(quality: string): Promise<Config> {
|
||||
]
|
||||
};
|
||||
|
||||
return retry(() => new Promise<Config>((c, e) => {
|
||||
return new Promise<Config>((c, e) => {
|
||||
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err && err.code !== 409) { return e(err); }
|
||||
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
@@ -87,7 +86,6 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
console.log(`Querying existing documents to update...`);
|
||||
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
@@ -103,7 +101,6 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
|
||||
console.log(`Replacing existing document with updated version`);
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
@@ -115,8 +112,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
});
|
||||
}
|
||||
|
||||
return retry(() => new Promise<void>((c, e) => {
|
||||
console.log(`Attempting to create document`);
|
||||
return new Promise<void>((c, e) => {
|
||||
client.createDocument(collection, release, err => {
|
||||
if (err && err.code === 409) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
@@ -124,7 +120,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
console.log('Build successfully published.');
|
||||
c();
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
||||
@@ -192,6 +188,7 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
@@ -250,22 +247,6 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
|
||||
const RETRY_TIMES = 10;
|
||||
async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
for (let run = 1; run <= RETRY_TIMES; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
console.log(`Caught error ${err} - ${run}/${RETRY_TIMES}`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ steps:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -20,7 +19,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
|
||||
@@ -87,6 +87,10 @@ steps:
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
|
||||
@@ -12,7 +12,6 @@ steps:
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
@@ -50,7 +49,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
@@ -62,7 +61,6 @@ steps:
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -77,7 +75,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -99,6 +96,8 @@ steps:
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -126,19 +125,19 @@ steps:
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
displayName: Run smoke tests (Electron)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# node ./node_modules/playwright/install.js
|
||||
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
# yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
# displayName: Run smoke tests (Browser)
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
- script: |
|
||||
set -e
|
||||
node ./node_modules/playwright/install.js
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
displayName: Run smoke tests (Browser)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -202,7 +201,7 @@ steps:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
|
||||
@@ -28,7 +28,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
|
||||
@@ -22,7 +22,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
@@ -34,7 +34,6 @@ steps:
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -49,7 +48,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
|
||||
@@ -31,10 +31,10 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git checkout origin/electron-11.x.y
|
||||
git checkout origin/electron-x.y.z
|
||||
git merge origin/master
|
||||
|
||||
# Push master branch into exploration branch
|
||||
git push origin HEAD:electron-11.x.y
|
||||
git push origin HEAD:electron-x.y.z
|
||||
|
||||
displayName: Sync & Merge Exploration
|
||||
|
||||
@@ -17,7 +17,6 @@ steps:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -29,7 +28,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
|
||||
@@ -52,25 +52,21 @@ steps:
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
echo -n $VSCODE_ARCH > .build/arch
|
||||
displayName: Prepare arch cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 npm_config_arch=$(NPM_ARCH) yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
@@ -89,64 +85,64 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: 'crash-dump-linux-$(VSCODE_ARCH)'
|
||||
artifactName: crash-dump-linux
|
||||
targetPath: .build/crashes
|
||||
displayName: 'Publish Crash Reports'
|
||||
continueOnError: true
|
||||
@@ -161,26 +157,15 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
displayName: Build deb, rpm packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||
displayName: Prepare snap package
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
# needed for code signing
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Install .NET Core SDK 2.x'
|
||||
inputs:
|
||||
version: 2.x
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
displayName: Build packages
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '.build/linux/rpm'
|
||||
FolderPath: '.build/linux/rpm/x86_64'
|
||||
Pattern: '*.rpm'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
@@ -201,16 +186,14 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: 'snap-$(VSCODE_ARCH)'
|
||||
artifactName: snap-x64
|
||||
targetPath: .build/linux/snap-tarball
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
@@ -4,10 +4,11 @@ REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$VSCODE_ARCH-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$VSCODE_ARCH-$BUILD_VERSION.tar.gz"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
@@ -27,36 +28,24 @@ rm -rf $ROOT/vscode-server-*.tar.*
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish DEB
|
||||
case $VSCODE_ARCH in
|
||||
x64) DEB_ARCH="amd64" ;;
|
||||
*) DEB_ARCH="$VSCODE_ARCH" ;;
|
||||
esac
|
||||
|
||||
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
||||
PLATFORM_DEB="linux-deb-x64"
|
||||
DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
case $VSCODE_ARCH in
|
||||
x64) RPM_ARCH="x86_64" ;;
|
||||
armhf) RPM_ARCH="armv7hl" ;;
|
||||
arm64) RPM_ARCH="aarch64" ;;
|
||||
*) RPM_ARCH="$VSCODE_ARCH" ;;
|
||||
esac
|
||||
|
||||
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
||||
PLATFORM_RPM="linux-rpm-x64"
|
||||
RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||
|
||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||
# Publish Snap
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
fi
|
||||
# Publish Snap
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
|
||||
@@ -9,7 +9,6 @@ steps:
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
@@ -46,7 +45,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
@@ -58,7 +57,6 @@ steps:
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -73,7 +71,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -94,7 +91,8 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-web-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -136,8 +134,7 @@ steps:
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
export INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
export NO_CLEANUP=1
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
|
||||
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
@@ -152,21 +149,6 @@ steps:
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- bash: |
|
||||
set -e
|
||||
mkdir -p $(Build.ArtifactStagingDirectory)/logs/linux-x64
|
||||
cd /tmp
|
||||
for folder in adsuser*/
|
||||
do
|
||||
folder=${folder%/}
|
||||
# Only archive directories we want for debugging purposes
|
||||
tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/$folder.tar.gz $folder/User $folder/logs
|
||||
done
|
||||
|
||||
displayName: Archive Logs
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
@@ -235,11 +217,10 @@ steps:
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
@@ -13,12 +13,6 @@ resources:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
endpoint: VSCodeHub
|
||||
- container: vscode-arm64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
|
||||
endpoint: VSCodeHub
|
||||
- container: vscode-armhf
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
|
||||
endpoint: VSCodeHub
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft:stable
|
||||
|
||||
@@ -70,9 +64,6 @@ stages:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
@@ -81,28 +72,22 @@ stages:
|
||||
- Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
|
||||
@@ -52,13 +52,9 @@ steps:
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $VSCODE_ARCH > .build/arch
|
||||
displayName: Prepare arch cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
@@ -71,7 +67,7 @@ steps:
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
@@ -116,8 +112,8 @@ steps:
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
- template: sql-product-compile.yml
|
||||
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
timeoutInMinutes: 180
|
||||
|
||||
- job: macOS_Signing
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
timeoutInMinutes: 70
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
@@ -61,15 +61,15 @@ jobs:
|
||||
steps:
|
||||
- template: web/sql-product-build-web.yml
|
||||
|
||||
# - job: Docker
|
||||
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
# pool:
|
||||
# vmImage: 'Ubuntu-16.04'
|
||||
# container: linux-x64
|
||||
# dependsOn:
|
||||
# - Linux
|
||||
# steps:
|
||||
# - template: docker/sql-product-build-docker.yml
|
||||
- job: Docker
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Linux
|
||||
steps:
|
||||
- template: docker/sql-product-build-docker.yml
|
||||
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
# - Docker
|
||||
- Docker
|
||||
- Windows
|
||||
- Windows_Test
|
||||
- LinuxWeb
|
||||
|
||||
@@ -6,7 +6,6 @@ steps:
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
@@ -37,7 +36,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
@@ -51,7 +50,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -64,7 +62,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -99,8 +96,8 @@ steps:
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
@@ -126,7 +123,6 @@ steps:
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
|
||||
@@ -6,7 +6,6 @@ steps:
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
@@ -43,7 +42,7 @@ steps:
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "sqltools@service.microsoft.com"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
@@ -55,7 +54,6 @@ steps:
|
||||
displayName: Merge distro
|
||||
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# displayName: Restore Cache - Node Modules
|
||||
# inputs:
|
||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
@@ -68,7 +66,6 @@ steps:
|
||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# displayName: Save Cache - Node Modules
|
||||
# inputs:
|
||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
</configuration>
|
||||
</configuration>
|
||||
@@ -13,7 +13,6 @@ steps:
|
||||
addToPath: true
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -27,7 +26,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
|
||||
@@ -12,9 +12,9 @@ $ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||
|
||||
# Create server archive
|
||||
# New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
$global:LASTEXITCODE = 0
|
||||
# exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
# exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
|
||||
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||
|
||||
@@ -6,7 +6,6 @@ steps:
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Compiled Files
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
@@ -45,7 +44,7 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
|
||||
exec { git config user.email "sqltools@service.microsoft.com" }
|
||||
exec { git config user.email "andresse@microsoft.com" }
|
||||
exec { git config user.name "AzureDataStudio" }
|
||||
displayName: Prepare tooling
|
||||
|
||||
@@ -56,7 +55,6 @@ steps:
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -73,7 +71,6 @@ steps:
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
@@ -98,8 +95,8 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "package-rebuild-extensions" }
|
||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-win32-x64-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-x64-inno-updater" }
|
||||
displayName: Build
|
||||
@@ -134,17 +131,17 @@ steps:
|
||||
$AppRoot = "$(agent.builddirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
# exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
# - powershell: |
|
||||
# . build/azure-pipelines/win32/exec.ps1
|
||||
# $ErrorActionPreference = "Stop"
|
||||
# exec { .\scripts\test-unstable.bat --build --tfs }
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
# displayName: Run unstable tests
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\scripts\test-unstable.bat --build --tfs }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
displayName: Run unstable tests
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'Sign out code'
|
||||
@@ -293,7 +290,7 @@ steps:
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
@@ -99,4 +99,4 @@ steps:
|
||||
mergeTestResults: true
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
@@ -98,13 +98,12 @@ const indentationFilter = [
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!**/*.gif',
|
||||
'!build/actions/**/*.js',
|
||||
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
|
||||
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb,jpg}',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||
'!extensions/resource-deployment/notebooks/**',
|
||||
'!extensions/mssql/notebooks/**',
|
||||
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||
'!extensions/integration-tests/testData/**',
|
||||
'!extensions/arc/src/controller/generated/**',
|
||||
'!extensions/sql-database-projects/resources/templates/*.xml',
|
||||
@@ -179,9 +178,7 @@ const copyrightFilter = [
|
||||
'!extensions/mssql/src/prompts/**',
|
||||
'!extensions/kusto/src/prompts/**',
|
||||
'!extensions/notebook/resources/jupyter_config/**',
|
||||
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||
'!extensions/query-history/images/**',
|
||||
'!extensions/sql/build/update-grammar.js',
|
||||
'!**/*.gif',
|
||||
'!**/*.xlf',
|
||||
'!**/*.dacpac',
|
||||
|
||||
@@ -261,7 +261,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
.pipe(fileLengthFilter.restore)
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions())
|
||||
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
|
||||
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
||||
|
||||
if (platform === 'linux') {
|
||||
@@ -345,7 +345,7 @@ const BUILD_TARGETS = [
|
||||
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
||||
{ platform: 'linux', arch: 'ia32' },
|
||||
{ platform: 'linux', arch: 'x64' },
|
||||
{ platform: 'linux', arch: 'armhf' },
|
||||
{ platform: 'linux', arch: 'arm' },
|
||||
{ platform: 'linux', arch: 'arm64' },
|
||||
];
|
||||
BUILD_TARGETS.forEach(buildTarget => {
|
||||
|
||||
@@ -23,7 +23,7 @@ const commit = util.getVersion(root);
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
function getDebPackageArch(arch) {
|
||||
return { x64: 'amd64', armhf: 'armhf', arm64: 'arm64' }[arch];
|
||||
return { x64: 'amd64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
}
|
||||
|
||||
function prepareDebPackage(arch) {
|
||||
@@ -53,11 +53,6 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
@@ -101,7 +96,7 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -121,7 +116,7 @@ function getRpmBuildPath(rpmArch) {
|
||||
}
|
||||
|
||||
function getRpmPackageArch(arch) {
|
||||
return { x64: 'x86_64', armhf: 'armv7hl', arm64: 'aarch64' }[arch];
|
||||
return { x64: 'x86_64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
}
|
||||
|
||||
function prepareRpmPackage(arch) {
|
||||
@@ -150,11 +145,6 @@ function prepareRpmPackage(arch) {
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('BUILD/usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
@@ -185,7 +175,7 @@ function prepareRpmPackage(arch) {
|
||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||
|
||||
const all = es.merge(code, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, spec, specIcon);
|
||||
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon);
|
||||
|
||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||
};
|
||||
@@ -259,23 +249,33 @@ function buildSnapPackage(arch) {
|
||||
|
||||
const BUILD_TARGETS = [
|
||||
{ arch: 'x64' },
|
||||
{ arch: 'armhf' },
|
||||
{ arch: 'arm' },
|
||||
{ arch: 'arm64' },
|
||||
];
|
||||
|
||||
BUILD_TARGETS.forEach(({ arch }) => {
|
||||
const debArch = getDebPackageArch(arch);
|
||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||
gulp.task(buildDebTask);
|
||||
BUILD_TARGETS.forEach((buildTarget) => {
|
||||
const arch = buildTarget.arch;
|
||||
|
||||
const rpmArch = getRpmPackageArch(arch);
|
||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||
gulp.task(buildRpmTask);
|
||||
{
|
||||
const debArch = getDebPackageArch(arch);
|
||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||
// gulp.task(prepareDebTask);
|
||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||
gulp.task(buildDebTask);
|
||||
}
|
||||
|
||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||
gulp.task(prepareSnapTask);
|
||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||
gulp.task(buildSnapTask);
|
||||
{
|
||||
const rpmArch = getRpmPackageArch(arch);
|
||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||
// gulp.task(prepareRpmTask);
|
||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||
gulp.task(buildRpmTask);
|
||||
}
|
||||
|
||||
{
|
||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||
gulp.task(prepareSnapTask);
|
||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||
gulp.task(buildSnapTask);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -55,7 +55,7 @@ function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
@@ -61,7 +61,7 @@ function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
@@ -207,26 +207,25 @@ const externalExtensions = [
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-pack',
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'arc',
|
||||
'asde-deployment',
|
||||
'azdata',
|
||||
'azurehybridtoolkit',
|
||||
'cms',
|
||||
'dacpac',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'machine-learning',
|
||||
'profiler',
|
||||
'query-history',
|
||||
'schema-compare',
|
||||
'server-report',
|
||||
'sql-assessment',
|
||||
'sql-database-projects',
|
||||
'sql-migration'
|
||||
'machine-learning',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
// extensions that require a rebuild since they have native parts
|
||||
const rebuildExtensions = [
|
||||
|
||||
@@ -241,26 +241,25 @@ const externalExtensions = [
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-pack',
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'arc',
|
||||
'asde-deployment',
|
||||
'azdata',
|
||||
'azurehybridtoolkit',
|
||||
'cms',
|
||||
'dacpac',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'machine-learning',
|
||||
'profiler',
|
||||
'query-history',
|
||||
'schema-compare',
|
||||
'server-report',
|
||||
'sql-assessment',
|
||||
'sql-database-projects',
|
||||
'sql-migration'
|
||||
'machine-learning',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
|
||||
// extensions that require a rebuild since they have native parts
|
||||
|
||||
@@ -206,10 +206,6 @@
|
||||
"name": "vs/workbench/contrib/webview",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/webviewPanel",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/customEditor",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
@@ -1004,7 +1004,7 @@ function createResource(project: string, slug: string, xlfFile: File, apiHostnam
|
||||
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
|
||||
*/
|
||||
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const data = JSON.stringify({ content: xlfFile.contents.toString() });
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
|
||||
@@ -53,13 +53,6 @@ const CORE_TYPES = [
|
||||
'trimLeft',
|
||||
'trimRight'
|
||||
];
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration'
|
||||
];
|
||||
const RULES = [
|
||||
// Tests: skip
|
||||
{
|
||||
@@ -75,37 +68,6 @@ const RULES = [
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -119,7 +81,6 @@ const RULES = [
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -129,7 +90,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -139,7 +99,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -148,7 +107,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -174,7 +132,7 @@ const RULES = [
|
||||
},
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
@@ -204,7 +162,7 @@ let hasErrors = false;
|
||||
function checkFile(program, sourceFile, rule) {
|
||||
checkNode(sourceFile);
|
||||
function checkNode(node) {
|
||||
var _a, _b;
|
||||
var _a;
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return ts.forEachChild(node, checkNode); // recurse down
|
||||
}
|
||||
@@ -212,12 +170,6 @@ function checkFile(program, sourceFile, rule) {
|
||||
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
||||
return; // override
|
||||
}
|
||||
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
|
||||
@@ -55,14 +55,6 @@ const CORE_TYPES = [
|
||||
'trimRight'
|
||||
];
|
||||
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration'
|
||||
];
|
||||
|
||||
const RULES = [
|
||||
|
||||
// Tests: skip
|
||||
@@ -81,40 +73,6 @@ const RULES = [
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -130,7 +88,6 @@ const RULES = [
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -141,7 +98,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -152,7 +108,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -162,7 +117,6 @@ const RULES = [
|
||||
{
|
||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -191,7 +145,7 @@ const RULES = [
|
||||
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
@@ -227,7 +181,6 @@ interface IRule {
|
||||
skip?: boolean;
|
||||
allowedTypes?: string[];
|
||||
disallowedDefinitions?: string[];
|
||||
disallowedTypes?: string[];
|
||||
}
|
||||
|
||||
let hasErrors = false;
|
||||
@@ -246,14 +199,6 @@ function checkFile(program: ts.Program, sourceFile: ts.SourceFile, rule: IRule)
|
||||
return; // override
|
||||
}
|
||||
|
||||
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
|
||||
@@ -15,7 +15,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
|
||||
function runProcess(command: string, args: ReadonlyArray<string> = []) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
|
||||
child.on('error', reject);
|
||||
|
||||
@@ -1860,9 +1860,9 @@ inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3:
|
||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||
|
||||
ini@~1.3.0:
|
||||
version "1.3.7"
|
||||
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84"
|
||||
integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
|
||||
integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==
|
||||
|
||||
is-absolute@^1.0.0:
|
||||
version "1.0.0"
|
||||
|
||||
@@ -60,12 +60,12 @@
|
||||
"git": {
|
||||
"name": "electron",
|
||||
"repositoryUrl": "https://github.com/electron/electron",
|
||||
"commitHash": "ca82414364002efa665ffa7427e267adf76ed1f3"
|
||||
"commitHash": "03c7a54dc534ce1867d4393b9b1a6989d4a7e005"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "MIT",
|
||||
"version": "9.4.3"
|
||||
"version": "9.2.1"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/ads-extension-telemetry": "^1.1.3",
|
||||
"ads-extension-telemetry": "^1.0.0",
|
||||
"service-downloader": "0.2.1",
|
||||
"vscode-nls": "^3.2.1"
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import AdsTelemetryReporter from '@microsoft/ads-extension-telemetry';
|
||||
import AdsTelemetryReporter from 'ads-extension-telemetry';
|
||||
|
||||
import * as Utils from './utils';
|
||||
|
||||
|
||||
@@ -182,13 +182,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.2.tgz#26520bf09abe4a5644cd5414e37125a8954241dd"
|
||||
integrity sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw==
|
||||
|
||||
"@microsoft/ads-extension-telemetry@^1.1.3":
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@microsoft/ads-extension-telemetry/-/ads-extension-telemetry-1.1.3.tgz#54160eefa21f2a536622b0617f3c3f2018cf9c87"
|
||||
integrity sha512-+h6hM9oOA6Zj/N0nCGPzRgydR0YHiHpNJoNlv6a/ziWXO3RYSbQX+3U/PpT3gEA6+8RwByf6RVICo7uIGBy1LQ==
|
||||
dependencies:
|
||||
vscode-extension-telemetry "^0.1.6"
|
||||
|
||||
"@types/mocha@^5.2.5":
|
||||
version "5.2.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-5.2.7.tgz#315d570ccb56c53452ff8638738df60726d5b6ea"
|
||||
@@ -199,6 +192,13 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.7.tgz#01e4ea724d9e3bd50d90c11fd5980ba317d8fa11"
|
||||
integrity sha512-E6Zn0rffhgd130zbCbAr/JdXfXkoOUFAKNs/rF8qnafSJ8KYaA/j3oz7dcwal+lYjLA7xvdd5J4wdYpCTlP8+w==
|
||||
|
||||
ads-extension-telemetry@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ads-extension-telemetry/-/ads-extension-telemetry-1.0.0.tgz#840b363a6ad958447819b9bc59fdad3e49de31a9"
|
||||
integrity sha512-ouxZVECe4tsO0ek0dLdnAZEz1Lrytv1uLbbGZhRbZsHITsUYNjnkKnA471uWh0Dj80s+orvv49/j3/tNBDP/SQ==
|
||||
dependencies:
|
||||
vscode-extension-telemetry "0.1.2"
|
||||
|
||||
agent-base@4, agent-base@^4.3.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee"
|
||||
@@ -225,15 +225,15 @@ append-transform@^2.0.0:
|
||||
dependencies:
|
||||
default-require-extensions "^3.0.0"
|
||||
|
||||
applicationinsights@1.7.4:
|
||||
version "1.7.4"
|
||||
resolved "https://registry.yarnpkg.com/applicationinsights/-/applicationinsights-1.7.4.tgz#e7d96435594d893b00cf49f70a5927105dbb8749"
|
||||
integrity sha512-XFLsNlcanpjFhHNvVWEfcm6hr7lu9znnb6Le1Lk5RE03YUV9X2B2n2MfM4kJZRrUdV+C0hdHxvWyv+vWoLfY7A==
|
||||
applicationinsights@1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/applicationinsights/-/applicationinsights-1.4.0.tgz#e17e436427b6e273291055181e29832cca978644"
|
||||
integrity sha512-TV8MYb0Kw9uE2cdu4V/UvTKdOABkX2+Fga9iDz0zqV7FLrNXfmAugWZmmdTx4JoynYkln3d5CUHY3oVSUEbfFw==
|
||||
dependencies:
|
||||
cls-hooked "^4.2.2"
|
||||
continuation-local-storage "^3.2.1"
|
||||
diagnostic-channel "0.2.0"
|
||||
diagnostic-channel-publishers "^0.3.3"
|
||||
diagnostic-channel-publishers "^0.3.2"
|
||||
|
||||
async-hook-jl@^1.7.6:
|
||||
version "1.7.6"
|
||||
@@ -397,7 +397,7 @@ default-require-extensions@^3.0.0:
|
||||
dependencies:
|
||||
strip-bom "^4.0.0"
|
||||
|
||||
diagnostic-channel-publishers@^0.3.3:
|
||||
diagnostic-channel-publishers@^0.3.2:
|
||||
version "0.3.5"
|
||||
resolved "https://registry.yarnpkg.com/diagnostic-channel-publishers/-/diagnostic-channel-publishers-0.3.5.tgz#a84a05fd6cc1d7619fdd17791c17e540119a7536"
|
||||
integrity sha512-AOIjw4T7Nxl0G2BoBPhkQ6i7T4bUd9+xvdYizwvG7vVAM1dvr+SDrcUudlmzwH0kbEwdR2V1EcnKT0wAeYLQNQ==
|
||||
@@ -974,12 +974,12 @@ to-fast-properties@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
|
||||
integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=
|
||||
|
||||
vscode-extension-telemetry@^0.1.6:
|
||||
version "0.1.6"
|
||||
resolved "https://registry.yarnpkg.com/vscode-extension-telemetry/-/vscode-extension-telemetry-0.1.6.tgz#048b70c93243413036a8315cda493b8e7342980c"
|
||||
integrity sha512-rbzSg7k4NnsCdF4Lz0gI4jl3JLXR0hnlmfFgsY8CSDYhXgdoIxcre8jw5rjkobY0xhSDhbG7xCjP8zxskySJ/g==
|
||||
vscode-extension-telemetry@0.1.2:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/vscode-extension-telemetry/-/vscode-extension-telemetry-0.1.2.tgz#049207f5453930888ff68ca925b07bab08f2c955"
|
||||
integrity sha512-FSbaZKlIH3VKvBJsKw7v5bESWHXzltji2rtjaJeJglpQH4tfClzwHMzlMXUZGiblV++djEzb1gW8mb5E+wxFsg==
|
||||
dependencies:
|
||||
applicationinsights "1.7.4"
|
||||
applicationinsights "1.4.0"
|
||||
|
||||
vscode-nls@^3.2.1:
|
||||
version "3.2.5"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "agent",
|
||||
"displayName": "SQL Server Agent",
|
||||
"description": "Manage and troubleshoot SQL Server Agent jobs",
|
||||
"version": "0.49.0",
|
||||
"version": "0.48.0",
|
||||
"publisher": "Microsoft",
|
||||
"preview": true,
|
||||
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
||||
|
||||
@@ -30,9 +30,9 @@ export abstract class AgentDialog<T extends IAgentDialogData> {
|
||||
return this.model.dialogMode;
|
||||
}
|
||||
|
||||
protected abstract updateModel(): Promise<void>;
|
||||
protected abstract async updateModel(): Promise<void>;
|
||||
|
||||
protected abstract initializeDialog(dialog: azdata.window.Dialog): Promise<void>;
|
||||
protected abstract async initializeDialog(dialog: azdata.window.Dialog): Promise<void>;
|
||||
|
||||
public async openDialog(dialogName?: string) {
|
||||
if (!this._isOpen) {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Welcome to Microsoft Azure Arc Extension for Azure Data Studio!
|
||||
|
||||
**This extension is only applicable to customers in the Azure Arc data services public preview.**
|
||||
**This extension is only applicable to customers in the Azure Arc data services private preview.**
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.7 7.9L15.8 15L15 15.8L7.9 8.7L0.8 15.8L0 15L7.1 7.9L0 0.8L0.8 0L7.9 7.1L15 0L15.8 0.8L8.7 7.9Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 228 B |
@@ -1,10 +0,0 @@
|
||||
<svg id="a2f0dd32-c564-48d6-97d7-86323bfba35b" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18">
|
||||
<defs>
|
||||
<linearGradient id="b863127b-2eb8-42a1-a46b-989a6a8d258c" x1="9" y1="18" x2="9" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#32bedd" />
|
||||
<stop offset="0.576" stop-color="#32ceef" />
|
||||
<stop offset="1" stop-color="#32d4f5" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M18,9.972V7.92l-.288-.108-2.2-.72-.576-1.4,1.116-2.376-1.44-1.44-.288.144L12.276,3.06l-1.4-.576L9.972,0H7.92L7.812.288l-.72,2.2-1.4.576L3.348,1.944l-1.44,1.44.144.288L3.1,5.724l-.576,1.4L0,8.028V10.08l.288.108,2.2.72.576,1.4L1.944,14.688l1.44,1.44.288-.144L5.724,14.94l1.4.576L8.028,18H10.08l.108-.288.72-2.2,1.4-.576,2.376,1.116,1.44-1.44-.144-.288L14.94,12.276l.576-1.4ZM9,12.95A3.95,3.95,0,1,1,12.95,9,3.947,3.947,0,0,1,9,12.95Z" fill="url(#b863127b-2eb8-42a1-a46b-989a6a8d258c)" />
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 909 B |
@@ -1,3 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2048 2048" width="16" height="16">
|
||||
<path d="M960 1920q-133 0-255-34t-230-96-194-150-150-195-97-229T0 960q0-133 34-255t96-230 150-194 195-150 229-97T960 0q133 0 255 34t230 96 194 150 150 195 97 229 34 256q0 133-34 255t-96 230-150 194-195 150-229 97-256 34zm0-1792q-115 0-221 30t-198 84-169 130-130 168-84 199-30 221q0 114 30 220t84 199 130 169 168 130 199 84 221 30q114 0 220-30t199-84 169-130 130-168 84-199 30-221q0-114-30-220t-84-199-130-169-168-130-199-84-221-30zm-64 640h128v640H896V768zm0-256h128v128H896V512z" />
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 581 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 5H2V0H3V3.3L3.8 2.4L4.6 1.7L5.4 1L6.3 0.5C6.59362 0.297586 6.94345 0.192635 7.3 0.2L8.5 0L9.9 0.2L11.3 0.8L12.4 1.6C12.7233 1.95924 12.9927 2.36344 13.2 2.8C13.459 3.20359 13.6609 3.64107 13.8 4.1C13.9226 4.55696 13.9897 5.027 14 5.5C13.9637 6.21504 13.8291 6.92168 13.6 7.6C13.2924 8.22655 12.8874 8.80035 12.4 9.3L5.9 15.9L5.1 15.1L11.7 8.6C12.0904 8.19804 12.3964 7.72203 12.6 7.2C12.8748 6.67624 13.0125 6.09136 13 5.5C13.0218 4.90769 12.8836 4.32046 12.6 3.8C12.4219 3.23995 12.072 2.75016 11.6 2.4C11.2498 1.928 10.76 1.57815 10.2 1.4C9.67954 1.11642 9.09231 0.978247 8.5 1C7.9834 0.981133 7.4696 1.08389 7 1.3L5.8 2L4.8 2.9L3.7 4H7V5Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 775 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="14" viewBox="0 0 16 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M14 0H14.4L14.7 0.2L14.9 0.5C14.9524 0.570883 14.9885 0.652432 15.0058 0.738849C15.023 0.825265 15.0211 0.914429 15 1V14H2.8L0.999997 12.2V1C0.985033 0.85904 1.02046 0.717335 1.1 0.6L1.3 0.3L1.6 0.1H14V0ZM14 1H13V7H3V1H2V11.8L3.2 13H4V9H11V13H14V1ZM4 6H12V1H4V6ZM10 10H5V13H6V11H7V13H10V10Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 421 B |
@@ -8,5 +8,5 @@
|
||||
not_numbered: true
|
||||
expand_sections: true
|
||||
sections:
|
||||
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
||||
url: postgres/tsg100-troubleshoot-postgres
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
- This chapter contains notebooks for troubleshooting Postgres on Azure Arc
|
||||
|
||||
## Notebooks in this Chapter
|
||||
- [TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
||||
- [TSG100 - The Azure Arc Postgres troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
||||
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
not_numbered: true
|
||||
expand_sections: true
|
||||
sections:
|
||||
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
||||
url: postgres/tsg100-troubleshoot-postgres
|
||||
|
||||
@@ -4,14 +4,13 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter\n",
|
||||
"===================================================================\n",
|
||||
"TSG100 - The Azure Arc Postgres troubleshooter\n",
|
||||
"==============================================\n",
|
||||
"\n",
|
||||
"Description\n",
|
||||
"-----------\n",
|
||||
"\n",
|
||||
"Follow these steps to troubleshoot an Azure Arc enabled PostgreSQL\n",
|
||||
"Hyperscale Server.\n",
|
||||
"Follow these steps to troubleshoot an Azure Arc Postgres Server.\n",
|
||||
"\n",
|
||||
"Steps\n",
|
||||
"-----\n",
|
||||
@@ -35,7 +34,6 @@
|
||||
"# the user will be prompted to select a server.\n",
|
||||
"namespace = os.environ.get('POSTGRES_SERVER_NAMESPACE')\n",
|
||||
"name = os.environ.get('POSTGRES_SERVER_NAME')\n",
|
||||
"version = os.environ.get('POSTGRES_SERVER_VERSION')\n",
|
||||
"\n",
|
||||
"tail_lines = 50"
|
||||
]
|
||||
@@ -145,7 +143,7 @@
|
||||
" if cmd.startswith(\"kubectl \") and \"AZDATA_OPENSHIFT\" in os.environ:\n",
|
||||
" cmd_actual[0] = cmd_actual[0].replace(\"kubectl\", \"oc\")\n",
|
||||
"\n",
|
||||
" # To aid supportability, determine which binary file will actually be executed on the machine\n",
|
||||
" # To aid supportabilty, determine which binary file will actually be executed on the machine\n",
|
||||
" #\n",
|
||||
" which_binary = None\n",
|
||||
"\n",
|
||||
@@ -402,11 +400,11 @@
|
||||
"import math\n",
|
||||
"\n",
|
||||
"# If a server was provided, get it\n",
|
||||
"if namespace and name and version:\n",
|
||||
" server = json.loads(run(f'kubectl get postgresql-{version} -n {namespace} {name} -o json', return_output=True))\n",
|
||||
"if namespace and name:\n",
|
||||
" server = json.loads(run(f'kubectl get dbs -n {namespace} {name} -o json', return_output=True))\n",
|
||||
"else:\n",
|
||||
" # Otherwise prompt the user to select a server\n",
|
||||
" servers = json.loads(run(f'kubectl get postgresqls --all-namespaces -o json', return_output=True))['items']\n",
|
||||
" servers = json.loads(run(f'kubectl get dbs --all-namespaces -o json', return_output=True))['items']\n",
|
||||
" if not servers:\n",
|
||||
" raise Exception('No Postgres servers found')\n",
|
||||
"\n",
|
||||
@@ -427,7 +425,6 @@
|
||||
" server = servers[i-1]\n",
|
||||
" namespace = server['metadata']['namespace']\n",
|
||||
" name = server['metadata']['name']\n",
|
||||
" version = server['kind'][len('postgresql-'):]\n",
|
||||
" break\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Got server {namespace}.{name}'))"
|
||||
@@ -449,10 +446,10 @@
|
||||
"uid = server['metadata']['uid']\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Server summary'))\n",
|
||||
"run(f'kubectl get postgresql-{version} -n {namespace} {name}')\n",
|
||||
"run(f'kubectl get dbs -n {namespace} {name}')\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Resource summary'))\n",
|
||||
"run(f'kubectl get sts,pods,pvc,svc,ep -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||
"run(f'kubectl get pods,pvc,svc,ep -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -469,7 +466,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"display(Markdown(f'#### Troubleshooting server {namespace}.{name}'))\n",
|
||||
"run(f'kubectl describe postgresql-{version} -n {namespace} {name}')"
|
||||
"run(f'kubectl describe dbs -n {namespace} {name}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -485,7 +482,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid} -o json', return_output=True))['items']\n",
|
||||
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l dusky.microsoft.com/serviceId={uid} -o json', return_output=True))['items']\n",
|
||||
"\n",
|
||||
"# Summarize and describe each pod\n",
|
||||
"for pod in pods:\n",
|
||||
@@ -532,7 +529,8 @@
|
||||
" con_restarts = con_status.get('restartCount', 0)\n",
|
||||
"\n",
|
||||
" display(Markdown(f'#### Troubleshooting container {namespace}.{pod_name}/{con_name} ({i+1}/{len(cons)})\\n'\n",
|
||||
" f'#### {\"R\" if con_ready else \"Not r\"}eady with {con_restarts} restarts'))\n",
|
||||
" f'#### {\"S\" if con_started else \"Not s\"}tarted and '\n",
|
||||
" f'{\"\" if con_ready else \"not \"}ready with {con_restarts} restarts'))\n",
|
||||
"\n",
|
||||
" run(f'kubectl logs -n {namespace} {pod_name} {con_name} --tail {tail_lines}')\n",
|
||||
"\n",
|
||||
@@ -556,7 +554,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"display(Markdown(f'#### Troubleshooting PersistentVolumeClaims'))\n",
|
||||
"run(f'kubectl describe pvc -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||
"run(f'kubectl describe pvc -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
"|azdata | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://github.com/microsoft/Azure-data-services-on-Azure-Arc/blob/master/scenarios/001-install-client-tools.md) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "714582b9-10ee-409e-ab12-15a4825c9471"
|
||||
@@ -65,7 +65,13 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import sys,os,json,html,getpass,time, tempfile\n",
|
||||
"import pandas,sys,os,json,html,getpass,time, tempfile\n",
|
||||
"pandas_version = pandas.__version__.split('.')\n",
|
||||
"pandas_major = int(pandas_version[0])\n",
|
||||
"pandas_minor = int(pandas_version[1])\n",
|
||||
"pandas_patch = int(pandas_version[2])\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\n",
|
||||
"def run_command(command):\n",
|
||||
" print(\"Executing: \" + command)\n",
|
||||
" !{command}\n",
|
||||
@@ -84,7 +90,7 @@
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Set variables**\n",
|
||||
"Generated by Azure Data Studio using the values collected in the 'Create Azure Arc data controller' wizard."
|
||||
"Generated by Azure Data Studio using the values collected in the Azure Arc Data controller create wizard"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "4b266b2d-bd1b-4565-92c9-3fc146cdce6d"
|
||||
@@ -123,22 +129,18 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"if \"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\" in os.environ:\n",
|
||||
" arc_docker_password = os.environ[\"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\"]\n",
|
||||
"if \"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\" in os.environ:\n",
|
||||
" arc_admin_password = os.environ[\"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" if arc_admin_password == \"\":\n",
|
||||
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data Controller password')\n",
|
||||
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data controller password')\n",
|
||||
" if arc_admin_password == \"\":\n",
|
||||
" sys.exit(f'Password is required.')\n",
|
||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||
" if arc_admin_password != confirm_password:\n",
|
||||
" sys.exit(f'Passwords do not match.')\n",
|
||||
"\n",
|
||||
"os.environ[\"SPN_CLIENT_ID\"] = sp_client_id\n",
|
||||
"os.environ[\"SPN_TENANT_ID\"] = sp_tenant_id\n",
|
||||
"if \"AZDATA_NB_VAR_SP_CLIENT_SECRET\" in os.environ:\n",
|
||||
" os.environ[\"SPN_CLIENT_SECRET\"] = os.environ[\"AZDATA_NB_VAR_SP_CLIENT_SECRET\"]\n",
|
||||
"os.environ[\"SPN_AUTHORITY\"] = \"https://login.microsoftonline.com\""
|
||||
" sys.exit(f'Passwords do not match.')"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "e7e10828-6cae-45af-8c2f-1484b6d4f9ac",
|
||||
@@ -173,7 +175,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Create Azure Arc Data Controller**"
|
||||
"### **Create Azure Arc Data controller**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "efe78cd3-ed73-4c9b-b586-fdd6c07dd37f"
|
||||
@@ -182,14 +184,16 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating Azure Arc Data Controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
||||
"print (f'Creating Azure Arc controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
||||
"os.environ[\"ACCEPT_EULA\"] = 'yes'\n",
|
||||
"os.environ[\"AZDATA_USERNAME\"] = arc_admin_username\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = arc_admin_password\n",
|
||||
"os.environ[\"DOCKER_USERNAME\"] = arc_docker_username\n",
|
||||
"os.environ[\"DOCKER_PASSWORD\"] = arc_docker_password\n",
|
||||
"if os.name == 'nt':\n",
|
||||
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {arc_data_controller_namespace}')\n",
|
||||
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -sc {arc_data_controller_storage_class} --profile-name {arc_profile}')\n",
|
||||
"print(f'Azure Arc Data Controller: {arc_data_controller_name} created.') "
|
||||
"print(f'Azure Arc Data controller cluster: {arc_data_controller_name} created.') "
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "373947a1-90b9-49ee-86f4-17a4c7d4ca76",
|
||||
@@ -201,7 +205,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setting context to created Azure Arc Data Controller**"
|
||||
"### **Setting context to created Azure Arc Data controller**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "a3ddc701-811d-4058-b3fb-b7295fcf50ae"
|
||||
@@ -210,7 +214,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Setting context to Data Controller.\n",
|
||||
"# Setting context to data controller.\n",
|
||||
"#\n",
|
||||
"run_command(f'kubectl config set-context --current --namespace {arc_data_controller_namespace}')"
|
||||
],
|
||||
@@ -223,7 +227,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Login to the Data Controller.**\n"
|
||||
"### **Login to the data controller.**\n"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "9376b2ab-0edf-478f-9e3c-5ff46ae3501a"
|
||||
@@ -232,9 +236,9 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Login to the Data Controller.\n",
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"run_command(f'azdata login --namespace {arc_data_controller_namespace}')"
|
||||
"run_command(f'azdata login -n {arc_data_controller_namespace}')"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "9aed0c5a-2c8a-4ad7-becb-60281923a196"
|
||||
@@ -243,4 +247,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,7 @@
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3",
|
||||
"language": "python"
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
@@ -26,12 +25,12 @@
|
||||
"source": [
|
||||
"\n",
|
||||
" \n",
|
||||
"## Create a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||
"## Deploy a PostgreSQL server group on an existing Azure Arc data cluster\n",
|
||||
" \n",
|
||||
"This notebook walks through the process of creating a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller.\n",
|
||||
"This notebook walks through the process of deploying a PostgreSQL server group on an existing Azure Arc data cluster.\n",
|
||||
" \n",
|
||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
||||
"\n",
|
||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||
],
|
||||
@@ -42,21 +41,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Prerequisites** \n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "20fe3985-a01e-461c-bce0-235f7606cc3c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setup and Check Prerequisites**"
|
||||
"### **Check prerequisites**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||
@@ -90,20 +75,100 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Set variables**\n",
|
||||
"\n",
|
||||
"#### \n",
|
||||
"\n",
|
||||
"Generated by Azure Data Studio using the values collected in the 'Deploy PostgreSQL Hyperscale - Azure Arc instance' wizard"
|
||||
"#### **Ensure Postgres Server Group name and password exist**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_endpoint = os.environ[\"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_ENDPOINT was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_USERNAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_username = os.environ[\"AZDATA_NB_VAR_CONTROLLER_USERNAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_USERNAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_password = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" server_group_name = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_password = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD was not defined. Exiting\\n') \n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_data = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_logs = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_backups = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Creating the PostgreSQL Hyperscale - Azure Arc instance**"
|
||||
"#### **Get optional parameters for the PostgreSQL server group**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"server_group_workers = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS\"]\n",
|
||||
"server_group_port = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT\")\n",
|
||||
"server_group_cores_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_REQUEST\")\n",
|
||||
"server_group_cores_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_LIMIT\")\n",
|
||||
"server_group_memory_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_REQUEST\")\n",
|
||||
"server_group_memory_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_LIMIT\")"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Installing PostgreSQL server group**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
@@ -114,9 +179,7 @@
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"os.environ[\"KUBECONFIG\"] = controller_kubeconfig\n",
|
||||
"os.environ[\"KUBECTL_CONTEXT\"] = controller_kubectl_context\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = controller_password\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
@@ -129,22 +192,17 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating the PostgreSQL Hyperscale - Azure Arc instance')\n",
|
||||
"print (f'Creating a PostgreSQL server group on Azure Arc')\n",
|
||||
"\n",
|
||||
"workers_option = f' -w {postgres_server_group_workers}' if postgres_server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{postgres_server_group_port}\"' if postgres_server_group_port else \"\"\n",
|
||||
"engine_version_option = f' -ev {postgres_server_group_engine_version}' if postgres_server_group_engine_version else \"\"\n",
|
||||
"extensions_option = f' --extensions \"{postgres_server_group_extensions}\"' if postgres_server_group_extensions else \"\"\n",
|
||||
"volume_size_data_option = f' -vsd {postgres_server_group_volume_size_data}Gi' if postgres_server_group_volume_size_data else \"\"\n",
|
||||
"volume_size_logs_option = f' -vsl {postgres_server_group_volume_size_logs}Gi' if postgres_server_group_volume_size_logs else \"\"\n",
|
||||
"volume_size_backups_option = f' -vsb {postgres_server_group_volume_size_backups}Gi' if postgres_server_group_volume_size_backups else \"\"\n",
|
||||
"cores_request_option = f' -cr \"{postgres_server_group_cores_request}\"' if postgres_server_group_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{postgres_server_group_cores_limit}\"' if postgres_server_group_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{postgres_server_group_memory_request}Gi\"' if postgres_server_group_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{postgres_server_group_memory_limit}Gi\"' if postgres_server_group_memory_limit else \"\"\n",
|
||||
"workers_option = f' -w {server_group_workers}' if server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{server_group_port}\"' if server_group_port else \"\"\n",
|
||||
"cores_request_option = f' -cr \"{server_group_cores_request}\"' if server_group_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{server_group_cores_limit}\"' if server_group_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{server_group_memory_request}Mi\"' if server_group_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{server_group_memory_limit}Mi\"' if server_group_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"cmd = f'azdata arc postgres server create -n {postgres_server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{engine_version_option}{extensions_option}{volume_size_data_option}{volume_size_logs_option}{volume_size_backups_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = postgres_password\n",
|
||||
"cmd = f'azdata arc postgres server create -n {server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3",
|
||||
"language": "python"
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
@@ -26,12 +25,12 @@
|
||||
"source": [
|
||||
"\n",
|
||||
" \n",
|
||||
"## Create SQL managed instance - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||
"## Deploy Azure SQL managed instance on an existing Azure Arc data cluster\n",
|
||||
" \n",
|
||||
"This notebook walks through the process of creating a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">SQL managed instance - Azure Arc</a> on an existing Azure Arc Data Controller.\n",
|
||||
"This notebook walks through the process of deploying a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">Azure SQL managed instance</a> on an existing Azure Arc data cluster.\n",
|
||||
" \n",
|
||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
||||
"\n",
|
||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||
],
|
||||
@@ -42,21 +41,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Prerequisites** \n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "d1c8258e-9efd-4380-a48c-cd675423ed2f"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setup and Check Prerequisites**"
|
||||
"### **Check prerequisites**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||
@@ -90,20 +75,70 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Set variables**\n",
|
||||
"\n",
|
||||
"#### \n",
|
||||
"\n",
|
||||
"Generated by Azure Data Studio using the values collected in the 'Deploy Azure SQL managed instance - Azure Arc' wizard"
|
||||
"#### **Ensure SQL instance name, username and password exist**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_endpoint = os.environ[\"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_ENDPOINT was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_USERNAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_username = os.environ[\"AZDATA_NB_VAR_CONTROLLER_USERNAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_USERNAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_password = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_INSTANCE_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_instance_name = os.environ[\"AZDATA_NB_VAR_SQL_INSTANCE_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_INSTANCE_NAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_password = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_data = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_logs = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Creating the SQL managed instance - Azure Arc instance**"
|
||||
"### **Installing Managed SQL Instance**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
@@ -114,9 +149,7 @@
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"os.environ[\"KUBECONFIG\"] = controller_kubeconfig\n",
|
||||
"os.environ[\"KUBECTL_CONTEXT\"] = controller_kubectl_context\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = controller_password\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
@@ -129,16 +162,10 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating the SQL managed instance - Azure Arc instance')\n",
|
||||
"print (f'Creating Managed SQL Server instance on Azure Arc')\n",
|
||||
"\n",
|
||||
"cores_request_option = f' -cr \"{sql_cores_request}\"' if sql_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{sql_cores_limit}\"' if sql_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{sql_memory_request}Gi\"' if sql_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{sql_memory_limit}Gi\"' if sql_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_USERNAME\"] = sql_username\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||
"cmd = f'azdata arc sql mi create -n {sql_instance_name} -scd {sql_storage_class_data} -scl {sql_storage_class_logs}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = mssql_password\n",
|
||||
"cmd = f'azdata arc sql mi create -n {mssql_instance_name} -scd {mssql_storage_class_data} -scl {mssql_storage_class_logs}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,89 +12,74 @@
|
||||
"command.editConnection.title": "Edit Connection",
|
||||
"arc.openDashboard": "Manage",
|
||||
|
||||
"resource.type.azure.arc.display.name": "Azure Arc data controller (preview)",
|
||||
"resource.type.azure.arc.display.name": "Azure Arc data controller",
|
||||
"resource.type.azure.arc.description": "Creates an Azure Arc data controller",
|
||||
"arc.data.controller.new.wizard.title": "Create Azure Arc data controller",
|
||||
"arc.data.controller.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||
"arc.data.controller.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||
"arc.data.controller.kube.cluster.context": "Cluster context",
|
||||
"arc.data.controller.cluster.config.profile.title": "Choose the config profile",
|
||||
"arc.data.controller.cluster.config.profile": "Config profile",
|
||||
"arc.data.controller.cluster.config.profile.loading": "Loading config profiles",
|
||||
"arc.data.controller.cluster.config.profile.loadingcompleted": "Loading config profiles complete",
|
||||
"arc.data.controller.create.azureconfig.title": "Azure and Connectivity Configuration",
|
||||
"arc.data.controller.connectivitymode.description": "Select the connectivity mode for the controller.",
|
||||
"arc.data.controller.create.controllerconfig.title": "Controller Configuration",
|
||||
"arc.data.controller.project.details.title": "Azure details",
|
||||
"arc.data.controller.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||
"arc.data.controller.details.title": "Data controller details",
|
||||
"arc.data.controller.details.description": "Provide a namespace, name and storage class for your Azure Arc data controller. This name will be used to identify your Arc instance for remote management and monitoring.",
|
||||
"arc.data.controller.namespace": "Data controller namespace",
|
||||
"arc.data.controller.namespace.validation.description": "Namespace must consist of lower case alphanumeric characters or '-', start/end with an alphanumeric character, and be 63 characters or fewer in length.",
|
||||
"arc.data.controller.name": "Data controller name",
|
||||
"arc.data.controller.name.validation.description": "Name must consist of lower case alphanumeric characters, '-' or '.', start/end with an alphanumeric character and be 253 characters or less in length.",
|
||||
"arc.data.controller.location": "Location",
|
||||
"arc.data.controller.admin.account.title": "Administrator account",
|
||||
"arc.data.controller.admin.account.name": "Data controller login",
|
||||
"arc.data.controller.admin.account.password": "Password",
|
||||
"arc.data.controller.admin.account.confirm.password": "Confirm password",
|
||||
"arc.data.controller.connectivitymode": "Connectivity Mode",
|
||||
"arc.data.controller.direct": "Direct",
|
||||
"arc.data.controller.indirect": "Indirect",
|
||||
"arc.data.controller.serviceprincipal.description": "When deploying a controller in direct connected mode a Service Principal is required for connecting to Azure. {0} about how to create this Service Principal and assign it the correct roles.",
|
||||
"arc.data.controller.spclientid": "Service Principal Client ID",
|
||||
"arc.data.controller.spclientid.description": "The Client (application) ID of the created Service Principal",
|
||||
"arc.data.controller.spclientid.validation.description": "The Client ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||
"arc.data.controller.spclientsecret": "Service Principal Client Secret",
|
||||
"arc.data.controller.spclientsecret.description": "The secret (password) of the Service Principal",
|
||||
"arc.data.controller.sptenantid": "Service Principal Tenant ID",
|
||||
"arc.data.controller.sptenantid.description": "The Tenant ID of the Service Principal. This must be the same as the Tenant ID of the subscription selected to create this controller for.",
|
||||
"arc.data.controller.sptenantid.validation.description": "The tenant ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||
"arc.data.controller.create.summary.title": "Review your configuration",
|
||||
"arc.data.controller.summary.arc.data.controller": "Azure Arc data controller",
|
||||
"arc.data.controller.summary.estimated.cost.per.month": "Estimated cost per month",
|
||||
"arc.data.controller.summary.arc.by.microsoft" : "by Microsoft",
|
||||
"arc.data.controller.summary.free" : "Free",
|
||||
"arc.data.controller.summary.arc.terms.of.use" : "Terms of use",
|
||||
"arc.data.controller.summary.arc.terms.separator" : "|",
|
||||
"arc.data.controller.summary.arc.terms.privacy.policy" : "Privacy policy",
|
||||
"arc.data.controller.summary.terms" : "Terms",
|
||||
"arc.data.controller.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
||||
"arc.data.controller.summary.terms.link.text": "Azure Marketplace Terms",
|
||||
"arc.data.controller.summary.kubernetes": "Kubernetes",
|
||||
"arc.data.controller.summary.kube.config.file.path": "Kube config file path",
|
||||
"arc.data.controller.summary.cluster.context": "Cluster context",
|
||||
"arc.data.controller.summary.profile": "Config profile",
|
||||
"arc.data.controller.summary.username": "Username",
|
||||
"arc.data.controller.summary.azure": "Azure",
|
||||
"arc.data.controller.summary.subscription": "Subscription",
|
||||
"arc.data.controller.summary.resource.group": "Resource group",
|
||||
"arc.data.controller.summary.data.controller.name": "Data controller name",
|
||||
"arc.data.controller.summary.data.controller.namespace": "Data controller namespace",
|
||||
"arc.data.controller.summary.controller": "Controller",
|
||||
"arc.data.controller.summary.location": "Location",
|
||||
"arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||
"arc.data.controller.readmore": "Read more",
|
||||
|
||||
"arc.control.plane.new.wizard.title": "Create Azure Arc data controller",
|
||||
"arc.control.plane.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||
"arc.control.plane.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||
"arc.control.plane.kube.cluster.context": "Cluster context",
|
||||
"arc.control.plane.container.registry.title": "Container registry details",
|
||||
"arc.control.plane.container.registry.name": "Container registry login",
|
||||
"arc.control.plane.container.registry.password": "Container registry password",
|
||||
"arc.control.plane.cluster.config.profile.title": "Choose the config profile",
|
||||
"arc.control.plane.cluster.config.profile": "Config profile",
|
||||
"arc.control.plane.data.controller.create.title": "Provide details to create Azure Arc data controller",
|
||||
"arc.control.plane.project.details.title": "Project details",
|
||||
"arc.control.plane.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||
"arc.control.plane.data.controller.details.title": "Data controller details",
|
||||
"arc.control.plane.data.controller.details.description": "Provide an Azure region and a name for your Azure Arc data controller. This name will be used to identify your Arc location for remote management and monitoring.",
|
||||
"arc.control.plane.arc.data.controller.connectivity.mode": "Data controller connectivity mode",
|
||||
"arc.control.plane.arc.data.controller.namespace": "Data controller namespace",
|
||||
"arc.control.plane.arc.data.controller.namespace.validation.description": "Data controller namespace (lower case letters, digits and - only)",
|
||||
"arc.control.plane.arc.data.controller.name": "Data controller name",
|
||||
"arc.control.plane.arc.data.controller.name.validation.description": "Data controller name (lower case letters, digits and - only)",
|
||||
"arc.control.plane.arc.data.controller.location": "Location",
|
||||
"arc.control.plane.admin.account.title": "Administrator account",
|
||||
"arc.control.plane.admin.account.name": "Data controller login",
|
||||
"arc.control.plane.admin.account.password": "Password",
|
||||
"arc.control.plane.admin.account.confirm.password": "Confirm password",
|
||||
"arc.control.plane.data.controller.create.summary.title": "Review your configuration",
|
||||
"arc.control.plane.summary.arc.data.controller": "Azure Arc data controller",
|
||||
"arc.control.plane.summary.estimated.cost.per.month": "Estimated cost per month",
|
||||
"arc.control.plane.summary.arc.by.microsoft" : "by Microsoft",
|
||||
"arc.control.plane.summary.free" : "Free",
|
||||
"arc.control.plane.summary.arc.terms.of.use" : "Terms of use",
|
||||
"arc.control.plane.summary.arc.terms.separator" : "|",
|
||||
"arc.control.plane.summary.arc.terms.privacy.policy" : "Privacy policy",
|
||||
"arc.control.plane.summary.terms" : "Terms",
|
||||
"arc.control.plane.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
||||
"arc.control.plane.summary.terms.link.text": "Azure Marketplace Terms",
|
||||
"arc.control.plane.summary.kubernetes": "Kubernetes",
|
||||
"arc.control.plane.summary.kube.config.file.path": "Kube config file path",
|
||||
"arc.control.plane.summary.cluster.context": "Cluster context",
|
||||
"arc.control.plane.summary.profile": "Config profile",
|
||||
"arc.control.plane.summary.username": "Username",
|
||||
"arc.control.plane.summary.docker.username": "Docker username",
|
||||
"arc.control.plane.summary.azure": "Azure",
|
||||
"arc.control.plane.summary.subscription": "Subscription",
|
||||
"arc.control.plane.summary.resource.group": "Resource group",
|
||||
"arc.control.plane.summary.data.controller.connectivity.mode": "Data controller connectivity mode",
|
||||
"arc.control.plane.summary.data.controller.name": "Data controller name",
|
||||
"arc.control.plane.summary.data.controller.namespace": "Data controller namespace",
|
||||
"arc.control.plane.summary.location": "Location",
|
||||
"arc.control.plane.arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||
"microsoft.agreement.privacy.statement":"Microsoft Privacy Statement",
|
||||
"deploy.script.action":"Script to notebook",
|
||||
"deploy.done.action":"Deploy",
|
||||
"deploy.arc.control.plane.action":"Script to notebook",
|
||||
|
||||
|
||||
"resource.type.arc.sql.display.name": "Azure SQL managed instance - Azure Arc (preview)",
|
||||
"resource.type.arc.postgres.display.name": "PostgreSQL Hyperscale server groups - Azure Arc (preview)",
|
||||
"resource.type.arc.postgres.display.name": "PostgreSQL server groups - Azure Arc (preview)",
|
||||
"resource.type.arc.sql.description": "Managed SQL Instance service for app developers in a customer-managed environment",
|
||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL Hyperscale server groups into an Azure Arc environment",
|
||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL server groups into an Azure Arc environment",
|
||||
"arc.controller": "Target Azure Arc Controller",
|
||||
|
||||
|
||||
"arc.sql.wizard.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||
"arc.sql.wizard.page1.title": "Provide Azure SQL managed instance parameters",
|
||||
"arc.sql.connection.settings.section.title": "SQL Connection information",
|
||||
"arc.sql.instance.settings.section.title": "SQL Instance settings",
|
||||
"arc.sql.new.dialog.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||
"arc.sql.settings.section.title": "SQL Connection information",
|
||||
"arc.azure.section.title": "Azure information",
|
||||
"arc.sql.instance.name": "Instance name",
|
||||
"arc.sql.instance.name": "Instance name (lower case letters and digits only)",
|
||||
"arc.sql.username": "Username",
|
||||
"arc.sql.invalid.username": "sa username is disabled, please choose another username",
|
||||
"arc.sql.invalid.instance.name": "Instance name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 13 characters or fewer in length.",
|
||||
"arc.storage-class.dc.label": "Storage Class",
|
||||
"arc.sql.storage-class.dc.description": "The storage class to be used for all data and logs persistent volumes for all data controller pods that require them.",
|
||||
"arc.storage-class.data.label": "Storage Class (Data)",
|
||||
@@ -104,14 +89,6 @@
|
||||
"arc.sql.storage-class.logs.description": "The storage class to be used for logs (/var/log)",
|
||||
"arc.postgres.storage-class.logs.description": "The storage class to be used for logs persistent volumes",
|
||||
"arc.storage-class.backups.label": "Storage Class (Backups)",
|
||||
"arc.cores-limit.label": "Cores Limit",
|
||||
"arc.sql.cores-limit.description": "The cores limit of the managed instance as an integer.",
|
||||
"arc.cores-request.label": "Cores Request",
|
||||
"arc.sql.cores-request.description": "The request for cores of the managed instance as an integer.",
|
||||
"arc.memory-limit.label": "Memory Limit",
|
||||
"arc.sql.memory-limit.description": "The limit of the capacity of the managed instance as an integer.",
|
||||
"arc.memory-request.label": "Memory Request",
|
||||
"arc.sql.memory-request.description": "The request for the capacity of the managed instance as an integer amount of memory in GBs.",
|
||||
"arc.postgres.storage-class.backups.description": "The storage class to be used for backup persistent volumes",
|
||||
"arc.password": "Password",
|
||||
"arc.confirm.password": "Confirm password",
|
||||
@@ -119,41 +96,19 @@
|
||||
"arc.azure.subscription": "Azure subscription",
|
||||
"arc.azure.resource.group": "Azure resource group",
|
||||
"arc.azure.location": "Azure location",
|
||||
"arc.postgres.wizard.title": "Deploy an Azure Arc enabled PostgreSQL Hyperscale server group (Preview)",
|
||||
"arc.postgres.wizard.page1.title": "Provide Azure enabled PostgreSQL Hyperscale server group parameters",
|
||||
"arc.postgres.settings.section.title": "General settings",
|
||||
"arc.postgres.settings.resource.title": "Resource settings",
|
||||
"arc.postgres.settings.storage.title": "Storage settings",
|
||||
"arc.postgres.new.dialog.title": "Deploy a PostgreSQL server group on Azure Arc (preview)",
|
||||
"arc.postgres.settings.section.title": "PostgreSQL server group settings",
|
||||
"arc.postgres.settings.resource.title": "PostgreSQL server group resource settings",
|
||||
"arc.postgres.server.group.name": "Server group name",
|
||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 11 characters or fewer in length.",
|
||||
"arc.postgres.server.group.workers.label": "Number of workers",
|
||||
"arc.postgres.server.group.workers.description": "The number of worker nodes to provision in a sharded cluster, or zero (the default) for single-node Postgres.",
|
||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 10 characters or fewer in length.",
|
||||
"arc.postgres.server.group.workers": "Number of workers",
|
||||
"arc.postgres.server.group.port": "Port",
|
||||
"arc.postgres.server.group.engine.version": "Engine Version",
|
||||
"arc.postgres.server.group.extensions.label": "Extensions",
|
||||
"arc.postgres.server.group.extensions.description": "A comma-separated list of the Postgres extensions that should be loaded on startup. Please refer to the postgres documentation for supported values.",
|
||||
"arc.postgres.server.group.volume.size.data.label": "Volume Size GB (Data)",
|
||||
"arc.postgres.server.group.volume.size.data.description": "The size of the storage volume to be used for data in GB.",
|
||||
"arc.postgres.server.group.volume.size.logs.label": "Volume Size GB (Logs)",
|
||||
"arc.postgres.server.group.volume.size.logs.description": "The size of the storage volume to be used for logs in GB.",
|
||||
"arc.postgres.server.group.volume.size.backups.label": "Volume Size GB (Backups)",
|
||||
"arc.postgres.server.group.volume.size.backups.description": "The size of the storage volume to be used for backups in GB.",
|
||||
"arc.postgres.server.group.cores.request.label": "CPU request (cores per node)",
|
||||
"arc.postgres.server.group.cores.request.description": "The minimum number of CPU cores that must be available per node to schedule the service. Fractional cores are supported.",
|
||||
"arc.postgres.server.group.cores.limit.label": "CPU limit (cores per node)",
|
||||
"arc.postgres.server.group.cores.limit.description": "The maximum number of CPU cores for the Postgres instance that can be used per node. Fractional cores are supported.",
|
||||
"arc.postgres.server.group.memory.request.label": "Memory request (GB per node)",
|
||||
"arc.postgres.server.group.memory.request.description": "The memory request of the Postgres instance per node in GB.",
|
||||
"arc.postgres.server.group.memory.limit.label": "Memory limit (GB per node)",
|
||||
"arc.postgres.server.group.memory.limit.description": "The memory limit of the Postgres instance per node in GB.",
|
||||
"arc.postgres.server.group.cores.request": "Min CPU cores (per node) to reserve",
|
||||
"arc.postgres.server.group.cores.limit": "Max CPU cores (per node) to allow",
|
||||
"arc.postgres.server.group.memory.request": "Min memory MB (per node) to reserve",
|
||||
"arc.postgres.server.group.memory.limit": "Max memory MB (per node) to allow",
|
||||
"arc.agreement": "I accept {0} and {1}.",
|
||||
"arc.agreement.sql.terms.conditions": "Azure SQL managed instance - Azure Arc terms and conditions",
|
||||
"arc.agreement.postgres.terms.conditions": "Azure Arc enabled PostgreSQL Hyperscale terms and conditions",
|
||||
"should.be.integer": "Value must be an integer",
|
||||
"requested.cores.less.than.or.equal.to.cores.limit": "Requested cores must be less than or equal to cores limit",
|
||||
"cores.limit.greater.than.or.equal.to.requested.cores": "Cores limit must be greater than or equal to requested cores",
|
||||
"requested.memory.less.than.or.equal.to.memory.limit": "Requested memory must be less than or equal to memory limit",
|
||||
"memory.limit.greater.than.or.equal.to.requested.memory": "Memory limit must be greater than or equal to requested memory",
|
||||
"arc.agreement.sql.help.text": "Azure Arc enabled Managed Instance provides SQL Server access and feature compatibility that can be deployed on the infrastructure of your choice. While this service is in preview, it has some feature limitations compared to SQL Managed Instance on Azure. {0}",
|
||||
"arc.agreement.sql.help.text.learn.more": "Learn more"
|
||||
"arc.agreement.sql.terms.conditions":"Azure SQL managed instance - Azure Arc terms and conditions",
|
||||
"arc.agreement.postgres.terms.conditions":"PostgreSQL server groups - Azure Arc terms and conditions",
|
||||
"arc.deploy.action":"Deploy"
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import * as rd from 'resource-deployment';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { PasswordToControllerDialog } from '../ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerTreeNode } from '../ui/tree/controllerTreeNode';
|
||||
|
||||
export class UserCancelledError extends Error implements rd.ErrorWithType {
|
||||
public get type(): rd.ErrorType {
|
||||
return rd.ErrorType.userCancelled;
|
||||
}
|
||||
}
|
||||
export function arcApi(treeDataProvider: AzureArcTreeDataProvider): arc.IExtension {
|
||||
return {
|
||||
getRegisteredDataControllers: () => getRegisteredDataControllers(treeDataProvider),
|
||||
getControllerPassword: (controllerInfo: arc.ControllerInfo) => getControllerPassword(treeDataProvider, controllerInfo),
|
||||
reacquireControllerPassword: (controllerInfo: arc.ControllerInfo) => reacquireControllerPassword(treeDataProvider, controllerInfo)
|
||||
};
|
||||
}
|
||||
|
||||
export async function reacquireControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||
const dialog = new PasswordToControllerDialog(treeDataProvider);
|
||||
dialog.showDialog(controllerInfo);
|
||||
const model = await dialog.waitForClose();
|
||||
if (!model) {
|
||||
throw new UserCancelledError(loc.userCancelledError);
|
||||
}
|
||||
return model.password;
|
||||
}
|
||||
|
||||
export async function getControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||
return await treeDataProvider.getPassword(controllerInfo);
|
||||
}
|
||||
|
||||
export async function getRegisteredDataControllers(treeDataProvider: AzureArcTreeDataProvider): Promise<arc.DataController[]> {
|
||||
return (await treeDataProvider.getChildren())
|
||||
.filter(node => node instanceof ControllerTreeNode)
|
||||
.map(node => ({
|
||||
label: (node as ControllerTreeNode).model.label,
|
||||
info: (node as ControllerTreeNode).model.info
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as yamljs from 'yamljs';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { throwUnless } from './utils';
|
||||
export interface KubeClusterContext {
|
||||
name: string;
|
||||
isCurrentContext: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the cluster context defined in the {@see configFile}
|
||||
*
|
||||
* @param configFile
|
||||
*/
|
||||
export function getKubeConfigClusterContexts(configFile: string): Promise<KubeClusterContext[]> {
|
||||
const config: any = yamljs.load(configFile);
|
||||
const rawContexts = <any[]>config['contexts'];
|
||||
throwUnless(rawContexts && rawContexts.length, loc.noContextFound(configFile));
|
||||
const currentContext = <string>config['current-context'];
|
||||
throwUnless(currentContext, loc.noCurrentContextFound(configFile));
|
||||
const contexts: KubeClusterContext[] = [];
|
||||
rawContexts.forEach(rawContext => {
|
||||
const name = <string>rawContext['name'];
|
||||
throwUnless(name, loc.noNameInContext(configFile));
|
||||
if (name) {
|
||||
contexts.push({
|
||||
name: name,
|
||||
isCurrentContext: name === currentContext
|
||||
});
|
||||
}
|
||||
});
|
||||
return Promise.resolve(contexts);
|
||||
}
|
||||
|
||||
/**
|
||||
* searches for {@see previousClusterContext} in the array of {@see clusterContexts}.
|
||||
* if {@see previousClusterContext} was truthy and it was found in {@see clusterContexts}
|
||||
* then it returns {@see previousClusterContext}
|
||||
* else it returns the current cluster context from {@see clusterContexts} unless throwIfNotFound was set on input in which case an error is thrown instead.
|
||||
* else it returns the current cluster context from {@see clusterContexts}
|
||||
*
|
||||
*
|
||||
* @param clusterContexts
|
||||
* @param previousClusterContext
|
||||
* @param throwIfNotFound
|
||||
*/
|
||||
export function getCurrentClusterContext(clusterContexts: KubeClusterContext[], previousClusterContext?: string, throwIfNotFound: boolean = false): string {
|
||||
if (previousClusterContext) {
|
||||
if (clusterContexts.find(c => c.name === previousClusterContext)) { // if previous cluster context value is found in clusters then return that value
|
||||
return previousClusterContext;
|
||||
} else {
|
||||
if (throwIfNotFound) {
|
||||
throw new Error(loc.clusterContextNotFound(previousClusterContext));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if not previousClusterContext or throwIfNotFound was false when previousCLusterContext was not found in the clusterContexts
|
||||
const currentClusterContext = clusterContexts.find(c => c.isCurrentContext)?.name;
|
||||
throwUnless(currentClusterContext !== undefined, loc.noCurrentClusterContext);
|
||||
return currentClusterContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the default kube config file path
|
||||
*/
|
||||
export function getDefaultKubeConfigPath(): string {
|
||||
return path.join(os.homedir(), '.kube', 'config');
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
*/
|
||||
export class Deferred<T> {
|
||||
promise: Promise<T>;
|
||||
resolve!: (value: T | PromiseLike<T>) => void;
|
||||
resolve!: (value?: T | PromiseLike<T>) => void;
|
||||
reject!: (reason?: any) => void;
|
||||
constructor() {
|
||||
this.promise = new Promise<T>((resolve, reject) => {
|
||||
|
||||
@@ -9,6 +9,8 @@ import * as vscode from 'vscode';
|
||||
import { ConnectionMode, IconPath, IconPathHelper } from '../constants';
|
||||
import * as loc from '../localizedConstants';
|
||||
|
||||
export class UserCancelledError extends Error { }
|
||||
|
||||
/**
|
||||
* Converts the resource type name into the localized Display Name for that type.
|
||||
* @param resourceType The resource type name to convert
|
||||
@@ -65,7 +67,7 @@ export function getResourceTypeIcon(resourceType: string | undefined): IconPath
|
||||
|
||||
/**
|
||||
* Returns the text to display for known connection modes
|
||||
* @param connectionMode The string representing the connection mode
|
||||
* @param connectionMode The string repsenting the connection mode
|
||||
*/
|
||||
export function getConnectionModeDisplayText(connectionMode: string | undefined): string {
|
||||
connectionMode = connectionMode ?? '';
|
||||
@@ -109,7 +111,7 @@ export function getDatabaseStateDisplayText(state: string): string {
|
||||
* @returns Promise resolving to the user's input if it passed validation,
|
||||
* or undefined if the input box was closed for any other reason
|
||||
*/
|
||||
async function promptInputBox(title: string, options: vscode.InputBoxOptions): Promise<string | undefined> {
|
||||
async function promptInputBox(title: string, options: vscode.InputBoxOptions): Promise<string> {
|
||||
const inputBox = vscode.window.createInputBox();
|
||||
inputBox.title = title;
|
||||
inputBox.prompt = options.prompt;
|
||||
@@ -146,15 +148,15 @@ async function promptInputBox(title: string, options: vscode.InputBoxOptions): P
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens an input box prompting the user to enter in the name of an instance to delete
|
||||
* @param name The name of the instance to delete
|
||||
* Opens an input box prompting the user to enter in the name of a resource to delete
|
||||
* @param name The name of the resource to delete
|
||||
* @returns Promise resolving to true if the user confirmed the name, false if the input box was closed for any other reason
|
||||
*/
|
||||
export async function promptForInstanceDeletion(name: string): Promise<boolean> {
|
||||
const title = loc.instanceDeletionWarning(name);
|
||||
export async function promptForResourceDeletion(name: string): Promise<boolean> {
|
||||
const title = loc.resourceDeletionWarning(name);
|
||||
const options: vscode.InputBoxOptions = {
|
||||
placeHolder: name,
|
||||
validateInput: input => input !== name ? loc.invalidInstanceDeletionName(name) : ''
|
||||
validateInput: input => input !== name ? loc.invalidResourceDeletionName(name) : ''
|
||||
};
|
||||
|
||||
return await promptInputBox(title, options) !== undefined;
|
||||
@@ -187,147 +189,39 @@ export async function promptAndConfirmPassword(validate: (input: string) => stri
|
||||
/**
|
||||
* Gets the message to display for a given error object that may be a variety of types.
|
||||
* @param error The error object
|
||||
* @param useMessageWithLink Whether to use the messageWithLink - if available
|
||||
*/
|
||||
export function getErrorMessage(error: any, useMessageWithLink: boolean = false): string {
|
||||
if (useMessageWithLink && error.messageWithLink) {
|
||||
return error.messageWithLink;
|
||||
}
|
||||
export function getErrorMessage(error: any): string {
|
||||
return error.message ?? error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an instance name from the controller. An instance name will either be just its name
|
||||
* e.g. myinstance or namespace_name e.g. mynamespace_my-instance.
|
||||
* @param instanceName The instance name in one of the formats described
|
||||
*/
|
||||
export function parseInstanceName(instanceName: string | undefined): string {
|
||||
instanceName = instanceName ?? '';
|
||||
const parts: string[] = instanceName.split('_');
|
||||
if (parts.length === 2) {
|
||||
instanceName = parts[1];
|
||||
}
|
||||
else if (parts.length > 2) {
|
||||
throw new Error(`Cannot parse resource '${instanceName}'. Acceptable formats are 'namespace_name' or 'name'.`);
|
||||
}
|
||||
return instanceName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an address into its separate ip and port values. Address must be in the form <ip>:<port>
|
||||
* or <ip>,<port>
|
||||
* @param address The address to parse
|
||||
*/
|
||||
export function parseIpAndPort(address: string): { ip: string, port: string } {
|
||||
let sections = address.split(':');
|
||||
const sections = address.split(':');
|
||||
if (sections.length !== 2) {
|
||||
sections = address.split(',');
|
||||
if (sections.length !== 2) {
|
||||
throw new Error(`Invalid address format for ${address}. Address must be in the form <ip>:<port> or <ip>,<port>`);
|
||||
}
|
||||
throw new Error(`Invalid address format for ${address}. Address must be in the form <ip>:<port>`);
|
||||
}
|
||||
return {
|
||||
ip: sections[0],
|
||||
port: sections[1]
|
||||
};
|
||||
}
|
||||
|
||||
export function createCredentialId(controllerId: string, resourceType: string, instanceName: string): string {
|
||||
return `${controllerId}::${resourceType}::${instanceName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the gibibyte (GiB) conversion of a quantity that could currently be represented by a range
|
||||
* of SI suffixes (E, P, T, G, M, K, m) or their power-of-two equivalents (Ei, Pi, Ti, Gi, Mi, Ki)
|
||||
* @param value The string of a quantity to be converted
|
||||
* @returns String of GiB conversion
|
||||
*/
|
||||
export function convertToGibibyteString(value: string): string {
|
||||
if (!value) {
|
||||
throw new Error(`Value provided is not a valid Kubernetes resource quantity`);
|
||||
}
|
||||
|
||||
let base10ToBase2Multiplier;
|
||||
let floatValue = parseFloat(value);
|
||||
let splitValue = value.split(String(floatValue));
|
||||
let unit = splitValue[1];
|
||||
|
||||
if (unit === 'K') {
|
||||
base10ToBase2Multiplier = 1000 / 1024;
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) / Math.pow(1024, 2);
|
||||
} else if (unit === 'M') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 2) / Math.pow(1024, 2);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) / 1024;
|
||||
} else if (unit === 'G') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 3) / Math.pow(1024, 3);
|
||||
floatValue = floatValue * base10ToBase2Multiplier;
|
||||
} else if (unit === 'T') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 4) / Math.pow(1024, 4);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * 1024;
|
||||
} else if (unit === 'P') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 5) / Math.pow(1024, 5);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 2);
|
||||
} else if (unit === 'E') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 6) / Math.pow(1024, 6);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 3);
|
||||
} else if (unit === 'm') {
|
||||
floatValue = (floatValue / 1000) / Math.pow(1024, 3);
|
||||
} else if (unit === '') {
|
||||
floatValue = floatValue / Math.pow(1024, 3);
|
||||
} else if (unit === 'Ki') {
|
||||
floatValue = floatValue / Math.pow(1024, 2);
|
||||
} else if (unit === 'Mi') {
|
||||
floatValue = floatValue / 1024;
|
||||
} else if (unit === 'Gi') {
|
||||
floatValue = floatValue;
|
||||
} else if (unit === 'Ti') {
|
||||
floatValue = floatValue * 1024;
|
||||
} else if (unit === 'Pi') {
|
||||
floatValue = floatValue * Math.pow(1024, 2);
|
||||
} else if (unit === 'Ei') {
|
||||
floatValue = floatValue * Math.pow(1024, 3);
|
||||
} else {
|
||||
throw new Error(`${value} is not a valid Kubernetes resource quantity`);
|
||||
}
|
||||
|
||||
return String(floatValue);
|
||||
}
|
||||
|
||||
/*
|
||||
* Throws an Error with given {@link message} unless {@link condition} is true.
|
||||
* This also tells the typescript compiler that the condition is 'truthy' in the remainder of the scope
|
||||
* where this function was called.
|
||||
*
|
||||
* @param condition
|
||||
* @param message
|
||||
*/
|
||||
export function throwUnless(condition: any, message?: string): asserts condition {
|
||||
if (!condition) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
export async function tryExecuteAction<T>(action: () => T | PromiseLike<T>): Promise<{ result: T | undefined, error: any }> {
|
||||
let error: any, result: T | undefined;
|
||||
try {
|
||||
result = await action();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
return { result, error };
|
||||
}
|
||||
|
||||
function decorate(decorator: (fn: Function, key: string) => Function): Function {
|
||||
return (_target: any, key: string, descriptor: any) => {
|
||||
let fnKey: string | null = null;
|
||||
let fn: Function | null = null;
|
||||
|
||||
if (typeof descriptor.value === 'function') {
|
||||
fnKey = 'value';
|
||||
fn = descriptor.value;
|
||||
} else if (typeof descriptor.get === 'function') {
|
||||
fnKey = 'get';
|
||||
fn = descriptor.get;
|
||||
}
|
||||
|
||||
if (!fn || !fnKey) {
|
||||
throw new Error('not supported');
|
||||
}
|
||||
|
||||
descriptor[fnKey] = decorator(fn, key);
|
||||
};
|
||||
}
|
||||
|
||||
export function debounce(delay: number): Function {
|
||||
return decorate((fn, key) => {
|
||||
const timerKey = `$debounce$${key}`;
|
||||
|
||||
return function (this: any, ...args: any[]) {
|
||||
clearTimeout(this[timerKey]);
|
||||
this[timerKey] = setTimeout(() => fn.apply(this, args), delay);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
@@ -7,11 +7,6 @@ import * as vscode from 'vscode';
|
||||
|
||||
export const refreshActionId = 'arc.refresh';
|
||||
|
||||
export const credentialNamespace = 'arcCredentials';
|
||||
|
||||
export const controllerTroubleshootDocsUrl = 'https://aka.ms/arc-data-tsg';
|
||||
export const miaaTroubleshootDocsUrl = 'https://aka.ms/miaa-tsg';
|
||||
|
||||
export interface IconPath {
|
||||
dark: string;
|
||||
light: string;
|
||||
@@ -34,18 +29,13 @@ export class IconPathHelper {
|
||||
public static properties: IconPath;
|
||||
public static networking: IconPath;
|
||||
public static refresh: IconPath;
|
||||
public static reset: IconPath;
|
||||
public static support: IconPath;
|
||||
public static wrench: IconPath;
|
||||
public static miaa: IconPath;
|
||||
public static controller: IconPath;
|
||||
public static health: IconPath;
|
||||
public static success: IconPath;
|
||||
public static save: IconPath;
|
||||
public static discard: IconPath;
|
||||
public static fail: IconPath;
|
||||
public static information: IconPath;
|
||||
public static gear: IconPath;
|
||||
|
||||
public static setExtensionContext(context: vscode.ExtensionContext) {
|
||||
IconPathHelper.context = context;
|
||||
@@ -97,10 +87,6 @@ export class IconPathHelper {
|
||||
light: context.asAbsolutePath('images/refresh.svg'),
|
||||
dark: context.asAbsolutePath('images/refresh.svg')
|
||||
};
|
||||
IconPathHelper.reset = {
|
||||
light: context.asAbsolutePath('images/reset.svg'),
|
||||
dark: context.asAbsolutePath('images/reset.svg')
|
||||
};
|
||||
IconPathHelper.support = {
|
||||
light: context.asAbsolutePath('images/support.svg'),
|
||||
dark: context.asAbsolutePath('images/support.svg')
|
||||
@@ -125,26 +111,10 @@ export class IconPathHelper {
|
||||
light: context.asAbsolutePath('images/success.svg'),
|
||||
dark: context.asAbsolutePath('images/success.svg'),
|
||||
};
|
||||
IconPathHelper.save = {
|
||||
light: context.asAbsolutePath('images/save.svg'),
|
||||
dark: context.asAbsolutePath('images/save.svg'),
|
||||
};
|
||||
IconPathHelper.discard = {
|
||||
light: context.asAbsolutePath('images/discard.svg'),
|
||||
dark: context.asAbsolutePath('images/discard.svg'),
|
||||
};
|
||||
IconPathHelper.fail = {
|
||||
light: context.asAbsolutePath('images/fail.svg'),
|
||||
dark: context.asAbsolutePath('images/fail.svg'),
|
||||
};
|
||||
IconPathHelper.information = {
|
||||
light: context.asAbsolutePath('images/information.svg'),
|
||||
dark: context.asAbsolutePath('images/information.svg'),
|
||||
};
|
||||
IconPathHelper.gear = {
|
||||
light: context.asAbsolutePath('images/gear.svg'),
|
||||
dark: context.asAbsolutePath('images/gear.svg'),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,13 +4,11 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import * as rd from 'resource-deployment';
|
||||
import * as vscode from 'vscode';
|
||||
import { arcApi } from './common/api';
|
||||
import { UserCancelledError } from './common/utils';
|
||||
import { IconPathHelper, refreshActionId } from './constants';
|
||||
import * as loc from './localizedConstants';
|
||||
import { ArcControllersOptionsSourceProvider } from './providers/arcControllersOptionsSourceProvider';
|
||||
import { ConnectToControllerDialog } from './ui/dialogs/connectControllerDialog';
|
||||
import { ConnectToControllerDialog, PasswordToControllerDialog } from './ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from './ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerTreeNode } from './ui/tree/controllerTreeNode';
|
||||
import { TreeNode } from './ui/tree/treeNode';
|
||||
@@ -57,12 +55,41 @@ export async function activate(context: vscode.ExtensionContext): Promise<arc.IE
|
||||
}
|
||||
});
|
||||
|
||||
// register option sources
|
||||
const rdApi = <rd.IExtension>vscode.extensions.getExtension(rd.extension.name)?.exports;
|
||||
context.subscriptions.push(rdApi.registerOptionsSourceProvider(new ArcControllersOptionsSourceProvider(treeDataProvider)));
|
||||
await checkArcDeploymentExtension();
|
||||
|
||||
return arcApi(treeDataProvider);
|
||||
return {
|
||||
getRegisteredDataControllers: async () => (await treeDataProvider.getChildren())
|
||||
.filter(node => node instanceof ControllerTreeNode)
|
||||
.map(node => ({
|
||||
label: (node as ControllerTreeNode).model.label,
|
||||
info: (node as ControllerTreeNode).model.info
|
||||
})),
|
||||
getControllerPassword: async (controllerInfo: arc.ControllerInfo) => {
|
||||
return await treeDataProvider.getPassword(controllerInfo);
|
||||
},
|
||||
reacquireControllerPassword: async (controllerInfo: arc.ControllerInfo) => {
|
||||
let model;
|
||||
const dialog = new PasswordToControllerDialog(treeDataProvider);
|
||||
dialog.showDialog(controllerInfo);
|
||||
model = await dialog.waitForClose();
|
||||
if (!model) {
|
||||
throw new UserCancelledError();
|
||||
}
|
||||
return model.password;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function deactivate(): void {
|
||||
}
|
||||
|
||||
async function checkArcDeploymentExtension(): Promise<void> {
|
||||
const version = vscode.extensions.getExtension('Microsoft.arcdeployment')?.packageJSON.version;
|
||||
if (version && version !== '0.3.2') {
|
||||
// If we have an older version of the deployment extension installed then uninstall it now since it's replaced
|
||||
// by this extension. (the latest version of the Arc Deployment extension will uninstall itself so don't do
|
||||
// anything here if that's already updated)
|
||||
await vscode.commands.executeCommand('workbench.extensions.uninstallExtension', 'Microsoft.arcdeployment');
|
||||
vscode.window.showInformationMessage(loc.arcDeploymentDeprecation);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,13 +8,13 @@ import { getErrorMessage } from './common/utils';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export const arcDeploymentDeprecation = localize('arc.arcDeploymentDeprecation', "The Arc Deployment extension has been replaced by the Arc extension and has been uninstalled.");
|
||||
export function arcControllerDashboard(name: string): string { return localize('arc.controllerDashboard', "Azure Arc Data Controller Dashboard (Preview) - {0}", name); }
|
||||
export function miaaDashboard(name: string): string { return localize('arc.miaaDashboard', "SQL managed instance - Azure Arc Dashboard (Preview) - {0}", name); }
|
||||
export function postgresDashboard(name: string): string { return localize('arc.postgresDashboard', "PostgreSQL Hyperscale - Azure Arc Dashboard (Preview) - {0}", name); }
|
||||
export function arcControllerDashboard(name: string): string { return localize('arc.controllerDashboard', "Azure Arc Controller Dashboard (Preview) - {0}", name); }
|
||||
export function miaaDashboard(name: string): string { return localize('arc.miaaDashboard', "Managed Instance Dashboard (Preview) - {0}", name); }
|
||||
export function postgresDashboard(name: string): string { return localize('arc.postgresDashboard', "Postgres Dashboard (Preview) - {0}", name); }
|
||||
|
||||
export const dataControllersType = localize('arc.dataControllersType', "Azure Arc Data Controller");
|
||||
export const pgSqlType = localize('arc.pgSqlType', "PostgreSQL Hyperscale - Azure Arc");
|
||||
export const miaaType = localize('arc.miaaType', "SQL managed instance - Azure Arc");
|
||||
export const pgSqlType = localize('arc.pgSqlType', "PostgreSQL Server group - Azure Arc");
|
||||
export const miaaType = localize('arc.miaaType', "SQL instance - Azure Arc");
|
||||
|
||||
export const overview = localize('arc.overview', "Overview");
|
||||
export const connectionStrings = localize('arc.connectionStrings', "Connection Strings");
|
||||
@@ -23,20 +23,15 @@ export const properties = localize('arc.properties', "Properties");
|
||||
export const settings = localize('arc.settings', "Settings");
|
||||
export const security = localize('arc.security', "Security");
|
||||
export const computeAndStorage = localize('arc.computeAndStorage', "Compute + Storage");
|
||||
export const nodeParameters = localize('arc.nodeParameters', "Node Parameters");
|
||||
export const compute = localize('arc.compute', "Compute");
|
||||
export const backup = localize('arc.backup', "Backup");
|
||||
export const newSupportRequest = localize('arc.newSupportRequest', "New support request");
|
||||
export const diagnoseAndSolveProblems = localize('arc.diagnoseAndSolveProblems', "Diagnose and solve problems");
|
||||
export const supportAndTroubleshooting = localize('arc.supportAndTroubleshooting', "Support + troubleshooting");
|
||||
export const resourceHealth = localize('arc.resourceHealth', "Resource health");
|
||||
export const parameterName = localize('arc.parameterName', "Parameter Name");
|
||||
export const value = localize('arc.value', "Value");
|
||||
|
||||
export const newInstance = localize('arc.createNew', "New Instance");
|
||||
export const deleteText = localize('arc.delete', "Delete");
|
||||
export const saveText = localize('arc.save', "Save");
|
||||
export const discardText = localize('arc.discard', "Discard");
|
||||
export const resetPassword = localize('arc.resetPassword', "Reset Password");
|
||||
export const openInAzurePortal = localize('arc.openInAzurePortal', "Open in Azure Portal");
|
||||
export const resourceGroup = localize('arc.resourceGroup', "Resource Group");
|
||||
@@ -64,20 +59,12 @@ export const yes = localize('arc.yes', "Yes");
|
||||
export const no = localize('arc.no', "No");
|
||||
export const feedback = localize('arc.feedback', "Feedback");
|
||||
export const selectConnectionString = localize('arc.selectConnectionString', "Select from available client connection strings below.");
|
||||
export const addingWorkerNodes = localize('arc.addingWorkerNodes', "adding worker nodes");
|
||||
export const workerNodesDescription = localize('arc.workerNodesDescription', "Expand your server group and scale your database by adding worker nodes.");
|
||||
export const postgresConfigurationInformation = localize('arc.postgres.configurationInformation', "You can configure the number of CPU cores and storage size that will apply to both worker nodes and coordinator node. Each worker node will have the same configuration. Adjust the number of CPU cores and memory settings for your server group.");
|
||||
export const workerNodesInformation = localize('arc.workerNodeInformation', "In preview it is not possible to reduce the number of worker nodes. Please refer to documentation linked above for more information.");
|
||||
export const vCores = localize('arc.vCores', "vCores");
|
||||
export const ram = localize('arc.ram', "RAM");
|
||||
export const refresh = localize('arc.refresh', "Refresh");
|
||||
export const resetAllToDefault = localize('arc.resetAllToDefault', "Reset all to default");
|
||||
export const resetToDefault = localize('arc.resetToDefault', "Reset to default");
|
||||
export const troubleshoot = localize('arc.troubleshoot', "Troubleshoot");
|
||||
export const clickTheNewSupportRequestButton = localize('arc.clickTheNewSupportRequestButton', "Click the new support request button to file a support request in the Azure Portal.");
|
||||
export const running = localize('arc.running', "Running");
|
||||
export const ready = localize('arc.ready', "Ready");
|
||||
export const notReady = localize('arc.notReady', "Not Ready");
|
||||
export const pending = localize('arc.pending', "Pending");
|
||||
export const failed = localize('arc.failed', "Failed");
|
||||
export const unknown = localize('arc.unknown', "Unknown");
|
||||
@@ -85,28 +72,17 @@ export const direct = localize('arc.direct', "Direct");
|
||||
export const indirect = localize('arc.indirect', "Indirect");
|
||||
export const loading = localize('arc.loading', "Loading...");
|
||||
export const refreshToEnterCredentials = localize('arc.refreshToEnterCredentials', "Refresh node to enter credentials");
|
||||
export const noInstancesAvailable = localize('arc.noInstancesAvailable', "No instances available");
|
||||
export const connectToServer = localize('arc.connecToServer', "Connect to Server");
|
||||
export const connectToController = localize('arc.connectToController', "Connect to Existing Controller");
|
||||
export function connectToMSSql(name: string): string { return localize('arc.connectToMSSql', "Connect to SQL managed instance - Azure Arc ({0})", name); }
|
||||
export function connectToPGSql(name: string): string { return localize('arc.connectToPGSql', "Connect to PostgreSQL Hyperscale - Azure Arc ({0})", name); }
|
||||
export const passwordToController = localize('arc.passwordToController', "Provide Password to Controller");
|
||||
export const controllerUrl = localize('arc.controllerUrl', "Controller URL");
|
||||
export const serverEndpoint = localize('arc.serverEndpoint', "Server Endpoint");
|
||||
export const controllerName = localize('arc.controllerName', "Name");
|
||||
export const controllerKubeConfig = localize('arc.controllerKubeConfig', "Kube Config File Path");
|
||||
export const controllerClusterContext = localize('arc.controllerClusterContext', "Cluster Context");
|
||||
export const defaultControllerName = localize('arc.defaultControllerName', "arc-dc");
|
||||
export const postgresProviderName = localize('arc.postgresProviderName', "PGSQL");
|
||||
export const miaaProviderName = localize('arc.miaaProviderName', "MSSQL");
|
||||
export const username = localize('arc.username', "Username");
|
||||
export const password = localize('arc.password', "Password");
|
||||
export const rememberPassword = localize('arc.rememberPassword', "Remember Password");
|
||||
export const connect = localize('arc.connect', "Connect");
|
||||
export const cancel = localize('arc.cancel', "Cancel");
|
||||
export const ok = localize('arc.ok', "Ok");
|
||||
export const on = localize('arc.on', "On");
|
||||
export const off = localize('arc.off', "Off");
|
||||
export const notConfigured = localize('arc.notConfigured', "Not Configured");
|
||||
|
||||
// Database States - see https://docs.microsoft.com/sql/relational-databases/databases/database-states
|
||||
@@ -135,31 +111,9 @@ export const databaseName = localize('arc.databaseName', "Database name");
|
||||
export const enterNewPassword = localize('arc.enterNewPassword', "Enter a new password");
|
||||
export const confirmNewPassword = localize('arc.confirmNewPassword', "Confirm the new password");
|
||||
export const learnAboutPostgresClients = localize('arc.learnAboutPostgresClients', "Learn more about Azure PostgreSQL Hyperscale client interfaces");
|
||||
export const nodeParametersDescription = localize('arc.nodeParametersDescription', " These server parameters of the Coordinator node and the Worker nodes can be set to custom (non-default) values. Search to find parameters.");
|
||||
export const learnAboutNodeParameters = localize('arc.learnAboutNodeParameters', "Learn more about database engine settings for Azure Arc enabled PostgreSQL Hyperscale");
|
||||
export const noNodeParametersFound = localize('arc.noNodeParametersFound', "No worker server parameters found...");
|
||||
export const searchToFilter = localize('arc.searchToFilter', "Search to filter items...");
|
||||
export const scalingCompute = localize('arc.scalingCompute', "scaling compute vCores and memory.");
|
||||
export const postgresComputeAndStorageDescriptionPartOne = localize('arc.postgresComputeAndStorageDescriptionPartOne', "You can scale your Azure Arc enabled");
|
||||
export const miaaComputeAndStorageDescriptionPartOne = localize('arc.miaaComputeAndStorageDescriptionPartOne', "You can scale your Azure SQL managed instance - Azure Arc by");
|
||||
export const postgresComputeAndStorageDescriptionPartTwo = localize('arc.postgres.computeAndStorageDescriptionPartTwo', "PostgreSQL Hyperscale server group by");
|
||||
export const computeAndStorageDescriptionPartThree = localize('arc.computeAndStorageDescriptionPartThree', "without downtime and by");
|
||||
export const computeAndStorageDescriptionPartFour = localize('arc.computeAndStorageDescriptionPartFour', "Before doing so, you need to ensure");
|
||||
export const computeAndStorageDescriptionPartFive = localize('arc.computeAndStorageDescriptionPartFive', "there are sufficient resources available");
|
||||
export const computeAndStorageDescriptionPartSix = localize('arc.computeAndStorageDescriptionPartSix', "in your Kubernetes cluster to honor this configuration.");
|
||||
export const node = localize('arc.node', "node");
|
||||
export const nodes = localize('arc.nodes', "nodes");
|
||||
export const workerNodes = localize('arc.workerNodes', "Worker Nodes");
|
||||
export const storagePerNode = localize('arc.storagePerNode', "storage per node");
|
||||
export const workerNodeCount = localize('arc.workerNodeCount', "Worker node count:");
|
||||
export const configurationPerNode = localize('arc.configurationPerNode', "Configuration (per node)");
|
||||
export const coresLimit = localize('arc.coresLimit', "CPU limit:");
|
||||
export const coresRequest = localize('arc.coresRequest', "CPU request:");
|
||||
export const memoryLimit = localize('arc.memoryLimit', "Memory limit (in GB):");
|
||||
export const memoryRequest = localize('arc.memoryRequest', "Memory request (in GB):");
|
||||
export const workerValidationErrorMessage = localize('arc.workerValidationErrorMessage', "The number of workers cannot be decreased.");
|
||||
export const memoryRequestValidationErrorMessage = localize('arc.memoryRequestValidationErrorMessage', "Memory request must be at least 0.25Gib");
|
||||
export const memoryLimitValidationErrorMessage = localize('arc.memoryLimitValidationErrorMessage', "Memory limit must be at least 0.25Gib");
|
||||
export const arcResources = localize('arc.arcResources', "Azure Arc Resources");
|
||||
export const enterANonEmptyPassword = localize('arc.enterANonEmptyPassword', "Enter a non empty password or press escape to exit.");
|
||||
export const thePasswordsDoNotMatch = localize('arc.thePasswordsDoNotMatch', "The passwords do not match. Confirm the password or press escape to exit.");
|
||||
@@ -169,19 +123,9 @@ export const condition = localize('arc.condition', "Condition");
|
||||
export const details = localize('arc.details', "Details");
|
||||
export const lastUpdated = localize('arc.lastUpdated', "Last updated");
|
||||
export const noExternalEndpoint = localize('arc.noExternalEndpoint', "No External Endpoint has been configured so this information isn't available.");
|
||||
export const podsReady = localize('arc.podsReady', "pods ready");
|
||||
export const connectToPostgresDescription = localize('arc.connectToPostgresDescription', "A connection to the server is required to show and set database engine settings, which will require the PostgreSQL Extension to be installed.");
|
||||
export const postgresExtension = localize('arc.postgresExtension', "microsoft.azuredatastudio-postgresql");
|
||||
|
||||
export function rangeSetting(min: string, max: string): string { return localize('arc.rangeSetting', "Value is expected to be in the range {0} - {1}", min, max); }
|
||||
export function allowedValue(value: string): string { return localize('arc.allowedValue', "Value is expected to be {0}", value); }
|
||||
export function databaseCreated(name: string): string { return localize('arc.databaseCreated', "Database {0} created", name); }
|
||||
export function deletingInstance(name: string): string { return localize('arc.deletingInstance', "Deleting instance '{0}'...", name); }
|
||||
export function installingExtension(name: string): string { return localize('arc.installingExtension', "Installing extension '{0}'...", name); }
|
||||
export function extensionInstalled(name: string): string { return localize('arc.extensionInstalled', "Extension '{0}' has been installed.", name); }
|
||||
export function updatingInstance(name: string): string { return localize('arc.updatingInstance', "Updating instance '{0}'...", name); }
|
||||
export function instanceDeleted(name: string): string { return localize('arc.instanceDeleted', "Instance '{0}' deleted", name); }
|
||||
export function instanceUpdated(name: string): string { return localize('arc.instanceUpdated', "Instance '{0}' updated", name); }
|
||||
export function resourceDeleted(name: string): string { return localize('arc.resourceDeleted', "Resource '{0}' deleted", name); }
|
||||
export function copiedToClipboard(name: string): string { return localize('arc.copiedToClipboard', "{0} copied to clipboard", name); }
|
||||
export function clickTheTroubleshootButton(resourceType: string): string { return localize('arc.clickTheTroubleshootButton', "Click the troubleshoot button to open the Azure Arc {0} troubleshooting notebook.", resourceType); }
|
||||
export function numVCores(vCores: string | undefined): string {
|
||||
@@ -196,49 +140,24 @@ export function numVCores(vCores: string | undefined): string {
|
||||
}
|
||||
}
|
||||
export function updated(when: string): string { return localize('arc.updated', "Updated {0}", when); }
|
||||
export function validationMin(min: number): string { return localize('arc.validationMin', "Value must be greater than or equal to {0}.", min); }
|
||||
|
||||
// Errors
|
||||
export const pgConnectionRequired = localize('arc.pgConnectionRequired', "A connection is required to show and set database engine settings.");
|
||||
export const miaaConnectionRequired = localize('arc.miaaConnectionRequired', "A connection is required to list the databases on this instance.");
|
||||
export const connectionRequired = localize('arc.connectionRequired', "A connection is required to show all properties. Click refresh to re-enter connection information");
|
||||
export const couldNotFindControllerRegistration = localize('arc.couldNotFindControllerRegistration', "Could not find controller registration.");
|
||||
export function outOfRange(min: string, max: string): string { return localize('arc.outOfRange', "The number must be in range {0} - {1}", min, max); }
|
||||
export function refreshFailed(error: any): string { return localize('arc.refreshFailed', "Refresh failed. {0}", getErrorMessage(error)); }
|
||||
export function resetFailed(error: any): string { return localize('arc.resetFailed', "Reset failed. {0}", getErrorMessage(error)); }
|
||||
export function openDashboardFailed(error: any): string { return localize('arc.openDashboardFailed', "Error opening dashboard. {0}", getErrorMessage(error)); }
|
||||
export function instanceDeletionFailed(name: string, error: any): string { return localize('arc.instanceDeletionFailed', "Failed to delete instance {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function instanceUpdateFailed(name: string, error: any): string { return localize('arc.instanceUpdateFailed', "Failed to update instance {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function pageDiscardFailed(error: any): string { return localize('arc.pageDiscardFailed', "Failed to discard user input. {0}", getErrorMessage(error)); }
|
||||
export function resourceDeletionFailed(name: string, error: any): string { return localize('arc.resourceDeletionFailed', "Failed to delete resource {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function databaseCreationFailed(name: string, error: any): string { return localize('arc.databaseCreationFailed', "Failed to create database {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function connectToControllerFailed(url: string, error: any): string { return localize('arc.connectToControllerFailed', "Could not connect to controller {0}. {1}", url, getErrorMessage(error)); }
|
||||
export function connectToMSSqlFailed(serverName: string, error: any): string { return localize('arc.connectToMSSqlFailed', "Could not connect to SQL managed instance - Azure Arc Instance {0}. {1}", serverName, getErrorMessage(error)); }
|
||||
export function connectToPGSqlFailed(serverName: string, error: any): string { return localize('arc.connectToPGSqlFailed', "Could not connect to PostgreSQL Hyperscale - Azure Arc Instance {0}. {1}", serverName, getErrorMessage(error)); }
|
||||
export function missingExtension(extensionName: string): string { return localize('arc.missingExtension', "The {0} extension is required to view engine settings. Do you wish to install it now?", extensionName); }
|
||||
export function extensionInstallationFailed(extensionName: string): string { return localize('arc.extensionInstallationFailed', "Failed to install extension {0}.", extensionName); }
|
||||
export function fetchConfigFailed(name: string, error: any): string { return localize('arc.fetchConfigFailed', "An unexpected error occurred retrieving the config for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchEndpointsFailed(name: string, error: any): string { return localize('arc.fetchEndpointsFailed', "An unexpected error occurred retrieving the endpoints for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchRegistrationsFailed(name: string, error: any): string { return localize('arc.fetchRegistrationsFailed', "An unexpected error occurred retrieving the registrations for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchDatabasesFailed(name: string, error: any): string { return localize('arc.fetchDatabasesFailed', "An unexpected error occurred retrieving the databases for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchEngineSettingsFailed(name: string, error: any): string { return localize('arc.fetchEngineSettingsFailed', "An unexpected error occurred retrieving the engine settings for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function instanceDeletionWarning(name: string): string { return localize('arc.instanceDeletionWarning', "Warning! Deleting an instance is permanent and cannot be undone. To delete the instance '{0}' type the name '{0}' below to proceed.", name); }
|
||||
export function invalidInstanceDeletionName(name: string): string { return localize('arc.invalidInstanceDeletionName', "The value '{0}' does not match the instance name. Try again or press escape to exit", name); }
|
||||
export function resourceDeletionWarning(name: string): string { return localize('arc.resourceDeletionWarning', "Warning! Deleting a resource is permanent and cannot be undone. To delete the resource '{0}' type the name '{0}' below to proceed.", name); }
|
||||
export function invalidResourceDeletionName(name: string): string { return localize('arc.invalidResourceDeletionName', "The value '{0}' does not match the instance name. Try again or press escape to exit", name); }
|
||||
export function couldNotFindAzureResource(name: string): string { return localize('arc.couldNotFindAzureResource', "Could not find Azure resource for {0}", name); }
|
||||
export function passwordResetFailed(error: any): string { return localize('arc.passwordResetFailed', "Failed to reset password. {0}", getErrorMessage(error)); }
|
||||
export function errorConnectingToController(error: any): string { return localize('arc.errorConnectingToController', "Error connecting to controller. {0}", getErrorMessage(error, true)); }
|
||||
export function errorConnectingToController(error: any): string { return localize('arc.errorConnectingToController', "Error connecting to controller. {0}", getErrorMessage(error)); }
|
||||
export function passwordAcquisitionFailed(error: any): string { return localize('arc.passwordAcquisitionFailed', "Failed to acquire password. {0}", getErrorMessage(error)); }
|
||||
export const loginFailed = localize('arc.loginFailed', "Error logging into controller - wrong username or password");
|
||||
export const invalidPassword = localize('arc.invalidPassword', "The password did not work, try again.");
|
||||
export function errorVerifyingPassword(error: any): string { return localize('arc.errorVerifyingPassword', "Error encountered while verifying password. {0}", getErrorMessage(error)); }
|
||||
export const noControllersConnected = localize('noControllersConnected', "No Azure Arc controllers are currently connected. Please run the command: 'Connect to Existing Azure Arc Controller' and then try again");
|
||||
export const variableValueFetchForUnsupportedVariable = (variableName: string) => localize('getVariableValue.unknownVariableName', "Attempt to get variable value for unknown variable:{0}", variableName);
|
||||
export const isPasswordFetchForUnsupportedVariable = (variableName: string) => localize('getIsPassword.unknownVariableName', "Attempt to get isPassword for unknown variable:{0}", variableName);
|
||||
export const noControllerInfoFound = (name: string) => localize('noControllerInfoFound', "Controller Info could not be found with name: {0}", name);
|
||||
export const noPasswordFound = (controllerName: string) => localize('noPasswordFound', "Password could not be retrieved for controller: {0} and user did not provide a password. Please retry later.", controllerName);
|
||||
export const clusterContextNotFound = (clusterContext: string) => localize('clusterContextNotFound', "Cluster Context with name: {0} not found in the Kube config file", clusterContext);
|
||||
export const noCurrentClusterContext = localize('noCurrentClusterContext', "No current cluster context was found in the kube config file");
|
||||
export const browse = localize('filePicker.browse', "Browse");
|
||||
export const select = localize('button.label', "Select");
|
||||
export const noContextFound = (configFile: string) => localize('noContextFound', "No 'contexts' found in the config file: {0}", configFile);
|
||||
export const noCurrentContextFound = (configFile: string) => localize('noCurrentContextFound', "No context is marked as 'current-context' in the config file: {0}", configFile);
|
||||
export const noNameInContext = (configFile: string) => localize('noNameInContext', "No name field was found in a cluster context in the config file: {0}", configFile);
|
||||
export const userCancelledError = localize('arc.userCancelledError', "User cancelled the dialog");
|
||||
export const clusterContextConfigNoLongerValid = (configFile: string, clusterContext: string, error: any) => localize('clusterContextConfigNoLongerValid', "The cluster context information specified by config file: {0} and cluster context: {1} is no longer valid. Error is:\n\t{2}\n Do you want to update this information?", configFile, clusterContext, getErrorMessage(error));
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
import { ControllerInfo, ResourceType } from 'arc';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as vscode from 'vscode';
|
||||
import { UserCancelledError } from '../common/api';
|
||||
import { getCurrentClusterContext, getKubeConfigClusterContexts } from '../common/kubeUtils';
|
||||
import { parseInstanceName, UserCancelledError } from '../common/utils';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { ConnectToControllerDialog } from '../ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
@@ -21,6 +20,7 @@ export type Registration = {
|
||||
export class ControllerModel {
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
private _endpoints: azdataExt.DcEndpointListResult[] = [];
|
||||
private _namespace: string = '';
|
||||
private _registrations: Registration[] = [];
|
||||
private _controllerConfig: azdataExt.DcConfigShowResult | undefined = undefined;
|
||||
|
||||
@@ -51,42 +51,19 @@ export class ControllerModel {
|
||||
this._onInfoUpdated.fire(this._info);
|
||||
}
|
||||
|
||||
public get azdataAdditionalEnvVars(): azdataExt.AdditionalEnvVars {
|
||||
return {
|
||||
'KUBECONFIG': this.info.kubeConfigFilePath,
|
||||
'KUBECTL_CONTEXT': this.info.kubeClusterContext
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls azdata login to set the context to this controller and acquires a login session to prevent other
|
||||
* calls from changing the context while commands for this session are being executed.
|
||||
* Calls azdata login to set the context to this controller
|
||||
* @param promptReconnect
|
||||
*/
|
||||
public async acquireAzdataSession(promptReconnect: boolean = false): Promise<azdataExt.AzdataSession> {
|
||||
let promptForValidClusterContext: boolean = false;
|
||||
try {
|
||||
const contexts = await getKubeConfigClusterContexts(this.info.kubeConfigFilePath);
|
||||
getCurrentClusterContext(contexts, this.info.kubeClusterContext, true); // this throws if this.info.kubeClusterContext is not found in 'contexts'
|
||||
} catch (error) {
|
||||
const response = await vscode.window.showErrorMessage(loc.clusterContextConfigNoLongerValid(this.info.kubeConfigFilePath, this.info.kubeClusterContext, error), loc.yes, loc.no);
|
||||
if (response === loc.yes) {
|
||||
promptForValidClusterContext = true;
|
||||
} else {
|
||||
if (!promptReconnect) { //throw unless we are required to prompt for reconnect anyways
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We haven't gotten our password yet or we want to prompt for a reconnect or we want to prompt to reacquire valid cluster context or any and all of these.
|
||||
if (!this._password || promptReconnect || promptForValidClusterContext) {
|
||||
public async azdataLogin(promptReconnect: boolean = false): Promise<void> {
|
||||
// We haven't gotten our password yet or we want to prompt for a reconnect
|
||||
if (!this._password || promptReconnect) {
|
||||
this._password = '';
|
||||
if (this.info.rememberPassword) {
|
||||
// It should be in the credentials store, get it from there
|
||||
this._password = await this.treeDataProvider.getPassword(this.info);
|
||||
}
|
||||
if (promptReconnect || !this._password || promptForValidClusterContext) {
|
||||
if (promptReconnect || !this._password) {
|
||||
// No password yet or we want to re-prompt for credentials so prompt for it from the user
|
||||
const dialog = new ConnectToControllerDialog(this.treeDataProvider);
|
||||
dialog.showDialog(this.info, this._password);
|
||||
@@ -94,14 +71,13 @@ export class ControllerModel {
|
||||
if (model) {
|
||||
await this.treeDataProvider.addOrUpdateController(model.controllerModel, model.password, false);
|
||||
this._password = model.password;
|
||||
this._info = model.controllerModel.info;
|
||||
} else {
|
||||
throw new UserCancelledError(loc.userCancelledError);
|
||||
throw new UserCancelledError();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this._azdataApi.azdata.acquireSession(this.info.url, this.info.username, this._password, this.azdataAdditionalEnvVars);
|
||||
await this._azdataApi.azdata.login(this.info.url, this.info.username, this._password);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -116,66 +92,61 @@ export class ControllerModel {
|
||||
}
|
||||
}
|
||||
public async refresh(showErrors: boolean = true, promptReconnect: boolean = false): Promise<void> {
|
||||
const session = await this.acquireAzdataSession(promptReconnect);
|
||||
const newRegistrations: Registration[] = [];
|
||||
try {
|
||||
await Promise.all([
|
||||
this._azdataApi.azdata.arc.dc.config.show(this.azdataAdditionalEnvVars, session).then(result => {
|
||||
this._controllerConfig = result.result;
|
||||
this.configLastUpdated = new Date();
|
||||
this._onConfigUpdated.fire(this._controllerConfig);
|
||||
}).catch(err => {
|
||||
// If an error occurs show a message so the user knows something failed but still
|
||||
// fire the event so callers hooking into this can handle the error (e.g. so dashboards don't show the
|
||||
// loading icon forever)
|
||||
if (showErrors) {
|
||||
vscode.window.showErrorMessage(loc.fetchConfigFailed(this.info.name, err));
|
||||
}
|
||||
this._onConfigUpdated.fire(this._controllerConfig);
|
||||
throw err;
|
||||
await this.azdataLogin(promptReconnect);
|
||||
this._registrations = [];
|
||||
await Promise.all([
|
||||
this._azdataApi.azdata.arc.dc.config.show().then(result => {
|
||||
this._controllerConfig = result.result;
|
||||
this.configLastUpdated = new Date();
|
||||
this._onConfigUpdated.fire(this._controllerConfig);
|
||||
}).catch(err => {
|
||||
// If an error occurs show a message so the user knows something failed but still
|
||||
// fire the event so callers can know to update (e.g. so dashboards don't show the
|
||||
// loading icon forever)
|
||||
if (showErrors) {
|
||||
vscode.window.showErrorMessage(loc.fetchConfigFailed(this.info.name, err));
|
||||
}
|
||||
this._onConfigUpdated.fire(this._controllerConfig);
|
||||
throw err;
|
||||
}),
|
||||
this._azdataApi.azdata.arc.dc.endpoint.list().then(result => {
|
||||
this._endpoints = result.result;
|
||||
this.endpointsLastUpdated = new Date();
|
||||
this._onEndpointsUpdated.fire(this._endpoints);
|
||||
}).catch(err => {
|
||||
// If an error occurs show a message so the user knows something failed but still
|
||||
// fire the event so callers can know to update (e.g. so dashboards don't show the
|
||||
// loading icon forever)
|
||||
if (showErrors) {
|
||||
vscode.window.showErrorMessage(loc.fetchEndpointsFailed(this.info.name, err));
|
||||
}
|
||||
this._onEndpointsUpdated.fire(this._endpoints);
|
||||
throw err;
|
||||
}),
|
||||
Promise.all([
|
||||
this._azdataApi.azdata.arc.postgres.server.list().then(result => {
|
||||
this._registrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
instanceType: ResourceType.postgresInstances
|
||||
};
|
||||
}));
|
||||
}),
|
||||
this._azdataApi.azdata.arc.dc.endpoint.list(this.azdataAdditionalEnvVars, session).then(result => {
|
||||
this._endpoints = result.result;
|
||||
this.endpointsLastUpdated = new Date();
|
||||
this._onEndpointsUpdated.fire(this._endpoints);
|
||||
}).catch(err => {
|
||||
// If an error occurs show a message so the user knows something failed but still
|
||||
// fire the event so callers can know to update (e.g. so dashboards don't show the
|
||||
// loading icon forever)
|
||||
if (showErrors) {
|
||||
vscode.window.showErrorMessage(loc.fetchEndpointsFailed(this.info.name, err));
|
||||
}
|
||||
this._onEndpointsUpdated.fire(this._endpoints);
|
||||
throw err;
|
||||
}),
|
||||
Promise.all([
|
||||
this._azdataApi.azdata.arc.postgres.server.list(this.azdataAdditionalEnvVars, session).then(result => {
|
||||
newRegistrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
instanceType: ResourceType.postgresInstances
|
||||
};
|
||||
}));
|
||||
}),
|
||||
this._azdataApi.azdata.arc.sql.mi.list(this.azdataAdditionalEnvVars, session).then(result => {
|
||||
newRegistrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
instanceType: ResourceType.sqlManagedInstances
|
||||
};
|
||||
}));
|
||||
})
|
||||
]).then(() => {
|
||||
this._registrations = newRegistrations;
|
||||
this.registrationsLastUpdated = new Date();
|
||||
this._onRegistrationsUpdated.fire(this._registrations);
|
||||
this._azdataApi.azdata.arc.sql.mi.list().then(result => {
|
||||
this._registrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
instanceType: ResourceType.sqlManagedInstances
|
||||
};
|
||||
}));
|
||||
})
|
||||
]);
|
||||
} finally {
|
||||
session.dispose();
|
||||
}
|
||||
]).then(() => {
|
||||
this.registrationsLastUpdated = new Date();
|
||||
this._onRegistrationsUpdated.fire(this._registrations);
|
||||
})
|
||||
]);
|
||||
}
|
||||
|
||||
public get endpoints(): azdataExt.DcEndpointListResult[] {
|
||||
@@ -186,6 +157,10 @@ export class ControllerModel {
|
||||
return this._endpoints.find(e => e.name === name);
|
||||
}
|
||||
|
||||
public get namespace(): string {
|
||||
return this._namespace;
|
||||
}
|
||||
|
||||
public get registrations(): Registration[] {
|
||||
return this._registrations;
|
||||
}
|
||||
@@ -196,10 +171,19 @@ export class ControllerModel {
|
||||
|
||||
public getRegistration(type: ResourceType, name: string): Registration | undefined {
|
||||
return this._registrations.find(r => {
|
||||
return r.instanceType === type && r.instanceName === name;
|
||||
return r.instanceType === type && parseInstanceName(r.instanceName) === name;
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteRegistration(_type: ResourceType, _name: string) {
|
||||
/* TODO chgagnon
|
||||
if (r && !r.isDeleted && r.customObjectName) {
|
||||
const r = this.getRegistration(type, name);
|
||||
await this._registrationRouter.apiV1RegistrationNsNameIsDeletedDelete(this._namespace, r.customObjectName, true);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
/**
|
||||
* property to for use a display label for this controller
|
||||
*/
|
||||
|
||||
@@ -3,15 +3,13 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { MiaaResourceInfo } from 'arc';
|
||||
import { ResourceInfo } from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as vscode from 'vscode';
|
||||
import { UserCancelledError } from '../common/api';
|
||||
import { Deferred } from '../common/promise';
|
||||
import { parseIpAndPort } from '../common/utils';
|
||||
import { UserCancelledError } from '../common/utils';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { ConnectToMiaaSqlDialog } from '../ui/dialogs/connectMiaaDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerModel, Registration } from './controllerModel';
|
||||
import { ResourceModel } from './resourceModel';
|
||||
@@ -22,19 +20,23 @@ export class MiaaModel extends ResourceModel {
|
||||
|
||||
private _config: azdataExt.SqlMiShowResult | undefined;
|
||||
private _databases: DatabaseModel[] = [];
|
||||
// The saved connection information
|
||||
private _connectionProfile: azdata.IConnectionProfile | undefined = undefined;
|
||||
// The ID of the active connection used to query the server
|
||||
private _activeConnectionId: string | undefined = undefined;
|
||||
|
||||
private readonly _onConfigUpdated = new vscode.EventEmitter<azdataExt.SqlMiShowResult | undefined>();
|
||||
private readonly _onDatabasesUpdated = new vscode.EventEmitter<DatabaseModel[]>();
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
public onConfigUpdated = this._onConfigUpdated.event;
|
||||
public onDatabasesUpdated = this._onDatabasesUpdated.event;
|
||||
public configLastUpdated: Date | undefined;
|
||||
public databasesLastUpdated: Date | undefined;
|
||||
public configLastUpdated?: Date;
|
||||
public databasesLastUpdated?: Date;
|
||||
|
||||
private _refreshPromise: Deferred<void> | undefined = undefined;
|
||||
|
||||
constructor(_controllerModel: ControllerModel, private _miaaInfo: MiaaResourceInfo, registration: Registration, private _treeDataProvider: AzureArcTreeDataProvider) {
|
||||
super(_controllerModel, _miaaInfo, registration);
|
||||
constructor(private _controllerModel: ControllerModel, info: ResourceInfo, registration: Registration, private _treeDataProvider: AzureArcTreeDataProvider) {
|
||||
super(info, registration);
|
||||
this._azdataApi = <azdataExt.IExtension>vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
}
|
||||
|
||||
@@ -71,11 +73,10 @@ export class MiaaModel extends ResourceModel {
|
||||
return this._refreshPromise.promise;
|
||||
}
|
||||
this._refreshPromise = new Deferred();
|
||||
let session: azdataExt.AzdataSession | undefined = undefined;
|
||||
try {
|
||||
session = await this.controllerModel.acquireAzdataSession();
|
||||
await this._controllerModel.azdataLogin();
|
||||
try {
|
||||
const result = await this._azdataApi.azdata.arc.sql.mi.show(this.info.name, this.controllerModel.azdataAdditionalEnvVars, session);
|
||||
const result = await this._azdataApi.azdata.arc.sql.mi.show(this.info.name);
|
||||
this._config = result.result;
|
||||
this.configLastUpdated = new Date();
|
||||
this._onConfigUpdated.fire(this._config);
|
||||
@@ -91,16 +92,22 @@ export class MiaaModel extends ResourceModel {
|
||||
|
||||
// If we have an external endpoint configured then fetch the databases now
|
||||
if (this._config.status.externalEndpoint) {
|
||||
this.getDatabases(false).catch(_err => {
|
||||
// If an error occurs still fire the event so callers can know to
|
||||
// update (e.g. so dashboards don't show the loading icon forever)
|
||||
|
||||
this.databasesLastUpdated = undefined;
|
||||
this.getDatabases().catch(err => {
|
||||
// If an error occurs show a message so the user knows something failed but still
|
||||
// fire the event so callers can know to update (e.g. so dashboards don't show the
|
||||
// loading icon forever)
|
||||
if (err instanceof UserCancelledError) {
|
||||
vscode.window.showWarningMessage(loc.connectionRequired);
|
||||
} else {
|
||||
vscode.window.showErrorMessage(loc.fetchDatabasesFailed(this.info.name, err));
|
||||
}
|
||||
this.databasesLastUpdated = new Date();
|
||||
this._onDatabasesUpdated.fire(this._databases);
|
||||
throw err;
|
||||
});
|
||||
} else {
|
||||
// Otherwise just fire the event so dashboards can update appropriately
|
||||
this.databasesLastUpdated = undefined;
|
||||
this.databasesLastUpdated = new Date();
|
||||
this._onDatabasesUpdated.fire(this._databases);
|
||||
}
|
||||
|
||||
@@ -109,76 +116,130 @@ export class MiaaModel extends ResourceModel {
|
||||
this._refreshPromise.reject(err);
|
||||
throw err;
|
||||
} finally {
|
||||
session?.dispose();
|
||||
this._refreshPromise = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
public async getDatabases(promptForConnection: boolean = true): Promise<void> {
|
||||
if (!this._connectionProfile) {
|
||||
await this.getConnectionProfile(promptForConnection);
|
||||
}
|
||||
|
||||
// We haven't connected yet so do so now and then store the ID for the active connection
|
||||
if (!this._activeConnectionId) {
|
||||
const result = await azdata.connection.connect(this._connectionProfile!, false, false);
|
||||
if (!result.connected) {
|
||||
throw new Error(result.errorMessage);
|
||||
private async getDatabases(): Promise<void> {
|
||||
await this.getConnectionProfile();
|
||||
if (this._connectionProfile) {
|
||||
// We haven't connected yet so do so now and then store the ID for the active connection
|
||||
if (!this._activeConnectionId) {
|
||||
const result = await azdata.connection.connect(this._connectionProfile, false, false);
|
||||
if (!result.connected) {
|
||||
throw new Error(result.errorMessage);
|
||||
}
|
||||
this._activeConnectionId = result.connectionId;
|
||||
}
|
||||
this._activeConnectionId = result.connectionId;
|
||||
}
|
||||
|
||||
const provider = azdata.dataprotocol.getProvider<azdata.MetadataProvider>(this._connectionProfile!.providerName, azdata.DataProviderType.MetadataProvider);
|
||||
const ownerUri = await azdata.connection.getUriForConnection(this._activeConnectionId);
|
||||
const databases = await provider.getDatabases(ownerUri);
|
||||
if (!databases) {
|
||||
throw new Error('Could not fetch databases');
|
||||
const provider = azdata.dataprotocol.getProvider<azdata.MetadataProvider>(this._connectionProfile.providerName, azdata.DataProviderType.MetadataProvider);
|
||||
const ownerUri = await azdata.connection.getUriForConnection(this._activeConnectionId);
|
||||
const databases = await provider.getDatabases(ownerUri);
|
||||
if (!databases) {
|
||||
throw new Error('Could not fetch databases');
|
||||
}
|
||||
if (databases.length > 0 && typeof (databases[0]) === 'object') {
|
||||
this._databases = (<azdata.DatabaseInfo[]>databases).map(db => { return { name: db.options['name'], status: db.options['state'] }; });
|
||||
} else {
|
||||
this._databases = (<string[]>databases).map(db => { return { name: db, status: '-' }; });
|
||||
}
|
||||
this.databasesLastUpdated = new Date();
|
||||
this._onDatabasesUpdated.fire(this._databases);
|
||||
}
|
||||
if (databases.length > 0 && typeof (databases[0]) === 'object') {
|
||||
this._databases = (<azdata.DatabaseInfo[]>databases).map(db => { return { name: db.options['name'], status: db.options['state'] }; });
|
||||
} else {
|
||||
this._databases = (<string[]>databases).map(db => { return { name: db, status: '-' }; });
|
||||
}
|
||||
this.databasesLastUpdated = new Date();
|
||||
this._onDatabasesUpdated.fire(this._databases);
|
||||
}
|
||||
/**
|
||||
* Loads the saved connection profile associated with this model. Will prompt for one if
|
||||
* we don't have one or can't find it (it was deleted)
|
||||
*/
|
||||
private async getConnectionProfile(): Promise<void> {
|
||||
if (this._connectionProfile) {
|
||||
return;
|
||||
}
|
||||
let connection: azdata.connection.ConnectionProfile | azdata.connection.Connection | undefined;
|
||||
|
||||
protected createConnectionProfile(): azdata.IConnectionProfile {
|
||||
const ipAndPort = parseIpAndPort(this.config?.status.externalEndpoint || '');
|
||||
return {
|
||||
serverName: `${ipAndPort.ip},${ipAndPort.port}`,
|
||||
databaseName: '',
|
||||
authenticationType: 'SqlLogin',
|
||||
providerName: loc.miaaProviderName,
|
||||
connectionName: '',
|
||||
userName: this._miaaInfo.userName || '',
|
||||
password: '',
|
||||
savePassword: true,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
if (this.info.connectionId) {
|
||||
try {
|
||||
const connections = await azdata.connection.getConnections();
|
||||
const existingConnection = connections.find(conn => conn.connectionId === this.info.connectionId);
|
||||
if (existingConnection) {
|
||||
const credentials = await azdata.connection.getCredentials(this.info.connectionId);
|
||||
if (credentials) {
|
||||
existingConnection.options['password'] = credentials.password;
|
||||
connection = existingConnection;
|
||||
} else {
|
||||
// We need the password so prompt the user for it
|
||||
const connectionProfile: azdata.IConnectionProfile = {
|
||||
serverName: existingConnection.options['serverName'],
|
||||
databaseName: existingConnection.options['databaseName'],
|
||||
authenticationType: existingConnection.options['authenticationType'],
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: existingConnection.options['user'],
|
||||
password: '',
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: existingConnection.options
|
||||
};
|
||||
connection = await azdata.connection.openConnectionDialog(['MSSQL'], connectionProfile);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore - the connection may not necessarily exist anymore and in that case we'll just reprompt for a connection
|
||||
}
|
||||
}
|
||||
|
||||
protected async promptForConnection(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
const connectToSqlDialog = new ConnectToMiaaSqlDialog(this.controllerModel, this);
|
||||
connectToSqlDialog.showDialog(loc.connectToMSSql(this.info.name), connectionProfile);
|
||||
let profileFromDialog = await connectToSqlDialog.waitForClose();
|
||||
if (!connection) {
|
||||
// We need the password so prompt the user for it
|
||||
const connectionProfile: azdata.IConnectionProfile = {
|
||||
// TODO chgagnon fill in external IP and port
|
||||
// serverName: (this.registration.externalIp && this.registration.externalPort) ? `${this.registration.externalIp},${this.registration.externalPort}` : '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
authenticationType: 'SqlLogin',
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: 'sa',
|
||||
password: '',
|
||||
savePassword: true,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
// Weren't able to load the existing connection so prompt user for new one
|
||||
connection = await azdata.connection.openConnectionDialog(['MSSQL'], connectionProfile);
|
||||
}
|
||||
|
||||
if (profileFromDialog) {
|
||||
this.updateConnectionProfile(profileFromDialog);
|
||||
if (connection) {
|
||||
const profile = {
|
||||
// The option name might be different here based on where it came from
|
||||
serverName: connection.options['serverName'] || connection.options['server'],
|
||||
databaseName: connection.options['databaseName'] || connection.options['database'],
|
||||
authenticationType: connection.options['authenticationType'],
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: connection.options['user'],
|
||||
password: connection.options['password'],
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: connection.connectionId,
|
||||
groupId: undefined,
|
||||
options: connection.options
|
||||
};
|
||||
this.updateConnectionProfile(profile);
|
||||
} else {
|
||||
throw new UserCancelledError();
|
||||
}
|
||||
}
|
||||
|
||||
protected async updateConnectionProfile(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
private async updateConnectionProfile(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
this._connectionProfile = connectionProfile;
|
||||
this._activeConnectionId = connectionProfile.id;
|
||||
this.info.connectionId = connectionProfile.id;
|
||||
this._miaaInfo.userName = connectionProfile.userName;
|
||||
await this._treeDataProvider.saveControllers();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,215 +3,279 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { PGResourceInfo } from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import { ResourceInfo } from 'arc';
|
||||
import * as vscode from 'vscode';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { ConnectToPGSqlDialog } from '../ui/dialogs/connectPGDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerModel, Registration } from './controllerModel';
|
||||
import { parseIpAndPort } from '../common/utils';
|
||||
import { UserCancelledError } from '../common/api';
|
||||
import { Registration } from './controllerModel';
|
||||
import { ResourceModel } from './resourceModel';
|
||||
import { Deferred } from '../common/promise';
|
||||
|
||||
export type EngineSettingsModel = {
|
||||
parameterName: string | undefined,
|
||||
value: string | undefined,
|
||||
description: string | undefined,
|
||||
min: string | undefined,
|
||||
max: string | undefined,
|
||||
options: string | undefined,
|
||||
type: string | undefined
|
||||
};
|
||||
export enum PodRole {
|
||||
Monitor,
|
||||
Router,
|
||||
Shard
|
||||
}
|
||||
|
||||
export interface V1Pod {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: any; // V1ObjectMeta;
|
||||
'spec'?: any; // V1PodSpec;
|
||||
'status'?: V1PodStatus;
|
||||
}
|
||||
|
||||
export interface V1PodStatus {
|
||||
'conditions'?: any[]; // Array<V1PodCondition>;
|
||||
'containerStatuses'?: Array<V1ContainerStatus>;
|
||||
'ephemeralContainerStatuses'?: any[]; // Array<V1ContainerStatus>;
|
||||
'hostIP'?: string;
|
||||
'initContainerStatuses'?: any[]; // Array<V1ContainerStatus>;
|
||||
'message'?: string;
|
||||
'nominatedNodeName'?: string;
|
||||
'phase'?: string;
|
||||
'podIP'?: string;
|
||||
'podIPs'?: any[]; // Array<V1PodIP>;
|
||||
'qosClass'?: string;
|
||||
'reason'?: string;
|
||||
'startTime'?: Date | null;
|
||||
}
|
||||
|
||||
export interface V1ContainerStatus {
|
||||
'containerID'?: string;
|
||||
'image'?: string;
|
||||
'imageID'?: string;
|
||||
'lastState'?: any; // V1ContainerState;
|
||||
'name'?: string;
|
||||
'ready'?: boolean;
|
||||
'restartCount'?: number;
|
||||
'started'?: boolean | null;
|
||||
'state'?: any; // V1ContainerState;
|
||||
}
|
||||
|
||||
export interface DuskyObjectModelsDatabaseService {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: any; // V1ObjectMeta;
|
||||
'spec'?: any; // DuskyObjectModelsDatabaseServiceSpec;
|
||||
'status'?: any; // DuskyObjectModelsDatabaseServiceStatus;
|
||||
'arc'?: any; // DuskyObjectModelsDatabaseServiceArcPayload;
|
||||
}
|
||||
|
||||
export interface V1Status {
|
||||
'apiVersion'?: string;
|
||||
'code'?: number | null;
|
||||
'details'?: any; // V1StatusDetails;
|
||||
'kind'?: string;
|
||||
'message'?: string;
|
||||
'metadata'?: any; // V1ListMeta;
|
||||
'reason'?: string;
|
||||
'status'?: string;
|
||||
'hasObject'?: boolean;
|
||||
}
|
||||
|
||||
export interface DuskyObjectModelsDatabase {
|
||||
'name'?: string;
|
||||
'owner'?: string;
|
||||
'sharded'?: boolean | null;
|
||||
}
|
||||
|
||||
export class PostgresModel extends ResourceModel {
|
||||
private _config?: azdataExt.PostgresServerShowResult;
|
||||
public _engineSettings: EngineSettingsModel[] = [];
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
private _service?: DuskyObjectModelsDatabaseService;
|
||||
private _pods?: V1Pod[];
|
||||
private readonly _onServiceUpdated = new vscode.EventEmitter<DuskyObjectModelsDatabaseService>();
|
||||
private readonly _onPodsUpdated = new vscode.EventEmitter<V1Pod[]>();
|
||||
public onServiceUpdated = this._onServiceUpdated.event;
|
||||
public onPodsUpdated = this._onPodsUpdated.event;
|
||||
public serviceLastUpdated?: Date;
|
||||
public podsLastUpdated?: Date;
|
||||
|
||||
private readonly _onConfigUpdated = new vscode.EventEmitter<azdataExt.PostgresServerShowResult>();
|
||||
public readonly _onEngineSettingsUpdated = new vscode.EventEmitter<EngineSettingsModel[]>();
|
||||
public onConfigUpdated = this._onConfigUpdated.event;
|
||||
public onEngineSettingsUpdated = this._onEngineSettingsUpdated.event;
|
||||
public configLastUpdated?: Date;
|
||||
public engineSettingsLastUpdated?: Date;
|
||||
|
||||
private _refreshPromise?: Deferred<void>;
|
||||
|
||||
constructor(_controllerModel: ControllerModel, private _pgInfo: PGResourceInfo, registration: Registration, private _treeDataProvider: AzureArcTreeDataProvider) {
|
||||
super(_controllerModel, _pgInfo, registration);
|
||||
this._azdataApi = <azdataExt.IExtension>vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
constructor(info: ResourceInfo, registration: Registration) {
|
||||
super(info, registration);
|
||||
}
|
||||
|
||||
/** Returns the configuration of Postgres */
|
||||
public get config(): azdataExt.PostgresServerShowResult | undefined {
|
||||
return this._config;
|
||||
/** Returns the service's Kubernetes namespace */
|
||||
public get namespace(): string | undefined {
|
||||
return ''; // TODO chgagnon return this.info.namespace;
|
||||
}
|
||||
|
||||
/** Returns the major version of Postgres */
|
||||
public get engineVersion(): string | undefined {
|
||||
const kind = this._config?.kind;
|
||||
return kind
|
||||
? kind.substring(kind.lastIndexOf('-') + 1)
|
||||
: undefined;
|
||||
/** Returns the service's name */
|
||||
public get name(): string {
|
||||
return this.info.name;
|
||||
}
|
||||
|
||||
/** Returns the IP address and port of Postgres */
|
||||
public get endpoint(): { ip: string, port: string } | undefined {
|
||||
return this._config?.status.externalEndpoint
|
||||
? parseIpAndPort(this._config.status.externalEndpoint)
|
||||
: undefined;
|
||||
/** Returns the service's fully qualified name in the format namespace.name */
|
||||
public get fullName(): string {
|
||||
return `${this.namespace}.${this.name}`;
|
||||
}
|
||||
|
||||
/** Returns the scale configuration of Postgres e.g. '3 nodes, 1.5 vCores, 1Gi RAM, 2Gi storage per node' */
|
||||
public get scaleConfiguration(): string | undefined {
|
||||
if (!this._config) {
|
||||
return undefined;
|
||||
}
|
||||
/** Returns the service's spec */
|
||||
public get service(): DuskyObjectModelsDatabaseService | undefined {
|
||||
return this._service;
|
||||
}
|
||||
|
||||
const cpuLimit = this._config.spec.scheduling?.default?.resources?.limits?.cpu;
|
||||
const ramLimit = this._config.spec.scheduling?.default?.resources?.limits?.memory;
|
||||
const cpuRequest = this._config.spec.scheduling?.default?.resources?.requests?.cpu;
|
||||
const ramRequest = this._config.spec.scheduling?.default?.resources?.requests?.memory;
|
||||
const storage = this._config.spec.storage?.data?.size;
|
||||
/** Returns the service's pods */
|
||||
public get pods(): V1Pod[] | undefined {
|
||||
return this._pods;
|
||||
}
|
||||
|
||||
// scale.shards was renamed to scale.workers. Check both for backwards compatibility.
|
||||
const scale = this._config.spec.scale;
|
||||
const nodes = (scale?.workers ?? scale?.shards ?? 0) + 1; // An extra node for the coordinator
|
||||
/** Refreshes the model */
|
||||
public async refresh() {
|
||||
await Promise.all([
|
||||
/* TODO enable
|
||||
this._databaseRouter.getDuskyDatabaseService(this.info.namespace || 'test', this.info.name).then(response => {
|
||||
this._service = response.body;
|
||||
this.serviceLastUpdated = new Date();
|
||||
this._onServiceUpdated.fire(this._service);
|
||||
}),
|
||||
this._databaseRouter.getDuskyPods(this.info.namespace || 'test', this.info.name).then(response => {
|
||||
this._pods = response.body;
|
||||
this.podsLastUpdated = new Date();
|
||||
this._onPodsUpdated.fire(this._pods!);
|
||||
})
|
||||
*/
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the service
|
||||
* @param func A function of modifications to apply to the service
|
||||
*/
|
||||
public async update(_func: (service: DuskyObjectModelsDatabaseService) => void): Promise<DuskyObjectModelsDatabaseService> {
|
||||
return <any>undefined;
|
||||
/*
|
||||
// Get the latest spec of the service in case it has changed
|
||||
const service = (await this._databaseRouter.getDuskyDatabaseService(this.info.namespace || 'test', this.info.name)).body;
|
||||
service.status = undefined; // can't update the status
|
||||
func(service);
|
||||
|
||||
return await this._databaseRouter.updateDuskyDatabaseService(this.namespace || 'test', this.name, service).then(r => {
|
||||
this._service = r.body;
|
||||
return this._service;
|
||||
});
|
||||
*/
|
||||
}
|
||||
|
||||
/** Deletes the service */
|
||||
public async delete(): Promise<V1Status> {
|
||||
return <any>undefined;
|
||||
// return (await this._databaseRouter.deleteDuskyDatabaseService(this.info.namespace || 'test', this.info.name)).body;
|
||||
}
|
||||
|
||||
/** Creates a SQL database in the service */
|
||||
public async createDatabase(_db: DuskyObjectModelsDatabase): Promise<DuskyObjectModelsDatabase> {
|
||||
return <any>undefined;
|
||||
// return (await this._databaseRouter.createDuskyDatabase(this.namespace || 'test', this.name, db)).body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the IP address and port of the service, preferring external IP over
|
||||
* internal IP. If either field is not available it will be set to undefined.
|
||||
*/
|
||||
public get endpoint(): { ip?: string, port?: number } {
|
||||
const externalIp = this._service?.status?.externalIP;
|
||||
const internalIp = this._service?.status?.internalIP;
|
||||
const externalPort = this._service?.status?.externalPort;
|
||||
const internalPort = this._service?.status?.internalPort;
|
||||
|
||||
return externalIp ? { ip: externalIp, port: externalPort ?? undefined }
|
||||
: internalIp ? { ip: internalIp, port: internalPort ?? undefined }
|
||||
: { ip: undefined, port: undefined };
|
||||
}
|
||||
|
||||
/** Returns the service's configuration e.g. '3 nodes, 1.5 vCores, 1GiB RAM, 2GiB storage per node' */
|
||||
public get configuration(): string {
|
||||
|
||||
// TODO: Resource requests and limits can be configured per role. Figure out how
|
||||
// to display that in the UI. For now, only show the default configuration.
|
||||
const cpuLimit = this._service?.spec?.scheduling?._default?.resources?.limits?.['cpu'];
|
||||
const ramLimit = this._service?.spec?.scheduling?._default?.resources?.limits?.['memory'];
|
||||
const cpuRequest = this._service?.spec?.scheduling?._default?.resources?.requests?.['cpu'];
|
||||
const ramRequest = this._service?.spec?.scheduling?._default?.resources?.requests?.['memory'];
|
||||
const storage = this._service?.spec?.storage?.volumeSize;
|
||||
const nodes = this.pods?.length;
|
||||
|
||||
let configuration: string[] = [];
|
||||
configuration.push(`${nodes} ${nodes > 1 ? loc.nodes : loc.node}`);
|
||||
|
||||
if (nodes) {
|
||||
configuration.push(`${nodes} ${nodes > 1 ? loc.nodes : loc.node}`);
|
||||
}
|
||||
|
||||
// Prefer limits if they're provided, otherwise use requests if they're provided
|
||||
if (cpuLimit || cpuRequest) {
|
||||
configuration.push(`${cpuLimit ?? cpuRequest!} ${loc.vCores}`);
|
||||
configuration.push(`${this.formatCores(cpuLimit ?? cpuRequest!)} ${loc.vCores}`);
|
||||
}
|
||||
|
||||
if (ramLimit || ramRequest) {
|
||||
configuration.push(`${ramLimit ?? ramRequest!} ${loc.ram}`);
|
||||
configuration.push(`${this.formatMemory(ramLimit ?? ramRequest!)} ${loc.ram}`);
|
||||
}
|
||||
|
||||
if (storage) {
|
||||
configuration.push(`${storage} ${loc.storagePerNode}`);
|
||||
configuration.push(`${this.formatMemory(storage)} ${loc.storagePerNode}`);
|
||||
}
|
||||
|
||||
return configuration.join(', ');
|
||||
}
|
||||
|
||||
/** Refreshes the model */
|
||||
public async refresh() {
|
||||
// Only allow one refresh to be happening at a time
|
||||
if (this._refreshPromise) {
|
||||
return this._refreshPromise.promise;
|
||||
}
|
||||
this._refreshPromise = new Deferred();
|
||||
let session: azdataExt.AzdataSession | undefined = undefined;
|
||||
try {
|
||||
session = await this.controllerModel.acquireAzdataSession();
|
||||
this._config = (await this._azdataApi.azdata.arc.postgres.server.show(this.info.name, this.controllerModel.azdataAdditionalEnvVars, session)).result;
|
||||
this.configLastUpdated = new Date();
|
||||
this._onConfigUpdated.fire(this._config);
|
||||
this._refreshPromise.resolve();
|
||||
} catch (err) {
|
||||
this._refreshPromise.reject(err);
|
||||
throw err;
|
||||
} finally {
|
||||
session?.dispose();
|
||||
this._refreshPromise = undefined;
|
||||
/** Given a V1Pod, returns its PodRole or undefined if the role isn't known */
|
||||
public static getPodRole(pod: V1Pod): PodRole | undefined {
|
||||
const name = pod.metadata?.name;
|
||||
const role = name?.substring(name.lastIndexOf('-'))[1];
|
||||
switch (role) {
|
||||
case 'm': return PodRole.Monitor;
|
||||
case 'r': return PodRole.Router;
|
||||
case 's': return PodRole.Shard;
|
||||
default: return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
public async getEngineSettings(): Promise<void> {
|
||||
if (!this._connectionProfile) {
|
||||
await this.getConnectionProfile();
|
||||
}
|
||||
|
||||
// We haven't connected yet so do so now and then store the ID for the active connection
|
||||
if (!this._activeConnectionId) {
|
||||
const result = await azdata.connection.connect(this._connectionProfile!, false, false);
|
||||
if (!result.connected) {
|
||||
throw new Error(result.errorMessage);
|
||||
}
|
||||
this._activeConnectionId = result.connectionId;
|
||||
}
|
||||
|
||||
const provider = azdata.dataprotocol.getProvider<azdata.QueryProvider>(this._connectionProfile!.providerName, azdata.DataProviderType.QueryProvider);
|
||||
const ownerUri = await azdata.connection.getUriForConnection(this._activeConnectionId);
|
||||
|
||||
const engineSettings = await provider.runQueryAndReturn(ownerUri, 'select name, setting, short_desc,min_val, max_val, enumvals, vartype from pg_settings');
|
||||
if (!engineSettings) {
|
||||
throw new Error('Could not fetch engine settings');
|
||||
}
|
||||
|
||||
const skippedEngineSettings: String[] = [
|
||||
'archive_command', 'archive_timeout', 'log_directory', 'log_file_mode', 'log_filename', 'restore_command',
|
||||
'shared_preload_libraries', 'synchronous_commit', 'ssl', 'unix_socket_permissions', 'wal_level'
|
||||
];
|
||||
|
||||
this._engineSettings = [];
|
||||
|
||||
engineSettings.rows.forEach(row => {
|
||||
let rowValues = row.map(c => c.displayValue);
|
||||
let name = rowValues.shift();
|
||||
if (!skippedEngineSettings.includes(name!)) {
|
||||
let result: EngineSettingsModel = {
|
||||
parameterName: name,
|
||||
value: rowValues.shift(),
|
||||
description: rowValues.shift(),
|
||||
min: rowValues.shift(),
|
||||
max: rowValues.shift(),
|
||||
options: rowValues.shift(),
|
||||
type: rowValues.shift()
|
||||
};
|
||||
|
||||
this._engineSettings.push(result);
|
||||
}
|
||||
});
|
||||
|
||||
this.engineSettingsLastUpdated = new Date();
|
||||
this._onEngineSettingsUpdated.fire(this._engineSettings);
|
||||
}
|
||||
|
||||
protected createConnectionProfile(): azdata.IConnectionProfile {
|
||||
const ipAndPort = parseIpAndPort(this.config?.status.externalEndpoint || '');
|
||||
return {
|
||||
serverName: `${ipAndPort.ip},${ipAndPort.port}`,
|
||||
databaseName: '',
|
||||
authenticationType: 'SqlLogin',
|
||||
providerName: loc.postgresProviderName,
|
||||
connectionName: '',
|
||||
userName: this._pgInfo.userName || '',
|
||||
password: '',
|
||||
savePassword: true,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: {
|
||||
host: `${ipAndPort.ip}`,
|
||||
port: `${ipAndPort.port}`,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected async promptForConnection(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
const connectToSqlDialog = new ConnectToPGSqlDialog(this.controllerModel, this);
|
||||
connectToSqlDialog.showDialog(loc.connectToPGSql(this.info.name), connectionProfile);
|
||||
let profileFromDialog = await connectToSqlDialog.waitForClose();
|
||||
|
||||
if (profileFromDialog) {
|
||||
this.updateConnectionProfile(profileFromDialog);
|
||||
} else {
|
||||
throw new UserCancelledError();
|
||||
/** Given a PodRole, returns its localized name */
|
||||
public static getPodRoleName(role?: PodRole): string {
|
||||
switch (role) {
|
||||
case PodRole.Monitor: return loc.monitor;
|
||||
case PodRole.Router: return loc.coordinator;
|
||||
case PodRole.Shard: return loc.worker;
|
||||
default: return '';
|
||||
}
|
||||
}
|
||||
|
||||
protected async updateConnectionProfile(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
this._connectionProfile = connectionProfile;
|
||||
this.info.connectionId = connectionProfile.id;
|
||||
this._pgInfo.userName = connectionProfile.userName;
|
||||
await this._treeDataProvider.saveControllers();
|
||||
/** Given a V1Pod returns its status */
|
||||
public static getPodStatus(pod: V1Pod): string {
|
||||
const phase = pod.status?.phase;
|
||||
if (phase !== 'Running') {
|
||||
return phase ?? '';
|
||||
}
|
||||
|
||||
// Pods can be in the running phase while some
|
||||
// containers are crashing, so check those too.
|
||||
for (let c of pod.status?.containerStatuses?.filter(c => !c.ready) ?? []) {
|
||||
const wReason = c.state?.waiting?.reason;
|
||||
const tReason = c.state?.terminated?.reason;
|
||||
if (wReason) { return wReason; }
|
||||
if (tReason) { return tReason; }
|
||||
}
|
||||
|
||||
return loc.running;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts millicores to cores (600m -> 0.6 cores)
|
||||
* https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#meaning-of-cpu
|
||||
* @param cores The millicores to format e.g. 600m
|
||||
*/
|
||||
private formatCores(cores: string): number {
|
||||
return cores?.endsWith('m') ? +cores.slice(0, -1) / 1000 : +cores;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the memory to end with 'B' e.g:
|
||||
* 1 -> 1B
|
||||
* 1K -> 1KB, 1Ki -> 1KiB
|
||||
* 1M -> 1MB, 1Mi -> 1MiB
|
||||
* 1G -> 1GB, 1Gi -> 1GiB
|
||||
* 1T -> 1TB, 1Ti -> 1TiB
|
||||
* https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#meaning-of-memory
|
||||
* @param memory The amount + unit of memory to format e.g. 1K
|
||||
*/
|
||||
private formatMemory(memory: string): string {
|
||||
return memory && !memory.endsWith('B') ? `${memory}B` : memory;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,23 +4,15 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ResourceInfo } from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { ControllerModel, Registration } from './controllerModel';
|
||||
import { createCredentialId } from '../common/utils';
|
||||
import { credentialNamespace } from '../constants';
|
||||
import { Registration } from './controllerModel';
|
||||
|
||||
export abstract class ResourceModel {
|
||||
|
||||
private readonly _onRegistrationUpdated = new vscode.EventEmitter<Registration>();
|
||||
public onRegistrationUpdated = this._onRegistrationUpdated.event;
|
||||
|
||||
// The saved connection information
|
||||
protected _connectionProfile: azdata.IConnectionProfile | undefined = undefined;
|
||||
// The ID of the active connection used to query the server
|
||||
protected _activeConnectionId: string | undefined = undefined;
|
||||
|
||||
constructor(public readonly controllerModel: ControllerModel, public info: ResourceInfo, private _registration: Registration) { }
|
||||
constructor(public info: ResourceInfo, private _registration: Registration) { }
|
||||
|
||||
public get registration(): Registration {
|
||||
return this._registration;
|
||||
@@ -31,57 +23,5 @@ export abstract class ResourceModel {
|
||||
this._onRegistrationUpdated.fire(this._registration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the saved connection profile associated with this model. Will prompt for one if
|
||||
* we don't have one or can't find it (it was deleted)
|
||||
*/
|
||||
protected async getConnectionProfile(promptForConnection: boolean = true): Promise<void> {
|
||||
let connectionProfile: azdata.IConnectionProfile | undefined = this.createConnectionProfile();
|
||||
|
||||
// If we have the ID stored then try to retrieve the password from previous connections
|
||||
if (this.info.connectionId) {
|
||||
try {
|
||||
const credentialProvider = await azdata.credentials.getProvider(credentialNamespace);
|
||||
const credentials = await credentialProvider.readCredential(createCredentialId(this.controllerModel.info.id, this.info.resourceType, this.info.name));
|
||||
if (credentials.password) {
|
||||
// Try to connect to verify credentials are still valid
|
||||
connectionProfile.password = credentials.password;
|
||||
// If we don't have a username for some reason then just continue on and we'll prompt for the username below
|
||||
if (connectionProfile.userName) {
|
||||
const result = await azdata.connection.connect(connectionProfile, false, false);
|
||||
if (!result.connected) {
|
||||
if (promptForConnection) {
|
||||
await this.promptForConnection(connectionProfile);
|
||||
} else {
|
||||
throw new Error(result.errorMessage);
|
||||
}
|
||||
} else {
|
||||
this.updateConnectionProfile(connectionProfile);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`Unexpected error fetching password for instance ${err}`);
|
||||
// ignore - something happened fetching the password so just reprompt
|
||||
}
|
||||
}
|
||||
|
||||
if (!connectionProfile?.userName || !connectionProfile?.password) {
|
||||
if (promptForConnection) {
|
||||
// Need to prompt user for password since we don't have one stored
|
||||
await this.promptForConnection(connectionProfile);
|
||||
} else {
|
||||
throw new Error('Missing username/password for connection profile');
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public abstract refresh(): Promise<void>;
|
||||
|
||||
protected abstract createConnectionProfile(): azdata.IConnectionProfile;
|
||||
|
||||
protected abstract promptForConnection(connectionProfile: azdata.IConnectionProfile): Promise<void>;
|
||||
|
||||
protected abstract updateConnectionProfile(connectionProfile: azdata.IConnectionProfile): Promise<void>;
|
||||
}
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as rd from 'resource-deployment';
|
||||
import { getControllerPassword, getRegisteredDataControllers, reacquireControllerPassword } from '../common/api';
|
||||
import { throwUnless } from '../common/utils';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
|
||||
/**
|
||||
* Class that provides options sources for an Arc Data Controller
|
||||
*/
|
||||
export class ArcControllersOptionsSourceProvider implements rd.IOptionsSourceProvider {
|
||||
readonly id = 'arc.controllers';
|
||||
constructor(private _treeProvider: AzureArcTreeDataProvider) { }
|
||||
|
||||
public async getOptions(): Promise<string[] | azdata.CategoryValue[]> {
|
||||
const controllers = await getRegisteredDataControllers(this._treeProvider);
|
||||
throwUnless(controllers !== undefined && controllers.length !== 0, loc.noControllersConnected);
|
||||
return controllers.map(ci => {
|
||||
return ci.label;
|
||||
});
|
||||
}
|
||||
|
||||
public async getVariableValue(variableName: string, controllerLabel: string): Promise<string> {
|
||||
const controller = (await getRegisteredDataControllers(this._treeProvider)).find(ci => ci.label === controllerLabel);
|
||||
throwUnless(controller !== undefined, loc.noControllerInfoFound(controllerLabel));
|
||||
switch (variableName) {
|
||||
case 'endpoint': return controller.info.url;
|
||||
case 'username': return controller.info.username;
|
||||
case 'kubeConfig': return controller.info.kubeConfigFilePath;
|
||||
case 'clusterContext': return controller.info.kubeClusterContext;
|
||||
case 'password': return this.getPassword(controller);
|
||||
default: throw new Error(loc.variableValueFetchForUnsupportedVariable(variableName));
|
||||
}
|
||||
}
|
||||
|
||||
private async getPassword(controller: arc.DataController): Promise<string> {
|
||||
let password = await getControllerPassword(this._treeProvider, controller.info);
|
||||
if (!password) {
|
||||
password = await reacquireControllerPassword(this._treeProvider, controller.info);
|
||||
}
|
||||
throwUnless(password !== undefined, loc.noPasswordFound(controller.label));
|
||||
return password;
|
||||
}
|
||||
|
||||
public getIsPassword(variableName: string): boolean {
|
||||
switch (variableName) {
|
||||
case 'endpoint': return false;
|
||||
case 'username': return false;
|
||||
case 'kubeConfig': return false;
|
||||
case 'clusterContext': return false;
|
||||
case 'password': return true;
|
||||
default: throw new Error(loc.isPasswordFetchForUnsupportedVariable(variableName));
|
||||
}
|
||||
}
|
||||
}
|
||||
2
extensions/arc/src/test/.gitignore
vendored
Normal file
2
extensions/arc/src/test/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/env
|
||||
/__pycache__
|
||||
21
extensions/arc/src/test/README.md
Normal file
21
extensions/arc/src/test/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Tests for deploying Arc resources via Jupyter notebook
|
||||
|
||||
## Prerequisites
|
||||
- Python >= 3.6
|
||||
- Pip package manager
|
||||
- Azdata CLI installed and logged into an Arc controller
|
||||
|
||||
## Running the tests
|
||||
### 1. (Optional, recommended) Create and activate a Python virtual environment
|
||||
- `python -m venv env`
|
||||
- `source env/bin/activate` (Linux)
|
||||
- `env\Scripts\activate.bat` (Windows)
|
||||
|
||||
### 2. Upgrade pip
|
||||
- `pip install --upgrade pip`
|
||||
|
||||
### 3. Install the dependencies
|
||||
- `pip install -r requirements.txt`
|
||||
|
||||
### 4. Run the tests
|
||||
- `pytest`
|
||||
@@ -1,62 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
import * as sinon from 'sinon';
|
||||
import * as yamljs from 'yamljs';
|
||||
import { getDefaultKubeConfigPath, getKubeConfigClusterContexts, KubeClusterContext } from '../../common/kubeUtils';
|
||||
import { tryExecuteAction } from '../../common/utils';
|
||||
|
||||
const kubeConfig =
|
||||
{
|
||||
'contexts': [
|
||||
{
|
||||
'context': {
|
||||
'cluster': 'docker-desktop',
|
||||
'user': 'docker-desktop'
|
||||
},
|
||||
'name': 'docker-for-desktop'
|
||||
},
|
||||
{
|
||||
'context': {
|
||||
'cluster': 'kubernetes',
|
||||
'user': 'kubernetes-admin'
|
||||
},
|
||||
'name': 'kubernetes-admin@kubernetes'
|
||||
}
|
||||
],
|
||||
'current-context': 'docker-for-desktop'
|
||||
};
|
||||
describe('KubeUtils', function (): void {
|
||||
const configFile = 'kubeConfig';
|
||||
|
||||
afterEach('KubeUtils cleanup', () => {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it('getDefaultKubeConfigPath', async () => {
|
||||
getDefaultKubeConfigPath().should.endWith(path.join('.kube', 'config'));
|
||||
});
|
||||
|
||||
describe('get Kube Config Cluster Contexts', () => {
|
||||
it('success', async () => {
|
||||
sinon.stub(yamljs, 'load').returns(<any>kubeConfig);
|
||||
const verifyContexts = (contexts: KubeClusterContext[], testName: string) => {
|
||||
contexts.length.should.equal(2, `test: ${testName} failed`);
|
||||
contexts[0].name.should.equal('docker-for-desktop', `test: ${testName} failed`);
|
||||
contexts[0].isCurrentContext.should.be.true(`test: ${testName} failed`);
|
||||
contexts[1].name.should.equal('kubernetes-admin@kubernetes', `test: ${testName} failed`);
|
||||
contexts[1].isCurrentContext.should.be.false(`test: ${testName} failed`);
|
||||
};
|
||||
verifyContexts(await getKubeConfigClusterContexts(configFile), 'getKubeConfigClusterContexts');
|
||||
});
|
||||
it('throws error when unable to load config file', async () => {
|
||||
const error = new Error('unknown error accessing file');
|
||||
sinon.stub(yamljs, 'load').throws(error); // simulate an error thrown from config file load
|
||||
((await tryExecuteAction(() => getKubeConfigClusterContexts(configFile))).error).should.equal(error, `test: getKubeConfigClusterContexts failed`);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,7 @@ import { Deferred } from '../../common/promise';
|
||||
|
||||
describe('Deferred', () => {
|
||||
it('Then should be called upon resolution', function (done): void {
|
||||
const deferred = new Deferred<void>();
|
||||
const deferred = new Deferred();
|
||||
deferred.then(() => {
|
||||
done();
|
||||
});
|
||||
|
||||
@@ -7,7 +7,7 @@ import { ResourceType } from 'arc';
|
||||
import 'mocha';
|
||||
import * as should from 'should';
|
||||
import * as vscode from 'vscode';
|
||||
import { getAzurecoreApi, getConnectionModeDisplayText, getDatabaseStateDisplayText, getErrorMessage, getResourceTypeIcon, parseEndpoint, parseIpAndPort, promptAndConfirmPassword, promptForInstanceDeletion, resourceTypeToDisplayName, convertToGibibyteString } from '../../common/utils';
|
||||
import { getAzurecoreApi, getConnectionModeDisplayText, getDatabaseStateDisplayText, getErrorMessage, getResourceTypeIcon, parseEndpoint, parseInstanceName, parseIpAndPort, promptAndConfirmPassword, promptForResourceDeletion, resourceTypeToDisplayName } from '../../common/utils';
|
||||
import { ConnectionMode as ConnectionMode, IconPathHelper } from '../../constants';
|
||||
import * as loc from '../../localizedConstants';
|
||||
import { MockInputBox } from '../stubs';
|
||||
@@ -47,6 +47,24 @@ describe('parseEndpoint Method Tests', function (): void {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstanceName Method Tests', () => {
|
||||
it('Should parse valid instanceName with namespace correctly', function (): void {
|
||||
should(parseInstanceName('mynamespace_myinstance')).equal('myinstance');
|
||||
});
|
||||
|
||||
it('Should parse valid instanceName without namespace correctly', function (): void {
|
||||
should(parseInstanceName('myinstance')).equal('myinstance');
|
||||
});
|
||||
|
||||
it('Should return empty string when undefined value passed in', function (): void {
|
||||
should(parseInstanceName(undefined)).equal('');
|
||||
});
|
||||
|
||||
it('Should return empty string when empty string value passed in', function (): void {
|
||||
should(parseInstanceName('')).equal('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAzurecoreApi Method Tests', function () {
|
||||
it('Should get azurecore API correctly', function (): void {
|
||||
should(getAzurecoreApi()).not.be.undefined();
|
||||
@@ -122,7 +140,7 @@ describe('promptForResourceDeletion Method Tests', function (): void {
|
||||
});
|
||||
|
||||
it('Resolves as true when value entered is correct', function (done): void {
|
||||
promptForInstanceDeletion('myname').then((value: boolean) => {
|
||||
promptForResourceDeletion('myname').then((value: boolean) => {
|
||||
value ? done() : done(new Error('Expected return value to be true'));
|
||||
});
|
||||
mockInputBox.value = 'myname';
|
||||
@@ -130,14 +148,14 @@ describe('promptForResourceDeletion Method Tests', function (): void {
|
||||
});
|
||||
|
||||
it('Resolves as false when input box is closed early', function (done): void {
|
||||
promptForInstanceDeletion('myname').then((value: boolean) => {
|
||||
promptForResourceDeletion('myname').then((value: boolean) => {
|
||||
!value ? done() : done(new Error('Expected return value to be false'));
|
||||
});
|
||||
mockInputBox.hide();
|
||||
});
|
||||
|
||||
it('Validation message is set when value entered is incorrect', async function (): Promise<void> {
|
||||
promptForInstanceDeletion('myname');
|
||||
promptForResourceDeletion('myname');
|
||||
mockInputBox.value = 'wrong value';
|
||||
await mockInputBox.triggerAccept();
|
||||
should(mockInputBox.validationMessage).not.be.equal('', 'Validation message should not be empty after incorrect value entered');
|
||||
@@ -242,6 +260,22 @@ describe('getErrorMessage Method Tests', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstanceName Method Tests', function () {
|
||||
it('2 part name', function (): void {
|
||||
const name = 'MyName';
|
||||
should(parseInstanceName(`MyNamespace_${name}`)).equal(name);
|
||||
});
|
||||
|
||||
it('1 part name', function (): void {
|
||||
const name = 'MyName';
|
||||
should(parseInstanceName(name)).equal(name);
|
||||
});
|
||||
|
||||
it('Invalid name', function (): void {
|
||||
should(() => parseInstanceName('Some_Invalid_Name')).throwError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseIpAndPort', function (): void {
|
||||
it('Valid address', function (): void {
|
||||
const ip = '127.0.0.1';
|
||||
@@ -254,116 +288,3 @@ describe('parseIpAndPort', function (): void {
|
||||
should(() => parseIpAndPort(ip)).throwError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertToGibibyteString Method Tests', function () {
|
||||
const tolerance = 0.001;
|
||||
it('Value is in KB', function (): void {
|
||||
const value = '44000K';
|
||||
const conversion = 0.04097819;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in MB', function (): void {
|
||||
const value = '1100M';
|
||||
const conversion = 1.02445483;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in GB', function (): void {
|
||||
const value = '1G';
|
||||
const conversion = 0.931322575;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in TB', function (): void {
|
||||
const value = '1T';
|
||||
const conversion = 931.32257;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in PB', function (): void {
|
||||
const value = '0.1P';
|
||||
const conversion = 93132.25746;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in EB', function (): void {
|
||||
const value = '1E';
|
||||
const conversion = 931322574.6154;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in mB', function (): void {
|
||||
const value = '1073741824000m';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in B', function (): void {
|
||||
const value = '1073741824';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in KiB', function (): void {
|
||||
const value = '1048576Ki';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in MiB', function (): void {
|
||||
const value = '256Mi';
|
||||
const conversion = 0.25;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in GiB', function (): void {
|
||||
const value = '1000Gi';
|
||||
const conversion = 1000;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in TiB', function (): void {
|
||||
const value = '1Ti';
|
||||
const conversion = 1024;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in PiB', function (): void {
|
||||
const value = '1Pi';
|
||||
const conversion = 1048576;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in EiB', function (): void {
|
||||
const value = '1Ei';
|
||||
const conversion = 1073741824;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is empty', function (): void {
|
||||
const value = '';
|
||||
const error = new Error(`Value provided is not a valid Kubernetes resource quantity`);
|
||||
should(() => convertToGibibyteString(value)).throwError(error);
|
||||
});
|
||||
|
||||
it('Value is not a valid Kubernetes resource quantity', function (): void {
|
||||
const value = '1J';
|
||||
const error = new Error(`${value} is not a valid Kubernetes resource quantity`);
|
||||
should(() => convertToGibibyteString(value)).throwError(error);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
|
||||
/**
|
||||
* Simple fake Azdata Api used to mock the API during tests
|
||||
*/
|
||||
export class FakeAzdataApi implements azdataExt.IAzdataApi {
|
||||
|
||||
public postgresInstances: azdataExt.PostgresServerListResult[] = [];
|
||||
public miaaInstances: azdataExt.SqlMiListResult[] = [];
|
||||
|
||||
//
|
||||
// API Implementation
|
||||
//
|
||||
public get arc() {
|
||||
const self = this;
|
||||
return {
|
||||
dc: {
|
||||
create(_namespace: string, _name: string, _connectivityMode: string, _resourceGroup: string, _location: string, _subscription: string, _profileName?: string, _storageClass?: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
endpoint: {
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.DcEndpointListResult[]>> { return <any>{ result: [] }; }
|
||||
},
|
||||
config: {
|
||||
list(): Promise<azdataExt.AzdataOutput<azdataExt.DcConfigListResult[]>> { throw new Error('Method not implemented.'); },
|
||||
async show(): Promise<azdataExt.AzdataOutput<azdataExt.DcConfigShowResult>> { return <any>{ result: undefined! }; }
|
||||
}
|
||||
},
|
||||
postgres: {
|
||||
server: {
|
||||
delete(_name: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.PostgresServerListResult[]>> { return <any>{ result: self.postgresInstances }; },
|
||||
show(_name: string): Promise<azdataExt.AzdataOutput<azdataExt.PostgresServerShowResult>> { throw new Error('Method not implemented.'); },
|
||||
edit(
|
||||
_name: string,
|
||||
_args: {
|
||||
adminPassword?: boolean,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
engineSettings?: string,
|
||||
extensions?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string,
|
||||
noWait?: boolean,
|
||||
port?: number,
|
||||
replaceEngineSettings?: boolean,
|
||||
workers?: number
|
||||
},
|
||||
_engineVersion?: string,
|
||||
_additionalEnvVars?: azdataExt.AdditionalEnvVars
|
||||
): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); }
|
||||
}
|
||||
},
|
||||
sql: {
|
||||
mi: {
|
||||
delete(_name: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.SqlMiListResult[]>> { return <any>{ result: self.miaaInstances }; },
|
||||
show(_name: string): Promise<azdataExt.AzdataOutput<azdataExt.SqlMiShowResult>> { throw new Error('Method not implemented.'); },
|
||||
edit(
|
||||
_name: string,
|
||||
_args: {
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string,
|
||||
noWait?: boolean
|
||||
}): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
getPath(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
login(_endpoint: string, _username: string, _password: string): Promise<azdataExt.AzdataOutput<void>> {
|
||||
return <any>undefined;
|
||||
}
|
||||
acquireSession(_endpoint: string, _username: string, _password: string): Promise<azdataExt.AzdataSession> {
|
||||
return Promise.resolve({ dispose: () => { } });
|
||||
}
|
||||
version(): Promise<azdataExt.AzdataOutput<string>> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
getSemVersion(): any {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import { AzureArcTreeDataProvider } from '../../ui/tree/azureArcTreeDataProvider
|
||||
export class FakeControllerModel extends ControllerModel {
|
||||
|
||||
constructor(treeDataProvider?: AzureArcTreeDataProvider, info?: Partial<ControllerInfo>, password?: string) {
|
||||
const _info: ControllerInfo = Object.assign({ id: uuid(), url: '', kubeConfigFilePath: '', kubeClusterContext: '', name: '', username: '', rememberPassword: false, resources: [] }, info);
|
||||
const _info: ControllerInfo = Object.assign({ id: uuid(), url: '', name: '', username: '', rememberPassword: false, resources: [] }, info);
|
||||
super(treeDataProvider!, _info, password);
|
||||
}
|
||||
|
||||
|
||||
@@ -11,9 +11,7 @@ import * as sinon from 'sinon';
|
||||
import * as TypeMoq from 'typemoq';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import * as vscode from 'vscode';
|
||||
import * as loc from '../../localizedConstants';
|
||||
import * as kubeUtils from '../../common/kubeUtils';
|
||||
import { UserCancelledError } from '../../common/api';
|
||||
import { UserCancelledError } from '../../common/utils';
|
||||
import { ControllerModel } from '../../models/controllerModel';
|
||||
import { ConnectToControllerDialog } from '../../ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from '../../ui/tree/azureArcTreeDataProvider';
|
||||
@@ -35,19 +33,17 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
beforeEach(function (): void {
|
||||
sinon.stub(ConnectToControllerDialog.prototype, 'showDialog');
|
||||
sinon.stub(kubeUtils, 'getKubeConfigClusterContexts').resolves([{ name: 'currentCluster', isCurrentContext: true }]);
|
||||
sinon.stub(vscode.window, 'showErrorMessage').resolves(<any>loc.yes);
|
||||
});
|
||||
|
||||
it('Rejected with expected error when user cancels', async function (): Promise<void> {
|
||||
// Returning an undefined model here indicates that the dialog closed without clicking "Ok" - usually through the user clicking "Cancel"
|
||||
sinon.stub(ConnectToControllerDialog.prototype, 'waitForClose').returns(Promise.resolve(undefined));
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
await should(model.acquireAzdataSession()).be.rejectedWith(new UserCancelledError(loc.userCancelledError));
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
await should(model.azdataLogin()).be.rejectedWith(new UserCancelledError());
|
||||
});
|
||||
|
||||
it('Reads password from cred store', async function (): Promise<void> {
|
||||
const password = 'password123'; // [SuppressMessage("Microsoft.Security", "CS001:SecretInline", Justification="Test password, not actually used")]
|
||||
const password = 'password123';
|
||||
|
||||
// Set up cred store to return our password
|
||||
const credProviderMock = TypeMoq.Mock.ofType<azdata.CredentialProvider>();
|
||||
@@ -58,17 +54,17 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
const azdataExtApiMock = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
const azdataMock = TypeMoq.Mock.ofType<azdataExt.IAzdataApi>();
|
||||
azdataMock.setup(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataMock.setup(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataExtApiMock.setup(x => x.azdata).returns(() => azdataMock.object);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(<any>{ exports: azdataExtApiMock.object });
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
|
||||
await model.acquireAzdataSession();
|
||||
azdataMock.verify(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password, TypeMoq.It.isAny()), TypeMoq.Times.once());
|
||||
await model.azdataLogin();
|
||||
azdataMock.verify(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password), TypeMoq.Times.once());
|
||||
});
|
||||
|
||||
it('Prompt for password when not in cred store', async function (): Promise<void> {
|
||||
const password = 'password123'; // [SuppressMessage("Microsoft.Security", "CS001:SecretInline", Justification="Stub value for testing")]
|
||||
const password = 'password123';
|
||||
|
||||
// Set up cred store to return empty password
|
||||
const credProviderMock = TypeMoq.Mock.ofType<azdata.CredentialProvider>();
|
||||
@@ -79,22 +75,22 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
const azdataExtApiMock = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
const azdataMock = TypeMoq.Mock.ofType<azdataExt.IAzdataApi>();
|
||||
azdataMock.setup(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataMock.setup(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataExtApiMock.setup(x => x.azdata).returns(() => azdataMock.object);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(<any>{ exports: azdataExtApiMock.object });
|
||||
|
||||
// Set up dialog to return new model with our password
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
sinon.stub(ConnectToControllerDialog.prototype, 'waitForClose').returns(Promise.resolve({ controllerModel: newModel, password: password }));
|
||||
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
|
||||
await model.acquireAzdataSession();
|
||||
azdataMock.verify(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password, TypeMoq.It.isAny()), TypeMoq.Times.once());
|
||||
await model.azdataLogin();
|
||||
azdataMock.verify(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password), TypeMoq.Times.once());
|
||||
});
|
||||
|
||||
it('Prompt for password when rememberPassword is true but prompt reconnect is true', async function (): Promise<void> {
|
||||
const password = 'password123'; // [SuppressMessage("Microsoft.Security", "CS001:SecretInline", Justification="Stub value for testing")]
|
||||
const password = 'password123';
|
||||
// Set up cred store to return a password to start with
|
||||
const credProviderMock = TypeMoq.Mock.ofType<azdata.CredentialProvider>();
|
||||
credProviderMock.setup(x => x.readCredential(TypeMoq.It.isAny())).returns(() => Promise.resolve({ credentialId: 'id', password: 'originalPassword' }));
|
||||
@@ -104,23 +100,23 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
const azdataExtApiMock = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
const azdataMock = TypeMoq.Mock.ofType<azdataExt.IAzdataApi>();
|
||||
azdataMock.setup(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataMock.setup(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataExtApiMock.setup(x => x.azdata).returns(() => azdataMock.object);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(<any>{ exports: azdataExtApiMock.object });
|
||||
|
||||
// Set up dialog to return new model with our new password from the reprompt
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
const waitForCloseStub = sinon.stub(ConnectToControllerDialog.prototype, 'waitForClose').returns(Promise.resolve({ controllerModel: newModel, password: password }));
|
||||
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] });
|
||||
|
||||
await model.acquireAzdataSession(true);
|
||||
await model.azdataLogin(true);
|
||||
should(waitForCloseStub.called).be.true('waitForClose should have been called');
|
||||
azdataMock.verify(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password, TypeMoq.It.isAny()), TypeMoq.Times.once());
|
||||
azdataMock.verify(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password), TypeMoq.Times.once());
|
||||
});
|
||||
|
||||
it('Prompt for password when we already have a password but prompt reconnect is true', async function (): Promise<void> {
|
||||
const password = 'password123'; // [SuppressMessage("Microsoft.Security", "CS001:SecretInline", Justification="Stub value for testing")]
|
||||
const password = 'password123';
|
||||
// Set up cred store to return a password to start with
|
||||
const credProviderMock = TypeMoq.Mock.ofType<azdata.CredentialProvider>();
|
||||
credProviderMock.setup(x => x.readCredential(TypeMoq.It.isAny())).returns(() => Promise.resolve({ credentialId: 'id', password: 'originalPassword' }));
|
||||
@@ -130,20 +126,20 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
const azdataExtApiMock = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
const azdataMock = TypeMoq.Mock.ofType<azdataExt.IAzdataApi>();
|
||||
azdataMock.setup(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataMock.setup(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataExtApiMock.setup(x => x.azdata).returns(() => azdataMock.object);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(<any>{ exports: azdataExtApiMock.object });
|
||||
|
||||
// Set up dialog to return new model with our new password from the reprompt
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
const newModel = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, password);
|
||||
const waitForCloseStub = sinon.stub(ConnectToControllerDialog.prototype, 'waitForClose').returns(Promise.resolve({ controllerModel: newModel, password: password }));
|
||||
|
||||
// Set up original model with a password
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', kubeConfigFilePath: '/path/to/.kube/config', kubeClusterContext: 'currentCluster', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, 'originalPassword');
|
||||
const model = new ControllerModel(new AzureArcTreeDataProvider(mockExtensionContext.object), { id: uuid(), url: '127.0.0.1', username: 'admin', name: 'arc', rememberPassword: true, resources: [] }, 'originalPassword');
|
||||
|
||||
await model.acquireAzdataSession(true);
|
||||
await model.azdataLogin(true);
|
||||
should(waitForCloseStub.called).be.true('waitForClose should have been called');
|
||||
azdataMock.verify(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password, TypeMoq.It.isAny()), TypeMoq.Times.once());
|
||||
azdataMock.verify(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), password), TypeMoq.Times.once());
|
||||
});
|
||||
|
||||
it('Model values are updated correctly when modified during reconnect', async function (): Promise<void> {
|
||||
@@ -158,7 +154,7 @@ describe('ControllerModel', function (): void {
|
||||
|
||||
const azdataExtApiMock = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
const azdataMock = TypeMoq.Mock.ofType<azdataExt.IAzdataApi>();
|
||||
azdataMock.setup(x => x.acquireSession(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataMock.setup(x => x.login(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => <any>Promise.resolve(undefined));
|
||||
azdataExtApiMock.setup(x => x.azdata).returns(() => azdataMock.object);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(<any>{ exports: azdataExtApiMock.object });
|
||||
|
||||
@@ -169,8 +165,6 @@ describe('ControllerModel', function (): void {
|
||||
{
|
||||
id: uuid(),
|
||||
url: '127.0.0.1',
|
||||
kubeConfigFilePath: '/path/to/.kube/config',
|
||||
kubeClusterContext: 'currentCluster',
|
||||
username: 'admin',
|
||||
name: 'arc',
|
||||
rememberPassword: false,
|
||||
@@ -183,8 +177,6 @@ describe('ControllerModel', function (): void {
|
||||
const newInfo: ControllerInfo = {
|
||||
id: model.info.id, // The ID stays the same since we're just re-entering information for the same model
|
||||
url: 'newUrl',
|
||||
kubeConfigFilePath: '/path/to/.kube/config',
|
||||
kubeClusterContext: 'currentCluster',
|
||||
username: 'newUser',
|
||||
name: 'newName',
|
||||
rememberPassword: true,
|
||||
@@ -199,11 +191,10 @@ describe('ControllerModel', function (): void {
|
||||
const waitForCloseStub = sinon.stub(ConnectToControllerDialog.prototype, 'waitForClose').returns(Promise.resolve(
|
||||
{ controllerModel: newModel, password: newPassword }));
|
||||
|
||||
await model.acquireAzdataSession(true);
|
||||
await model.azdataLogin(true);
|
||||
should(waitForCloseStub.called).be.true('waitForClose should have been called');
|
||||
should((await treeDataProvider.getChildren()).length).equal(1, 'Tree Data provider should still only have 1 node');
|
||||
should(model.info).deepEqual(newInfo, 'Model info should have been updated');
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
2
extensions/arc/src/test/requirements.txt
Normal file
2
extensions/arc/src/test/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
pytest==5.3.5
|
||||
notebook==6.0.3
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user