mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 18:46:43 -05:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
295004c42e | ||
|
|
45333e8852 | ||
|
|
e1280022d6 | ||
|
|
c90d630703 | ||
|
|
bfcdf28cbc | ||
|
|
1c694cbd3b | ||
|
|
2b0aba119d | ||
|
|
59cc8e5d7f | ||
|
|
f8da5bd1c7 | ||
|
|
26667eb1dc | ||
|
|
5e6a9ca9fa | ||
|
|
4ecf5fae84 | ||
|
|
f76500d5f0 | ||
|
|
2a212edf1e | ||
|
|
7bcd19987b | ||
|
|
0ce5f02dd2 | ||
|
|
0831e9e161 |
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"tool": "Credential Scanner",
|
||||
"suppressions": [
|
||||
{
|
||||
"file": "src\\vs\\base\\test\\common\\uri.test.ts",
|
||||
"_justification": "External code"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
**/vs/nls.build.js
|
||||
**/vs/nls.js
|
||||
**/vs/css.build.js
|
||||
**/vs/css.js
|
||||
**/vs/loader.js
|
||||
**/promise-polyfill/**
|
||||
**/insane/**
|
||||
**/marked/**
|
||||
**/test/**/*.js
|
||||
**/node_modules/**
|
||||
**/vscode-api-tests/testWorkspace/**
|
||||
**/vscode-api-tests/testWorkspace2/**
|
||||
**/extensions/**/out/**
|
||||
**/extensions/**/build/**
|
||||
**/extensions/markdown-language-features/media/**
|
||||
**/extensions/typescript-basics/test/colorize-fixtures/**
|
||||
768
.eslintrc.json
768
.eslintrc.json
@@ -1,752 +1,20 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"jsdoc"
|
||||
],
|
||||
"rules": {
|
||||
"constructor-super": "warn",
|
||||
"curly": "warn",
|
||||
"eqeqeq": "warn",
|
||||
"no-buffer-constructor": "warn",
|
||||
"no-caller": "warn",
|
||||
"no-debugger": "warn",
|
||||
"no-duplicate-case": "warn",
|
||||
"no-duplicate-imports": "warn",
|
||||
"no-eval": "warn",
|
||||
"no-extra-semi": "warn",
|
||||
"no-new-wrappers": "warn",
|
||||
"no-redeclare": "off",
|
||||
"no-sparse-arrays": "warn",
|
||||
"no-throw-literal": "warn",
|
||||
"no-unsafe-finally": "warn",
|
||||
"no-unused-labels": "warn",
|
||||
"no-restricted-globals": [
|
||||
"warn",
|
||||
"name",
|
||||
"length",
|
||||
"event",
|
||||
"closed",
|
||||
"external",
|
||||
"status",
|
||||
"origin",
|
||||
"orientation"
|
||||
], // non-complete list of globals that are easy to access unintentionally
|
||||
"no-var": "warn",
|
||||
"jsdoc/no-types": "warn",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": "warn",
|
||||
"@typescript-eslint/class-name-casing": "warn",
|
||||
"code-no-unused-expressions": [
|
||||
"warn",
|
||||
{
|
||||
"allowTernary": true
|
||||
}
|
||||
],
|
||||
"code-translation-remind": "warn",
|
||||
"code-no-nls-in-standalone-editor": "warn",
|
||||
"code-no-standalone-editor": "warn",
|
||||
"code-no-unexternalized-strings": "warn",
|
||||
"code-layering": [
|
||||
"warn",
|
||||
{
|
||||
"common": [],
|
||||
"node": [
|
||||
"common"
|
||||
],
|
||||
"browser": [
|
||||
"common"
|
||||
],
|
||||
"electron-main": [
|
||||
"common",
|
||||
"node"
|
||||
],
|
||||
"electron-browser": [
|
||||
"common",
|
||||
"browser",
|
||||
"node"
|
||||
]
|
||||
}
|
||||
],
|
||||
"code-import-patterns": [
|
||||
"warn",
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// !!! Do not relax these rules !!!
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
{
|
||||
"target": "**/{vs,sql}/base/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/test/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
// vs/base/test/browser contains tests for vs/base/browser
|
||||
"target": "**/{vs,sql}/base/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/test/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/electron-main/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"typemoq",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/test/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/platform/*/test/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node}/**",
|
||||
"**/{vs,sql}/platform/*/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser,node,electron-browser}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/electron-main/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/platform/*/{common,node,electron-main}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"typemoq",
|
||||
"sinon",
|
||||
"azdata",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/test/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/worker/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/editor/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/platform/*/test/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/editor/test/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/test/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/standalone/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/editor/standalone/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/standalone/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/platform/*/test/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/editor/test/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/standalone/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/contrib/**",
|
||||
"**/{vs,sql}/editor/standalone/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/standalone/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/standalone/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/test/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/contrib/*/test/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/test/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/test/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/test/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/contrib/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/editor/contrib/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/contrib/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/editor/contrib/*/common/**",
|
||||
"**/{vs,sql}/workbench/common/**",
|
||||
"**/{vs,sql}/workbench/services/*/common/**",
|
||||
"assert"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"@angular/*",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/contrib/**", // editor/contrib is equivalent to /browser/ by convention
|
||||
"**/{vs,sql}/workbench/workbench.web.api",
|
||||
"**/{vs,sql}/workbench/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/services/*/{common,browser}/**",
|
||||
"assert"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/workbench.desktop.main.ts",
|
||||
"restrictions": [
|
||||
"**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/api/common/**",
|
||||
"restrictions": [
|
||||
"vscode",
|
||||
"azdata",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/platform/*/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/editor/contrib/*/common/**",
|
||||
"**/{vs,sql}/workbench/api/common/**",
|
||||
"**/{vs,sql}/workbench/common/**",
|
||||
"**/{vs,sql}/workbench/services/*/common/**",
|
||||
"**/{vs,sql}/workbench/contrib/*/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/api/worker/**",
|
||||
"restrictions": [
|
||||
"vscode",
|
||||
"azdata",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/**/{common,worker}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/*/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/editor/contrib/**", // editor/contrib is equivalent to /browser/ by convention
|
||||
"**/{vs,sql}/workbench/{common,browser,node,electron-browser,api}/**",
|
||||
"**/{vs,sql}/workbench/services/*/{common,browser,node,electron-browser}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/test/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/**",
|
||||
"**/{vs,sql}/platform/**",
|
||||
"**/{vs,sql}/editor/**",
|
||||
"**/{vs,sql}/workbench/{common,browser,node,electron-browser}/**",
|
||||
"vs/workbench/contrib/files/common/editors/fileEditorInput",
|
||||
"**/{vs,sql}/workbench/services/**",
|
||||
"**/{vs,sql}/workbench/test/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"azdata",
|
||||
"**/{vs,sql}/base/**/common/**",
|
||||
"**/{vs,sql}/platform/**/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/workbench/workbench.web.api",
|
||||
"**/{vs,sql}/workbench/common/**",
|
||||
"**/{vs,sql}/workbench/services/**/common/**",
|
||||
"**/{vs,sql}/workbench/api/**/common/**",
|
||||
"vs/workbench/contrib/files/common/editors/fileEditorInput", // this should be fine, it only accesses constants from contrib
|
||||
"vscode-textmate"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/worker/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/**/common/**",
|
||||
"**/{vs,sql}/platform/**/common/**",
|
||||
"**/{vs,sql}/editor/common/**",
|
||||
"**/{vs,sql}/workbench/**/common/**",
|
||||
"**/{vs,sql}/workbench/**/worker/**",
|
||||
"**/{vs,sql}/workbench/services/**/common/**",
|
||||
"vscode"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"azdata",
|
||||
"vscode",
|
||||
"**/{vs,sql}/base/**/{common,browser,worker}/**",
|
||||
"**/{vs,sql}/platform/**/{common,browser}/**",
|
||||
"**/{vs,sql}/editor/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/workbench.web.api",
|
||||
"**/{vs,sql}/workbench/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,browser}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser}/**",
|
||||
"vscode-textmate",
|
||||
"onigasm-umd",
|
||||
"sanitize-html",
|
||||
"@angular/*",
|
||||
"rxjs/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/**/{common,node}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node}/**",
|
||||
"**/{vs,sql}/editor/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/api/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/services/**/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/**/{common,browser,worker,node,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/**/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/editor/**",
|
||||
"**/{vs,sql}/workbench/{common,browser,node,electron-browser,api}/**",
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-browser}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/contrib/terminal/browser/**",
|
||||
"restrictions": [
|
||||
// xterm and its addons are strictly browser-only components
|
||||
"xterm",
|
||||
"xterm-addon-*",
|
||||
"**/{vs,sql}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/contrib/extensions/browser/**",
|
||||
"restrictions": [
|
||||
"semver-umd",
|
||||
"**/{vs,sql}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/contrib/update/browser/update.ts",
|
||||
"restrictions": [
|
||||
"semver-umd",
|
||||
"**/{vs,sql}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/code/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/**/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/**/{common,node}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node}/**",
|
||||
"**/{vs,sql}/code/**/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/code/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/**/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/**/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/platform/**/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/code/**/{common,browser,node,electron-browser}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/code/electron-main/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/**/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/**/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node,electron-main}/**",
|
||||
"**/{vs,sql}/code/**/{common,node,electron-main}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/server/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/**/{common,node}/**",
|
||||
"**/{vs,sql}/base/parts/**/{common,node}/**",
|
||||
"**/{vs,sql}/platform/**/{common,node}/**",
|
||||
"**/{vs,sql}/workbench/**/{common,node}/**",
|
||||
"**/{vs,sql}/server/**",
|
||||
"**/{vs,sql}/code/**/{common,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{node,electron-browser,electron-main}/**",
|
||||
"restrictions": "**/*"
|
||||
},
|
||||
{
|
||||
"target": "**/extensions/**",
|
||||
"restrictions": "**/*"
|
||||
},
|
||||
{
|
||||
"target": "**/test/smoke/**",
|
||||
"restrictions": [
|
||||
"**/test/smoke/**",
|
||||
"*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/automation/**",
|
||||
"restrictions": [
|
||||
"**/test/automation/**",
|
||||
"*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/integration/**",
|
||||
"restrictions": [
|
||||
"**/test/integration/**",
|
||||
"*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "{**/api/**.test.ts,}",
|
||||
"restrictions": "{**/{vs,sql}/**,assert,sinon,crypto,vscode,typemoq,azdata}"
|
||||
},
|
||||
{
|
||||
"target": "{**/**.test.ts,**/test/**}",
|
||||
"restrictions": "{**/{vs,sql}/**,assert,sinon,crypto,xterm*,vscode,typemoq,azdata}"
|
||||
},
|
||||
{
|
||||
"target": "**/browser/**",
|
||||
"restrictions": [
|
||||
"**/{vs,sql}/**",
|
||||
"azdata",
|
||||
"vscode",
|
||||
"@angular/*",
|
||||
"rxjs/**",
|
||||
"chart.js",
|
||||
"ng2-charts",
|
||||
"angular2-grid",
|
||||
"html-query-plan",
|
||||
"plotly.js-dist"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{common,browser,workbench}/**",
|
||||
"restrictions": ["**/{vs,sql}/**","azdata","vscode"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"*.js"
|
||||
],
|
||||
"rules": {
|
||||
"jsdoc/no-types": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"**/sql/**"
|
||||
],
|
||||
"rules": {
|
||||
"no-sync": "warn",
|
||||
"strict": ["warn", "never"],
|
||||
"no-console": "warn"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"**/vscode.d.ts",
|
||||
"**/vscode.proposed.d.ts"
|
||||
],
|
||||
"rules": {
|
||||
"vscode-dts-create-func": "warn",
|
||||
"vscode-dts-literal-or-types": "warn",
|
||||
"vscode-dts-interface-naming": "warn",
|
||||
"vscode-dts-event-naming": [
|
||||
"warn",
|
||||
{
|
||||
"allowed": [
|
||||
"onCancellationRequested",
|
||||
"event"
|
||||
],
|
||||
"verbs": [
|
||||
"accept",
|
||||
"change",
|
||||
"close",
|
||||
"collapse",
|
||||
"create",
|
||||
"delete",
|
||||
"dispose",
|
||||
"edit",
|
||||
"end",
|
||||
"expand",
|
||||
"hide",
|
||||
"open",
|
||||
"override",
|
||||
"receive",
|
||||
"register",
|
||||
"rename",
|
||||
"save",
|
||||
"send",
|
||||
"start",
|
||||
"terminate",
|
||||
"trigger",
|
||||
"unregister",
|
||||
"write"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
"root": true,
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.sql.json"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"jsdoc"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/no-floating-promises": [
|
||||
"error",
|
||||
{
|
||||
"ignoreVoid": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,11 +2,11 @@
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: Bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
|
||||
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
1
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: Enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
73
.github/classifier.yml
vendored
73
.github/classifier.yml
vendored
@@ -1,36 +1,49 @@
|
||||
{
|
||||
perform: true,
|
||||
perform: false,
|
||||
alwaysRequireAssignee: false,
|
||||
labelsRequiringAssignee: [],
|
||||
defaultLabel: 'Triage: Needed',
|
||||
defaultAssignee: '',
|
||||
autoAssignees: {
|
||||
Area - Acquisition: [],
|
||||
Area - Azure: [],
|
||||
Area - Backup\Restore: [],
|
||||
Area - Charting\Insights: [],
|
||||
Area - Connection: [ charles-gagnon ],
|
||||
Area - DacFX: [],
|
||||
Area - Dashboard: [],
|
||||
Area - Data Explorer: [],
|
||||
Area - Edit Data: [],
|
||||
Area - Extensibility: [],
|
||||
Area - External Table: [],
|
||||
Area - Fundamentals: [],
|
||||
Area - Language Service: [ charles-gagnon ],
|
||||
Area - Localization: [],
|
||||
Area - Notebooks: [ chlafreniere ],
|
||||
Area - Performance: [],
|
||||
Area - Query Editor: [ anthonydresser ],
|
||||
Area - Query Plan: [],
|
||||
Area - Reliability: [],
|
||||
Area - Resource Deployment: [],
|
||||
Area - Schema Compare: [],
|
||||
Area - Shell: [],
|
||||
Area - SQL Agent: [],
|
||||
Area - SQL Import: [],
|
||||
Area - SQL Profiler: [],
|
||||
Area - SQL 2019: [],
|
||||
Area - SSMS Integration: []
|
||||
accessibility: [],
|
||||
acquisition: [],
|
||||
agent: [],
|
||||
azure: [],
|
||||
backup: [],
|
||||
bcdr: [],
|
||||
'chart viewer': [],
|
||||
connection: [],
|
||||
dacfx: [],
|
||||
dashboard: [],
|
||||
'data explorer': [],
|
||||
documentation: [],
|
||||
'edit data': [],
|
||||
export: [],
|
||||
extensibility: [],
|
||||
extensionManager: [],
|
||||
globalization: [],
|
||||
grid: [],
|
||||
import: [],
|
||||
insights: [],
|
||||
intellisense: [],
|
||||
localization: [],
|
||||
'managed instance': [],
|
||||
notebooks: [],
|
||||
'object explorer': [],
|
||||
performance: [],
|
||||
profiler: [],
|
||||
'query editor': [],
|
||||
'query execution': [],
|
||||
reliability: [],
|
||||
restore: [],
|
||||
scripting: [],
|
||||
'server group': [],
|
||||
settings: [],
|
||||
setup: [],
|
||||
shell: [],
|
||||
showplan: [],
|
||||
snippet: [],
|
||||
sql2019Preview: [],
|
||||
sqldw: [],
|
||||
supportability: [],
|
||||
ux: []
|
||||
}
|
||||
}
|
||||
|
||||
34
.github/feature-requests.yml
vendored
34
.github/feature-requests.yml
vendored
@@ -1,34 +0,0 @@
|
||||
{
|
||||
typeLabel: {
|
||||
name: 'feature-request'
|
||||
},
|
||||
candidateMilestone: {
|
||||
number: 107,
|
||||
name: 'Backlog Candidates'
|
||||
},
|
||||
approvedMilestone: {
|
||||
number: 8,
|
||||
name: 'Backlog'
|
||||
},
|
||||
onLabeled: {
|
||||
delay: 60,
|
||||
perform: true
|
||||
},
|
||||
onCandidateMilestoned: {
|
||||
candidatesComment: "This feature request is now a candidate for our backlog. The community has 60 days to upvote the issue. If it receives 20 upvotes we will move it to our backlog. If not, we will close it. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
perform: true
|
||||
},
|
||||
onMonitorUpvotes: {
|
||||
upvoteThreshold: 20,
|
||||
acceptanceComment: ":slightly_smiling_face: This feature request received a sufficient number of community upvotes and we moved it to our backlog. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
perform: true
|
||||
},
|
||||
onMonitorDaysOnCandidateMilestone: {
|
||||
daysOnMilestone: 60,
|
||||
warningPeriod: 10,
|
||||
numberOfCommentsToPreventAutomaticRejection: 20,
|
||||
rejectionComment: ":slightly_frowning_face: In the last 60 days, this feature request has received less than 20 community upvotes and we closed it. Still a big Thank You to you for taking the time to create this issue! To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
warningComment: "This feature request has not yet received the 20 community upvotes it takes to make to our backlog. 10 days to go. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding",
|
||||
perform: true
|
||||
}
|
||||
}
|
||||
2
.github/locker.yml
vendored
2
.github/locker.yml
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
daysAfterClose: 45,
|
||||
daysSinceLastUpdate: 3,
|
||||
ignoredLabels: ['A11y_ADS_OctTestPass', 'A11y_ADS_Schema_Dacpac_Backup', 'A11y_AzureDataStudio', 'A11yExclusion', 'A11yMAS', 'A11yResolved: Will Not Fix', 'A11yTCS'],
|
||||
ignoredLabels: [],
|
||||
perform: true
|
||||
}
|
||||
|
||||
7
.github/mergers.json
vendored
7
.github/mergers.json
vendored
@@ -1,7 +0,0 @@
|
||||
[
|
||||
"kenvanhyning",
|
||||
"kburtram",
|
||||
"udeeshagautam",
|
||||
"qifahs",
|
||||
"chlafreniere"
|
||||
]
|
||||
9
.github/pull_request_template.md
vendored
9
.github/pull_request_template.md
vendored
@@ -1,9 +0,0 @@
|
||||
<!-- Thank you for submitting a Pull Request. Please:
|
||||
* Read our Pull Request guidelines:
|
||||
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
|
||||
* Associate an issue with the Pull Request.
|
||||
* Ensure that the code is up-to-date with the `master` branch.
|
||||
* Include a description of the proposed changes and how to test them.
|
||||
-->
|
||||
|
||||
This PR fixes #
|
||||
6
.github/stale.yml
vendored
6
.github/stale.yml
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
perform: true,
|
||||
label: 'Stale PR',
|
||||
daysSinceLastUpdate: 7,
|
||||
ignoredLabels: ['Do Not Merge']
|
||||
}
|
||||
151
.github/workflows/ci.yml
vendored
151
.github/workflows/ci.yml
vendored
@@ -1,151 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||
- run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
name: Setup Build Environment
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
# TODO: cache node modules
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Compile Options
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn valid-layers-check
|
||||
name: Run Valid Layers Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
name: Run Unit Tests (Electron)
|
||||
# Fails with cryptic error (e.g. https://github.com/microsoft/vscode/pull/90292/checks?check_run_id=433681926#step:13:9)
|
||||
# - run: DISPLAY=:10 yarn test-browser --browser chromium
|
||||
# name: Run Unit Tests (Browser)
|
||||
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests (Electron)
|
||||
|
||||
windows:
|
||||
runs-on: windows-2016
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '2.x'
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Compile Options
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn valid-layers-check
|
||||
name: Run Valid Layers Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: .\scripts\test.bat --tfs "Unit Tests"
|
||||
name: Run Unit Tests (Electron)
|
||||
# - run: yarn test-browser --browser chromium {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
|
||||
# name: Run Unit Tests (Browser)
|
||||
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests (Electron)
|
||||
|
||||
darwin:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Compile Options
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn valid-layers-check
|
||||
name: Run Valid Layers Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: ./scripts/test.sh --tfs "Unit Tests"
|
||||
name: Run Unit Tests (Electron)
|
||||
# - run: yarn test-browser --browser chromium --browser webkit
|
||||
# name: Run Unit Tests (Browser)
|
||||
# - run: ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# name: Run Integration Tests (Electron)
|
||||
|
||||
# monaco:
|
||||
# runs-on: ubuntu-latest
|
||||
# env:
|
||||
# CHILD_CONCURRENCY: "1"
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# steps:
|
||||
# - uses: actions/checkout@v1
|
||||
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||
# - run: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
|
||||
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
# sudo chmod +x /etc/init.d/xvfb
|
||||
# sudo update-rc.d xvfb defaults
|
||||
# sudo service xvfb start
|
||||
# name: Setup Build Environment
|
||||
# - uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 10
|
||||
# - run: yarn --frozen-lockfile
|
||||
# name: Install Dependencies
|
||||
# - run: yarn monaco-compile-check
|
||||
# name: Run Monaco Editor Checks
|
||||
# - run: yarn gulp editor-esm-bundle
|
||||
# name: Editor Distro & ESM Bundle
|
||||
16
.github/workflows/merger.yml
vendored
16
.github/workflows/merger.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: AutoMerger
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
release_labeler:
|
||||
name: Auto Merger
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- uses: ./build/actions/AutoMerge
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GithubMerger }}"
|
||||
19
.github/workflows/portlabel.yml
vendored
19
.github/workflows/portlabel.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Port Request Labeler
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- release/**
|
||||
|
||||
jobs:
|
||||
release_labeler:
|
||||
name: Release labeler
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- uses: ./build/actions/AutoLabel
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
with:
|
||||
label: "Port Request"
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
.DS_Store
|
||||
.cache
|
||||
npm-debug.log
|
||||
Thumbs.db
|
||||
node_modules/
|
||||
@@ -11,24 +10,12 @@ out-editor/
|
||||
out-editor-src/
|
||||
out-editor-build/
|
||||
out-editor-esm/
|
||||
out-editor-esm-bundle/
|
||||
out-editor-min/
|
||||
out-monaco-editor-core/
|
||||
out-vscode/
|
||||
out-vscode-min/
|
||||
out-vscode-reh/
|
||||
out-vscode-reh-min/
|
||||
out-vscode-reh-pkg/
|
||||
out-vscode-reh-web/
|
||||
out-vscode-reh-web-min/
|
||||
out-vscode-reh-web-pkg/
|
||||
out-vscode-web/
|
||||
out-vscode-web-min/
|
||||
src/vs/server
|
||||
resources/server
|
||||
build/node_modules
|
||||
coverage/
|
||||
test_data/
|
||||
test-results/
|
||||
yarn-error.log
|
||||
*.vsix
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
/**
|
||||
* @name No floating promises
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @id js/experimental/floating-promise
|
||||
*/
|
||||
import javascript
|
||||
|
||||
private predicate isEscapingPromise(PromiseDefinition promise) {
|
||||
exists (DataFlow::Node escape | promise.flowsTo(escape) |
|
||||
escape = any(DataFlow::InvokeNode invk).getAnArgument()
|
||||
or
|
||||
escape = any(DataFlow::FunctionNode fun).getAReturn()
|
||||
or
|
||||
escape = any(ThrowStmt t).getExpr().flow()
|
||||
or
|
||||
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
|
||||
or
|
||||
escape = any(DataFlow::PropWrite write).getRhs()
|
||||
or
|
||||
exists(WithStmt with, Assignment assign |
|
||||
with.mayAffect(assign.getLhs()) and
|
||||
assign.getRhs().flow() = escape
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
from PromiseDefinition promise
|
||||
where
|
||||
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
|
||||
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
|
||||
not isEscapingPromise(promise)
|
||||
select promise, "This promise appears to be a floating promise"
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"printWidth": 120,
|
||||
"semi": true,
|
||||
"singleQuote": true
|
||||
}
|
||||
70
.vscode/cglicenses.schema.json
vendored
70
.vscode/cglicenses.schema.json
vendored
@@ -1,61 +1,23 @@
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"prependLicenseText"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the dependency"
|
||||
},
|
||||
"fullLicenseText": {
|
||||
"type": "array",
|
||||
"description": "The complete license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"prependLicenseText": {
|
||||
"type": "array",
|
||||
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"licenseDetail"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the dependency"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"fullLicenseText"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the dependency"
|
||||
},
|
||||
"fullLicenseText": {
|
||||
"type": "array",
|
||||
"description": "The complete license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"prependLicenseText": {
|
||||
"type": "array",
|
||||
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"description": "The complete license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
2
.vscode/extensions.json
vendored
2
.vscode/extensions.json
vendored
@@ -2,8 +2,8 @@
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"msjsdiag.debugger-for-chrome"
|
||||
]
|
||||
}
|
||||
|
||||
148
.vscode/launch.json
vendored
148
.vscode/launch.json
vendored
@@ -16,6 +16,7 @@
|
||||
"request": "attach",
|
||||
"name": "Attach to Extension Host",
|
||||
"port": 5870,
|
||||
"restart": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
@@ -34,10 +35,7 @@
|
||||
"port": 5876,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"hidden": true,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
@@ -55,10 +53,7 @@
|
||||
"port": 5875,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"hidden": true,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -71,16 +66,17 @@
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
"timeout": 20000
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
},
|
||||
"port": 9222,
|
||||
"timeout": 20000,
|
||||
"env": {
|
||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||
},
|
||||
@@ -88,33 +84,21 @@
|
||||
"urlFilter": "*workbench.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--no-cached-data",
|
||||
],
|
||||
"webRoot": "${workspaceFolder}",
|
||||
// Settings for js-debug:
|
||||
"pauseForSourceMap": false,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
"--no-cached-data"
|
||||
],
|
||||
"webRoot": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Main Process)",
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
},
|
||||
"runtimeArgs": [
|
||||
"--no-cached-data"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 1
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -141,43 +125,13 @@
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"name": "Launch Built-in Extension",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web) (TBD)",
|
||||
"runtimeExecutable": "yarn",
|
||||
"runtimeArgs": [
|
||||
"web"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web, Chrome) (TBD)",
|
||||
"url": "http://localhost:8080",
|
||||
"preLaunchTask": "Run web",
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Git Unit Tests",
|
||||
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
|
||||
"stopOnEntry": false,
|
||||
"cwd": "${workspaceFolder}/extensions/git",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 10
|
||||
}
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
@@ -187,17 +141,13 @@
|
||||
"cwd": "${workspaceFolder}/test/smoke",
|
||||
"env": {
|
||||
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
|
||||
},
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 8
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Run Unit Tests",
|
||||
"program": "${workspaceFolder}/test/unit/electron/index.js",
|
||||
"program": "${workspaceFolder}/test/electron/index.js",
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||
@@ -212,13 +162,7 @@
|
||||
"cwd": "${workspaceFolder}",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"env": {
|
||||
"MOCHA_COLORS": "true"
|
||||
},
|
||||
"presentation": {
|
||||
"hidden": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -236,22 +180,6 @@
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Run Extension Integration Tests",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||
},
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
@@ -273,45 +201,21 @@
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Main Process"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 1
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
|
||||
"name": "Search and Renderer processes",
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Main Process",
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 3
|
||||
}
|
||||
"Attach to Search Process"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug Renderer and Extension Host processes",
|
||||
"name": "Renderer and Extension Host processes",
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Attach Renderer and Extension Host",
|
||||
"configurations": [
|
||||
"Attach to azuredatastudio",
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
9
.vscode/searches/ts36031.code-search
vendored
9
.vscode/searches/ts36031.code-search
vendored
@@ -1,9 +0,0 @@
|
||||
# Query: \\w+\\?\\..+![(.[]
|
||||
# Flags: RegExp
|
||||
# ContextLines: 2
|
||||
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
||||
270 } : undefined,
|
||||
271 isChecked: options.ariaProvider!.isChecked ? (e) => {
|
||||
272: return options.ariaProvider?.isChecked!(e.element as T);
|
||||
273 } : undefined
|
||||
274 },
|
||||
19
.vscode/settings.json
vendored
19
.vscode/settings.json
vendored
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"editor.insertSpaces": false,
|
||||
"files.eol": "\n",
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"files.exclude": {
|
||||
".git": true,
|
||||
@@ -22,8 +23,6 @@
|
||||
"i18n/**": true,
|
||||
"extensions/**/out/**": true,
|
||||
"test/smoke/out/**": true,
|
||||
"test/automation/out/**": true,
|
||||
"test/integration/browser/out/**": true,
|
||||
"src/vs/base/test/node/uri.test.data.txt": true
|
||||
},
|
||||
"lcov.path": [
|
||||
@@ -39,14 +38,9 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"eslint.options": {
|
||||
"rulePaths": [
|
||||
"./build/lib/eslint"
|
||||
]
|
||||
},
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"npm.exclude": "**/extensions/**",
|
||||
"npm.packageManager": "yarn",
|
||||
"git.ignoreLimitWarning": true,
|
||||
"emmet.excludeLanguages": [],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
@@ -64,10 +58,5 @@
|
||||
"url": "./.vscode/cglicenses.schema.json"
|
||||
}
|
||||
],
|
||||
"git.ignoreLimitWarning": true,
|
||||
"remote.extensionKind": {
|
||||
"msjsdiag.debugger-for-chrome": "workspace"
|
||||
},
|
||||
"gulp.autoDetect": "off",
|
||||
"files.insertFinalNewline": true
|
||||
}
|
||||
"git.ignoreLimitWarning": true
|
||||
}
|
||||
51
.vscode/tasks.json
vendored
51
.vscode/tasks.json
vendored
@@ -5,10 +5,7 @@
|
||||
"type": "npm",
|
||||
"script": "watch",
|
||||
"label": "Build VS Code",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"group": "build",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
@@ -33,18 +30,26 @@
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-vscode-watch",
|
||||
"label": "TS - Strict VSCode",
|
||||
"script": "strict-initialization-watch",
|
||||
"label": "TS - Strict Initialization",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-vscode",
|
||||
"owner": "typescript-strict-initialization",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "tslint",
|
||||
"label": "Run tslint",
|
||||
"problemMatcher": [
|
||||
"$tslint5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Run tests",
|
||||
"type": "shell",
|
||||
@@ -68,40 +73,14 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "electron",
|
||||
"type": "gulp",
|
||||
"task": "electron",
|
||||
"label": "Download electron"
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "hygiene",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"command": "yarn web -- --no-launch",
|
||||
"label": "Run web",
|
||||
"isBackground": true,
|
||||
"problemMatcher": {
|
||||
"pattern": {
|
||||
"regexp": ""
|
||||
},
|
||||
"background": {
|
||||
"beginsPattern": ".*node .*",
|
||||
"endsPattern": "Web UI available at .*"
|
||||
}
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "eslint",
|
||||
"problemMatcher": {
|
||||
"source": "eslint",
|
||||
"base": "$eslint-stylish"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "7.1.11"
|
||||
target "3.1.8"
|
||||
runtime "electron"
|
||||
|
||||
153
CHANGELOG.md
153
CHANGELOG.md
@@ -1,156 +1,5 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.15.1
|
||||
* Release date: February 19, 2020
|
||||
* Release status: General Availability
|
||||
* Resolved [#9145 Edit Data render the result grid incorrectly when using custom query](https://github.com/microsoft/azuredatastudio/issues/9145).
|
||||
* Resolved [#9149 Show Active Connections](https://github.com/microsoft/azuredatastudio/issues/9149).
|
||||
|
||||
## Version 1.15.0
|
||||
* Release date: February 13, 2020
|
||||
* Release status: General Availability
|
||||
* New Azure Sign-in improvement - Added improved Azure Sign-in experience, including removal of copy/paste of device code to make a more seamless connected experience.
|
||||
* Find in Notebook support - Users can now use Ctrl+F inside of a notebook. Find in Notebook support searches line by line through both code and text cells.
|
||||
* VS Code merge from 1.38 to 1.42 - This release includes updates to VS Code from the 3 previous VS Code releases. Read their [release notes](https://code.visualstudio.com/updates/v1_42) to learn more.
|
||||
* Fix for the ["white/blank screen"](https://github.com/microsoft/azuredatastudio/issues/8775) issue reported by many users.
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+label%3ABug+milestone%3A%22February+2020%22+is%3Aclosed).
|
||||
|
||||
## Version 1.14.1
|
||||
* Release date: December 26, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix for bug https://github.com/microsoft/azuredatastudio/issues/8768
|
||||
|
||||
## Version 1.14.0
|
||||
* Release date: December 19, 2019
|
||||
* Release status: General Availability
|
||||
* Added bigdatacluster.ignoreSslVerification setting to allow ignoring SSL verification errors when connecting to a BDC [#8129](https://github.com/microsoft/azuredatastudio/issues/8129)
|
||||
* Changed attach to connection dropdown in Notebooks to only list the currently active connection [#8582](https://github.com/microsoft/azuredatastudio/pull/8582)
|
||||
* Allow changing default language flavor for offline query editors [#8419](https://github.com/microsoft/azuredatastudio/pull/8419)
|
||||
* GA status for Big Data Cluster/SQL 2019 features [#8269](https://github.com/microsoft/azuredatastudio/issues/8269)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/44?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
## Version 1.13.1
|
||||
* Release date: November 15, 2019
|
||||
* Release status: General Availability
|
||||
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
|
||||
|
||||
## Version 1.13.0
|
||||
* Release date: November 4, 2019
|
||||
* Release status: General Availability
|
||||
* General Availability release for Schema Compare and DACPAC extensions
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||
|
||||
## Version 1.12.2
|
||||
* Release date: October 11, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
|
||||
|
||||
## Version 1.12.1
|
||||
* Release date: October 7, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
|
||||
|
||||
## Version 1.12.0
|
||||
* Release date: October 2, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Announcing the Query History panel
|
||||
* Improved Query Results Grid copy selection support
|
||||
* TempDB page added to Server Reports extension
|
||||
* PowerShell extension update
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
|
||||
|
||||
## Version 1.11.0
|
||||
* Release date: September 10, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
|
||||
|
||||
## Version 1.10.0
|
||||
* Release date: August 14, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* [SandDance](https://github.com/microsoft/SandDance) integration — A new way to interact with data. Download the extension [here](https://docs.microsoft.com/sql/azure-data-studio/sanddance-extension)
|
||||
* Notebook improvements
|
||||
* Better loading performance
|
||||
* Ability to right click SQL results grid to save your results as CSV, JSON, etc.
|
||||
* Buttons to add code or text cells in-line
|
||||
* [Other fixes and improvements](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+label%3A%22Area%3A+Notebooks%22+milestone%3A%22August+2019+Release%22+is%3Aclosed)
|
||||
* SQL Server Dacpac extension can support Azure Active Directory authentication
|
||||
* Updated SQL Server 2019 extension
|
||||
* Visual Studio Code May Release Merge 1.37 - this includes changes from [1.36](https://code.visualstudio.com/updates/v1_37) and [1.37](https://code.visualstudio.com/updates/v1_37)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/39?closed=1).
|
||||
|
||||
## Version 1.9.0
|
||||
* Release date: July 11, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Release of [SentryOne Plan Explorer Extension](https://www.sentryone.com/products/sentryone-plan-explorer-extension-azure-data-studio)
|
||||
* **Schema Compare**
|
||||
* Schema Compare File Support (.SCMP)
|
||||
* Cancel support
|
||||
* [Other fixes and improvements](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22July+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Schema+Compare%22)
|
||||
* **Notebooks**
|
||||
* Plotly Support
|
||||
* Open Notebook from Browser
|
||||
* Python Package Management
|
||||
* Performance & Markdown Enhancements
|
||||
* Improved Keyboard Shortcuts
|
||||
* [Other fixes and improvements](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22July+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Notebooks%22)
|
||||
* **SQL Server Profiler**
|
||||
* Filtering by Database Name
|
||||
* Copy & Paste Support
|
||||
* Save/Load Filter
|
||||
* SQL Server 2019 Support
|
||||
* New Language Packs Available
|
||||
* Visual Studio Code May Release Merge 1.35 - the latest improvements can be found [here](https://code.visualstudio.com/updates/v1_35)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/35?closed=1).
|
||||
|
||||
## Version 1.8.0
|
||||
* Release date: June 6, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Initial release of the Database Admin Tool Extensions for Windows *Preview* extension
|
||||
* Initial release of the Central Management Servers extension
|
||||
* **Schema Compare**
|
||||
* Added Exclude/Include Options
|
||||
* Generate Script opens script after being generated
|
||||
* Removed double scroll bars
|
||||
* Formatting and layout improvements
|
||||
* Complete changes can be found [here](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22June+2019+Release%22+label%3A%22Area%3A+Schema+Compare%22+is%3Aclosed)
|
||||
* Messages panel moved into results panel - when users ran SQL queries, results and messages were in stacked panels. Now they are in separate tabs in a single panel similar to SSMS.
|
||||
* **Notebook**
|
||||
* Users can now choose to use their own Python 3 or Anaconda installs in notebooks
|
||||
* Multiple Stability + fit/finish fixes
|
||||
* View the full list of improvements and fixes [here](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22June+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Notebooks%22)
|
||||
* Visual Studio Code May Release Merge 1.34 - the latest improvements can be found [here](https://code.visualstudio.com/updates/v1_34)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/32?closed=1).
|
||||
|
||||
## Version 1.7.0
|
||||
* Release date: May 8, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Announcing Schema Compare *Preview* extension
|
||||
* Tasks Panel UX improvement
|
||||
* Announcing new Welcome page
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/31?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues.
|
||||
|
||||
## Version 1.6.0
|
||||
* Release date: April 18, 2019
|
||||
* Release status: General Availability
|
||||
@@ -272,7 +121,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
|
||||
## What's new in this version
|
||||
* Announcing the SQL Server 2019 Preview extension.
|
||||
* Support for SQL Server 2019 preview features including Big Data Cluster support.
|
||||
* Support for SQL Server 2019 preview features including big data cluster support.
|
||||
* Azure Data Studio Notebooks
|
||||
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
|
||||
* SQL Server Polybase Create External Table Wizard
|
||||
|
||||
1
CODE_OF_CONDUCT.md
Normal file
1
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1 @@
|
||||
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
|
||||
32
README.md
32
README.md
@@ -1,26 +1,25 @@
|
||||
# Azure Data Studio
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
|
||||
[](https://twitter.com/azuredatastudio)
|
||||
[](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
|
||||
|
||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
## **Download the latest Azure Data Studio release**
|
||||
**Download the latest Azure Data Studio release**
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2116782
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2116781
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2116691
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2116692
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2116783
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2116779
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2116780
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2087316
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2087317
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2087318
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2087170
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2087414
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2087171
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2087415
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
## Try out the latest insiders build from `master`:
|
||||
Try out the latest insiders build from `master`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
||||
@@ -29,7 +28,7 @@ Go to our [download page](https://aka.ms/azuredatastudio) for more specific inst
|
||||
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
||||
|
||||
## **Feature Highlights**
|
||||
**Feature Highlights**
|
||||
|
||||
- Cross-Platform DB management for Windows, macOS and Linux with simple XCopy deployment
|
||||
- SQL Server Connection Management with Connection Dialog, Server Groups, Azure Integration and Registered Servers
|
||||
@@ -69,14 +68,7 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
|
||||
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
|
||||
* jamesrod817 for `Tempdb (#7022)`
|
||||
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
||||
* devmattrick for `Update row count as updates are received #6642`
|
||||
* mottykohn for `In Message panel onclick scroll to line #6417`
|
||||
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
|
||||
* yamatoya for `fix the format #4899`
|
||||
* yamatoya for `fix the format (#4899)`
|
||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
* sadedil for `Missing feature request: Save as XML #3729`
|
||||
|
||||
@@ -36,7 +36,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
jquery-ui: https://github.com/jquery/jquery-ui
|
||||
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
||||
jschardet: https://github.com/aadsm/jschardet
|
||||
jupyter-powershell: https://github.com/vors/jupyter-powershell
|
||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||
make-error: https://github.com/JsCommunity/make-error
|
||||
minimist: https://github.com/substack/minimist
|
||||
@@ -47,6 +46,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
node-fetch: https://github.com/bitinn/node-fetch
|
||||
node-pty: https://github.com/Tyriar/node-pty
|
||||
nsfw: https://github.com/Axosoft/nsfw
|
||||
pretty-data: https://github.com/vkiryukhin/pretty-data
|
||||
primeng: https://github.com/primefaces/primeng
|
||||
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
|
||||
pty.js: https://github.com/chjj/pty.js
|
||||
@@ -1176,35 +1176,7 @@ That's all there is to it!
|
||||
=========================================
|
||||
END OF jschardet NOTICES AND INFORMATION
|
||||
|
||||
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sergei Vorobev
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
=========================================
|
||||
END OF jupyter-powershell NOTICES AND INFORMATION
|
||||
|
||||
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2015 Project Jupyter Contributors
|
||||
All rights reserved.
|
||||
|
||||
@@ -1448,6 +1420,16 @@ SOFTWARE.
|
||||
=========================================
|
||||
END OF nsfw NOTICES AND INFORMATION
|
||||
|
||||
%% pretty-data NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
License: Dual licensed under the MIT and GPL licenses:
|
||||
|
||||
http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
http://www.gnu.org/licenses/gpl.html
|
||||
=========================================
|
||||
END OF pretty-data NOTICES AND INFORMATION
|
||||
|
||||
%% primeng NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
69
azure-pipelines-linux-mac.yml
Normal file
69
azure-pipelines-linux-mac.yml
Normal file
@@ -0,0 +1,69 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '8.x'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- script: |
|
||||
git submodule update --init --recursive
|
||||
nvm install 10.15.1
|
||||
nvm use 10.15.1
|
||||
npm i -g yarn
|
||||
displayName: 'preinstall'
|
||||
|
||||
- script: |
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
# sh -e /etc/init.d/xvfb start
|
||||
# sleep 3
|
||||
displayName: 'Linux preinstall'
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- script: |
|
||||
yarn
|
||||
displayName: 'Install'
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: 'Run TSLint'
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: 'Run Strict Null Check'
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: 'Compile'
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
||||
displayName: 'Tests'
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
|
||||
displayName: 'Tests'
|
||||
condition: ne(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: '**/test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
|
||||
condition: ne(variables['Agent.OS'], 'Linux')
|
||||
44
azure-pipelines-windows.yml
Normal file
44
azure-pipelines-windows.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- script: |
|
||||
yarn
|
||||
displayName: 'Yarn Install'
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: 'Electron'
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: 'Run TSLint'
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: 'Run Strict Null Check'
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: 'Compile'
|
||||
|
||||
- script: |
|
||||
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
|
||||
displayName: 'Test'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report
|
||||
@@ -1,22 +1,29 @@
|
||||
trigger:
|
||||
- master
|
||||
- release/*
|
||||
- master
|
||||
- releases/*
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
steps:
|
||||
- template: build/azure-pipelines/win32/continuous-build-win32.yml
|
||||
|
||||
- job: Linux
|
||||
# All tasks on Windows
|
||||
- job: build_all_windows
|
||||
displayName: Build all tasks (Windows)
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- template: build/azure-pipelines/linux/continuous-build-linux.yml
|
||||
- template: azure-pipelines-windows.yml
|
||||
|
||||
- job: macOS
|
||||
# All tasks on Linux
|
||||
- job: build_all_linux
|
||||
displayName: Build all tasks (Linux)
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
vmImage: 'Ubuntu 16.04'
|
||||
steps:
|
||||
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
|
||||
# All tasks on macOS
|
||||
- job: build_all_darwin
|
||||
displayName: Build all tasks (macOS)
|
||||
pool:
|
||||
vmImage: macos-10.13
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
2019-12-01T02:20:58.491Z
|
||||
@@ -1,136 +0,0 @@
|
||||
# cleanup rules for native node modules, .gitignore style
|
||||
|
||||
nan/**
|
||||
*/node_modules/nan/**
|
||||
|
||||
fsevents/binding.gyp
|
||||
fsevents/fsevents.cc
|
||||
fsevents/build/**
|
||||
fsevents/src/**
|
||||
fsevents/test/**
|
||||
!fsevents/**/*.node
|
||||
|
||||
vscode-sqlite3/binding.gyp
|
||||
vscode-sqlite3/benchmark/**
|
||||
vscode-sqlite3/cloudformation/**
|
||||
vscode-sqlite3/deps/**
|
||||
vscode-sqlite3/test/**
|
||||
vscode-sqlite3/build/**
|
||||
vscode-sqlite3/src/**
|
||||
!vscode-sqlite3/build/Release/*.node
|
||||
|
||||
oniguruma/binding.gyp
|
||||
oniguruma/build/**
|
||||
oniguruma/src/**
|
||||
oniguruma/deps/**
|
||||
!oniguruma/build/Release/*.node
|
||||
!oniguruma/src/*.js
|
||||
|
||||
windows-mutex/binding.gyp
|
||||
windows-mutex/build/**
|
||||
windows-mutex/src/**
|
||||
!windows-mutex/**/*.node
|
||||
|
||||
native-keymap/binding.gyp
|
||||
native-keymap/build/**
|
||||
native-keymap/src/**
|
||||
native-keymap/deps/**
|
||||
!native-keymap/build/Release/*.node
|
||||
|
||||
native-is-elevated/binding.gyp
|
||||
native-is-elevated/build/**
|
||||
native-is-elevated/src/**
|
||||
native-is-elevated/deps/**
|
||||
!native-is-elevated/build/Release/*.node
|
||||
|
||||
native-watchdog/binding.gyp
|
||||
native-watchdog/build/**
|
||||
native-watchdog/src/**
|
||||
!native-watchdog/build/Release/*.node
|
||||
|
||||
spdlog/binding.gyp
|
||||
spdlog/build/**
|
||||
spdlog/deps/**
|
||||
spdlog/src/**
|
||||
spdlog/test/**
|
||||
!spdlog/build/Release/*.node
|
||||
|
||||
jschardet/dist/**
|
||||
|
||||
windows-foreground-love/binding.gyp
|
||||
windows-foreground-love/build/**
|
||||
windows-foreground-love/src/**
|
||||
!windows-foreground-love/**/*.node
|
||||
|
||||
windows-process-tree/binding.gyp
|
||||
windows-process-tree/build/**
|
||||
windows-process-tree/src/**
|
||||
!windows-process-tree/**/*.node
|
||||
|
||||
keytar/binding.gyp
|
||||
keytar/build/**
|
||||
keytar/src/**
|
||||
keytar/script/**
|
||||
keytar/node_modules/**
|
||||
!keytar/**/*.node
|
||||
|
||||
node-pty/binding.gyp
|
||||
node-pty/build/**
|
||||
node-pty/src/**
|
||||
node-pty/tools/**
|
||||
node-pty/deps/**
|
||||
!node-pty/build/Release/*.exe
|
||||
!node-pty/build/Release/*.dll
|
||||
!node-pty/build/Release/*.node
|
||||
|
||||
emmet/node_modules/**
|
||||
|
||||
pty.js/build/**
|
||||
!pty.js/build/Release/**
|
||||
|
||||
# START SQL Modules
|
||||
|
||||
@angular/**/src/**
|
||||
@angular/**/testing/**
|
||||
|
||||
angular2-grid/components/**
|
||||
angular2-grid/directives/**
|
||||
angular2-grid/interfaces/**
|
||||
angular2-grid/modules/**
|
||||
|
||||
angular2-slickgrid/.vscode/**
|
||||
angular2-slickgrid/components/**
|
||||
angular2-slickgrid/examples/**
|
||||
|
||||
jquery-ui/external/**
|
||||
jquery-ui/demos/**
|
||||
|
||||
slickgrid/node_modules/**
|
||||
slickgrid/examples/**
|
||||
|
||||
kerberos/build/**
|
||||
|
||||
# END SQL Modules
|
||||
|
||||
nsfw/binding.gyp
|
||||
nsfw/build/**
|
||||
nsfw/src/**
|
||||
nsfw/openpa/**
|
||||
nsfw/includes/**
|
||||
!nsfw/build/Release/*.node
|
||||
!nsfw/**/*.a
|
||||
|
||||
vsda/build/**
|
||||
vsda/ci/**
|
||||
vsda/src/**
|
||||
vsda/.gitignore
|
||||
vsda/binding.gyp
|
||||
vsda/README.md
|
||||
vsda/targets
|
||||
!vsda/build/Release/vsda.node
|
||||
|
||||
vscode-windows-ca-certs/**/*
|
||||
!vscode-windows-ca-certs/package.json
|
||||
!vscode-windows-ca-certs/**/*.node
|
||||
|
||||
node-addon-api/**/*
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
98
build/actions/AutoLabel/.gitignore
vendored
98
build/actions/AutoLabel/.gitignore
vendored
@@ -1,98 +0,0 @@
|
||||
|
||||
# Created by https://www.gitignore.io/api/node
|
||||
# Edit at https://www.gitignore.io/?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# react / gatsby
|
||||
public/
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# End of https://www.gitignore.io/api/node
|
||||
@@ -1,9 +0,0 @@
|
||||
name: 'Auto Labeler'
|
||||
description: 'Automatically add a label to a PR'
|
||||
author: 'aaomidi'
|
||||
inputs:
|
||||
token:
|
||||
description: 'The GITHUB_TOKEN secret'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
1
build/actions/AutoLabel/dist/index.d.ts
vendored
1
build/actions/AutoLabel/dist/index.d.ts
vendored
@@ -1 +0,0 @@
|
||||
export {};
|
||||
35714
build/actions/AutoLabel/dist/index.js
vendored
35714
build/actions/AutoLabel/dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"name": "auto-merge",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"repository": "git@github.com:aaomidi/AutoLabel.git",
|
||||
"author": "Amir Omidi <amir@aaomidi.com>",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"package": "ncc build src/index.ts -o dist",
|
||||
"lint": "eslint src/index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.11.1",
|
||||
"@zeit/ncc": "^0.21.1",
|
||||
"eslint": "^6.8.0",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.1.3",
|
||||
"@actions/github": "^1.1.0",
|
||||
"@octokit/rest": "^16.33.1",
|
||||
"actions-toolkit": "^3.0.1"
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Toolkit } from 'actions-toolkit';
|
||||
|
||||
const tools = new Toolkit({
|
||||
event: 'pull_request',
|
||||
secrets: ['GITHUB_TOKEN']
|
||||
});
|
||||
|
||||
const label = tools.inputs.label || 'Port Request';
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
let prNumber = tools.context.payload?.pull_request?.number;
|
||||
|
||||
if (prNumber === undefined) {
|
||||
console.error('PR Number was undefined');
|
||||
tools.log.error('PR Number was undefined');
|
||||
return;
|
||||
}
|
||||
|
||||
tools.github.issues.addLabels({
|
||||
...tools.context.repo,
|
||||
labels: [label],
|
||||
issue_number: prNumber
|
||||
});
|
||||
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
tools.log.error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
@@ -1,76 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Basic Options */
|
||||
"target": "es6",
|
||||
/* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
|
||||
"module": "commonjs",
|
||||
/* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
"declaration": true,
|
||||
/* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
"sourceMap": true,
|
||||
/* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "out",
|
||||
/* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true,
|
||||
/* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"strictNullChecks": true,
|
||||
/* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
"alwaysStrict": true,
|
||||
/* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"],
|
||||
"skipLibCheck": true,
|
||||
/* Type declaration files to be included in compilation. */
|
||||
"allowSyntheticDefaultImports": true,
|
||||
/* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
100
build/actions/AutoMerge/.gitignore
vendored
100
build/actions/AutoMerge/.gitignore
vendored
@@ -1,100 +0,0 @@
|
||||
|
||||
# Created by https://www.gitignore.io/api/node
|
||||
# Edit at https://www.gitignore.io/?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# react / gatsby
|
||||
public/
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# End of https://www.gitignore.io/api/node
|
||||
|
||||
out/
|
||||
@@ -1,6 +0,0 @@
|
||||
name: 'Auto Merge'
|
||||
description: 'Automatically merges a PR based on command'
|
||||
author: 'aaomidi'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
1
build/actions/AutoMerge/dist/index.d.ts
vendored
1
build/actions/AutoMerge/dist/index.d.ts
vendored
@@ -1 +0,0 @@
|
||||
export {};
|
||||
35769
build/actions/AutoMerge/dist/index.js
vendored
35769
build/actions/AutoMerge/dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"name": "auto-merge",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"repository": "git@github.com:aaomidi/AutoMerge.git",
|
||||
"author": "Amir Omidi <amir@aaomidi.com>",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"package": "ncc build src/index.ts -o dist",
|
||||
"lint": "eslint src/index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.11.1",
|
||||
"@zeit/ncc": "^0.21.1",
|
||||
"eslint": "^6.8.0",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.1.3",
|
||||
"@actions/github": "^1.1.0",
|
||||
"@octokit/rest": "^16.33.1",
|
||||
"actions-toolkit": "^3.0.1"
|
||||
}
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Toolkit } from 'actions-toolkit';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
|
||||
interface LabelDefinition {
|
||||
id: number,
|
||||
name: string
|
||||
}
|
||||
|
||||
interface RepoContext {
|
||||
repo: string,
|
||||
owner: string
|
||||
}
|
||||
|
||||
const tools = new Toolkit({
|
||||
event: 'issue_comment',
|
||||
secrets: ['GITHUB_TOKEN']
|
||||
});
|
||||
|
||||
const labelToCheckFor = tools.inputs.label || 'Approved';
|
||||
|
||||
const fileToCheckFor = tools.inputs.filePath || './.github/mergers.json';
|
||||
|
||||
const checkMerged = async (repoContext: RepoContext, pullNumber: number): Promise<boolean> => {
|
||||
let isMerged: boolean;
|
||||
try {
|
||||
const result = await tools.github.pulls.checkIfMerged({
|
||||
...repoContext,
|
||||
pull_number: pullNumber
|
||||
});
|
||||
isMerged = result.status === 204;
|
||||
} catch (ex) {
|
||||
isMerged = false;
|
||||
}
|
||||
return isMerged;
|
||||
};
|
||||
|
||||
const checkCollabrator = async (repoContext: RepoContext, username: string): Promise<boolean> => {
|
||||
let isCollabrator: boolean;
|
||||
try {
|
||||
const result = await tools.github.repos.checkCollaborator({
|
||||
...repoContext,
|
||||
username,
|
||||
});
|
||||
isCollabrator = result.status === 204;
|
||||
} catch (ex) {
|
||||
isCollabrator = false;
|
||||
}
|
||||
return isCollabrator;
|
||||
};
|
||||
|
||||
tools.command('merge', async () => {
|
||||
try {
|
||||
const issue = tools.context.payload.issue;
|
||||
|
||||
if (issue?.pull_request === undefined) {
|
||||
console.log('This command only works on pull requests');
|
||||
return;
|
||||
}
|
||||
|
||||
const sender = tools.context.payload.sender;
|
||||
|
||||
const senderName = sender?.login ?? ' Unknown Sender';
|
||||
const issueNumber = issue?.number;
|
||||
|
||||
if (!issueNumber) {
|
||||
return tools.log.error('Issue number not defined.');
|
||||
}
|
||||
|
||||
const isMerged = await checkMerged(tools.context.repo, issueNumber);
|
||||
|
||||
if (isMerged === true) {
|
||||
console.log('PR is already merged');
|
||||
return;
|
||||
}
|
||||
|
||||
const mergers: string[] = JSON.parse(tools.getFile(fileToCheckFor));
|
||||
|
||||
if (!mergers.includes(senderName)) {
|
||||
console.log('Unrecognized user tried to merge!', senderName);
|
||||
return;
|
||||
}
|
||||
|
||||
const isCollabrator = await checkCollabrator(tools.context.repo, senderName);
|
||||
|
||||
if (isCollabrator !== true) {
|
||||
console.log('User is not a collabrator');
|
||||
return;
|
||||
}
|
||||
|
||||
const labels: LabelDefinition[] = issue.labels || [];
|
||||
|
||||
const foundLabel = labels.find(l => l.name === labelToCheckFor);
|
||||
|
||||
if (foundLabel === undefined) {
|
||||
console.log(`Label ${labelToCheckFor} must be applied`);
|
||||
const createCommentParams: Octokit.IssuesCreateCommentParams = {
|
||||
...tools.context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: `The label ${labelToCheckFor} is required for using this command.`
|
||||
};
|
||||
await tools.github.issues.createComment(createCommentParams);
|
||||
return;
|
||||
}
|
||||
|
||||
const createCommentParams: Octokit.IssuesCreateCommentParams = {
|
||||
...tools.context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: `Merging PR based on approval from @${senderName}`
|
||||
};
|
||||
|
||||
const commentResult = await tools.github.issues.createComment(createCommentParams);
|
||||
|
||||
if (commentResult.status !== 201) {
|
||||
console.log('Comment not created');
|
||||
return;
|
||||
}
|
||||
|
||||
const mergeResult = await tools.github.pulls.merge({
|
||||
...tools.context.repo,
|
||||
pull_number: issueNumber,
|
||||
merge_method: 'squash'
|
||||
});
|
||||
console.log(mergeResult);
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('Running...');
|
||||
@@ -1,76 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Basic Options */
|
||||
"target": "es6",
|
||||
/* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
|
||||
"module": "commonjs",
|
||||
/* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
"declaration": true,
|
||||
/* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
"sourceMap": true,
|
||||
/* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "out",
|
||||
/* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true,
|
||||
/* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"strictNullChecks": true,
|
||||
/* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
"alwaysStrict": true,
|
||||
/* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
"types": ["node"],
|
||||
"skipLibCheck": true,
|
||||
/* Type declaration files to be included in compilation. */
|
||||
"allowSyntheticDefaultImports": true,
|
||||
/* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,36 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as vfs from 'vinyl-fs';
|
||||
|
||||
const files = [
|
||||
'.build/extensions/**/*.vsix', // external extensions
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||
'.build/linux/sha256hashes.txt', // linux hashes
|
||||
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||
'.build/linux/server/*', // linux server
|
||||
'.build/linux/archive/*', // linux archive
|
||||
'.build/docker/**', // docker images
|
||||
'.build/darwin/**', // darwin binaries
|
||||
'.build/version.json' // version information
|
||||
];
|
||||
|
||||
async function main() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||
|
||||
stream.on('end', () => resolve());
|
||||
stream.on('error', e => reject(e));
|
||||
});
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,133 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl?: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
if (process.argv.length !== 6) {
|
||||
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||
return new Promise<string>((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, filePath: string, fileName: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(filePath),
|
||||
contentDisposition: `attachment; filename="${fileName}"`,
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , platform, type, fileName, filePath] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
|
||||
console.log('Creating asset...');
|
||||
|
||||
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
|
||||
console.log('Size:', size);
|
||||
|
||||
const stream = fs.createReadStream(filePath);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + fileName;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, filePath, fileName);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
const asset: Asset = {
|
||||
platform,
|
||||
type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,60 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
|
||||
const build = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
cd $BUILD_STAGINGDIRECTORY
|
||||
mkdir extraction
|
||||
cd extraction
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-extension-telemetry.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-chrome-debug-core.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
|
||||
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
|
||||
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
|
||||
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
|
||||
cd ..
|
||||
rm -rf extraction
|
||||
18
build/azure-pipelines/common/installDistro.ts
Normal file
18
build/azure-pipelines/common/installDistro.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as cp from 'child_process';
|
||||
|
||||
function yarnInstall(packageName: string): void {
|
||||
cp.execSync(`yarn add --no-lockfile ${packageName}`);
|
||||
}
|
||||
|
||||
const product = require('../../../product.json');
|
||||
const dependencies = product.dependencies || {} as { [name: string]: string; };
|
||||
|
||||
Object.keys(dependencies).forEach(name => {
|
||||
const url = dependencies[name];
|
||||
yarnInstall(url);
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# Publish webview contents
|
||||
PACKAGEJSON="$REPO/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"
|
||||
@@ -1,87 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { basename, join } from 'path';
|
||||
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
await assertContainer(blobService, commit);
|
||||
|
||||
for (const file of files) {
|
||||
const blobName = basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
|
||||
const files = fileNames.map(fileName => join(directory, fileName));
|
||||
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
||||
@@ -152,6 +152,13 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
|
||||
const queuedBy = process.env['BUILD_QUEUEDBY']!;
|
||||
const sourceBranch = process.env['BUILD_SOURCEBRANCH']!;
|
||||
const isReleased = (
|
||||
// Insiders: nightly build from master
|
||||
(quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)) ||
|
||||
|
||||
// Exploration: any build from electron-4.0.x branch
|
||||
(quality === 'exploration' && /^electron-4.0.x$|^refs\/heads\/electron-4.0.x$/.test(sourceBranch))
|
||||
);
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Quality:', quality);
|
||||
@@ -161,6 +168,7 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
console.log('Is Update:', isUpdate);
|
||||
console.log('Is Released:', isReleased);
|
||||
console.log('File:', file);
|
||||
|
||||
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||
@@ -215,15 +223,11 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from master
|
||||
const isReleased = (quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
|
||||
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
isReleased: config.frozen ? false : isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [] as Array<Asset>,
|
||||
@@ -242,6 +246,11 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
|
||||
console.warn('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
|
||||
return;
|
||||
}
|
||||
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { DocumentClient } from 'documentdb';
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
|
||||
return new Promise<Config>((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) { return e(err); }
|
||||
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function doRelease(commit: string, quality: string): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
release.isReleased = true;
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
console.log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return update();
|
||||
}
|
||||
|
||||
async function release(commit: string, quality: string): Promise<void> {
|
||||
const config = await getConfig(quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
|
||||
await doRelease(commit, quality);
|
||||
}
|
||||
|
||||
function env(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (!result) {
|
||||
throw new Error(`Skipping release due to missing env: ${name}`);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const commit = env('BUILD_SOURCEVERSION');
|
||||
const quality = env('VSCODE_QUALITY');
|
||||
|
||||
await release(commit, quality);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,70 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||
|
||||
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||
|
||||
if (res.resources.length === 0) {
|
||||
return createDefaultConfig(quality);
|
||||
}
|
||||
|
||||
return res.resources[0] as Config;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -36,6 +36,7 @@ export interface IVersionAccessor extends IApplicationAccessor {
|
||||
enum Platform {
|
||||
WIN_32 = 'win32-ia32',
|
||||
WIN_64 = 'win32-x64',
|
||||
LINUX_32 = 'linux-ia32',
|
||||
LINUX_64 = 'linux-x64',
|
||||
MAC_OS = 'darwin-x64'
|
||||
}
|
||||
@@ -146,10 +147,6 @@ async function ensureVersionAndSymbols(options: IOptions) {
|
||||
// Check version does not exist
|
||||
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
|
||||
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
|
||||
if (!Array.isArray(versions.app_versions)) {
|
||||
throw new Error(`Unexpected response: ${JSON.stringify(versions)}`);
|
||||
}
|
||||
|
||||
if (versions.app_versions.some(v => v.version === options.versions.code)) {
|
||||
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
|
||||
return;
|
||||
@@ -188,17 +185,13 @@ const hockeyAppToken = process.argv[3];
|
||||
const is64 = process.argv[4] === 'x64';
|
||||
const hockeyAppId = process.argv[5];
|
||||
|
||||
if (process.argv.length !== 6) {
|
||||
throw new Error(`HockeyApp: Unexpected number of arguments. Got ${process.argv}`);
|
||||
}
|
||||
|
||||
let platform: Platform;
|
||||
if (process.platform === 'darwin') {
|
||||
platform = Platform.MAC_OS;
|
||||
} else if (process.platform === 'win32') {
|
||||
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
|
||||
} else {
|
||||
platform = Platform.LINUX_64;
|
||||
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
|
||||
}
|
||||
|
||||
// Create version and upload symbols in HockeyApp
|
||||
@@ -219,9 +212,7 @@ if (repository && codeVersion && electronVersion && (product.quality === 'stable
|
||||
}).then(() => {
|
||||
console.log('HockeyApp: done');
|
||||
}).catch(error => {
|
||||
console.error(`HockeyApp: error ${error} (AppID: ${hockeyAppId})`);
|
||||
|
||||
return process.exit(1);
|
||||
console.error(`HockeyApp: error (${error})`);
|
||||
});
|
||||
} else {
|
||||
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import * as url from 'url';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
||||
|
||||
function log(...args: any[]) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
interface Build {
|
||||
interface Build extends RetrievedDocument {
|
||||
assets: Asset[];
|
||||
}
|
||||
|
||||
@@ -38,20 +38,62 @@ interface Asset {
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function _update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
|
||||
release.assets = [
|
||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return _update();
|
||||
}
|
||||
|
||||
async function sync(commit: string, quality: string): Promise<void> {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const container = client.database('builds').container(quality);
|
||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = `dbs/builds/colls/${quality}`;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||
const res = await container.items.query<Build>(query, {}).fetchAll();
|
||||
|
||||
if (res.resources.length !== 1) {
|
||||
throw new Error(`No builds found for ${commit}`);
|
||||
}
|
||||
|
||||
const build = res.resources[0];
|
||||
const build = await new Promise<Build>((c, e) => {
|
||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
c(results[0] as Build);
|
||||
});
|
||||
});
|
||||
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
|
||||
@@ -98,9 +140,8 @@ async function sync(commit: string, quality: string): Promise<void> {
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
|
||||
log(` Updating build in DB...`);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
||||
|
||||
log(` Done ✔️`);
|
||||
} catch (err) {
|
||||
@@ -112,6 +153,11 @@ async function sync(commit: string, quality: string): Promise<void> {
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
|
||||
error('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
|
||||
return;
|
||||
}
|
||||
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
[
|
||||
{
|
||||
"eventPrefix": "typescript-language-features/",
|
||||
"sourceDirs": [
|
||||
"../../s/extensions/typescript-language-features"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "git/",
|
||||
"sourceDirs": [
|
||||
"../../s/extensions/git"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "extension-telemetry/",
|
||||
"sourceDirs": [
|
||||
"vscode-extension-telemetry"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "vscode-markdown/",
|
||||
"sourceDirs": [
|
||||
"../../s/extensions/markdown-language-features"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "html-language-features/",
|
||||
"sourceDirs": [
|
||||
"../../s/extensions/html-language-features",
|
||||
"vscode-html-languageservice"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "json-language-features/",
|
||||
"sourceDirs": [
|
||||
"../../s/extensions/json-language-features",
|
||||
"vscode-json-languageservice"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "ms-vscode.node2/",
|
||||
"sourceDirs": [
|
||||
"vscode-chrome-debug-core",
|
||||
"vscode-node-debug2"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true,
|
||||
"patchDebugEvents": true
|
||||
},
|
||||
{
|
||||
"eventPrefix": "ms-vscode.node/",
|
||||
"sourceDirs": [
|
||||
"vscode-chrome-debug-core",
|
||||
"vscode-node-debug"
|
||||
],
|
||||
"excludedDirs": [],
|
||||
"applyEndpoints": true,
|
||||
"patchDebugEvents": true
|
||||
}
|
||||
]
|
||||
4
build/azure-pipelines/darwin/build.sh
Executable file
4
build/azure-pipelines/darwin/build.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
yarn gulp vscode-darwin-min
|
||||
yarn gulp upload-vscode-sourcemaps
|
||||
@@ -1,55 +1,47 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
versionSpec: "1.10.1"
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
||||
- script: |
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn electron x64
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn strict-vscode
|
||||
displayName: Run Strict Null Check.
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Electron)
|
||||
# - script: | {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
|
||||
# yarn test-browser --browser chromium --browser webkit
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests (Electron)
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# ensure drop directories exist
|
||||
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
@@ -1,14 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,22 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-executable-page-protection</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.network.client</key>
|
||||
<true/>
|
||||
<key>com.apple.security.network.server</key>
|
||||
<true/>
|
||||
<key>com.apple.security.app-sandbox</key>
|
||||
<false/>
|
||||
<key>com.apple.security.automation.apple-events</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,154 +1,59 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine monacotools.visualstudio.com
|
||||
password $(VSO_PAT)
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
password $(VSCODE_MIXIN_PASSWORD)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
yarn
|
||||
yarn gulp mixin
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
displayName: Prepare build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
./build/azure-pipelines/darwin/build.sh
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
||||
displayName: Run unit tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-browser --build --browser chromium --browser webkit
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
displayName: Run integration tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
|
||||
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
||||
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
codesign -s 99FM488X57 --deep --force --options runtime --entitlements build/azure-pipelines/darwin/entitlements.plist $(agent.builddirectory)/VSCode-darwin/*.app
|
||||
displayName: Set Hardened Entitlements
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin && zip -r -X -y $(agent.builddirectory)/VSCode-darwin.zip * && popd
|
||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
|
||||
displayName: Archive build
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
@@ -162,61 +67,21 @@ steps:
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppDeveloperSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "Hardening",
|
||||
"parameterValue": "--options=runtime"
|
||||
}
|
||||
],
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 60
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign
|
||||
|
||||
- script: |
|
||||
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
|
||||
- script: |
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
||||
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
||||
displayName: Export bundle identifier
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)'
|
||||
Pattern: 'VSCode-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppNotarize",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "BundleId",
|
||||
"parameterValue": "$(BundleIdentifier)"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Notarization
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(VSCODE_HOCKEYAPP_TOKEN)" \
|
||||
./build/azure-pipelines/darwin/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
|
||||
@@ -1,27 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# remove pkg from archive
|
||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||
|
||||
# publish the build
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
darwin \
|
||||
archive \
|
||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../VSCode-darwin.zip
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||
|
||||
# publish Remote Extension Host
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
server-darwin \
|
||||
archive-unsigned \
|
||||
"vscode-server-darwin.zip" \
|
||||
../vscode-server-darwin.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
# node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" x64 "$VSCODE_HOCKEYAPP_ID_MACOS"
|
||||
# Skip hockey app because build failure.
|
||||
# https://github.com/microsoft/vscode/issues/90491
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_MACOS"
|
||||
|
||||
# upload configuration
|
||||
yarn gulp upload-vscode-configuration
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
steps:
|
||||
- task: InstallAppleCertificate@2
|
||||
displayName: 'Install developer certificate'
|
||||
inputs:
|
||||
certSecureFile: 'osx_signing_key.p12'
|
||||
condition: eq(variables['signed'], true)
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.3'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: '1.x'
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
GITHUB_TOKEN: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd ../azuredatastudio-darwin
|
||||
ls
|
||||
|
||||
echo "Cleaning the application"
|
||||
xattr -cr *.app
|
||||
cd *.app
|
||||
find . -name '._*' -print0 | xargs -0 rm -rf --
|
||||
cd ..
|
||||
|
||||
echo "Signing the application with deep"
|
||||
codesign --deep --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
|
||||
|
||||
cd *.app
|
||||
ls
|
||||
echo "Signing specific components"
|
||||
find . -type f -print0 | xargs -0 file | grep ': *Mach-O' | sed 's/: *Mach-O.*//' | while read -r file; do codesign --options runtime --timestamp --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS --force "$file" || break; done
|
||||
|
||||
echo "Signing Electron again..."
|
||||
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS Contents/Frameworks/Electron\ Framework.framework
|
||||
cd ..
|
||||
|
||||
echo "Signing the entire application one more time"
|
||||
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
|
||||
popd
|
||||
displayName: 'Manual codesign'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p .build/darwin/archive
|
||||
pushd ../azuredatastudio-darwin
|
||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
popd
|
||||
displayName: 'Archive'
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish SelfSigned'
|
||||
inputs:
|
||||
artifactName: darwin-selfsigned
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'ESRP CodeSigning'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationCode": "MacAppDeveloperSign",
|
||||
"parameters": {
|
||||
"Hardening": "Enable"
|
||||
},
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 90
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
zip -d $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Signed'
|
||||
inputs:
|
||||
artifactName: darwin-signed
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- task: EsrpCodeSigning@1
|
||||
displayName: 'ESRP Notarization'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"KeyCode": "CP-401337-Apple",
|
||||
"OperationCode": "MacAppNotarize",
|
||||
"Parameters": {
|
||||
"BundleId": "com.microsoft.azuredatastudio-$(VSCODE_QUALITY)"
|
||||
},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Notarized'
|
||||
inputs:
|
||||
artifactName: darwin-notarized
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/darwin/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
@@ -1,19 +0,0 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$ZipName = "azuredatastudio-darwin.zip"
|
||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||
@@ -1,6 +1,3 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master', 'release/*']
|
||||
@@ -11,35 +8,23 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'azuredatastudio-adointegration'
|
||||
KeyVaultName: ado-secrets
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
login vscode
|
||||
password $(VSCODE_MIXIN_PASSWORD)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
||||
git fetch distro
|
||||
|
||||
# Push master branch into oss/master
|
||||
git push distro origin/master:refs/heads/oss/master
|
||||
|
||||
# Push every release branch into oss/release
|
||||
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
|
||||
|
||||
git push distro origin/master:refs/heads/master
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
|
||||
displayName: Sync & Merge Distro
|
||||
displayName: Sync & Merge Distro
|
||||
@@ -1,16 +0,0 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||
|
||||
ADD ./ /opt/ads-server
|
||||
|
||||
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||
|
||||
CMD ["/opt/ads-server/server.sh"]
|
||||
|
||||
EXPOSE 8000:8000
|
||||
EXPOSE 8001:8001
|
||||
@@ -1,36 +0,0 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git checkout origin/electron-6.0.x
|
||||
git merge origin/master
|
||||
|
||||
# Push master branch into exploration branch
|
||||
git push origin HEAD:electron-6.0.x
|
||||
|
||||
displayName: Sync & Merge Exploration
|
||||
@@ -1,39 +0,0 @@
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master']
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- job: ExplorationMerge
|
||||
pool:
|
||||
vmImage: Ubuntu-16.04
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine mssqltools.visualstudio.com
|
||||
login azuredatastudio
|
||||
password $(DEVOPS_PASSWORD)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
|
||||
git remote add explore "$ADS_EXPLORE_REPO"
|
||||
git fetch explore
|
||||
|
||||
git checkout -b merge-branch explore/master
|
||||
|
||||
git merge origin/master
|
||||
|
||||
git push explore HEAD:master
|
||||
|
||||
displayName: Sync & Merge Explore
|
||||
env:
|
||||
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
|
||||
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)
|
||||
@@ -1,27 +0,0 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.8
|
||||
|
||||
RUN rm /usr/bin/gcc
|
||||
RUN rm /usr/bin/g++
|
||||
RUN ln -s /usr/bin/gcc-4.8 /usr/bin/gcc
|
||||
RUN ln -s /usr/bin/g++-4.8 /usr/bin/g++
|
||||
|
||||
#docker
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN apt-key fingerprint 0EBFCD88
|
||||
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
|
||||
# if built on a windows host.
|
||||
ADD ./xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
3
build/azure-pipelines/linux/build.sh
Executable file
3
build/azure-pipelines/linux/build.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-min"
|
||||
@@ -2,62 +2,51 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
versionSpec: "1.10.1"
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
||||
- script: |
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn electron x64
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||
yarn strict-vscode
|
||||
displayName: Run Strict Null Check
|
||||
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Electron)
|
||||
# - script: | {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
|
||||
# DISPLAY=:10 yarn test-browser --browser chromium
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests (Electron)
|
||||
displayName: Run Unit Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
mkdir -p $REPO/.build/linux/{archive,server}
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
|
||||
# create version
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
COMMIT_ID=$(git rev-parse HEAD)
|
||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# create docker
|
||||
mkdir -p $REPO/.build/docker
|
||||
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
40
build/azure-pipelines/linux/frozen-check.js
Normal file
40
build/azure-pipelines/linux/frozen-check.js
Normal file
@@ -0,0 +1,40 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
getConfig(process.argv[2])
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo 'noop'
|
||||
@@ -1,116 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: Docker@1
|
||||
displayName: 'Pull image'
|
||||
inputs:
|
||||
azureSubscriptionEndpoint: 'vscode-builds-subscription'
|
||||
azureContainerRegistry: vscodehub.azurecr.io
|
||||
command: 'Run an image'
|
||||
imageName: 'vscode-linux-build-agent:$(VSCODE_ARCH)'
|
||||
containerCommand: uname
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 ./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/prebuild.sh
|
||||
displayName: Prebuild
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/build.sh
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
||||
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
@@ -1,186 +1,73 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine monacotools.visualstudio.com
|
||||
password $(VSO_PAT)
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
password $(VSCODE_MIXIN_PASSWORD)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
CHILD_CONCURRENCY=1 yarn
|
||||
yarn gulp mixin
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
displayName: Prepare build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
./build/azure-pipelines/linux/build.sh
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "electron-$(VSCODE_ARCH)"
|
||||
|
||||
# xvfb seems to be crashing often, let's make sure it's always up
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
displayName: Run unit tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
displayName: Build packages
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '.build/linux/rpm/x86_64'
|
||||
Pattern: '*.rpm'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-450779-Pgp",
|
||||
"operationSetCode": "LinuxSign",
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(VSCODE_HOCKEYAPP_TOKEN)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-x64
|
||||
targetPath: .build/linux/snap-tarball
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
|
||||
@@ -4,53 +4,48 @@ REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
# node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||
# Skip hockey app because build failure.
|
||||
# https://github.com/microsoft/vscode/issues/90491
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||
|
||||
# Publish DEB
|
||||
PLATFORM_DEB="linux-deb-x64"
|
||||
DEB_ARCH="amd64"
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-deb"
|
||||
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
PLATFORM_RPM="linux-rpm-x64"
|
||||
RPM_ARCH="x86_64"
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-prepare-snap"
|
||||
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: DownloadPipelineArtifact@0
|
||||
displayName: 'Download Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-x64
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
|
||||
- script: |
|
||||
@@ -31,22 +25,25 @@ steps:
|
||||
|
||||
# Define variables
|
||||
REPO="$(pwd)"
|
||||
SNAP_ROOT="$REPO/.build/linux/snap/x64"
|
||||
ARCH="$(VSCODE_ARCH)"
|
||||
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
|
||||
|
||||
# Install build dependencies
|
||||
(cd build && yarn)
|
||||
|
||||
# Unpack snap tarball artifact, in order to preserve file perms
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
|
||||
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
|
||||
|
||||
# Create snap package
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
||||
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
|
||||
|
||||
# Publish snap package
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
||||
@@ -1,171 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
GITHUB_TOKEN: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
yarn gulp install-ssmsmin
|
||||
displayName: Install extension binaries
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp compile-extensions
|
||||
yarn gulp package-external-extensions
|
||||
displayName: Package External extensions
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||
displayName: 'Run Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||
displayName: 'Run Unstable Extension Unit Tests'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
displayName: Build Deb
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy Extension Unit Test Coverage Files to: $(Build.ArtifactStagingDirectory)'
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/extensions'
|
||||
Contents: '*/coverage/**'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)/test-results/coverage'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
@@ -1,36 +0,0 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
$Arch = "x64"
|
||||
|
||||
# Publish tarball
|
||||
$PlatformLinux = "linux-$Arch"
|
||||
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||
|
||||
# Publish DEB
|
||||
$PlatformDeb = "linux-deb-$Arch"
|
||||
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||
|
||||
# Publish RPM
|
||||
$PlatformRpm = "linux-rpm-$Arch"
|
||||
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||
@@ -19,7 +19,7 @@
|
||||
[ "${NETWORKING}" = "no" ] && exit 0
|
||||
|
||||
PROG="/usr/bin/Xvfb"
|
||||
PROG_OPTIONS=":10 -ac -screen 0 1024x768x24"
|
||||
PROG_OPTIONS=":10 -ac"
|
||||
PROG_OUTPUT="/tmp/Xvfb.out"
|
||||
|
||||
case "$1" in
|
||||
@@ -50,4 +50,4 @@ case "$1" in
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit $RETVAL
|
||||
exit $RETVAL
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user