mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 02:51:36 -05:00
SQL Operations Studio Public Preview 1 (0.23) release source code
This commit is contained in:
2
build/tfs/common/.gitignore
vendored
Normal file
2
build/tfs/common/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
*.js
|
||||
40
build/tfs/common/common.sh
Normal file
40
build/tfs/common/common.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# set agent specific npm cache
|
||||
if [ -n "$AGENT_WORKFOLDER" ]
|
||||
then
|
||||
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
|
||||
echo "Using npm cache: $npm_config_cache"
|
||||
fi
|
||||
|
||||
SUMMARY="Task;Duration"$'\n'
|
||||
step() {
|
||||
START=$SECONDS
|
||||
TASK=$1; shift
|
||||
echo ""
|
||||
echo "*****************************************************************************"
|
||||
echo "Start: $TASK"
|
||||
echo "*****************************************************************************"
|
||||
"$@"
|
||||
|
||||
# Calculate total duration
|
||||
TOTAL=$(echo "$SECONDS - $START" | bc)
|
||||
M=$(echo "$TOTAL / 60" | bc)
|
||||
S=$(echo "$TOTAL % 60" | bc)
|
||||
DURATION="$(printf "%02d" $M):$(printf "%02d" $S)"
|
||||
|
||||
echo "*****************************************************************************"
|
||||
echo "End: $TASK, Total: $DURATION"
|
||||
echo "*****************************************************************************"
|
||||
SUMMARY="$SUMMARY$TASK;$DURATION"$'\n'
|
||||
}
|
||||
|
||||
done_steps() {
|
||||
echo ""
|
||||
echo "Build Summary"
|
||||
echo "============="
|
||||
echo "${SUMMARY}" | column -t -s';'
|
||||
}
|
||||
|
||||
trap done_steps EXIT
|
||||
85
build/tfs/common/enqueue.ts
Normal file
85
build/tfs/common/enqueue.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import { DocumentClient } from 'documentdb';
|
||||
import * as azure from 'azure-storage';
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
function queueSigningRequest(quality: string, commit: string): Promise<void> {
|
||||
const retryOperations = new azure.ExponentialRetryPolicyFilter();
|
||||
const queueSvc = azure
|
||||
.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(retryOperations);
|
||||
|
||||
queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();
|
||||
|
||||
const message = `${quality}/${commit}`;
|
||||
|
||||
return new Promise<void>((c, e) => queueSvc.createMessage('sign-darwin', message, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function isBuildSigned(quality: string, commit: string): Promise<boolean> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
return new Promise<boolean>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return c(false); }
|
||||
|
||||
const [release] = results;
|
||||
const assets: Asset[] = release.assets;
|
||||
const isSigned = assets.some(a => a.platform === 'darwin' && a.type === 'archive');
|
||||
|
||||
c(isSigned);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function waitForSignedBuild(quality: string, commit: string): Promise<void> {
|
||||
let retries = 0;
|
||||
|
||||
while (retries < 180) {
|
||||
if (await isBuildSigned(quality, commit)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await new Promise<void>(c => setTimeout(c, 10000));
|
||||
retries++;
|
||||
}
|
||||
|
||||
throw new Error('Timed out waiting for signed build');
|
||||
}
|
||||
|
||||
async function main(quality: string): Promise<void> {
|
||||
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
|
||||
console.log(`Queueing signing request for '${quality}/${commit}'...`);
|
||||
await queueSigningRequest(quality, commit);
|
||||
|
||||
console.log('Waiting on signed build...');
|
||||
await waitForSignedBuild(quality, commit);
|
||||
|
||||
console.log('Found signed build!');
|
||||
}
|
||||
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
26
build/tfs/common/installDistro.ts
Normal file
26
build/tfs/common/installDistro.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const cp = require('child_process');
|
||||
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
|
||||
function npmInstall(package: string, args: string[]): void {
|
||||
const result = cp.spawnSync(npm, ['install', package, ...args], {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
|
||||
if (result.error || result.status !== 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const product = require('../../../product.json');
|
||||
const dependencies = product.dependencies || {} as { [name: string]: string; };
|
||||
const [, , ...args] = process.argv;
|
||||
|
||||
Object.keys(dependencies).forEach(name => {
|
||||
const url = dependencies[name];
|
||||
npmInstall(url, args);
|
||||
});
|
||||
15
build/tfs/common/node.sh
Normal file
15
build/tfs/common/node.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# setup nvm
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
export NVM_DIR=~/.nvm
|
||||
source $(brew --prefix nvm)/nvm.sh
|
||||
else
|
||||
source $NVM_DIR/nvm.sh
|
||||
fi
|
||||
|
||||
# install node
|
||||
NODE_VERSION=7.10.0
|
||||
nvm install $NODE_VERSION
|
||||
nvm use $NODE_VERSION
|
||||
266
build/tfs/common/publish.ts
Normal file
266
build/tfs/common/publish.ts
Normal file
@@ -0,0 +1,266 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { execSync } from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { DocumentClient, NewDocument } from 'documentdb';
|
||||
|
||||
if (process.argv.length < 6) {
|
||||
console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||
return new Promise<string>((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
|
||||
return new Promise<Config>((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) { return e(err); }
|
||||
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
}
|
||||
|
||||
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
|
||||
release.assets = [
|
||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
console.log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.createDocument(collection, release, err => {
|
||||
if (err && err.code === 409) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
console.log('Build successfully published.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
interface PublishOptions {
|
||||
'upload-only': boolean;
|
||||
}
|
||||
|
||||
async function publish(commit: string, quality: string, platform: string, type: string, name: string, version: string, _isUpdate: string, file: string, opts: PublishOptions): Promise<void> {
|
||||
const isUpdate = _isUpdate === 'true';
|
||||
|
||||
const queuedBy = process.env['BUILD_QUEUEDBY'];
|
||||
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
|
||||
const isReleased = quality === 'insider'
|
||||
&& /^master$|^refs\/heads\/master$/.test(sourceBranch)
|
||||
&& /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy);
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Platforn:', platform);
|
||||
console.log('Type:', type);
|
||||
console.log('Name:', name);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
console.log('Is Update:', isUpdate);
|
||||
console.log('Is Released:', isReleased);
|
||||
console.log('File:', file);
|
||||
|
||||
const stream = fs.createReadStream(file);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
await Promise.all([
|
||||
assertContainer(blobService, quality),
|
||||
assertContainer(mooncakeBlobService, quality)
|
||||
]);
|
||||
|
||||
const [blobExists, moooncakeBlobExists] = await Promise.all([
|
||||
doesAssetExist(blobService, quality, blobName),
|
||||
doesAssetExist(mooncakeBlobService, quality, blobName)
|
||||
]);
|
||||
|
||||
const promises = [];
|
||||
|
||||
if (!blobExists) {
|
||||
promises.push(uploadBlob(blobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
if (!moooncakeBlobExists) {
|
||||
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
if (promises.length === 0) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
const config = await getConfig(quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
const asset: Asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash
|
||||
};
|
||||
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: config.frozen ? false : isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {} as any
|
||||
};
|
||||
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const opts = minimist<PublishOptions>(process.argv.slice(2), {
|
||||
boolean: ['upload-only']
|
||||
});
|
||||
|
||||
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
|
||||
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
|
||||
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
41
build/tfs/darwin/build.sh
Normal file
41
build/tfs/darwin/build.sh
Normal file
@@ -0,0 +1,41 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export VSCODE_MIXIN_PASSWORD="$1"
|
||||
export AZURE_STORAGE_ACCESS_KEY="$2"
|
||||
export AZURE_STORAGE_ACCESS_KEY_2="$3"
|
||||
export MOONCAKE_STORAGE_ACCESS_KEY="$4"
|
||||
export AZURE_DOCUMENTDB_MASTERKEY="$5"
|
||||
VSO_PAT="$6"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
step "Build minified & upload source maps" \
|
||||
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
|
||||
|
||||
# step "Create loader snapshot"
|
||||
# node build/lib/snapshotLoader.js
|
||||
|
||||
step "Run unit tests" \
|
||||
./scripts/test.sh --build --reporter dot
|
||||
|
||||
step "Run integration tests" \
|
||||
./scripts/test-integration.sh
|
||||
|
||||
step "Publish release" \
|
||||
./build/tfs/darwin/release.sh
|
||||
26
build/tfs/darwin/release.sh
Normal file
26
build/tfs/darwin/release.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
|
||||
step "Install build dependencies" \
|
||||
npm i)
|
||||
|
||||
REPO=`pwd`
|
||||
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
|
||||
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
|
||||
BUILD=$REPO/../VSCode-darwin
|
||||
PACKAGEJSON=`ls $BUILD/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
|
||||
rm -rf $UNSIGNEDZIP
|
||||
(cd $BUILD && \
|
||||
step "Create unsigned archive" \
|
||||
zip -r -X -y $UNSIGNEDZIP *)
|
||||
|
||||
step "Upload unsigned archive" \
|
||||
node build/tfs/common/publish.js --upload-only $VSCODE_QUALITY darwin archive-unsigned VSCode-darwin-$VSCODE_QUALITY-unsigned.zip $VERSION false $UNSIGNEDZIP
|
||||
|
||||
step "Sign build" \
|
||||
node build/tfs/common/enqueue.js $VSCODE_QUALITY
|
||||
28
build/tfs/darwin/smoketest.sh
Normal file
28
build/tfs/darwin/smoketest.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export VSCODE_MIXIN_PASSWORD="$1"
|
||||
VSO_PAT="$2"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
step "Build minified & upload source maps" \
|
||||
npm run gulp -- vscode-darwin-min
|
||||
|
||||
step "Run smoke test" \
|
||||
pushd test/smoke
|
||||
npm install
|
||||
npm test -- --latest "$AGENT_BUILDDIRECTORY/VSCode-darwin/Visual Studio Code - Insiders.app/Contents/MacOS/Electron"
|
||||
popd
|
||||
1
build/tfs/linux/.gitignore
vendored
Normal file
1
build/tfs/linux/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
pat
|
||||
3
build/tfs/linux/build-ia32.sh
Normal file
3
build/tfs/linux/build-ia32.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
./build/tfs/linux/build.sh ia32 "$@"
|
||||
3
build/tfs/linux/build-x64.sh
Normal file
3
build/tfs/linux/build-x64.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
./build/tfs/linux/build.sh x64 "$@"
|
||||
43
build/tfs/linux/build.sh
Normal file
43
build/tfs/linux/build.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export ARCH="$1"
|
||||
export VSCODE_MIXIN_PASSWORD="$2"
|
||||
export AZURE_STORAGE_ACCESS_KEY="$3"
|
||||
export AZURE_STORAGE_ACCESS_KEY_2="$4"
|
||||
export MOONCAKE_STORAGE_ACCESS_KEY="$5"
|
||||
export AZURE_DOCUMENTDB_MASTERKEY="$6"
|
||||
export LINUX_REPO_PASSWORD="$7"
|
||||
VSO_PAT="$8"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install --arch=$ARCH --unsafe-perm
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Get Electron" \
|
||||
npm run gulp -- "electron-$ARCH"
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js --arch=$ARCH
|
||||
|
||||
step "Build minified" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-min"
|
||||
|
||||
# step "Create loader snapshot"
|
||||
# node build/lib/snapshotLoader.js --arch=$ARCH
|
||||
|
||||
step "Run unit tests" \
|
||||
./scripts/test.sh --build --reporter dot
|
||||
|
||||
step "Publish release" \
|
||||
./build/tfs/linux/release.sh
|
||||
50
build/tfs/linux/ia32/Dockerfile
Normal file
50
build/tfs/linux/ia32/Dockerfile
Normal file
@@ -0,0 +1,50 @@
|
||||
FROM microsoft/vsts-agent:ubuntu-14.04-standard
|
||||
MAINTAINER Joao Moreno <joao.moreno@microsoft.com>
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
RUN dpkg --add-architecture i386
|
||||
RUN apt-get update
|
||||
|
||||
# Dependencies
|
||||
RUN apt-get install -y build-essential
|
||||
RUN apt-get install -y gcc-multilib g++-multilib
|
||||
RUN apt-get install -y git
|
||||
RUN apt-get install -y zip
|
||||
RUN apt-get install -y rpm
|
||||
RUN apt-get install -y createrepo
|
||||
RUN apt-get install -y python-gtk2
|
||||
RUN apt-get install -y jq
|
||||
RUN apt-get install -y xvfb
|
||||
RUN apt-get install -y fakeroot
|
||||
RUN apt-get install -y libgtk2.0-0:i386
|
||||
RUN apt-get install -y libgconf-2-4:i386
|
||||
RUN apt-get install -y libnss3:i386
|
||||
RUN apt-get install -y libasound2:i386
|
||||
RUN apt-get install -y libxtst6:i386
|
||||
RUN apt-get install -y libfuse2
|
||||
RUN apt-get install -y libnotify-bin
|
||||
RUN apt-get install -y libnotify4:i386
|
||||
RUN apt-get install -y libx11-dev:i386
|
||||
RUN apt-get install -y libxkbfile-dev:i386
|
||||
RUN apt-get install -y libxss1:i386
|
||||
RUN apt-get install -y libx11-xcb-dev:i386
|
||||
RUN apt-get install -y libgl1-mesa-glx:i386 libgl1-mesa-dri:i386
|
||||
RUN apt-get install -y libxkbfile-dev
|
||||
RUN apt-get install -y bc bsdmainutils
|
||||
RUN apt-get install -y libgirepository-1.0-1:i386 gir1.2-glib-2.0:i386 gir1.2-secret-1:i386 libsecret-1-dev:i386
|
||||
RUN apt-get install -y dpkg-dev:i386
|
||||
|
||||
# Xvfb
|
||||
# Thanks https://medium.com/@griggheo/running-headless-selenium-webdriver-tests-in-docker-containers-342fdbabf756
|
||||
ADD xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
|
||||
# nvm
|
||||
ENV NVM_DIR /usr/local/nvm
|
||||
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
|
||||
|
||||
# for libsecret
|
||||
ENV PKG_CONFIG_PATH /usr/lib/i386-linux-gnu/pkgconfig
|
||||
|
||||
CMD (service xvfb start; export DISPLAY=:10; ./start.sh)
|
||||
15
build/tfs/linux/ia32/run-agent.sh
Normal file
15
build/tfs/linux/ia32/run-agent.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ ! -f pat ]; then
|
||||
echo "Error: file pat not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker run \
|
||||
-e VSTS_ACCOUNT="monacotools" \
|
||||
-e VSTS_TOKEN="$(cat pat)" \
|
||||
-e VSTS_AGENT="tb-lnx-ia32-local" \
|
||||
-e VSTS_POOL="linux-ia32" \
|
||||
-e VSTS_WORK="/var/vsts/work" \
|
||||
--name "tb-lnx-ia32-local" \
|
||||
-it joaomoreno/vscode-vso-agent-ia32:latest
|
||||
53
build/tfs/linux/ia32/xvfb.init
Normal file
53
build/tfs/linux/ia32/xvfb.init
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# /etc/rc.d/init.d/xvfbd
|
||||
#
|
||||
# chkconfig: 345 95 28
|
||||
# description: Starts/Stops X Virtual Framebuffer server
|
||||
# processname: Xvfb
|
||||
#
|
||||
### BEGIN INIT INFO
|
||||
# Provides: xvfb
|
||||
# Required-Start: $remote_fs $syslog
|
||||
# Required-Stop: $remote_fs $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: Start xvfb at boot time
|
||||
# Description: Enable xvfb provided by daemon.
|
||||
### END INIT INFO
|
||||
|
||||
[ "${NETWORKING}" = "no" ] && exit 0
|
||||
|
||||
PROG="/usr/bin/Xvfb"
|
||||
PROG_OPTIONS=":10 -ac"
|
||||
PROG_OUTPUT="/tmp/Xvfb.out"
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo "Starting : X Virtual Frame Buffer "
|
||||
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
|
||||
disown -ar
|
||||
;;
|
||||
stop)
|
||||
echo "Shutting down : X Virtual Frame Buffer"
|
||||
killproc $PROG
|
||||
RETVAL=$?
|
||||
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
|
||||
/var/run/Xvfb.pid
|
||||
echo
|
||||
;;
|
||||
restart|reload)
|
||||
$0 stop
|
||||
$0 start
|
||||
RETVAL=$?
|
||||
;;
|
||||
status)
|
||||
status Xvfb
|
||||
RETVAL=$?
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 (start|stop|restart|reload|status)"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit $RETVAL
|
||||
86
build/tfs/linux/release.sh
Normal file
86
build/tfs/linux/release.sh
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
step "Build Debian package" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-build-deb"
|
||||
|
||||
step "Build RPM package" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-build-rpm"
|
||||
|
||||
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
|
||||
step "Install build dependencies" \
|
||||
npm install --unsafe-perm)
|
||||
|
||||
# Variables
|
||||
PLATFORM_LINUX="linux-$ARCH"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
PLATFORM_RPM="linux-rpm-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
REPO="`pwd`"
|
||||
ROOT="$REPO/.."
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && \
|
||||
step "Create tar.gz archive" \
|
||||
tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
step "Publish tar.gz archive" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
|
||||
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
step "Publish Debian package" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
|
||||
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
step "Publish RPM package" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
|
||||
|
||||
if [ -z "$VSCODE_QUALITY" ]; then
|
||||
echo "VSCODE_QUALITY is not set, skipping repo package publish"
|
||||
else
|
||||
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
|
||||
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
|
||||
# Get necessary information
|
||||
pushd $REPO && COMMIT_HASH=$(git rev-parse HEAD) && popd
|
||||
PACKAGE_NAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/_.*//g')"
|
||||
DEB_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$DEB_FILENAME"
|
||||
RPM_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$RPM_FILENAME"
|
||||
PACKAGE_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\_.*$//g')"
|
||||
# Write config files needed by API, use eval to force environment variable expansion
|
||||
DIRNAME=$(dirname $(readlink -f $0))
|
||||
pushd $DIRNAME
|
||||
# Submit to apt repo
|
||||
if [ "$DEB_ARCH" = "amd64" ]; then
|
||||
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
|
||||
eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"sourceUrl\": \"$DEB_URL\" }' > apt-addpkg.json
|
||||
echo "Submitting apt-addpkg.json:"
|
||||
cat apt-addpkg.json
|
||||
|
||||
step "Publish to repositories" \
|
||||
./repoapi_client.sh -config apt-config.json -addpkg apt-addpkg.json
|
||||
fi
|
||||
# Submit to yum repo (disabled as it's manual until signing is automated)
|
||||
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
|
||||
# eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"sourceUrl\": \"$RPM_URL\" }' > yum-addpkg.json
|
||||
# echo "Submitting yum-addpkg.json:"
|
||||
# cat yum-addpkg.json
|
||||
# ./repoapi_client.sh -config yum-config.json -addpkg yum-addpkg.json
|
||||
popd
|
||||
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
262
build/tfs/linux/repoapi_client.sh
Normal file
262
build/tfs/linux/repoapi_client.sh
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/bin/bash -e
|
||||
# This is a VERY basic script for Create/Delete operations on repos and packages
|
||||
#
|
||||
cmd=$1
|
||||
urls=urls.txt
|
||||
defaultPackageFile=new_package.json
|
||||
defaultRepoFile=new_repo.json
|
||||
|
||||
function Bail
|
||||
{
|
||||
echo "ERROR: $@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
function BailIfFileMissing {
|
||||
file="$1"
|
||||
if [ ! -f "$file" ]; then
|
||||
Bail "File $file does not exist"
|
||||
fi
|
||||
}
|
||||
|
||||
function Usage {
|
||||
echo "USAGE: Manage repos and packages in an apt repository"
|
||||
echo "$0 -config FILENAME -listrepos | -listpkgs | -addrepo FILENAME | -addpkg FILENAME |"
|
||||
echo "-addpkgs FILENAME | -check ID | -delrepo REPOID | -delpkg PKGID"
|
||||
echo -e "\t-config FILENAME : JSON file containing API server name and creds"
|
||||
echo -e "\t-listrepos : List repositories"
|
||||
echo -e "\t-listpkgs [REGEX] : List packages, optionally filter by REGEX"
|
||||
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
|
||||
echo -e "\t-addpkg FILENAME : Add package to repo using the specified JSON file"
|
||||
echo -e "\t-addpkgs FILENAME : Add packages to repo using urls contained in FILENAME"
|
||||
echo -e "\t-check ID : Check upload operation by ID"
|
||||
echo -e "\t-delrepo REPOID : Delete the specified repo by ID"
|
||||
echo -e "\t-delpkg PKGID : Delete the specified package by ID"
|
||||
exit 1
|
||||
}
|
||||
|
||||
function ParseFromJson {
|
||||
if [ -z "$secretContents" ]; then
|
||||
Bail "Unable to parse value because no JSON contents were specified"
|
||||
elif [ -z "$1" ]; then
|
||||
Bail "Unable to parse value from JSON because no key was specified"
|
||||
fi
|
||||
# Write value directly to stdout to be used by caller
|
||||
echo $secretContents | jq "$1" | tr -d '"'
|
||||
}
|
||||
|
||||
function ParseConfigFile {
|
||||
configFile="$1"
|
||||
if [ -z "$configFile" ]; then
|
||||
echo "Must specify -config option"
|
||||
Usage
|
||||
fi
|
||||
BailIfFileMissing "$configFile"
|
||||
secretContents=$(cat "$configFile")
|
||||
|
||||
server=$(ParseFromJson .server)
|
||||
protocol=$(ParseFromJson .protocol)
|
||||
port=$(ParseFromJson .port)
|
||||
repositoryId=$(ParseFromJson .repositoryId)
|
||||
user=$(ParseFromJson .username)
|
||||
pass=$(ParseFromJson .password)
|
||||
baseurl="$protocol://$user:$pass@$server:$port"
|
||||
}
|
||||
|
||||
# List Repositories
|
||||
function ListRepositories
|
||||
{
|
||||
echo "Fetching repo list from $server..."
|
||||
curl -k "$baseurl/v1/repositories" | sed 's/,/,\n/g' | sed 's/^"/\t"/g'
|
||||
echo ""
|
||||
}
|
||||
|
||||
# List packages, using $1 as a regex to filter results
|
||||
function ListPackages
|
||||
{
|
||||
echo "Fetching package list from $server"
|
||||
curl -k "$baseurl/v1/packages" | sed 's/{/\n{/g' | egrep "$1" | sed 's/,/,\n/g' | sed 's/^"/\t"/g'
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Create a new Repo using the specified JSON file
|
||||
function AddRepo
|
||||
{
|
||||
repoFile=$1
|
||||
if [ -z $repoFile ]; then
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $defaultRepoFile.template"
|
||||
fi
|
||||
if [ ! -f $repoFile ]; then
|
||||
Bail "Error: Cannot create repo - $repoFile does not exist"
|
||||
fi
|
||||
packageUrl=$(grep "url" $repoFile | head -n 1 | awk '{print $2}' | tr -d ',')
|
||||
echo "Creating new repo on $server [$packageUrl]"
|
||||
curl -i -k "$baseurl/v1/repositories" --data @./$repoFile -H "Content-Type: application/json"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Upload a single package using the specified JSON file
|
||||
function AddPackage
|
||||
{
|
||||
packageFile=$1
|
||||
if [ -z $packageFile ]; then
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $defaultPackageFile.template"
|
||||
fi
|
||||
if [ ! -f $packageFile ]; then
|
||||
Bail "Error: Cannot add package - $packageFile does not exist"
|
||||
fi
|
||||
packageUrl=$(grep "sourceUrl" $packageFile | head -n 1 | awk '{print $2}')
|
||||
echo "Adding package to $server [$packageUrl]"
|
||||
curl -i -k "$baseurl/v1/packages" --data @./$packageFile -H "Content-Type: application/json"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Upload a single package by dynamically creating a JSON file using a provided URL
|
||||
function AddPackageByUrl
|
||||
{
|
||||
url=$(echo "$1")
|
||||
if [ -z "$url" ]; then
|
||||
Bail "Unable to publish package because no URL was specified"
|
||||
fi
|
||||
tmpFile=$(mktemp)
|
||||
tmpOut=$(mktemp)
|
||||
if ! wget -q "$url" -O $tmpFile; then
|
||||
rm -f $tmpFile $tmpFile
|
||||
Bail "Unable to download URL $url"
|
||||
elif dpkg -I $tmpFile > $tmpOut 2> /dev/null; then
|
||||
echo "File is deb format"
|
||||
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
|
||||
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
|
||||
elif rpm -qpi $tmpFile > $tmpOut 2> /dev/null; then
|
||||
echo "File is rpm format"
|
||||
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
else
|
||||
rm -f $tmpFile $tmpOut
|
||||
Bail "File is not a valid deb/rpm package $url"
|
||||
fi
|
||||
|
||||
rm -f $tmpFile $tmpOut
|
||||
if [ -z "$pkgName" ]; then
|
||||
Bail "Unable to parse package name for $url"
|
||||
elif [ -z "$pkgVer" ]; then
|
||||
Bail "Unable to parse package version number for $url"
|
||||
fi
|
||||
|
||||
# Create Package .json file
|
||||
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
|
||||
cp $defaultPackageFile.template $defaultPackageFile
|
||||
sed -i "s/PACKAGENAME/$pkgName/g" $defaultPackageFile
|
||||
sed -i "s/PACKAGEVERSION/$pkgVer/g" $defaultPackageFile
|
||||
sed -i "s/PACKAGEURL/$escapedUrl/g" $defaultPackageFile
|
||||
sed -i "s/REPOSITORYID/$repositoryId/g" $defaultPackageFile
|
||||
# Perform Upload
|
||||
AddPackage $defaultPackageFile
|
||||
# Cleanup
|
||||
rm -f $defaultPackageFile
|
||||
}
|
||||
|
||||
# Upload multiple packages by reading urls line-by-line from the specified file
|
||||
function AddPackages
|
||||
{
|
||||
urlFile=$1
|
||||
if [ -z $urlFile ]; then
|
||||
Bail "Must specify a flat text file containing one or more URLs"
|
||||
fi
|
||||
if [ ! -f $urlFile ]; then
|
||||
Bail "Cannot add packages. File $urlFile does not exist"
|
||||
fi
|
||||
for url in $(cat $urlFile); do
|
||||
if [ -n "$url" ]; then
|
||||
AddPackageByUrl "$url"
|
||||
fi
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
# Check upload by ID
|
||||
function CheckUpload {
|
||||
id=$1
|
||||
if [ -z "$id" ]; then
|
||||
Bail "Must specify an ID"
|
||||
fi
|
||||
curl -k $baseurl/v1/packages/queue/$id
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Delete the specified repo
|
||||
function DeleteRepo
|
||||
{
|
||||
repoId=$1
|
||||
if [ -z $repoId ]; then
|
||||
Bail "Please specify repository ID. Run -listrepos for a list of IDs"
|
||||
fi
|
||||
curl -I -k -X DELETE "$baseurl/v1/repositories/$repoId"
|
||||
}
|
||||
|
||||
# Delete the specified package
|
||||
function DeletePackage
|
||||
{
|
||||
packageId=$1
|
||||
if [ -z $packageId ]; then
|
||||
Bail "Please specify package ID. Run -listpkgs for a list of IDs"
|
||||
fi
|
||||
echo Removing pkgId $packageId from repo $repositoryId
|
||||
curl -I -k -X DELETE "$baseurl/v1/packages/$packageId"
|
||||
}
|
||||
|
||||
# Parse params
|
||||
# Not using getopts because this uses multi-char flags
|
||||
operation=
|
||||
while (( "$#" )); do
|
||||
if [[ "$1" == "-config" ]]; then
|
||||
shift
|
||||
configFile="$1"
|
||||
elif [[ "$1" == "-listrepos" ]]; then
|
||||
operation=ListRepositories
|
||||
elif [[ "$1" == "-listpkgs" ]]; then
|
||||
operation=ListPackages
|
||||
if [ -n "$2" ]; then
|
||||
shift
|
||||
operand="$1"
|
||||
fi
|
||||
elif [[ "$1" == "-addrepo" ]]; then
|
||||
operation=AddRepo
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-addpkg" ]]; then
|
||||
operation=AddPackage
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-addpkgs" ]]; then
|
||||
operation=AddPackages
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-check" ]]; then
|
||||
operation=CheckUpload
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-delrepo" ]]; then
|
||||
operation=DeleteRepo
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-delpkg" ]]; then
|
||||
operation=DeletePackage
|
||||
shift
|
||||
operand="$1"
|
||||
else
|
||||
Usage
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
echo "Performing $operation $operand"
|
||||
# Parse config file
|
||||
ParseConfigFile "$configFile"
|
||||
|
||||
# Exit if no operation was specified
|
||||
if [ -z "operation" ]; then
|
||||
Usage
|
||||
fi
|
||||
|
||||
$operation "$operand"
|
||||
46
build/tfs/linux/smoketest.sh
Normal file
46
build/tfs/linux/smoketest.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export ARCH="$1"
|
||||
export VSCODE_MIXIN_PASSWORD="$2"
|
||||
VSO_PAT="$3"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install --arch=$ARCH --unsafe-perm
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Get Electron" \
|
||||
npm run gulp -- "electron-$ARCH"
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js --arch=$ARCH
|
||||
|
||||
step "Build minified" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-min"
|
||||
|
||||
function configureEnvironment {
|
||||
id -u testuser &>/dev/null || (useradd -m testuser; chpasswd <<< testuser:testpassword)
|
||||
sudo -i -u testuser git config --global user.name "VS Code Agent"
|
||||
sudo -i -u testuser git config --global user.email "monacotools@microsoft.com"
|
||||
chown -R testuser $AGENT_BUILDDIRECTORY
|
||||
}
|
||||
|
||||
function runTest {
|
||||
pushd test/smoke
|
||||
npm install
|
||||
sudo -u testuser -H xvfb-run -a -s "-screen 0 1024x768x8" npm test -- --latest "$AGENT_BUILDDIRECTORY/VSCode-linux-ia32/code-insiders"
|
||||
popd
|
||||
}
|
||||
|
||||
step "Configure environment" configureEnvironment
|
||||
|
||||
step "Run smoke test" runTest
|
||||
|
||||
43
build/tfs/linux/x64/Dockerfile
Normal file
43
build/tfs/linux/x64/Dockerfile
Normal file
@@ -0,0 +1,43 @@
|
||||
FROM microsoft/vsts-agent:ubuntu-14.04-standard
|
||||
MAINTAINER Joao Moreno <joao.moreno@microsoft.com>
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update
|
||||
|
||||
# Dependencies
|
||||
RUN apt-get install -y build-essential
|
||||
RUN apt-get install -y gcc-multilib g++-multilib
|
||||
RUN apt-get install -y git
|
||||
RUN apt-get install -y dpkg-dev
|
||||
RUN apt-get install -y zip
|
||||
RUN apt-get install -y rpm
|
||||
RUN apt-get install -y createrepo
|
||||
RUN apt-get install -y python-gtk2
|
||||
RUN apt-get install -y jq
|
||||
RUN apt-get install -y xvfb
|
||||
RUN apt-get install -y fakeroot
|
||||
RUN apt-get install -y libgtk2.0-0
|
||||
RUN apt-get install -y libgconf-2-4
|
||||
RUN apt-get install -y libnss3
|
||||
RUN apt-get install -y libasound2
|
||||
RUN apt-get install -y libxtst6
|
||||
RUN apt-get install -y libfuse2
|
||||
RUN apt-get install -y libnotify-bin
|
||||
RUN apt-get install -y libx11-dev
|
||||
RUN apt-get install -y libxss1
|
||||
RUN apt-get install -y libx11-xcb-dev
|
||||
RUN apt-get install -y libxkbfile-dev
|
||||
RUN apt-get install -y bc bsdmainutils
|
||||
RUN apt-get install -y libsecret-1-dev
|
||||
|
||||
# Xvfb
|
||||
# Thanks https://medium.com/@griggheo/running-headless-selenium-webdriver-tests-in-docker-containers-342fdbabf756
|
||||
ADD xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
|
||||
# nvm
|
||||
ENV NVM_DIR /usr/local/nvm
|
||||
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
|
||||
|
||||
CMD (service xvfb start; export DISPLAY=:10; ./start.sh)
|
||||
15
build/tfs/linux/x64/run-agent.sh
Normal file
15
build/tfs/linux/x64/run-agent.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ ! -f pat ]; then
|
||||
echo "Error: file pat not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker run \
|
||||
-e VSTS_ACCOUNT="monacotools" \
|
||||
-e VSTS_TOKEN="$(cat pat)" \
|
||||
-e VSTS_AGENT="tb-lnx-x64-local" \
|
||||
-e VSTS_POOL="linux-x64" \
|
||||
-e VSTS_WORK="/var/vsts/work" \
|
||||
--name "tb-lnx-x64-local" \
|
||||
-it joaomoreno/vscode-vso-agent-x64:latest
|
||||
53
build/tfs/linux/x64/xvfb.init
Normal file
53
build/tfs/linux/x64/xvfb.init
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# /etc/rc.d/init.d/xvfbd
|
||||
#
|
||||
# chkconfig: 345 95 28
|
||||
# description: Starts/Stops X Virtual Framebuffer server
|
||||
# processname: Xvfb
|
||||
#
|
||||
### BEGIN INIT INFO
|
||||
# Provides: xvfb
|
||||
# Required-Start: $remote_fs $syslog
|
||||
# Required-Stop: $remote_fs $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: Start xvfb at boot time
|
||||
# Description: Enable xvfb provided by daemon.
|
||||
### END INIT INFO
|
||||
|
||||
[ "${NETWORKING}" = "no" ] && exit 0
|
||||
|
||||
PROG="/usr/bin/Xvfb"
|
||||
PROG_OPTIONS=":10 -ac"
|
||||
PROG_OUTPUT="/tmp/Xvfb.out"
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo "Starting : X Virtual Frame Buffer "
|
||||
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
|
||||
disown -ar
|
||||
;;
|
||||
stop)
|
||||
echo "Shutting down : X Virtual Frame Buffer"
|
||||
killproc $PROG
|
||||
RETVAL=$?
|
||||
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
|
||||
/var/run/Xvfb.pid
|
||||
echo
|
||||
;;
|
||||
restart|reload)
|
||||
$0 stop
|
||||
$0 start
|
||||
RETVAL=$?
|
||||
;;
|
||||
status)
|
||||
status Xvfb
|
||||
RETVAL=$?
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 (start|stop|restart|reload|status)"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit $RETVAL
|
||||
55
build/tfs/win32/1_build.ps1
Normal file
55
build/tfs/win32/1_build.ps1
Normal file
@@ -0,0 +1,55 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$mixinPassword,
|
||||
[string]$vsoPAT
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\scripts\env.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
# Create a _netrc file to download distro dependencies
|
||||
# In order to get _netrc to work, we need a HOME variable setup
|
||||
"machine monacotools.visualstudio.com password ${vsoPAT}" | Out-File "$env:HOME\_netrc" -Encoding ASCII
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch="$arch"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & npm install }
|
||||
}
|
||||
|
||||
step "Hygiene" {
|
||||
exec { & npm run gulp -- hygiene }
|
||||
}
|
||||
|
||||
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
|
||||
step "Mix in repository from vscode-distro" {
|
||||
exec { & npm run gulp -- mixin }
|
||||
}
|
||||
|
||||
step "Get Electron" {
|
||||
exec { & npm run gulp -- "electron-$global:arch" }
|
||||
}
|
||||
|
||||
step "Install distro dependencies" {
|
||||
exec { & node build\tfs\common\installDistro.js }
|
||||
}
|
||||
|
||||
step "Build minified" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
|
||||
}
|
||||
|
||||
# step "Create loader snapshot" {
|
||||
# exec { & node build\lib\snapshotLoader.js --arch=$global:arch }
|
||||
# }
|
||||
|
||||
step "Run unit tests" {
|
||||
exec { & .\scripts\test.bat --build --reporter dot }
|
||||
}
|
||||
|
||||
# step "Run integration tests" {
|
||||
# exec { & .\scripts\test-integration.bat }
|
||||
# }
|
||||
|
||||
done
|
||||
12
build/tfs/win32/2_package.ps1
Normal file
12
build/tfs/win32/2_package.ps1
Normal file
@@ -0,0 +1,12 @@
|
||||
Param(
|
||||
[string]$arch
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
step "Create archive and setup package" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-archive" "vscode-win32-$global:arch-setup" }
|
||||
}
|
||||
|
||||
done
|
||||
35
build/tfs/win32/3_upload.ps1
Normal file
35
build/tfs/win32/3_upload.ps1
Normal file
@@ -0,0 +1,35 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$storageKey,
|
||||
[string]$mooncakeStorageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$Exe = "$Repo\.build\win32-$arch\setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$arch\archive\VSCode-win32-$arch.zip"
|
||||
$Build = "$Root\VSCode-win32-$arch"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = $mooncakeStorageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$assetPlatform = if ($arch -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
step "Publish archive" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$global:arch-$Version.zip" $Version true $Zip }
|
||||
}
|
||||
|
||||
step "Publish setup package" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$global:arch-$Version.exe" $Version true $Exe }
|
||||
}
|
||||
|
||||
done
|
||||
47
build/tfs/win32/lib.ps1
Normal file
47
build/tfs/win32/lib.ps1
Normal file
@@ -0,0 +1,47 @@
|
||||
# stop when there's an error
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$env:HOME=$env:USERPROFILE
|
||||
|
||||
if (Test-Path env:AGENT_WORKFOLDER) {
|
||||
$env:HOME="${env:AGENT_WORKFOLDER}\home"
|
||||
$env:npm_config_cache="${env:HOME}\npm-cache"
|
||||
$env:npm_config_devdir="${env:HOME}\npm-devdir"
|
||||
New-Item -Path "$env:HOME" -Type directory -Force | out-null
|
||||
New-Item -Path "$env:npm_config_cache" -Type directory -Force | out-null
|
||||
}
|
||||
|
||||
# throw when a process exits with something other than 0
|
||||
function exec([scriptblock]$cmd, [string]$errorMessage = "Error executing command: " + $cmd) {
|
||||
& $cmd
|
||||
if ($LastExitCode -ne 0) {
|
||||
throw $errorMessage
|
||||
}
|
||||
}
|
||||
|
||||
$Summary = @()
|
||||
function step($Task, $Step) {
|
||||
echo ""
|
||||
echo "*****************************************************************************"
|
||||
echo "Start: $Task"
|
||||
echo "*****************************************************************************"
|
||||
echo ""
|
||||
|
||||
$Stopwatch = [Diagnostics.Stopwatch]::StartNew()
|
||||
Invoke-Command $Step
|
||||
$Stopwatch.Stop()
|
||||
$Formatted = "{0:g}" -f $Stopwatch.Elapsed
|
||||
|
||||
echo "*****************************************************************************"
|
||||
echo "End: $Task, Total: $Formatted"
|
||||
echo "*****************************************************************************"
|
||||
|
||||
$global:Summary += @{ "$Task" = $Formatted }
|
||||
}
|
||||
|
||||
function done() {
|
||||
echo ""
|
||||
echo "Build Summary"
|
||||
echo "============="
|
||||
$global:Summary | Format-Table @{L="Task";E={$_.Name}}, @{L="Duration";E={$_.Value}}
|
||||
}
|
||||
6
build/tfs/win32/node.ps1
Normal file
6
build/tfs/win32/node.ps1
Normal file
@@ -0,0 +1,6 @@
|
||||
# install node
|
||||
$env:Path = $env:NVM_HOME + ";" + $env:NVM_SYMLINK + ";" + $env:Path
|
||||
$NodeVersion = "7.10.0"
|
||||
nvm install $NodeVersion
|
||||
nvm use $NodeVersion
|
||||
$env:Path = $env:NVM_HOME + "\v" + $NodeVersion + ";" + $env:Path
|
||||
47
build/tfs/win32/smoketest.ps1
Normal file
47
build/tfs/win32/smoketest.ps1
Normal file
@@ -0,0 +1,47 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$mixinPassword,
|
||||
[string]$vsoPAT
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\scripts\env.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
# Create a _netrc file to download distro dependencies
|
||||
# In order to get _netrc to work, we need a HOME variable setup
|
||||
$env:HOME = $env:USERPROFILE
|
||||
"machine monacotools.visualstudio.com password ${vsoPAT}" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch = "$arch"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & npm install }
|
||||
}
|
||||
|
||||
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
|
||||
step "Mix in repository from vscode-distro" {
|
||||
exec { & npm run gulp -- mixin }
|
||||
}
|
||||
|
||||
step "Get Electron" {
|
||||
exec { & npm run gulp -- "electron-$global:arch" }
|
||||
}
|
||||
|
||||
step "Install distro dependencies" {
|
||||
exec { & node build\tfs\common\installDistro.js }
|
||||
}
|
||||
|
||||
step "Build minified" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
|
||||
}
|
||||
|
||||
step "Run smoke test" {
|
||||
exec { & Push-Location test\smoke }
|
||||
exec { & npm install }
|
||||
exec { & npm test -- --latest "$env:AGENT_BUILDDIRECTORY\VSCode-win32-$global:arch\Code - Insiders.exe" }
|
||||
exec { & Pop-Location }
|
||||
}
|
||||
|
||||
done
|
||||
Reference in New Issue
Block a user