Files
azuredatastudio/build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js
Aasim Khan a11beb71da Updating build pipelines to Cache@2 and switching to artifacts for compiled file sharing in pipeline jobs. (#14989)
* switiching product compile node cache task to newer version

* moving new changes from product-compile to sql-product-compile

* changing to yarn.lock as cache key

* Adding compilation cache

* changing keypath to key

* letting find command do the heavy lifting

* removing old save cache task

* reverting compilation cache to old task

* Creating a js to list compiled files
switching to cache 2 for compiled files
Creating a js file to compute yarn cache

* removed unused input targetFolder from pipeline cache task

* removed save cache

* Fixing compute node modules file

* Adding compiled computenodemodules

* Fixing checked variables on product compile
Updating all pipeline jobs to cache 2
Using tar for windows pipeline. Hoping it works

* Fixing indentation in web job

* Fixing different indentation in web job

* Generating sha keys for compilation cache to be cross plat

* trying deterministic key for compilation cache

* Fixing md5 command

* Trying another method of generating compilation cache

* testing with a hardcoded string

* Changing to a better hardcoded string

* Remove redundant make dir

* Fixing mkdir command in windows and trying new string key

* fixing $$ in sql product compile

* Removin redundant mkdir

* Trying source version var

* Fixing compilation key

* chaning script to powershell

* Adding artifacts to store compiled files
switching to 7zip for windows node cache

* Adding missing step key in web build

* Building not found directories

* Making correct directory

* Switching to vscode's computeNodeModuleCache

* Fixing formatting and making it look more like vscode's pipeline

* Adding back compiled comput cache key

* Fixing cache file

* Fixing copyright message
Adding sql header to custom node cache generator
Updating cache salt to force a cache miss

* Using glob instead of custom method to find all yarn.lock files
Fixing some other pipeline errors.

* Removing unnecessary variable checks.

* Added back VSCODE_STEP_ON_IT check
Moving drop artifacts before compiled files to keep it drop folder free  from compiled files

* Changing task name from cache flags to cache key

* Removing glob from compute node module cache
Fixing copyright message

* checking in updated js
2021-04-09 08:06:39 -07:00

48 lines
1.8 KiB
JavaScript

/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
const ROOT = path.join(__dirname, '../../../');
function findFiles(location, pattern, result) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
let stat;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
}
catch (err) {
continue;
}
if (stat.isDirectory()) {
findFiles(entryPath, pattern, result);
}
else {
if (stat.isFile() && entry.endsWith(pattern)) {
result.push(path.join(ROOT, entryPath));
}
}
}
}
const shasum = crypto.createHash('sha1');
/**
* Creating a sha hash of all the files that can cause packages to change/redownload.
*/
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Adding all yarn.lock files into sha sum.
const result = [];
findFiles('', 'yarn.lock', result);
result.forEach(f => shasum.update(fs.readFileSync(f)));
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));