Use the new lossy cred compression strategy (#1834)

This commit puts the lossy cred compression strategy from #1832 into
production.

When run on the MakerDAO forums, this drops the output size from 41MB
(close to the point where GitHub starts warning about file sizes) to
14MB (plenty of room to grow).

Test plan: I ran it on a local sc2 instance. Unlikely that this
introduced any subtle bugs.
This commit is contained in:
Dandelion Mané 2020-06-01 18:37:58 -07:00 committed by GitHub
parent d556776cca
commit 2fd32dd785
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 12 additions and 2 deletions

View File

@ -9,13 +9,22 @@ import {loadInstanceConfig} from "./common";
import {fromJSON as weightedGraphFromJSON} from "../core/weightedGraph";
import {defaultParams} from "../analysis/timeline/params";
import {LoggingTaskReporter} from "../util/taskReporter";
import {compute, toJSON as credResultToJSON} from "../analysis/credResult";
import {
compute,
toJSON as credResultToJSON,
compressByThreshold,
} from "../analysis/credResult";
function die(std, message) {
std.err("fatal: " + message);
return 1;
}
// Any cred flow that sums to less than this threshold will be filtered
// from the time-level cred data (though we will still have a summary).
// TODO: Make this a configurable parameter.
const CRED_THRESHOLD = 10;
const scoreCommand: Command = async (args, std) => {
if (args.length !== 0) {
return die(std, "usage: sourcecred score");
@ -36,7 +45,8 @@ const scoreCommand: Command = async (args, std) => {
const params = defaultParams();
const credResult = await compute(graph, params, declarations);
const credJSON = stringify(credResultToJSON(credResult));
const compressed = compressByThreshold(credResult, CRED_THRESHOLD);
const credJSON = stringify(credResultToJSON(compressed));
const outputPath = pathJoin(baseDir, "output", "credResult.json");
await fs.writeFile(outputPath, credJSON);
taskReporter.finish("score");