mirror of
https://github.com/status-im/sourcecred.git
synced 2025-02-19 16:04:15 +00:00
[update diffbase]
wchargin-branch: mirror-eav-extract wchargin-source: a1da645abba540a2c8c2d405f10bfc2f2206554e
This commit is contained in:
commit
19c64fbeaa
@ -33,6 +33,11 @@ commands:
|
||||
paths:
|
||||
- node_modules
|
||||
key: v1-dependencies-{{ checksum "package.json" }}
|
||||
pull_cache:
|
||||
description: Pulls docker images usable for our cache
|
||||
steps:
|
||||
- run: docker pull sourcecred/sourcecred:dev
|
||||
- run: docker pull node:12
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@ -69,6 +74,9 @@ workflows:
|
||||
branches:
|
||||
ignore:
|
||||
- master
|
||||
extra_build_args: --cache-from=node:12,sourcecred/sourcecred:dev
|
||||
before_build:
|
||||
- pull_cache
|
||||
after_build:
|
||||
- run:
|
||||
name: Preview Docker Tag for Build
|
||||
@ -86,6 +94,9 @@ workflows:
|
||||
ignore: /.*/
|
||||
tags:
|
||||
only: /^v.*/
|
||||
extra_build_args: --cache-from=node:12,sourcecred/sourcecred:dev
|
||||
before_build:
|
||||
- pull_cache
|
||||
after_build:
|
||||
- run:
|
||||
name: Publish Docker Tag with Sourcecred Version
|
||||
@ -111,6 +122,9 @@ workflows:
|
||||
|
||||
- docker/publish:
|
||||
image: sourcecred/sourcecred
|
||||
extra_build_args: --cache-from=node:12,sourcecred/sourcecred:dev
|
||||
before_build:
|
||||
- pull_cache
|
||||
requires:
|
||||
- test
|
||||
- test_full
|
||||
|
@ -3,6 +3,8 @@
|
||||
## [Unreleased]
|
||||
|
||||
<!-- Please add new entries just beneath this line. -->
|
||||
- Breaking: Change output format for the scores command (#1372)
|
||||
- Include top nodes for every type in Timeline Cred (#1358)
|
||||
|
||||
## [0.4.0]
|
||||
|
||||
|
@ -4,6 +4,7 @@ set -eu
|
||||
usage() {
|
||||
printf 'usage: build_static_site.sh --target TARGET\n'
|
||||
printf ' [--project PROJECT [...]]\n'
|
||||
printf ' [--project-file PROJECT_FILE [...]]\n'
|
||||
printf ' [--weights WEIGHTS_FILE]\n'
|
||||
printf ' [--cname DOMAIN]\n'
|
||||
printf ' [--no-backend]\n'
|
||||
@ -15,6 +16,8 @@ usage() {
|
||||
printf '\t%s\n' 'an empty directory into which to build the site'
|
||||
printf '%s\n' '--project PROJECT'
|
||||
printf '\t%s\n' 'a project spec; see help for cli/load.js for details'
|
||||
printf '%s\n' '--project-file PROJECT_FILE'
|
||||
printf '\t%s\n' 'the path to a file containing a project config'
|
||||
printf '%s\n' '--weights WEIGHTS_FILE'
|
||||
printf '\t%s\n' 'path to a json file which contains a weights configuration.'
|
||||
printf '\t%s\n' 'This will be used instead of the default weights and persisted.'
|
||||
@ -55,6 +58,7 @@ parse_args() {
|
||||
weights=
|
||||
repos=( )
|
||||
projects=( )
|
||||
project_files=( )
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--target)
|
||||
@ -78,6 +82,11 @@ parse_args() {
|
||||
if [ $# -eq 0 ]; then die 'missing value for --project'; fi
|
||||
projects+=( "$1" )
|
||||
;;
|
||||
--project-file)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then die 'missing value for --project-file'; fi
|
||||
project_files+=( "$1" )
|
||||
;;
|
||||
--cname)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then die 'missing value for --cname'; fi
|
||||
@ -151,6 +160,17 @@ build() {
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "${#project_files[@]}" -ne 0 ]; then
|
||||
local weightsStr=""
|
||||
if [ -n "${weights}" ]; then
|
||||
weightsStr="--weights ${weights}"
|
||||
fi
|
||||
for project_file in "${project_files[@]}"; do
|
||||
NODE_PATH="./node_modules${NODE_PATH:+:${NODE_PATH}}" \
|
||||
node "${SOURCECRED_BIN:-./bin}/sourcecred.js" load --project "${project_file}" $weightsStr
|
||||
done
|
||||
fi
|
||||
|
||||
yarn -s build --output-path "${target}"
|
||||
|
||||
# Copy the SourceCred data into the appropriate API route. Using
|
||||
|
@ -34,7 +34,7 @@ echo "Updating github/fetchGithubOrgTest.sh"
|
||||
echo "Updating github/fetchGithubRepoTest.sh"
|
||||
./src/plugins/github/fetchGithubRepoTest.sh -u --no-build
|
||||
|
||||
if [ -z "${DISCOURSE_TEST_API_KEY:-}" ]; then
|
||||
if [ -n "${DISCOURSE_TEST_API_KEY:-}" ]; then
|
||||
echo "Updating Discourse API snapshots"
|
||||
./src/plugins/discourse/update_discourse_api_snapshots.sh
|
||||
else
|
||||
|
File diff suppressed because one or more lines are too long
@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"type": "sourcecred/cli/scores",
|
||||
"version": "0.1.0"
|
||||
"version": "0.2.0"
|
||||
},
|
||||
{
|
||||
"intervals": [
|
||||
@ -200,7 +200,13 @@
|
||||
],
|
||||
"users": [
|
||||
{
|
||||
"id": "decentralion",
|
||||
"address": [
|
||||
"sourcecred",
|
||||
"github",
|
||||
"USERLIKE",
|
||||
"USER",
|
||||
"decentralion"
|
||||
],
|
||||
"intervalCred": [
|
||||
9.383869639663432,
|
||||
4.691404861944296,
|
||||
@ -254,7 +260,13 @@
|
||||
"totalCred": 41.343602084119254
|
||||
},
|
||||
{
|
||||
"id": "wchargin",
|
||||
"address": [
|
||||
"sourcecred",
|
||||
"github",
|
||||
"USERLIKE",
|
||||
"USER",
|
||||
"wchargin"
|
||||
],
|
||||
"intervalCred": [
|
||||
3.6161303603365673,
|
||||
1.808595138055704,
|
||||
|
@ -11,6 +11,11 @@ export type PluginDeclaration = {|
|
||||
+edgePrefix: EdgeAddressT,
|
||||
+nodeTypes: $ReadOnlyArray<NodeType>,
|
||||
+edgeTypes: $ReadOnlyArray<EdgeType>,
|
||||
// Which node types represent user identities.
|
||||
// Important for computing score and for display in the frontend.
|
||||
// It's expected that the userTypes will also be included in the array of
|
||||
// nodeTypes.
|
||||
+userTypes: $ReadOnlyArray<NodeType>,
|
||||
|};
|
||||
|
||||
export function combineTypes(
|
||||
|
@ -34,7 +34,7 @@ export type FullTimelineCred = $ReadOnlyArray<{|
|
||||
export function distributionToCred(
|
||||
ds: TimelineDistributions,
|
||||
nodeOrder: $ReadOnlyArray<NodeAddressT>,
|
||||
scoringNodePrefix: NodeAddressT
|
||||
scoringNodePrefixes: $ReadOnlyArray<NodeAddressT>
|
||||
): FullTimelineCred {
|
||||
if (ds.length === 0) {
|
||||
return [];
|
||||
@ -43,7 +43,8 @@ export function distributionToCred(
|
||||
const scoringNodeIndices = [];
|
||||
const cred = new Array(nodeOrder.length);
|
||||
for (let i = 0; i < nodeOrder.length; i++) {
|
||||
if (NodeAddress.hasPrefix(nodeOrder[i], scoringNodePrefix)) {
|
||||
const addr = nodeOrder[i];
|
||||
if (scoringNodePrefixes.some((x) => NodeAddress.hasPrefix(addr, x))) {
|
||||
scoringNodeIndices.push(i);
|
||||
}
|
||||
cred[i] = new Array(intervals.length);
|
||||
|
@ -20,7 +20,34 @@ describe("src/analysis/timeline/distributionToCred", () => {
|
||||
},
|
||||
];
|
||||
const nodeOrder = [na("foo"), na("bar")];
|
||||
const actual = distributionToCred(ds, nodeOrder, NodeAddress.empty);
|
||||
const actual = distributionToCred(ds, nodeOrder, [NodeAddress.empty]);
|
||||
const expected = [
|
||||
{
|
||||
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||
cred: new Float64Array([1, 1]),
|
||||
},
|
||||
{
|
||||
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||
cred: new Float64Array([9, 1]),
|
||||
},
|
||||
];
|
||||
expect(expected).toEqual(actual);
|
||||
});
|
||||
it("correctly handles multiple scoring prefixes", () => {
|
||||
const ds = [
|
||||
{
|
||||
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||
intervalWeight: 2,
|
||||
distribution: new Float64Array([0.5, 0.5]),
|
||||
},
|
||||
{
|
||||
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||
intervalWeight: 10,
|
||||
distribution: new Float64Array([0.9, 0.1]),
|
||||
},
|
||||
];
|
||||
const nodeOrder = [na("foo"), na("bar")];
|
||||
const actual = distributionToCred(ds, nodeOrder, [na("foo"), na("bar")]);
|
||||
const expected = [
|
||||
{
|
||||
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||
@ -47,7 +74,7 @@ describe("src/analysis/timeline/distributionToCred", () => {
|
||||
},
|
||||
];
|
||||
const nodeOrder = [na("foo"), na("bar")];
|
||||
const actual = distributionToCred(ds, nodeOrder, na("bar"));
|
||||
const actual = distributionToCred(ds, nodeOrder, [na("bar")]);
|
||||
const expected = [
|
||||
{
|
||||
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||
@ -69,11 +96,11 @@ describe("src/analysis/timeline/distributionToCred", () => {
|
||||
},
|
||||
];
|
||||
const nodeOrder = [na("foo"), na("bar")];
|
||||
const fail = () => distributionToCred(ds, nodeOrder, na("zod"));
|
||||
const fail = () => distributionToCred(ds, nodeOrder, []);
|
||||
expect(fail).toThrowError("no nodes matched scoringNodePrefix");
|
||||
});
|
||||
it("returns empty array if no intervals are present", () => {
|
||||
expect(distributionToCred([], [], NodeAddress.empty)).toEqual([]);
|
||||
expect(distributionToCred([], [], [])).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,66 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {toObject, fromObject} from "../../util/map";
|
||||
import {type Interval} from "./interval";
|
||||
import {NodeAddress, type NodeAddressT} from "../../core/graph";
|
||||
import {type FullTimelineCred} from "./distributionToCred";
|
||||
|
||||
export type FilteredTimelineCred = {|
|
||||
+intervals: $ReadOnlyArray<Interval>,
|
||||
+addressToCred: Map<NodeAddressT, $ReadOnlyArray<number>>,
|
||||
|};
|
||||
|
||||
/**
|
||||
* Compress FullTimelineCred by discarding most nodes' cred.
|
||||
*
|
||||
* FullTimelineCred contains the cred at every interval for every node in the
|
||||
* graph. This could be tens of thousands of nodes and hundreds of intervals;
|
||||
* it's ungainly to store. To avoid this issue, we compress the cred down by
|
||||
* removing cred for most nodes. (We care a lot about users' cred; not so much
|
||||
* about the cred for every individual comment ever.)
|
||||
*
|
||||
* Right now, we do this by filtering out every node that doesn't match an
|
||||
* inclusion address prefix. In the future, we may have more sophisticated
|
||||
* logic, like keeping the top k nodes for each type.
|
||||
*/
|
||||
export function filterTimelineCred(
|
||||
fullCred: FullTimelineCred,
|
||||
nodeOrder: $ReadOnlyArray<NodeAddressT>,
|
||||
inclusionPrefixes: $ReadOnlyArray<NodeAddressT>
|
||||
): FilteredTimelineCred {
|
||||
const intervals = fullCred.map((x) => x.interval);
|
||||
const addressToCred = new Map();
|
||||
function hasMatch(x: NodeAddressT): boolean {
|
||||
for (const prefix of inclusionPrefixes) {
|
||||
if (NodeAddress.hasPrefix(x, prefix)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < nodeOrder.length; i++) {
|
||||
const addr = nodeOrder[i];
|
||||
if (hasMatch(addr)) {
|
||||
const addrCred = fullCred.map(({cred}) => cred[i]);
|
||||
addressToCred.set(addr, addrCred);
|
||||
}
|
||||
}
|
||||
return {intervals, addressToCred};
|
||||
}
|
||||
|
||||
export type FilteredTimelineCredJSON = {|
|
||||
+intervals: $ReadOnlyArray<Interval>,
|
||||
+addressToCred: {[NodeAddressT]: $ReadOnlyArray<number>},
|
||||
|};
|
||||
|
||||
export function filteredTimelineCredToJSON(
|
||||
x: FilteredTimelineCred
|
||||
): FilteredTimelineCredJSON {
|
||||
return {intervals: x.intervals, addressToCred: toObject(x.addressToCred)};
|
||||
}
|
||||
|
||||
export function filteredTimelineCredFromJSON(
|
||||
x: FilteredTimelineCredJSON
|
||||
): FilteredTimelineCred {
|
||||
return {intervals: x.intervals, addressToCred: fromObject(x.addressToCred)};
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {NodeAddress} from "../../core/graph";
|
||||
import {
|
||||
filterTimelineCred,
|
||||
filteredTimelineCredToJSON,
|
||||
filteredTimelineCredFromJSON,
|
||||
} from "./filterTimelineCred";
|
||||
|
||||
describe("src/analysis/timeline/filterTimelineCred", () => {
|
||||
const na = (...parts) => NodeAddress.fromParts(parts);
|
||||
describe("filterTimelineCred", () => {
|
||||
it("returns an empty object for empty cred", () => {
|
||||
expect(filterTimelineCred([], [], [])).toEqual({
|
||||
intervals: [],
|
||||
addressToCred: new Map(),
|
||||
});
|
||||
});
|
||||
it("appropriately filters a simple example", () => {
|
||||
const fullCred = [
|
||||
{
|
||||
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||
cred: new Float64Array([1, 2, 3]),
|
||||
},
|
||||
{
|
||||
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||
cred: new Float64Array([4, 5, 6]),
|
||||
},
|
||||
];
|
||||
const nodeOrder = [na("foo"), na("bar"), na("zod")];
|
||||
const prefixes = [na("foo"), na("bar")];
|
||||
const expected = {
|
||||
intervals: fullCred.map((x) => x.interval),
|
||||
addressToCred: new Map().set(na("foo"), [1, 4]).set(na("bar"), [2, 5]),
|
||||
};
|
||||
expect(filterTimelineCred(fullCred, nodeOrder, prefixes)).toEqual(
|
||||
expected
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("JSON serialization", () => {
|
||||
const i0 = {startTimeMs: 0, endTimeMs: 10};
|
||||
const i1 = {startTimeMs: 10, endTimeMs: 20};
|
||||
const intervals = [i0, i1];
|
||||
const fc = {
|
||||
intervals,
|
||||
addressToCred: new Map().set(na("foo"), [1, 4]).set(na("bar"), [2, 5]),
|
||||
};
|
||||
const json = filteredTimelineCredToJSON(fc);
|
||||
const fc_ = filteredTimelineCredFromJSON(json);
|
||||
expect(fc).toEqual(fc_);
|
||||
});
|
||||
});
|
102
src/analysis/timeline/params.js
Normal file
102
src/analysis/timeline/params.js
Normal file
@ -0,0 +1,102 @@
|
||||
// @flow
|
||||
|
||||
import {
|
||||
type Weights,
|
||||
type WeightsJSON,
|
||||
toJSON as weightsToJSON,
|
||||
fromJSON as weightsFromJSON,
|
||||
defaultWeights,
|
||||
} from "../weights";
|
||||
|
||||
/**
|
||||
* Parameters for computing TimelineCred
|
||||
*
|
||||
* The parameters are intended to be user-configurable.
|
||||
*/
|
||||
export type TimelineCredParameters = {|
|
||||
// Determines how quickly cred returns to the PageRank seed vector. If alpha
|
||||
// is high, then cred will tend to "stick" to nodes that are seeded, e.g.
|
||||
// issues and pull requests. Alpha should be between 0 and 1.
|
||||
+alpha: number,
|
||||
// Determines how quickly cred decays. The decay is 1, then cred never
|
||||
// decays, and old nodes and edges will retain full weight forever. (This
|
||||
// would result in cred that is highly biased towards old contributions, as
|
||||
// they would continue earning cred in every timeslice, forever.) If the
|
||||
// decay is 0, then weights go to zero the first week after their node/edge
|
||||
// was created. Should be between 0 and 1.
|
||||
+intervalDecay: number,
|
||||
// The weights. This determines how much cred is assigned based on different
|
||||
// node types, how cred flows across various edge types, and can specify
|
||||
// manual weights directly on individual nodes. See the docs in
|
||||
// `analysis/weights` for details.
|
||||
+weights: Weights,
|
||||
|};
|
||||
|
||||
export const DEFAULT_ALPHA = 0.05;
|
||||
export const DEFAULT_INTERVAL_DECAY = 0.5;
|
||||
|
||||
/**
|
||||
* The PartialTimelineCredParameters are a version of TimelineCredParameters
|
||||
* where every field has been marked optional, to make it convenient for API
|
||||
* clients to override just the parameters they want to.
|
||||
*/
|
||||
export type PartialTimelineCredParameters = {|
|
||||
+alpha?: number,
|
||||
+intervalDecay?: number,
|
||||
+weights?: Weights,
|
||||
|};
|
||||
|
||||
export type TimelineCredParametersJSON = {|
|
||||
+alpha: number,
|
||||
+intervalDecay: number,
|
||||
+weights: WeightsJSON,
|
||||
|};
|
||||
|
||||
export function paramsToJSON(
|
||||
p: TimelineCredParameters
|
||||
): TimelineCredParametersJSON {
|
||||
return {
|
||||
alpha: p.alpha,
|
||||
intervalDecay: p.intervalDecay,
|
||||
weights: weightsToJSON(p.weights),
|
||||
};
|
||||
}
|
||||
|
||||
export function paramsFromJSON(
|
||||
p: TimelineCredParametersJSON
|
||||
): TimelineCredParameters {
|
||||
return {
|
||||
alpha: p.alpha,
|
||||
intervalDecay: p.intervalDecay,
|
||||
weights: weightsFromJSON(p.weights),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports the default TimelineCredParameters.
|
||||
*
|
||||
* End consumers of SourceCred will not need to depend on this; it's
|
||||
* provided for implementation of SourceCred's APIs.
|
||||
*/
|
||||
export function defaultParams(): TimelineCredParameters {
|
||||
return {
|
||||
alpha: DEFAULT_ALPHA,
|
||||
intervalDecay: DEFAULT_INTERVAL_DECAY,
|
||||
weights: defaultWeights(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Promote PartialTimelineCredParameters to TimelineCredParameters.
|
||||
*
|
||||
* This takes PartialTimelineCredParameters and mixes them with the
|
||||
* default parameters to provide a full TimelineCredParameters.
|
||||
*
|
||||
* End consumers of SourceCred will not need to depend on this; it's
|
||||
* provided for implementation of SourceCred's APIs.
|
||||
*/
|
||||
export function partialParams(
|
||||
partial: PartialTimelineCredParameters
|
||||
): TimelineCredParameters {
|
||||
return {...defaultParams(), ...partial};
|
||||
}
|
67
src/analysis/timeline/params.test.js
Normal file
67
src/analysis/timeline/params.test.js
Normal file
@ -0,0 +1,67 @@
|
||||
// @flow
|
||||
|
||||
import {
|
||||
paramsToJSON,
|
||||
paramsFromJSON,
|
||||
defaultParams,
|
||||
partialParams,
|
||||
type TimelineCredParameters,
|
||||
DEFAULT_ALPHA,
|
||||
DEFAULT_INTERVAL_DECAY,
|
||||
} from "./params";
|
||||
import {defaultWeights} from "../weights";
|
||||
import {NodeAddress} from "../../core/graph";
|
||||
|
||||
describe("analysis/timeline/params", () => {
|
||||
const customWeights = () => {
|
||||
const weights = defaultWeights();
|
||||
// Ensure it works with non-default weights
|
||||
weights.nodeManualWeights.set(NodeAddress.empty, 33);
|
||||
return weights;
|
||||
};
|
||||
it("JSON round trip", () => {
|
||||
const p: TimelineCredParameters = {
|
||||
alpha: 0.1337,
|
||||
intervalDecay: 0.31337,
|
||||
weights: customWeights(),
|
||||
};
|
||||
const j = paramsToJSON(p);
|
||||
const p_ = paramsFromJSON(j);
|
||||
const j_ = paramsToJSON(p_);
|
||||
expect(j).toEqual(j_);
|
||||
expect(p).toEqual(p_);
|
||||
});
|
||||
it("defaultParams", () => {
|
||||
const expected: TimelineCredParameters = {
|
||||
alpha: DEFAULT_ALPHA,
|
||||
intervalDecay: DEFAULT_INTERVAL_DECAY,
|
||||
weights: defaultWeights(),
|
||||
};
|
||||
expect(defaultParams()).toEqual(expected);
|
||||
});
|
||||
describe("partialParams", () => {
|
||||
it("uses default values if no overrides provided", () => {
|
||||
const params = partialParams({});
|
||||
expect(params).toEqual(defaultParams());
|
||||
});
|
||||
it("accepts an alpha override", () => {
|
||||
const params = partialParams({alpha: 0.99});
|
||||
expect(params.weights).toEqual(defaultWeights());
|
||||
expect(params.alpha).toEqual(0.99);
|
||||
expect(params.intervalDecay).toEqual(DEFAULT_INTERVAL_DECAY);
|
||||
});
|
||||
it("accepts weights override", () => {
|
||||
const weights = customWeights();
|
||||
const params = partialParams({weights});
|
||||
expect(params.weights).toEqual(weights);
|
||||
expect(params.alpha).toEqual(DEFAULT_ALPHA);
|
||||
expect(params.intervalDecay).toEqual(DEFAULT_INTERVAL_DECAY);
|
||||
});
|
||||
it("accepts intervalDecay override", () => {
|
||||
const params = partialParams({intervalDecay: 0.1});
|
||||
expect(params.weights).toEqual(defaultWeights());
|
||||
expect(params.alpha).toEqual(DEFAULT_ALPHA);
|
||||
expect(params.intervalDecay).toEqual(0.1);
|
||||
});
|
||||
});
|
||||
});
|
@ -3,10 +3,12 @@
|
||||
import {sum} from "d3-array";
|
||||
import sortBy from "lodash.sortby";
|
||||
import * as NullUtil from "../../util/null";
|
||||
import * as MapUtil from "../../util/map";
|
||||
import {toCompat, fromCompat, type Compatible} from "../../util/compat";
|
||||
import {type Interval} from "./interval";
|
||||
import {timelinePagerank} from "./timelinePagerank";
|
||||
import {distributionToCred} from "./distributionToCred";
|
||||
import {type PluginDeclaration, combineTypes} from "../pluginDeclaration";
|
||||
import {
|
||||
Graph,
|
||||
type GraphJSON,
|
||||
@ -15,19 +17,14 @@ import {
|
||||
type Node,
|
||||
} from "../../core/graph";
|
||||
import {
|
||||
type Weights,
|
||||
type WeightsJSON,
|
||||
toJSON as weightsToJSON,
|
||||
fromJSON as weightsFromJSON,
|
||||
} from "../weights";
|
||||
import {type NodeAndEdgeTypes} from "../types";
|
||||
import {
|
||||
filterTimelineCred,
|
||||
type FilteredTimelineCred,
|
||||
filteredTimelineCredToJSON,
|
||||
filteredTimelineCredFromJSON,
|
||||
type FilteredTimelineCredJSON,
|
||||
} from "./filterTimelineCred";
|
||||
type TimelineCredParameters,
|
||||
paramsToJSON,
|
||||
paramsFromJSON,
|
||||
type TimelineCredParametersJSON,
|
||||
type PartialTimelineCredParameters,
|
||||
partialParams,
|
||||
defaultParams,
|
||||
} from "./params";
|
||||
|
||||
export type {Interval} from "./interval";
|
||||
|
||||
@ -43,53 +40,6 @@ export type CredNode = {|
|
||||
+cred: $ReadOnlyArray<number>,
|
||||
|};
|
||||
|
||||
/**
|
||||
* Parameters for computing TimelineCred
|
||||
*
|
||||
* The parameters are intended to be user-configurable.
|
||||
*/
|
||||
export type TimelineCredParameters = {|
|
||||
// Determines how quickly cred returns to the PageRank seed vector. If alpha
|
||||
// is high, then cred will tend to "stick" to nodes that are seeded, e.g.
|
||||
// issues and pull requests. Alpha should be between 0 and 1.
|
||||
+alpha: number,
|
||||
// Determines how quickly cred decays. The decay is 1, then cred never
|
||||
// decays, and old nodes and edges will retain full weight forever. (This
|
||||
// would result in cred that is highly biased towards old contributions, as
|
||||
// they would continue earning cred in every timeslice, forever.) If the
|
||||
// decay is 0, then weights go to zero the first week after their node/edge
|
||||
// was created. Should be between 0 and 1.
|
||||
+intervalDecay: number,
|
||||
// The weights. This determines how much cred is assigned based on different
|
||||
// node types, how cred flows across various edge types, and can specify
|
||||
// manual weights directly on individual nodes. See the docs in
|
||||
// `analysis/weights` for details.
|
||||
+weights: Weights,
|
||||
|};
|
||||
|
||||
/**
|
||||
* Configuration for computing TimelineCred
|
||||
*
|
||||
* Unlike the parameters, the config is expected to be static.
|
||||
* It's code-level config that isolates the TimelineCred algorithms from
|
||||
* specific plugin-level details about which nodes addresses are used for scoring,
|
||||
* etc.
|
||||
*
|
||||
* A default config is available in `src/plugins/defaultCredConfig`
|
||||
*/
|
||||
export type TimelineCredConfig = {|
|
||||
// Cred is normalized so that for a given interval, the total score of all
|
||||
// nodes matching this prefix will be equal to the total weight of nodes in
|
||||
// the interval.
|
||||
+scoreNodePrefix: NodeAddressT,
|
||||
// To save on space, we keep cred only for nodes matching one of these
|
||||
// NodeAddresses.
|
||||
+filterNodePrefixes: $ReadOnlyArray<NodeAddressT>,
|
||||
// The types are used to assign base cred to nodes based on their type. Node
|
||||
// that the weight for each type may be overriden in the params.
|
||||
+types: NodeAndEdgeTypes,
|
||||
|};
|
||||
|
||||
/**
|
||||
* Represents the timeline cred of a graph. This class wraps all the data
|
||||
* needed to analyze and interpet cred (ie. it has the Graph and the cred
|
||||
@ -100,20 +50,23 @@ export type TimelineCredConfig = {|
|
||||
*/
|
||||
export class TimelineCred {
|
||||
_graph: Graph;
|
||||
_cred: FilteredTimelineCred;
|
||||
_intervals: $ReadOnlyArray<Interval>;
|
||||
_addressToCred: Map<NodeAddressT, $ReadOnlyArray<number>>;
|
||||
_params: TimelineCredParameters;
|
||||
_config: TimelineCredConfig;
|
||||
_plugins: $ReadOnlyArray<PluginDeclaration>;
|
||||
|
||||
constructor(
|
||||
graph: Graph,
|
||||
cred: FilteredTimelineCred,
|
||||
intervals: $ReadOnlyArray<Interval>,
|
||||
addressToCred: Map<NodeAddressT, $ReadOnlyArray<number>>,
|
||||
params: TimelineCredParameters,
|
||||
config: TimelineCredConfig
|
||||
plugins: $ReadOnlyArray<PluginDeclaration>
|
||||
) {
|
||||
this._graph = graph;
|
||||
this._cred = cred;
|
||||
this._intervals = intervals;
|
||||
this._addressToCred = addressToCred;
|
||||
this._params = params;
|
||||
this._config = config;
|
||||
this._plugins = plugins;
|
||||
}
|
||||
|
||||
graph(): Graph {
|
||||
@ -124,8 +77,8 @@ export class TimelineCred {
|
||||
return this._params;
|
||||
}
|
||||
|
||||
config(): TimelineCredConfig {
|
||||
return this._config;
|
||||
plugins(): $ReadOnlyArray<PluginDeclaration> {
|
||||
return this._plugins;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -134,15 +87,21 @@ export class TimelineCred {
|
||||
*
|
||||
* This returns a new TimelineCred; it does not modify the existing one.
|
||||
*/
|
||||
async reanalyze(newParams: TimelineCredParameters): Promise<TimelineCred> {
|
||||
return await TimelineCred.compute(this._graph, newParams, this._config);
|
||||
async reanalyze(
|
||||
newParams: PartialTimelineCredParameters
|
||||
): Promise<TimelineCred> {
|
||||
return await TimelineCred.compute({
|
||||
graph: this._graph,
|
||||
params: newParams,
|
||||
plugins: this._plugins,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all the intervals in the timeline.
|
||||
*/
|
||||
intervals(): $ReadOnlyArray<Interval> {
|
||||
return this._cred.intervals;
|
||||
return this._intervals;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -154,7 +113,7 @@ export class TimelineCred {
|
||||
* filtered results; if so, it will return undefined.
|
||||
*/
|
||||
credNode(a: NodeAddressT): ?CredNode {
|
||||
const cred = this._cred.addressToCred.get(a);
|
||||
const cred = this._addressToCred.get(a);
|
||||
if (cred === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
@ -164,99 +123,159 @@ export class TimelineCred {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all the nodes matching the prefix, along with their cred,
|
||||
* sorted by total cred (descending).
|
||||
* Returns nodes sorted by their total cred (descending).
|
||||
*
|
||||
* If prefixes is provided, then only nodes matching at least one of the provided
|
||||
* address prefixes will be included.
|
||||
*/
|
||||
credSortedNodes(prefix: NodeAddressT): $ReadOnlyArray<CredNode> {
|
||||
const match = (a) => NodeAddress.hasPrefix(a, prefix);
|
||||
const addresses = Array.from(this._cred.addressToCred.keys()).filter(match);
|
||||
credSortedNodes(
|
||||
prefixes?: $ReadOnlyArray<NodeAddressT>
|
||||
): $ReadOnlyArray<CredNode> {
|
||||
let addresses = Array.from(this._addressToCred.keys());
|
||||
|
||||
if (prefixes != null) {
|
||||
const match = (a) => prefixes.some((p) => NodeAddress.hasPrefix(a, p));
|
||||
addresses = addresses.filter(match);
|
||||
}
|
||||
const credNodes = addresses.map((a) => this.credNode(a));
|
||||
return sortBy(credNodes, (x: CredNode) => -x.total);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all user-typed nodes, sorted by their total cred (descending).
|
||||
*
|
||||
* A node is considered a user-type node if its address has a prefix match
|
||||
* with a type specified as a user type by one of the plugin declarations.
|
||||
*/
|
||||
userNodes(): $ReadOnlyArray<CredNode> {
|
||||
const userTypes = [].concat(...this.plugins().map((p) => p.userTypes));
|
||||
return this.credSortedNodes(userTypes.map((x) => x.prefix));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new, filtered TimelineCred, by removing low-scored nodes.
|
||||
*
|
||||
* Cred Graphs may have a huge number of small contributions, like comments,
|
||||
* in which end users are not particularly interested. However, the size of
|
||||
* the TimelineCred offered to the frontend matters quite a bit. Therefore,
|
||||
* we can use this method to discard almost all nodes in the graph.
|
||||
*
|
||||
* Specifically, `reduceSize` takes in an array of inclusion prefixes: for
|
||||
* each inclusion prefix, we will take the top `k` nodes matching that prefix
|
||||
* (by total score across all intervals).
|
||||
*
|
||||
* It also takes `fullInclusion` prefixes: for these prefixes, every matching
|
||||
* node will be included. This allows us to ensure that e.g. every user will
|
||||
* be included in the `cli scores` output, even if they are not in the top
|
||||
* `k` users.
|
||||
*/
|
||||
reduceSize(opts: {|
|
||||
+typePrefixes: $ReadOnlyArray<NodeAddressT>,
|
||||
+nodesPerType: number,
|
||||
+fullInclusionPrefixes: $ReadOnlyArray<NodeAddressT>,
|
||||
|}): TimelineCred {
|
||||
const {typePrefixes, nodesPerType, fullInclusionPrefixes} = opts;
|
||||
const selectedNodes: Set<NodeAddressT> = new Set();
|
||||
for (const prefix of typePrefixes) {
|
||||
const matchingNodes = this.credSortedNodes([prefix]).slice(
|
||||
0,
|
||||
nodesPerType
|
||||
);
|
||||
for (const {node} of matchingNodes) {
|
||||
selectedNodes.add(node.address);
|
||||
}
|
||||
}
|
||||
// For the fullInclusionPrefixes, we won't slice -- we just take every match.
|
||||
const matchingNodes = this.credSortedNodes(fullInclusionPrefixes);
|
||||
for (const {node} of matchingNodes) {
|
||||
selectedNodes.add(node.address);
|
||||
}
|
||||
|
||||
const filteredAddressToCred = new Map();
|
||||
for (const address of selectedNodes) {
|
||||
const cred = NullUtil.get(this._addressToCred.get(address));
|
||||
filteredAddressToCred.set(address, cred);
|
||||
}
|
||||
return new TimelineCred(
|
||||
this._graph,
|
||||
this._intervals,
|
||||
filteredAddressToCred,
|
||||
this._params,
|
||||
this._plugins
|
||||
);
|
||||
}
|
||||
|
||||
toJSON(): TimelineCredJSON {
|
||||
const rawJSON = {
|
||||
graphJSON: this._graph.toJSON(),
|
||||
credJSON: filteredTimelineCredToJSON(this._cred),
|
||||
intervalsJSON: this._intervals,
|
||||
credJSON: MapUtil.toObject(this._addressToCred),
|
||||
paramsJSON: paramsToJSON(this._params),
|
||||
pluginsJSON: this._plugins,
|
||||
};
|
||||
return toCompat(COMPAT_INFO, rawJSON);
|
||||
}
|
||||
|
||||
static fromJSON(
|
||||
j: TimelineCredJSON,
|
||||
config: TimelineCredConfig
|
||||
): TimelineCred {
|
||||
static fromJSON(j: TimelineCredJSON): TimelineCred {
|
||||
const json = fromCompat(COMPAT_INFO, j);
|
||||
const {graphJSON, credJSON, paramsJSON} = json;
|
||||
const {graphJSON, intervalsJSON, credJSON, paramsJSON, pluginsJSON} = json;
|
||||
const cred = MapUtil.fromObject(credJSON);
|
||||
const graph = Graph.fromJSON(graphJSON);
|
||||
const cred = filteredTimelineCredFromJSON(credJSON);
|
||||
const params = paramsFromJSON(paramsJSON);
|
||||
return new TimelineCred(graph, cred, params, config);
|
||||
return new TimelineCred(graph, intervalsJSON, cred, params, pluginsJSON);
|
||||
}
|
||||
|
||||
static async compute(
|
||||
static async compute(opts: {|
|
||||
graph: Graph,
|
||||
params: TimelineCredParameters,
|
||||
config: TimelineCredConfig
|
||||
): Promise<TimelineCred> {
|
||||
const ftc = await _computeTimelineCred(graph, params, config);
|
||||
return new TimelineCred(graph, ftc, params, config);
|
||||
params?: PartialTimelineCredParameters,
|
||||
plugins: $ReadOnlyArray<PluginDeclaration>,
|
||||
|}): Promise<TimelineCred> {
|
||||
const {graph, params, plugins} = opts;
|
||||
const fullParams = params == null ? defaultParams() : partialParams(params);
|
||||
const nodeOrder = Array.from(graph.nodes()).map((x) => x.address);
|
||||
const types = combineTypes(plugins);
|
||||
const userTypes = [].concat(...plugins.map((x) => x.userTypes));
|
||||
const scorePrefixes = userTypes.map((x) => x.prefix);
|
||||
const distribution = await timelinePagerank(
|
||||
graph,
|
||||
types,
|
||||
fullParams.weights,
|
||||
fullParams.intervalDecay,
|
||||
fullParams.alpha
|
||||
);
|
||||
const cred = distributionToCred(
|
||||
distribution,
|
||||
nodeOrder,
|
||||
userTypes.map((x) => x.prefix)
|
||||
);
|
||||
const addressToCred = new Map();
|
||||
for (let i = 0; i < nodeOrder.length; i++) {
|
||||
const addr = nodeOrder[i];
|
||||
const addrCred = cred.map(({cred}) => cred[i]);
|
||||
addressToCred.set(addr, addrCred);
|
||||
}
|
||||
const intervals = cred.map((x) => x.interval);
|
||||
const preliminaryCred = new TimelineCred(
|
||||
graph,
|
||||
intervals,
|
||||
addressToCred,
|
||||
fullParams,
|
||||
plugins
|
||||
);
|
||||
return preliminaryCred.reduceSize({
|
||||
typePrefixes: types.nodeTypes.map((x) => x.prefix),
|
||||
nodesPerType: 100,
|
||||
fullInclusionPrefixes: scorePrefixes,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function _computeTimelineCred(
|
||||
graph: Graph,
|
||||
params: TimelineCredParameters,
|
||||
config: TimelineCredConfig
|
||||
): Promise<FilteredTimelineCred> {
|
||||
const nodeOrder = Array.from(graph.nodes()).map((x) => x.address);
|
||||
const distribution = await timelinePagerank(
|
||||
graph,
|
||||
config.types,
|
||||
params.weights,
|
||||
params.intervalDecay,
|
||||
params.alpha
|
||||
);
|
||||
const cred = distributionToCred(
|
||||
distribution,
|
||||
nodeOrder,
|
||||
config.scoreNodePrefix
|
||||
);
|
||||
const filtered = filterTimelineCred(
|
||||
cred,
|
||||
nodeOrder,
|
||||
config.filterNodePrefixes
|
||||
);
|
||||
return filtered;
|
||||
}
|
||||
|
||||
const COMPAT_INFO = {type: "sourcecred/timelineCred", version: "0.1.0"};
|
||||
const COMPAT_INFO = {type: "sourcecred/timelineCred", version: "0.5.0"};
|
||||
|
||||
export opaque type TimelineCredJSON = Compatible<{|
|
||||
+graphJSON: GraphJSON,
|
||||
+paramsJSON: ParamsJSON,
|
||||
+credJSON: FilteredTimelineCredJSON,
|
||||
+paramsJSON: TimelineCredParametersJSON,
|
||||
+pluginsJSON: $ReadOnlyArray<PluginDeclaration>,
|
||||
+credJSON: {[string]: $ReadOnlyArray<number>},
|
||||
+intervalsJSON: $ReadOnlyArray<Interval>,
|
||||
|}>;
|
||||
|
||||
type ParamsJSON = {|
|
||||
+alpha: number,
|
||||
+intervalDecay: number,
|
||||
+weights: WeightsJSON,
|
||||
|};
|
||||
|
||||
function paramsToJSON(p: TimelineCredParameters): ParamsJSON {
|
||||
return {
|
||||
alpha: p.alpha,
|
||||
intervalDecay: p.intervalDecay,
|
||||
weights: weightsToJSON(p.weights),
|
||||
};
|
||||
}
|
||||
|
||||
function paramsFromJSON(p: ParamsJSON): TimelineCredParameters {
|
||||
return {
|
||||
alpha: p.alpha,
|
||||
intervalDecay: p.intervalDecay,
|
||||
weights: weightsFromJSON(p.weights),
|
||||
};
|
||||
}
|
||||
|
@ -1,19 +1,54 @@
|
||||
// @flow
|
||||
|
||||
import deepFreeze from "deep-freeze";
|
||||
import {sum} from "d3-array";
|
||||
import sortBy from "lodash.sortby";
|
||||
import {utcWeek} from "d3-time";
|
||||
import {NodeAddress, Graph} from "../../core/graph";
|
||||
import {TimelineCred, type TimelineCredConfig} from "./timelineCred";
|
||||
import {type FilteredTimelineCred} from "./filterTimelineCred";
|
||||
import {defaultWeights} from "../weights";
|
||||
import {
|
||||
NodeAddress,
|
||||
Graph,
|
||||
type NodeAddressT,
|
||||
EdgeAddress,
|
||||
} from "../../core/graph";
|
||||
import {TimelineCred} from "./timelineCred";
|
||||
import {defaultParams} from "./params";
|
||||
import {type PluginDeclaration} from "../pluginDeclaration";
|
||||
import {type NodeType} from "../types";
|
||||
|
||||
describe("src/analysis/timeline/timelineCred", () => {
|
||||
const credConfig: () => TimelineCredConfig = () => ({
|
||||
scoreNodePrefix: NodeAddress.fromParts(["foo"]),
|
||||
filterNodePrefixes: [NodeAddress.fromParts(["foo"])],
|
||||
types: {nodeTypes: [], edgeTypes: []},
|
||||
const userType: NodeType = {
|
||||
name: "user",
|
||||
pluralName: "users",
|
||||
prefix: NodeAddress.fromParts(["user"]),
|
||||
defaultWeight: 0,
|
||||
description: "a user",
|
||||
};
|
||||
const userPrefix = userType.prefix;
|
||||
const fooType: NodeType = {
|
||||
name: "foo",
|
||||
pluralName: "foos",
|
||||
prefix: NodeAddress.fromParts(["foo"]),
|
||||
defaultWeight: 0,
|
||||
description: "a foo",
|
||||
};
|
||||
const fooPrefix = fooType.prefix;
|
||||
const plugin: PluginDeclaration = deepFreeze({
|
||||
name: "foo",
|
||||
nodePrefix: NodeAddress.empty,
|
||||
edgePrefix: EdgeAddress.empty,
|
||||
nodeTypes: [userType, fooType],
|
||||
edgeTypes: [],
|
||||
userTypes: [userType],
|
||||
});
|
||||
const users = [
|
||||
["starter", (x) => Math.max(0, 20 - x)],
|
||||
["steady", (_) => 4],
|
||||
["finisher", (x) => (x * x) / 20],
|
||||
["latecomer", (x) => Math.max(0, x - 20)],
|
||||
];
|
||||
|
||||
// Ensure tests can't contaminate shared state.
|
||||
deepFreeze([userType, fooType, users]);
|
||||
|
||||
function exampleTimelineCred(): TimelineCred {
|
||||
const startTimeMs = +new Date(2017, 0);
|
||||
@ -26,17 +61,11 @@ describe("src/analysis/timeline/timelineCred", () => {
|
||||
endTimeMs: +boundaries[i + 1],
|
||||
});
|
||||
}
|
||||
const users = [
|
||||
["starter", (x) => Math.max(0, 20 - x)],
|
||||
["steady", (_) => 4],
|
||||
["finisher", (x) => (x * x) / 20],
|
||||
["latecomer", (x) => Math.max(0, x - 20)],
|
||||
];
|
||||
|
||||
const graph = new Graph();
|
||||
const addressToCred = new Map();
|
||||
for (const [name, generator] of users) {
|
||||
const address = NodeAddress.fromParts(["foo", name]);
|
||||
const address = NodeAddress.append(userPrefix, name);
|
||||
graph.addNode({
|
||||
address,
|
||||
description: `[@${name}](https://github.com/${name})`,
|
||||
@ -45,41 +74,130 @@ describe("src/analysis/timeline/timelineCred", () => {
|
||||
const scores = intervals.map((_unuesd, i) => generator(i));
|
||||
addressToCred.set(address, scores);
|
||||
}
|
||||
const filteredTimelineCred: FilteredTimelineCred = {
|
||||
intervals,
|
||||
addressToCred,
|
||||
};
|
||||
const params = {alpha: 0.05, intervalDecay: 0.5, weights: defaultWeights()};
|
||||
return new TimelineCred(graph, filteredTimelineCred, params, credConfig());
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const address = NodeAddress.append(fooPrefix, String(i));
|
||||
graph.addNode({
|
||||
address,
|
||||
timestampMs: null,
|
||||
description: `foo ${i}`,
|
||||
});
|
||||
const scores = intervals.map((_) => i);
|
||||
addressToCred.set(address, scores);
|
||||
}
|
||||
return new TimelineCred(graph, intervals, addressToCred, defaultParams(), [
|
||||
plugin,
|
||||
]);
|
||||
}
|
||||
|
||||
it("JSON serialization works", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const json = exampleTimelineCred().toJSON();
|
||||
const tc_ = TimelineCred.fromJSON(json, credConfig());
|
||||
const tc_ = TimelineCred.fromJSON(json);
|
||||
expect(tc.graph()).toEqual(tc_.graph());
|
||||
expect(tc.params()).toEqual(tc_.params());
|
||||
expect(tc.config()).toEqual(tc_.config());
|
||||
expect(tc.credSortedNodes(NodeAddress.empty)).toEqual(
|
||||
tc.credSortedNodes(NodeAddress.empty)
|
||||
);
|
||||
});
|
||||
|
||||
it("cred sorting works", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const sorted = tc.credSortedNodes(NodeAddress.empty);
|
||||
const sorted = tc.credSortedNodes();
|
||||
const expected = sortBy(sorted, (x) => -x.total);
|
||||
expect(sorted).toEqual(expected);
|
||||
});
|
||||
|
||||
it("prefix filtering works", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.credSortedNodes([userPrefix]);
|
||||
for (const {node} of filtered) {
|
||||
const isUser = NodeAddress.hasPrefix(node.address, userPrefix);
|
||||
expect(isUser).toBe(true);
|
||||
}
|
||||
expect(filtered).toHaveLength(users.length);
|
||||
});
|
||||
|
||||
it("prefix filtering can combine disjoint prefixes", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.credSortedNodes([userPrefix, fooPrefix]);
|
||||
const all = tc.credSortedNodes();
|
||||
expect(filtered).toEqual(all);
|
||||
});
|
||||
|
||||
it("prefix filtering will not result in node double-inclusion", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.credSortedNodes([userPrefix, NodeAddress.empty]);
|
||||
const all = tc.credSortedNodes();
|
||||
expect(filtered).toEqual(all);
|
||||
});
|
||||
|
||||
it("an empty list of prefixes results in an empty array", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.credSortedNodes([]);
|
||||
expect(filtered).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("cred aggregation works", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
const nodes = tc.credSortedNodes(NodeAddress.empty);
|
||||
const nodes = tc.credSortedNodes();
|
||||
for (const node of nodes) {
|
||||
expect(node.total).toEqual(sum(node.cred));
|
||||
}
|
||||
});
|
||||
|
||||
describe("reduceSize", () => {
|
||||
it("chooses top nodes for each type prefix", () => {
|
||||
const nodesPerType = 3;
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.reduceSize({
|
||||
typePrefixes: [userPrefix, fooPrefix],
|
||||
nodesPerType,
|
||||
fullInclusionPrefixes: [],
|
||||
});
|
||||
|
||||
const checkPrefix = (p: NodeAddressT) => {
|
||||
const fullNodes = tc.credSortedNodes([p]);
|
||||
const truncatedNodes = filtered.credSortedNodes([p]);
|
||||
expect(truncatedNodes).toHaveLength(nodesPerType);
|
||||
expect(fullNodes.slice(0, nodesPerType)).toEqual(truncatedNodes);
|
||||
};
|
||||
checkPrefix(userPrefix);
|
||||
checkPrefix(fooPrefix);
|
||||
});
|
||||
|
||||
it("can keep only scoring nodes", () => {
|
||||
const nodesPerType = 3;
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.reduceSize({
|
||||
typePrefixes: [],
|
||||
nodesPerType,
|
||||
fullInclusionPrefixes: [userPrefix],
|
||||
});
|
||||
const fullUserNodes = tc.credSortedNodes([userPrefix]);
|
||||
const truncatedUserNodes = filtered.credSortedNodes([userPrefix]);
|
||||
expect(fullUserNodes).toEqual(truncatedUserNodes);
|
||||
const truncatedFoo = filtered.credSortedNodes([fooPrefix]);
|
||||
expect(truncatedFoo).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("keeps all scoring nodes (with multiple scoring types)", () => {
|
||||
const nodesPerType = 3;
|
||||
const tc = exampleTimelineCred();
|
||||
const filtered = tc.reduceSize({
|
||||
typePrefixes: [userPrefix, NodeAddress.fromParts(["nope"])],
|
||||
nodesPerType,
|
||||
fullInclusionPrefixes: [userPrefix, fooPrefix],
|
||||
});
|
||||
const fullUserNodes = tc.credSortedNodes([userPrefix]);
|
||||
const truncatedUserNodes = filtered.credSortedNodes([userPrefix]);
|
||||
expect(fullUserNodes).toEqual(truncatedUserNodes);
|
||||
const fullFoo = tc.credSortedNodes([fooPrefix]);
|
||||
const truncatedFoo = filtered.credSortedNodes([fooPrefix]);
|
||||
expect(fullFoo).toEqual(truncatedFoo);
|
||||
});
|
||||
});
|
||||
|
||||
it("userNodes returns the credSortedNodes for user types", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
expect(tc.userNodes()).toEqual(tc.credSortedNodes([userPrefix]));
|
||||
});
|
||||
|
||||
it("credNode returns undefined for absent nodes", () => {
|
||||
const tc = exampleTimelineCred();
|
||||
expect(tc.credNode(NodeAddress.fromParts(["baz"]))).toBe(undefined);
|
||||
|
@ -6,21 +6,20 @@ import path from "path";
|
||||
import {TaskReporter} from "../util/taskReporter";
|
||||
import {Graph} from "../core/graph";
|
||||
import {loadGraph} from "../plugins/github/loadGraph";
|
||||
import {
|
||||
type TimelineCredParameters,
|
||||
TimelineCred,
|
||||
} from "../analysis/timeline/timelineCred";
|
||||
|
||||
import {DEFAULT_CRED_CONFIG} from "../plugins/defaultCredConfig";
|
||||
import {TimelineCred} from "../analysis/timeline/timelineCred";
|
||||
import {defaultParams, partialParams} from "../analysis/timeline/params";
|
||||
import {type PartialTimelineCredParameters} from "../analysis/timeline/params";
|
||||
|
||||
import {type Project} from "../core/project";
|
||||
import {setupProjectDirectory} from "../core/project_io";
|
||||
import {loadDiscourse} from "../plugins/discourse/loadDiscourse";
|
||||
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
|
||||
import * as NullUtil from "../util/null";
|
||||
|
||||
export type LoadOptions = {|
|
||||
+project: Project,
|
||||
+params: TimelineCredParameters,
|
||||
+params: ?PartialTimelineCredParameters,
|
||||
+plugins: $ReadOnlyArray<PluginDeclaration>,
|
||||
+sourcecredDirectory: string,
|
||||
+githubToken: string | null,
|
||||
+discourseKey: string | null,
|
||||
@ -44,7 +43,8 @@ export async function load(
|
||||
options: LoadOptions,
|
||||
taskReporter: TaskReporter
|
||||
): Promise<void> {
|
||||
const {project, params, sourcecredDirectory, githubToken} = options;
|
||||
const {project, params, plugins, sourcecredDirectory, githubToken} = options;
|
||||
const fullParams = params == null ? defaultParams() : partialParams(params);
|
||||
const loadTask = `load-${options.project.id}`;
|
||||
taskReporter.start(loadTask);
|
||||
const cacheDirectory = path.join(sourcecredDirectory, "cache");
|
||||
@ -101,7 +101,7 @@ export async function load(
|
||||
await fs.writeFile(graphFile, JSON.stringify(graph.toJSON()));
|
||||
|
||||
taskReporter.start("compute-cred");
|
||||
const cred = await TimelineCred.compute(graph, params, DEFAULT_CRED_CONFIG);
|
||||
const cred = await TimelineCred.compute({graph, params: fullParams, plugins});
|
||||
const credJSON = cred.toJSON();
|
||||
const credFile = path.join(projectDirectory, "cred.json");
|
||||
await fs.writeFile(credFile, JSON.stringify(credJSON));
|
||||
|
@ -19,7 +19,10 @@ import {NodeAddress, Graph} from "../core/graph";
|
||||
import {node} from "../core/graphTestUtil";
|
||||
import {TestTaskReporter} from "../util/taskReporter";
|
||||
import {load, type LoadOptions} from "./load";
|
||||
import {DEFAULT_CRED_CONFIG} from "../plugins/defaultCredConfig";
|
||||
import {
|
||||
type PartialTimelineCredParameters,
|
||||
partialParams,
|
||||
} from "../analysis/timeline/params";
|
||||
|
||||
type JestMockFn = $Call<typeof jest.fn>;
|
||||
jest.mock("../plugins/github/loadGraph", () => ({
|
||||
@ -71,7 +74,8 @@ describe("api/load", () => {
|
||||
// Tweaks the weights so that we can ensure we aren't overriding with default weights
|
||||
weights.nodeManualWeights.set(NodeAddress.empty, 33);
|
||||
// Deep freeze will freeze the weights, too
|
||||
const params = deepFreeze({alpha: 0.05, intervalDecay: 0.5, weights});
|
||||
const params: PartialTimelineCredParameters = {weights};
|
||||
const plugins = deepFreeze([]);
|
||||
const example = () => {
|
||||
const sourcecredDirectory = tmp.dirSync().name;
|
||||
const taskReporter = new TestTaskReporter();
|
||||
@ -79,6 +83,7 @@ describe("api/load", () => {
|
||||
sourcecredDirectory,
|
||||
githubToken,
|
||||
params,
|
||||
plugins,
|
||||
project,
|
||||
discourseKey,
|
||||
};
|
||||
@ -138,14 +143,10 @@ describe("api/load", () => {
|
||||
it("calls TimelineCred.compute with the right graph and options", async () => {
|
||||
const {options, taskReporter} = example();
|
||||
await load(options, taskReporter);
|
||||
expect(timelineCredCompute).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
params,
|
||||
DEFAULT_CRED_CONFIG
|
||||
);
|
||||
expect(timelineCredCompute.mock.calls[0][0].equals(combinedGraph())).toBe(
|
||||
true
|
||||
);
|
||||
const args = timelineCredCompute.mock.calls[0][0];
|
||||
expect(args.graph.equals(combinedGraph())).toBe(true);
|
||||
expect(args.params).toEqual(partialParams(params));
|
||||
expect(args.plugins).toEqual(plugins);
|
||||
});
|
||||
|
||||
it("saves the resultant cred.json to disk", async () => {
|
||||
|
@ -4,6 +4,8 @@
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import deepFreeze from "deep-freeze";
|
||||
import fs from "fs-extra";
|
||||
import {type Weights, fromJSON as weightsFromJSON} from "../analysis/weights";
|
||||
|
||||
import * as NullUtil from "../util/null";
|
||||
|
||||
@ -27,3 +29,16 @@ export function githubToken(): string | null {
|
||||
export function discourseKey(): string | null {
|
||||
return NullUtil.orElse(process.env.SOURCECRED_DISCOURSE_KEY, null);
|
||||
}
|
||||
|
||||
export async function loadWeights(path: string): Promise<Weights> {
|
||||
if (!(await fs.exists(path))) {
|
||||
throw new Error("Could not find the weights file");
|
||||
}
|
||||
const raw = await fs.readFile(path, "utf-8");
|
||||
const weightsJSON = JSON.parse(raw);
|
||||
try {
|
||||
return weightsFromJSON(weightsJSON);
|
||||
} catch (e) {
|
||||
throw new Error(`provided weights file is invalid:\n${e}`);
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,10 @@
|
||||
// @flow
|
||||
|
||||
import path from "path";
|
||||
import tmp from "tmp";
|
||||
import fs from "fs-extra";
|
||||
import {defaultWeights, toJSON as weightsToJSON} from "../analysis/weights";
|
||||
import {NodeAddress} from "../core/graph";
|
||||
|
||||
import {
|
||||
defaultPlugins,
|
||||
@ -8,6 +12,7 @@ import {
|
||||
sourcecredDirectory,
|
||||
githubToken,
|
||||
discourseKey,
|
||||
loadWeights,
|
||||
} from "./common";
|
||||
|
||||
describe("cli/common", () => {
|
||||
@ -66,4 +71,36 @@ describe("cli/common", () => {
|
||||
expect(discourseKey()).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe("loadWeights", () => {
|
||||
function tmpWithContents(contents: mixed) {
|
||||
const name = tmp.tmpNameSync();
|
||||
fs.writeFileSync(name, JSON.stringify(contents));
|
||||
return name;
|
||||
}
|
||||
it("works in a simple success case", async () => {
|
||||
const weights = defaultWeights();
|
||||
// Make a modification, just to be sure we aren't always loading the
|
||||
// default weights.
|
||||
weights.nodeManualWeights.set(NodeAddress.empty, 3);
|
||||
const weightsJSON = weightsToJSON(weights);
|
||||
const file = tmpWithContents(weightsJSON);
|
||||
const weights_ = await loadWeights(file);
|
||||
expect(weights).toEqual(weights_);
|
||||
});
|
||||
it("rejects if the file is not a valid weights file", () => {
|
||||
const file = tmpWithContents(1234);
|
||||
expect.assertions(1);
|
||||
return loadWeights(file).catch((e) =>
|
||||
expect(e.message).toMatch("provided weights file is invalid:")
|
||||
);
|
||||
});
|
||||
it("rejects if the file does not exist", () => {
|
||||
const file = tmp.tmpNameSync();
|
||||
expect.assertions(1);
|
||||
return loadWeights(file).catch((e) =>
|
||||
expect(e.message).toMatch("Could not find the weights file")
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
187
src/cli/genProject.js
Normal file
187
src/cli/genProject.js
Normal file
@ -0,0 +1,187 @@
|
||||
// @flow
|
||||
// Implementation of `sourcecred gen-project`.
|
||||
// This method is intended as a placeholder for generating a project definition,
|
||||
// before we build a more intentional declarative json config approach, as discussed
|
||||
// here: https://github.com/sourcecred/sourcecred/issues/1232#issuecomment-519538494
|
||||
// This method is untested; please take care when modifying it!
|
||||
|
||||
import dedent from "../util/dedent";
|
||||
import type {Command} from "./command";
|
||||
import * as Common from "./common";
|
||||
import stringify from "json-stable-stringify";
|
||||
import {type Project, projectToJSON} from "../core/project";
|
||||
import {type RepoId} from "../core/repoId";
|
||||
import {specToProject} from "../plugins/github/specToProject";
|
||||
import * as NullUtil from "../util/null";
|
||||
|
||||
function usage(print: (string) => void): void {
|
||||
print(
|
||||
dedent`\
|
||||
usage: sourcecred gen-project PROJECT_ID
|
||||
[--github GITHUB_SPEC [...]]
|
||||
[--discourse-url DISCOURSE_URL]
|
||||
[--discourse-username DISCOURSE_USERNAME]
|
||||
sourcecred gen-project --help
|
||||
|
||||
Generates a SourceCred project configuration based on the provided specs.
|
||||
|
||||
A PROJECT_ID must be provided, and will be the name of the project.
|
||||
|
||||
Zero or more github specs may be provided; each GitHub spec can be of the
|
||||
form OWNER/NAME (as in 'torvalds/linux') for loading a single repository,
|
||||
or @owner (as in '@torvalds') for loading all repositories owned by a given
|
||||
account.
|
||||
|
||||
A discourse url and discourse username may be provided. If one is provided,
|
||||
then both must be. The discourse url is a url to a valid Discourse server,
|
||||
as in 'https://discourse.sourcecred.io', and the username must be a valid
|
||||
user on that server, as in 'credbot'. The user in question should not have
|
||||
any special or admin permissions, so that it won't encounter hidden
|
||||
messages.
|
||||
|
||||
All of the GitHub specs, and the Discourse specification (if it exists)
|
||||
will be combined into a single project. The serialized project
|
||||
configuration will be printed to stdout.
|
||||
|
||||
Arguments:
|
||||
PROJECT_ID
|
||||
Locally unique identifier for the project.
|
||||
|
||||
--github GITHUB_SPEC
|
||||
A specification (in form 'OWNER/NAME' or '@OWNER') of GitHub
|
||||
repositories to load.
|
||||
|
||||
--discourse-url DISCOURSE_URL
|
||||
The url of a Discourse server to load.
|
||||
|
||||
--discourse-username DISCOURSE_USERNAME
|
||||
The username of a Discourse account to scan from. It's recommended
|
||||
to make an account called "credbot".
|
||||
|
||||
--help
|
||||
Show this help message and exit, as 'sourcecred help scores'.
|
||||
|
||||
Environment Variables:
|
||||
SOURCECRED_GITHUB_TOKEN
|
||||
API token for GitHub. This should be a 40-character hex
|
||||
string. Required if using GitHub specs.
|
||||
|
||||
To generate a token, create a "Personal access token" at
|
||||
<https://github.com/settings/tokens>. When loading data for
|
||||
public repositories, no special permissions are required.
|
||||
For private repositories, the 'repo' scope is required.
|
||||
`.trimRight()
|
||||
);
|
||||
}
|
||||
|
||||
function die(std, message) {
|
||||
std.err("fatal: " + message);
|
||||
std.err("fatal: run 'sourcecred help gen-project' for help");
|
||||
return 1;
|
||||
}
|
||||
|
||||
export const genProject: Command = async (args, std) => {
|
||||
let projectId: string | null = null;
|
||||
let discourseUrl: string | null = null;
|
||||
let discourseUsername: string | null = null;
|
||||
const githubSpecs: string[] = [];
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
switch (args[i]) {
|
||||
case "--help": {
|
||||
usage(std.out);
|
||||
return 0;
|
||||
}
|
||||
case "--github": {
|
||||
if (++i >= args.length)
|
||||
return die(std, "'--github' given without value");
|
||||
githubSpecs.push(args[i]);
|
||||
break;
|
||||
}
|
||||
case "--discourse-url": {
|
||||
if (discourseUrl != undefined)
|
||||
return die(std, "'--discourse-url' given multiple times");
|
||||
if (++i >= args.length)
|
||||
return die(std, "'--discourse-url' given without value");
|
||||
discourseUrl = args[i];
|
||||
break;
|
||||
}
|
||||
case "--discourse-username": {
|
||||
if (discourseUsername != undefined)
|
||||
return die(std, "'--discourse-username' given multiple times");
|
||||
if (++i >= args.length)
|
||||
return die(std, "'--discourse-username' given without value");
|
||||
discourseUsername = args[i];
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (projectId != null) return die(std, "multiple project IDs provided");
|
||||
projectId = args[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (projectId == null) {
|
||||
return die(std, "no project ID provided");
|
||||
}
|
||||
|
||||
const githubToken = Common.githubToken();
|
||||
const project: Project = await createProject({
|
||||
projectId,
|
||||
githubSpecs,
|
||||
discourseUsername,
|
||||
discourseUrl,
|
||||
githubToken,
|
||||
});
|
||||
const projectJSON = projectToJSON(project);
|
||||
console.log(stringify(projectJSON));
|
||||
return 0;
|
||||
};
|
||||
|
||||
export async function createProject(opts: {|
|
||||
+projectId: string,
|
||||
+githubSpecs: $ReadOnlyArray<string>,
|
||||
+discourseUsername: string | null,
|
||||
+discourseUrl: string | null,
|
||||
+githubToken: string | null,
|
||||
|}): Promise<Project> {
|
||||
const {
|
||||
projectId,
|
||||
githubSpecs,
|
||||
discourseUsername,
|
||||
discourseUrl,
|
||||
githubToken,
|
||||
} = opts;
|
||||
let repoIds: RepoId[] = [];
|
||||
let discourseServer = null;
|
||||
if (discourseUrl && discourseUsername) {
|
||||
discourseServer = {serverUrl: discourseUrl, apiUsername: discourseUsername};
|
||||
} else if (
|
||||
(!discourseUrl && discourseUsername) ||
|
||||
(discourseUrl && !discourseUsername)
|
||||
) {
|
||||
throw new Error(
|
||||
"If either of discourseUrl and discourseUsername are provided, then both must be."
|
||||
);
|
||||
}
|
||||
if (githubSpecs.length && githubToken == null) {
|
||||
throw new Error("Provided GitHub specs without GitHub token.");
|
||||
}
|
||||
for (const spec of githubSpecs) {
|
||||
const subproject = await specToProject(spec, NullUtil.get(githubToken));
|
||||
repoIds = repoIds.concat(subproject.repoIds);
|
||||
}
|
||||
return {id: projectId, repoIds, discourseServer};
|
||||
}
|
||||
|
||||
export default genProject;
|
||||
|
||||
export const help: Command = async (args, std) => {
|
||||
if (args.length === 0) {
|
||||
usage(std.out);
|
||||
return 0;
|
||||
} else {
|
||||
usage(std.err);
|
||||
return 1;
|
||||
}
|
||||
};
|
@ -7,6 +7,7 @@ import dedent from "../util/dedent";
|
||||
import {help as loadHelp} from "./load";
|
||||
import {help as scoresHelp} from "./scores";
|
||||
import {help as clearHelp} from "./clear";
|
||||
import {help as genProjectHelp} from "./genProject";
|
||||
|
||||
const help: Command = async (args, std) => {
|
||||
if (args.length === 0) {
|
||||
@ -19,6 +20,7 @@ const help: Command = async (args, std) => {
|
||||
load: loadHelp,
|
||||
scores: scoresHelp,
|
||||
clear: clearHelp,
|
||||
"gen-project": genProjectHelp,
|
||||
};
|
||||
if (subHelps[command] !== undefined) {
|
||||
return subHelps[command](args.slice(1), std);
|
||||
@ -39,6 +41,8 @@ function usage(print: (string) => void): void {
|
||||
Commands:
|
||||
load load repository data into SourceCred
|
||||
clear clear SoucrceCred data
|
||||
scores print SourceCred scores to stdout
|
||||
gen-project print a SourceCred project config to stdout
|
||||
help show this help message
|
||||
|
||||
Use 'sourcecred help COMMAND' for help about an individual command.
|
||||
|
@ -6,15 +6,21 @@ import {LoggingTaskReporter} from "../util/taskReporter";
|
||||
import type {Command} from "./command";
|
||||
import * as Common from "./common";
|
||||
import {defaultWeights, fromJSON as weightsFromJSON} from "../analysis/weights";
|
||||
import {projectFromJSON} from "../core/project";
|
||||
import {load} from "../api/load";
|
||||
import {specToProject} from "../plugins/github/specToProject";
|
||||
import fs from "fs-extra";
|
||||
import {partialParams} from "../analysis/timeline/params";
|
||||
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
|
||||
import {declaration as discourseDeclaration} from "../plugins/discourse/declaration";
|
||||
import {declaration as githubDeclaration} from "../plugins/github/declaration";
|
||||
|
||||
function usage(print: (string) => void): void {
|
||||
print(
|
||||
dedent`\
|
||||
usage: sourcecred load [PROJECT_SPEC...]
|
||||
[--weights WEIGHTS_FILE]
|
||||
[--project PROJECT_FILE]
|
||||
sourcecred load --help
|
||||
|
||||
Load a target project, generating a cred attribution for it.
|
||||
@ -28,6 +34,10 @@ function usage(print: (string) => void): void {
|
||||
PROJECT_SPEC:
|
||||
Identifier of a project to load.
|
||||
|
||||
--project PROJECT_FILE
|
||||
Path to a json file which contains a project configuration.
|
||||
That project will be loaded.
|
||||
|
||||
--weights WEIGHTS_FILE
|
||||
Path to a json file which contains a weights configuration.
|
||||
This will be used instead of the default weights and persisted.
|
||||
@ -64,6 +74,7 @@ function die(std, message) {
|
||||
|
||||
const loadCommand: Command = async (args, std) => {
|
||||
const projectSpecs: string[] = [];
|
||||
const projectPaths: string[] = [];
|
||||
let weightsPath: ?string;
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
switch (args[i]) {
|
||||
@ -79,13 +90,19 @@ const loadCommand: Command = async (args, std) => {
|
||||
weightsPath = args[i];
|
||||
break;
|
||||
}
|
||||
case "--project": {
|
||||
if (++i >= args.length)
|
||||
return die(std, "'--project' given without value");
|
||||
projectPaths.push(args[i]);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
projectSpecs.push(args[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (projectSpecs.length == 0) {
|
||||
if (projectSpecs.length === 0 && projectPaths.length === 0) {
|
||||
return die(std, "projects not specified");
|
||||
}
|
||||
|
||||
@ -101,17 +118,29 @@ const loadCommand: Command = async (args, std) => {
|
||||
|
||||
const taskReporter = new LoggingTaskReporter();
|
||||
|
||||
const projects = await Promise.all(
|
||||
const specProjects = await Promise.all(
|
||||
projectSpecs.map((s) => specToProject(s, githubToken))
|
||||
);
|
||||
const params = {alpha: 0.05, intervalDecay: 0.5, weights};
|
||||
const optionses = projects.map((project) => ({
|
||||
project,
|
||||
params,
|
||||
sourcecredDirectory: Common.sourcecredDirectory(),
|
||||
githubToken,
|
||||
discourseKey: Common.discourseKey(),
|
||||
}));
|
||||
const params = partialParams({weights});
|
||||
const manualProjects = await Promise.all(projectPaths.map(loadProject));
|
||||
const projects = specProjects.concat(manualProjects);
|
||||
const optionses = projects.map((project) => {
|
||||
const plugins: PluginDeclaration[] = [];
|
||||
if (project.discourseServer != null) {
|
||||
plugins.push(discourseDeclaration);
|
||||
}
|
||||
if (project.repoIds.length) {
|
||||
plugins.push(githubDeclaration);
|
||||
}
|
||||
return {
|
||||
project,
|
||||
params,
|
||||
plugins,
|
||||
sourcecredDirectory: Common.sourcecredDirectory(),
|
||||
githubToken,
|
||||
discourseKey: Common.discourseKey(),
|
||||
};
|
||||
});
|
||||
// Deliberately load in serial because GitHub requests that their API not
|
||||
// be called concurrently
|
||||
for (const options of optionses) {
|
||||
@ -134,6 +163,20 @@ const loadWeightOverrides = async (path: string) => {
|
||||
}
|
||||
};
|
||||
|
||||
const loadProject = async (path: string) => {
|
||||
if (!(await fs.exists(path))) {
|
||||
throw new Error(`Project path ${path} does not exist`);
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(path, "utf-8");
|
||||
const json = JSON.parse(raw);
|
||||
try {
|
||||
return projectFromJSON(json);
|
||||
} catch (e) {
|
||||
throw new Error(`project at path ${path} is invalid:\n${e}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const help: Command = async (args, std) => {
|
||||
if (args.length === 0) {
|
||||
usage(std.out);
|
||||
|
@ -10,6 +10,8 @@ import loadCommand, {help} from "./load";
|
||||
import type {LoadOptions} from "../api/load";
|
||||
import {defaultWeights, toJSON as weightsToJSON} from "../analysis/weights";
|
||||
import * as Common from "./common";
|
||||
import {defaultParams, partialParams} from "../analysis/timeline/params";
|
||||
import {declaration as githubDeclaration} from "../plugins/github/declaration";
|
||||
|
||||
import {makeRepoId, stringToRepoId} from "../core/repoId";
|
||||
|
||||
@ -76,7 +78,8 @@ describe("cli/load", () => {
|
||||
repoIds: [makeRepoId("foo", "bar")],
|
||||
discourseServer: null,
|
||||
},
|
||||
params: {alpha: 0.05, intervalDecay: 0.5, weights: defaultWeights()},
|
||||
params: defaultParams(),
|
||||
plugins: [githubDeclaration],
|
||||
sourcecredDirectory: Common.sourcecredDirectory(),
|
||||
githubToken: fakeGithubToken,
|
||||
discourseKey: fakeDiscourseKey,
|
||||
@ -100,7 +103,8 @@ describe("cli/load", () => {
|
||||
repoIds: [stringToRepoId(projectId)],
|
||||
discourseServer: null,
|
||||
},
|
||||
params: {alpha: 0.05, intervalDecay: 0.5, weights: defaultWeights()},
|
||||
params: defaultParams(),
|
||||
plugins: [githubDeclaration],
|
||||
sourcecredDirectory: Common.sourcecredDirectory(),
|
||||
githubToken: fakeGithubToken,
|
||||
discourseKey: fakeDiscourseKey,
|
||||
@ -137,7 +141,8 @@ describe("cli/load", () => {
|
||||
repoIds: [makeRepoId("foo", "bar")],
|
||||
discourseServer: null,
|
||||
},
|
||||
params: {alpha: 0.05, intervalDecay: 0.5, weights},
|
||||
params: partialParams({weights}),
|
||||
plugins: [githubDeclaration],
|
||||
sourcecredDirectory: Common.sourcecredDirectory(),
|
||||
githubToken: fakeGithubToken,
|
||||
discourseKey: fakeDiscourseKey,
|
||||
|
@ -13,12 +13,10 @@ import {
|
||||
type Interval,
|
||||
type CredNode,
|
||||
} from "../analysis/timeline/timelineCred";
|
||||
import {DEFAULT_CRED_CONFIG} from "../plugins/defaultCredConfig";
|
||||
import {userNodeType} from "../plugins/github/declaration";
|
||||
import * as GN from "../plugins/github/nodes";
|
||||
import {directoryForProjectId} from "../core/project_io";
|
||||
import {NodeAddress} from "../core/graph";
|
||||
|
||||
const COMPAT_INFO = {type: "sourcecred/cli/scores", version: "0.1.0"};
|
||||
const COMPAT_INFO = {type: "sourcecred/cli/scores", version: "0.2.0"};
|
||||
|
||||
function usage(print: (string) => void): void {
|
||||
print(
|
||||
@ -57,7 +55,11 @@ function die(std, message) {
|
||||
}
|
||||
|
||||
export type NodeOutput = {|
|
||||
+id: string,
|
||||
// The components of the SourceCred address for the node
|
||||
// Conventionally, the first two components designate what plugin
|
||||
// generated the node, as in [PLUGIN_AUTHOR, PLUGIN_NAME, ...]
|
||||
// Subsequent components are created according to plugin-specific logic.
|
||||
+address: $ReadOnlyArray<string>,
|
||||
+totalCred: number,
|
||||
+intervalCred: $ReadOnlyArray<number>,
|
||||
|};
|
||||
@ -100,17 +102,13 @@ export const scores: Command = async (args, std) => {
|
||||
|
||||
const credBlob = await fs.readFile(credFile);
|
||||
const credJSON = JSON.parse(credBlob.toString());
|
||||
const timelineCred = TimelineCred.fromJSON(credJSON, DEFAULT_CRED_CONFIG);
|
||||
const timelineCred = TimelineCred.fromJSON(credJSON);
|
||||
const userOutput: NodeOutput[] = timelineCred
|
||||
.credSortedNodes(userNodeType.prefix)
|
||||
.userNodes()
|
||||
.map((n: CredNode) => {
|
||||
const address = n.node.address;
|
||||
const structuredAddress = GN.fromRaw((address: any));
|
||||
if (structuredAddress.type !== GN.USERLIKE_TYPE) {
|
||||
throw new Error("invariant violation");
|
||||
}
|
||||
const address = NodeAddress.toParts(n.node.address);
|
||||
return {
|
||||
id: structuredAddress.login,
|
||||
address,
|
||||
intervalCred: n.cred,
|
||||
totalCred: n.total,
|
||||
};
|
||||
|
@ -9,6 +9,7 @@ import help from "./help";
|
||||
import load from "./load";
|
||||
import scores from "./scores";
|
||||
import clear from "./clear";
|
||||
import genProject from "./genProject";
|
||||
|
||||
const sourcecred: Command = async (args, std) => {
|
||||
if (args.length === 0) {
|
||||
@ -28,6 +29,8 @@ const sourcecred: Command = async (args, std) => {
|
||||
return clear(args.slice(1), std);
|
||||
case "scores":
|
||||
return scores(args.slice(1), std);
|
||||
case "gen-project":
|
||||
return genProject(args.slice(1), std);
|
||||
default:
|
||||
std.err("fatal: unknown command: " + JSON.stringify(args[0]));
|
||||
std.err("fatal: run 'sourcecred help' for commands and usage");
|
||||
|
@ -4,12 +4,6 @@ import React from "react";
|
||||
import type {Assets} from "../webutil/assets";
|
||||
import {TimelineExplorer} from "./TimelineExplorer";
|
||||
import {TimelineCred} from "../analysis/timeline/timelineCred";
|
||||
import {
|
||||
declaration as githubDeclaration,
|
||||
userNodeType,
|
||||
repoNodeType,
|
||||
} from "../plugins/github/declaration";
|
||||
import {DEFAULT_CRED_CONFIG} from "../plugins/defaultCredConfig";
|
||||
import {encodeProjectId, type ProjectId} from "../core/project";
|
||||
|
||||
export type Props = {|
|
||||
@ -73,9 +67,6 @@ export class TimelineApp extends React.Component<Props, State> {
|
||||
<TimelineExplorer
|
||||
initialTimelineCred={timelineCred}
|
||||
projectId={this.props.projectId}
|
||||
declarations={[githubDeclaration]}
|
||||
defaultNodeType={userNodeType}
|
||||
filterableNodeTypes={[userNodeType, repoNodeType]}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@ -96,7 +87,7 @@ export async function defaultLoader(
|
||||
if (!response.ok) {
|
||||
return Promise.reject(response);
|
||||
}
|
||||
return TimelineCred.fromJSON(await response.json(), DEFAULT_CRED_CONFIG);
|
||||
return TimelineCred.fromJSON(await response.json());
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -10,7 +10,7 @@ import {TimelineCred} from "../analysis/timeline/timelineCred";
|
||||
|
||||
export type Props = {|
|
||||
+timelineCred: TimelineCred,
|
||||
+selectedNodeFilter: NodeAddressT,
|
||||
+selectedNodeFilter: NodeAddressT | null,
|
||||
|};
|
||||
|
||||
const MAX_ENTRIES_PER_LIST = 100;
|
||||
@ -33,7 +33,13 @@ const DEFAULT_ENTRIES_PER_CHART = 6;
|
||||
export class TimelineCredView extends React.Component<Props> {
|
||||
render() {
|
||||
const {selectedNodeFilter, timelineCred} = this.props;
|
||||
const nodes = timelineCred.credSortedNodes(selectedNodeFilter);
|
||||
const nodes = (() => {
|
||||
if (selectedNodeFilter == null) {
|
||||
return timelineCred.userNodes();
|
||||
} else {
|
||||
return timelineCred.credSortedNodes([selectedNodeFilter]);
|
||||
}
|
||||
})();
|
||||
const tableNodes = nodes.slice(0, MAX_ENTRIES_PER_LIST);
|
||||
const chartNodes = nodes
|
||||
.slice(0, DEFAULT_ENTRIES_PER_CHART)
|
||||
|
@ -6,9 +6,7 @@ import type {Assets} from "../webutil/assets";
|
||||
import {TimelineCredView} from "./TimelineCredView";
|
||||
import {Graph, NodeAddress} from "../core/graph";
|
||||
import {type Interval, TimelineCred} from "../analysis/timeline/timelineCred";
|
||||
import {type FilteredTimelineCred} from "../analysis/timeline/filterTimelineCred";
|
||||
import {defaultWeights} from "../analysis/weights";
|
||||
import {DEFAULT_CRED_CONFIG} from "../plugins/defaultCredConfig";
|
||||
import {defaultParams} from "../analysis/timeline/params";
|
||||
|
||||
export default class TimelineCredViewInspectiontest extends React.Component<{|
|
||||
+assets: Assets,
|
||||
@ -48,17 +46,8 @@ export default class TimelineCredViewInspectiontest extends React.Component<{|
|
||||
const scores = intervals.map((_unuesd, i) => generator(i));
|
||||
addressToCred.set(address, scores);
|
||||
}
|
||||
const filteredTimelineCred: FilteredTimelineCred = {
|
||||
intervals,
|
||||
addressToCred,
|
||||
};
|
||||
const params = {alpha: 0.05, intervalDecay: 0.5, weights: defaultWeights()};
|
||||
return new TimelineCred(
|
||||
graph,
|
||||
filteredTimelineCred,
|
||||
params,
|
||||
DEFAULT_CRED_CONFIG
|
||||
);
|
||||
const params = defaultParams();
|
||||
return new TimelineCred(graph, intervals, addressToCred, params, []);
|
||||
}
|
||||
|
||||
render() {
|
||||
|
@ -2,26 +2,19 @@
|
||||
|
||||
import React from "react";
|
||||
import deepEqual from "lodash.isequal";
|
||||
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
|
||||
import {type Weights, copy as weightsCopy} from "../analysis/weights";
|
||||
import {type NodeAddressT} from "../core/graph";
|
||||
import {
|
||||
TimelineCred,
|
||||
type TimelineCredParameters,
|
||||
} from "../analysis/timeline/timelineCred";
|
||||
import {TimelineCred} from "../analysis/timeline/timelineCred";
|
||||
import {type TimelineCredParameters} from "../analysis/timeline/params";
|
||||
import {TimelineCredView} from "./TimelineCredView";
|
||||
import Link from "../webutil/Link";
|
||||
import {WeightConfig} from "./weights/WeightConfig";
|
||||
import {WeightsFileManager} from "./weights/WeightsFileManager";
|
||||
import {type NodeType} from "../analysis/types";
|
||||
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
|
||||
|
||||
export type Props = {
|
||||
projectId: string,
|
||||
initialTimelineCred: TimelineCred,
|
||||
// TODO: Get this info from the TimelineCred
|
||||
declarations: $ReadOnlyArray<PluginDeclaration>,
|
||||
+defaultNodeType: NodeType,
|
||||
+filterableNodeTypes: $ReadOnlyArray<NodeType>,
|
||||
};
|
||||
|
||||
export type State = {
|
||||
@ -31,7 +24,7 @@ export type State = {
|
||||
intervalDecay: number,
|
||||
loading: boolean,
|
||||
showWeightConfig: boolean,
|
||||
selectedNodeTypePrefix: NodeAddressT,
|
||||
selectedNodeTypePrefix: NodeAddressT | null,
|
||||
};
|
||||
|
||||
/**
|
||||
@ -46,9 +39,7 @@ export class TimelineExplorer extends React.Component<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
const timelineCred = props.initialTimelineCred;
|
||||
const {defaultNodeType} = props;
|
||||
const {alpha, intervalDecay, weights} = timelineCred.params();
|
||||
const selectedNodeTypePrefix = defaultNodeType.prefix;
|
||||
this.state = {
|
||||
timelineCred,
|
||||
alpha,
|
||||
@ -59,7 +50,7 @@ export class TimelineExplorer extends React.Component<Props, State> {
|
||||
weights: weightsCopy(weights),
|
||||
loading: false,
|
||||
showWeightConfig: false,
|
||||
selectedNodeTypePrefix,
|
||||
selectedNodeTypePrefix: null,
|
||||
};
|
||||
}
|
||||
|
||||
@ -88,7 +79,7 @@ export class TimelineExplorer extends React.Component<Props, State> {
|
||||
);
|
||||
const weightConfig = (
|
||||
<WeightConfig
|
||||
declarations={this.props.declarations}
|
||||
declarations={this.state.timelineCred.plugins()}
|
||||
nodeTypeWeights={this.state.weights.nodeTypeWeights}
|
||||
edgeTypeWeights={this.state.weights.edgeTypeWeights}
|
||||
onNodeWeightChange={(prefix, weight) => {
|
||||
@ -152,6 +143,23 @@ export class TimelineExplorer extends React.Component<Props, State> {
|
||||
}
|
||||
|
||||
renderFilterSelect() {
|
||||
const optionGroup = (declaration: PluginDeclaration) => {
|
||||
const header = (
|
||||
<option
|
||||
key={declaration.nodePrefix}
|
||||
value={declaration.nodePrefix}
|
||||
style={{fontWeight: "bold"}}
|
||||
>
|
||||
{declaration.name}
|
||||
</option>
|
||||
);
|
||||
const entries = declaration.nodeTypes.map((type) => (
|
||||
<option key={type.prefix} value={type.prefix}>
|
||||
{"\u2003" + type.name}
|
||||
</option>
|
||||
));
|
||||
return [header, ...entries];
|
||||
};
|
||||
return (
|
||||
<label>
|
||||
<span style={{marginLeft: "5px"}}>Showing: </span>
|
||||
@ -161,11 +169,10 @@ export class TimelineExplorer extends React.Component<Props, State> {
|
||||
this.setState({selectedNodeTypePrefix: e.target.value})
|
||||
}
|
||||
>
|
||||
{this.props.filterableNodeTypes.map(({prefix, pluralName}) => (
|
||||
<option key={prefix} value={prefix}>
|
||||
{pluralName}
|
||||
</option>
|
||||
))}
|
||||
<option key={null} value={null}>
|
||||
All users
|
||||
</option>
|
||||
{this.state.timelineCred.plugins().map(optionGroup)}
|
||||
</select>
|
||||
</label>
|
||||
);
|
||||
|
@ -1,14 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import deepFreeze from "deep-freeze";
|
||||
import {userNodeType, repoNodeType, declaration} from "./github/declaration";
|
||||
import type {TimelineCredConfig} from "../analysis/timeline/timelineCred";
|
||||
|
||||
export const DEFAULT_CRED_CONFIG: TimelineCredConfig = deepFreeze({
|
||||
scoreNodePrefix: userNodeType.prefix,
|
||||
filterNodePrefixes: [userNodeType.prefix, repoNodeType.prefix],
|
||||
types: {
|
||||
nodeTypes: declaration.nodeTypes.slice(),
|
||||
edgeTypes: declaration.edgeTypes.slice(),
|
||||
},
|
||||
});
|
@ -44,4 +44,5 @@ export const declaration: PluginDeclaration = deepFreeze({
|
||||
nodeTypes: [inserterNodeType, machineNodeType],
|
||||
edgePrefix: EdgeAddress.fromParts(["factorio"]),
|
||||
edgeTypes: [assemblesEdgeType, transportsEdgeType],
|
||||
userTypes: [],
|
||||
});
|
||||
|
@ -74,7 +74,7 @@ export const likesEdgeType: EdgeType = deepFreeze({
|
||||
});
|
||||
|
||||
export const declaration: PluginDeclaration = deepFreeze({
|
||||
name: "discourse",
|
||||
name: "Discourse",
|
||||
nodePrefix,
|
||||
edgePrefix,
|
||||
nodeTypes: [userNodeType, topicNodeType, postNodeType],
|
||||
@ -85,4 +85,5 @@ export const declaration: PluginDeclaration = deepFreeze({
|
||||
topicContainsPostEdgeType,
|
||||
likesEdgeType,
|
||||
],
|
||||
userTypes: [userNodeType],
|
||||
});
|
||||
|
@ -150,8 +150,7 @@ export class Fetcher implements Discourse {
|
||||
|
||||
async latestTopicId(): Promise<TopicId> {
|
||||
const response = await this._fetch("/latest.json?order=created");
|
||||
failFor404(response);
|
||||
failFor403(response);
|
||||
failIfMissing(response);
|
||||
failForNotOk(response);
|
||||
const json = await response.json();
|
||||
if (json.topic_list.topics.length === 0) {
|
||||
@ -162,8 +161,7 @@ export class Fetcher implements Discourse {
|
||||
|
||||
async latestPosts(): Promise<Post[]> {
|
||||
const response = await this._fetch("/posts.json");
|
||||
failFor404(response);
|
||||
failFor403(response);
|
||||
failIfMissing(response);
|
||||
failForNotOk(response);
|
||||
const json = await response.json();
|
||||
return json.latest_posts.map(parsePost);
|
||||
@ -171,18 +169,10 @@ export class Fetcher implements Discourse {
|
||||
|
||||
async topicWithPosts(id: TopicId): Promise<TopicWithPosts | null> {
|
||||
const response = await this._fetch(`/t/${id}.json`);
|
||||
if (response.status === 404) {
|
||||
// Not sure why this happens, but a topic can sometimes 404.
|
||||
// We should just consider it unreachable.
|
||||
// Here is an example: https://discourse.sourcecred.io/t/116
|
||||
return null;
|
||||
}
|
||||
if (response.status === 403) {
|
||||
// Probably this topic is hidden or deleted.
|
||||
// Just consider it unreachable.
|
||||
// If the issue is that the user provided bad keys, then
|
||||
// they will get a more helpful error when they try to get the latest
|
||||
// topic id.
|
||||
const {status} = response;
|
||||
if (status === 403 || status === 404 || status === 410) {
|
||||
// The topic is hidden, deleted, or otherwise missing.
|
||||
// Example of a 404 topic: https://discourse.sourcecred.io/t/116
|
||||
return null;
|
||||
}
|
||||
failForNotOk(response);
|
||||
@ -199,12 +189,9 @@ export class Fetcher implements Discourse {
|
||||
|
||||
async post(id: PostId): Promise<Post | null> {
|
||||
const response = await this._fetch(`/posts/${id}.json`);
|
||||
if (response.status === 404) {
|
||||
// Since topics can 404, I assume posts can too.
|
||||
return null;
|
||||
}
|
||||
if (response.status === 403) {
|
||||
// Probably this post is hidden or deleted.
|
||||
const {status} = response;
|
||||
if (status === 403 || status === 404 || status === 410) {
|
||||
// The post is hidden, deleted, or otherwise missing.
|
||||
return null;
|
||||
}
|
||||
failForNotOk(response);
|
||||
@ -216,25 +203,25 @@ export class Fetcher implements Discourse {
|
||||
const response = await this._fetch(
|
||||
`/user_actions.json?username=${username}&filter=1&offset=${offset}`
|
||||
);
|
||||
failFor404(response);
|
||||
failFor403(response);
|
||||
failIfMissing(response);
|
||||
failForNotOk(response);
|
||||
const json = await response.json();
|
||||
return json.user_actions.map(parseLike);
|
||||
}
|
||||
}
|
||||
|
||||
function failFor404(response: Response) {
|
||||
function failIfMissing(response: Response) {
|
||||
if (response.status === 404) {
|
||||
throw new Error(`404 Not Found on: ${response.url}; maybe bad serverUrl?`);
|
||||
}
|
||||
}
|
||||
|
||||
function failFor403(response: Response) {
|
||||
if (response.status === 403) {
|
||||
throw new Error(`403 Forbidden: bad API username or key?`);
|
||||
}
|
||||
if (response.status === 410) {
|
||||
throw new Error(`410 Gone`);
|
||||
}
|
||||
}
|
||||
|
||||
function failForNotOk(response: Response) {
|
||||
if (!response.ok) {
|
||||
throw new Error(`not OK status ${response.status} on ${response.url}`);
|
||||
|
@ -31,4 +31,5 @@ export const declaration: PluginDeclaration = deepFreeze({
|
||||
edgePrefix: E.Prefix.base,
|
||||
nodeTypes,
|
||||
edgeTypes,
|
||||
userTypes: [],
|
||||
});
|
||||
|
@ -10,14 +10,17 @@ export const BLACKLISTED_IDS: $ReadOnlyArray<ObjectId> = deepFreeze([
|
||||
"MDEyOk9yZ2FuaXphdGlvbjE3OTUyOTI1",
|
||||
"MDEyOk9yZ2FuaXphdGlvbjI5MTkzOTQ=",
|
||||
"MDEyOk9yZ2FuaXphdGlvbjEyNDE3MDI0",
|
||||
// In this case, the bot used to be a user (@greenkeeper)
|
||||
"MDM6Qm90MjMwNDAwNzY=",
|
||||
// @dependabot also gives incosistent results (user vs bot)
|
||||
"MDM6Qm90NDk2OTkzMzM=",
|
||||
"MDEyOk9yZ2FuaXphdGlvbjQzMDkzODIw",
|
||||
// These are `Bot` nodes that are sometimes referenced in a `User`
|
||||
// context: in particular, as the author of a commit.
|
||||
"MDM6Qm90MjMwNDAwNzY=", // greenkeeper
|
||||
"MDM6Qm90NDk2OTkzMzM=", // dependabot
|
||||
"MDM6Qm90NDY0NDczMjE=", // allcontributors
|
||||
// These are the offending reactions.
|
||||
"MDg6UmVhY3Rpb24yMTY3ODkyNQ==",
|
||||
"MDg6UmVhY3Rpb240NDMwMzQ1",
|
||||
"MDg6UmVhY3Rpb24xMDI4MzQxOA==",
|
||||
"MDg6UmVhY3Rpb24zNDUxNjA2MQ==",
|
||||
// Now org used to be a user (@nueko)
|
||||
"MDEyOk9yZ2FuaXphdGlvbjIxMzQ5NTM=",
|
||||
// Problematic interactions they did as a user: Thumbs up reactions.
|
||||
|
@ -199,4 +199,5 @@ export const declaration: PluginDeclaration = deepFreeze({
|
||||
edgePrefix: E.Prefix.base,
|
||||
nodeTypes: nodeTypes,
|
||||
edgeTypes: edgeTypes,
|
||||
userTypes: [userNodeType],
|
||||
});
|
||||
|
Loading…
x
Reference in New Issue
Block a user