Refactor TimelineCredScores data type (#1804)
This commit refactors the TimelineCredScores data type so it is an array-of-objects rather than an object-of-arrays. I want to add several more fields (for forward cred flow, backwards cred flow, seed flow, synthetic loop flow), and feel it will be a lot cleaner with an array-of-objects. This is a refactor of a local data type, and there's test coverage. Likelihood of regression is very low. Test plan: Updated tests; `yarn test` passes.
This commit is contained in:
parent
0449c9ea37
commit
0f6a765569
|
@ -209,12 +209,12 @@ export class TimelineCred {
|
||||||
const addressToCred = new Map();
|
const addressToCred = new Map();
|
||||||
for (let i = 0; i < nodeOrder.length; i++) {
|
for (let i = 0; i < nodeOrder.length; i++) {
|
||||||
const addr = nodeOrder[i];
|
const addr = nodeOrder[i];
|
||||||
const addrCred = credScores.intervalCredScores.map((cred) => cred[i]);
|
const addrCred = credScores.map(({cred}) => cred[i]);
|
||||||
addressToCred.set(addr, addrCred);
|
addressToCred.set(addr, addrCred);
|
||||||
}
|
}
|
||||||
return new TimelineCred(
|
return new TimelineCred(
|
||||||
weightedGraph,
|
weightedGraph,
|
||||||
credScores.intervals,
|
credScores.map((x) => x.interval),
|
||||||
addressToCred,
|
addressToCred,
|
||||||
fullParams,
|
fullParams,
|
||||||
plugins
|
plugins
|
||||||
|
|
|
@ -15,25 +15,17 @@ export opaque type NodeOrderedCredScores: Float64Array = Float64Array;
|
||||||
/**
|
/**
|
||||||
* Represents cred scores over time.
|
* Represents cred scores over time.
|
||||||
*
|
*
|
||||||
* It contains an array of intervals, which give timing information, and an
|
* The TimelineCredScores consists of a time-ordered array of IntervalCreds.
|
||||||
* array of CredTimeSlices, which are Float64Arrays. Each CredTimeSlice
|
* Each IntervalCred contains the interval information, as well as the raw
|
||||||
* contains cred scores for an interval. The cred scores are included in
|
* cred score for every node in the graph. The cred is stored as a Float64Array,
|
||||||
* node-address-sorted order, and as such the CredScores can only be
|
* with scores corresponding to nodes by the node's index in the Graph's
|
||||||
* interpreted in the context of an associated Graph.
|
* canonical address-sorted node ordering.
|
||||||
*
|
|
||||||
* As invariants, it is guaranteed that:
|
|
||||||
* - intervals and intervalCredScores will always have the same length
|
|
||||||
* - all of the intervalCredScores will have a consistent implicit node ordering
|
|
||||||
*
|
|
||||||
* The type is marked opaque so that no-one else can construct instances that
|
|
||||||
* don't conform to these invariants.
|
|
||||||
*/
|
*/
|
||||||
export opaque type TimelineCredScores: {|
|
export type TimelineCredScores = $ReadOnlyArray<IntervalCred>;
|
||||||
+intervals: $ReadOnlyArray<Interval>,
|
|
||||||
+intervalCredScores: $ReadOnlyArray<NodeOrderedCredScores>,
|
export type IntervalCred = {|
|
||||||
|} = {|
|
+interval: Interval,
|
||||||
+intervals: $ReadOnlyArray<Interval>,
|
+cred: NodeOrderedCredScores,
|
||||||
+intervalCredScores: $ReadOnlyArray<NodeOrderedCredScores>,
|
|
||||||
|};
|
|};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -59,9 +51,6 @@ export function distributionToCred(
|
||||||
nodeOrder: $ReadOnlyArray<NodeAddressT>,
|
nodeOrder: $ReadOnlyArray<NodeAddressT>,
|
||||||
scoringNodePrefixes: $ReadOnlyArray<NodeAddressT>
|
scoringNodePrefixes: $ReadOnlyArray<NodeAddressT>
|
||||||
): TimelineCredScores {
|
): TimelineCredScores {
|
||||||
if (ds.length === 0) {
|
|
||||||
return {intervals: [], intervalCredScores: []};
|
|
||||||
}
|
|
||||||
const scoringNodeIndices = [];
|
const scoringNodeIndices = [];
|
||||||
for (let i = 0; i < nodeOrder.length; i++) {
|
for (let i = 0; i < nodeOrder.length; i++) {
|
||||||
const addr = nodeOrder[i];
|
const addr = nodeOrder[i];
|
||||||
|
@ -69,8 +58,7 @@ export function distributionToCred(
|
||||||
scoringNodeIndices.push(i);
|
scoringNodeIndices.push(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const intervals = ds.map((x) => x.interval);
|
return ds.map(({interval, distribution, intervalWeight}) => {
|
||||||
const intervalCredScores = ds.map(({distribution, intervalWeight}) => {
|
|
||||||
const intervalTotalScore = sum(
|
const intervalTotalScore = sum(
|
||||||
scoringNodeIndices.map((x) => distribution[x])
|
scoringNodeIndices.map((x) => distribution[x])
|
||||||
);
|
);
|
||||||
|
@ -78,32 +66,33 @@ export function distributionToCred(
|
||||||
const intervalNormalizer =
|
const intervalNormalizer =
|
||||||
intervalTotalScore === 0 ? 0 : intervalWeight / intervalTotalScore;
|
intervalTotalScore === 0 ? 0 : intervalWeight / intervalTotalScore;
|
||||||
const cred = distribution.map((x) => x * intervalNormalizer);
|
const cred = distribution.map((x) => x * intervalNormalizer);
|
||||||
return cred;
|
return {interval, cred};
|
||||||
});
|
});
|
||||||
return {intervalCredScores, intervals};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const COMPAT_INFO = {type: "sourcecred/timelineCredScores", version: "0.1.0"};
|
const COMPAT_INFO = {type: "sourcecred/timelineCredScores", version: "0.2.0"};
|
||||||
|
|
||||||
export type TimelineCredScoresJSON = Compatible<{|
|
export type TimelineCredScoresJSON = Compatible<
|
||||||
+intervals: $ReadOnlyArray<Interval>,
|
$ReadOnlyArray<{|
|
||||||
// TODO: Serializing floats as strings is space-inefficient. We can likely
|
+interval: Interval,
|
||||||
// get space savings if we base64 encode a byte representation of the
|
// TODO: Serializing floats as strings is space-inefficient. We can likely
|
||||||
// floats.
|
// get space savings if we base64 encode a byte representation of the
|
||||||
+intervalCredScores: $ReadOnlyArray<$ReadOnlyArray<number>>,
|
// floats.
|
||||||
|}>;
|
+cred: $ReadOnlyArray<number>,
|
||||||
|
|}>
|
||||||
|
>;
|
||||||
|
|
||||||
export function toJSON(s: TimelineCredScores): TimelineCredScoresJSON {
|
export function toJSON(s: TimelineCredScores): TimelineCredScoresJSON {
|
||||||
return toCompat(COMPAT_INFO, {
|
return toCompat(
|
||||||
intervals: s.intervals,
|
COMPAT_INFO,
|
||||||
intervalCredScores: s.intervalCredScores.map((x) => Array.from(x)),
|
s.map(({interval, cred}) => ({interval, cred: Array.from(cred)}))
|
||||||
});
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function fromJSON(j: TimelineCredScoresJSON): TimelineCredScores {
|
export function fromJSON(j: TimelineCredScoresJSON): TimelineCredScores {
|
||||||
const {intervals, intervalCredScores} = fromCompat(COMPAT_INFO, j);
|
const scoreArray = fromCompat(COMPAT_INFO, j);
|
||||||
return {
|
return scoreArray.map(({cred, interval}) => ({
|
||||||
intervals,
|
cred: new Float64Array(cred),
|
||||||
intervalCredScores: intervalCredScores.map((x) => new Float64Array(x)),
|
interval,
|
||||||
};
|
}));
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,16 +21,16 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
];
|
];
|
||||||
const nodeOrder = [na("foo"), na("bar")];
|
const nodeOrder = [na("foo"), na("bar")];
|
||||||
const actual = distributionToCred(ds, nodeOrder, [NodeAddress.empty]);
|
const actual = distributionToCred(ds, nodeOrder, [NodeAddress.empty]);
|
||||||
const expected = {
|
const expected = [
|
||||||
intervals: [
|
{
|
||||||
{startTimeMs: 0, endTimeMs: 10},
|
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||||
{startTimeMs: 10, endTimeMs: 20},
|
cred: new Float64Array([1, 1]),
|
||||||
],
|
},
|
||||||
intervalCredScores: [
|
{
|
||||||
new Float64Array([1, 1]),
|
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||||
new Float64Array([9, 1]),
|
cred: new Float64Array([9, 1]),
|
||||||
],
|
},
|
||||||
};
|
];
|
||||||
expect(expected).toEqual(actual);
|
expect(expected).toEqual(actual);
|
||||||
});
|
});
|
||||||
it("correctly handles multiple scoring prefixes", () => {
|
it("correctly handles multiple scoring prefixes", () => {
|
||||||
|
@ -48,16 +48,16 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
];
|
];
|
||||||
const nodeOrder = [na("foo"), na("bar")];
|
const nodeOrder = [na("foo"), na("bar")];
|
||||||
const actual = distributionToCred(ds, nodeOrder, [na("foo"), na("bar")]);
|
const actual = distributionToCred(ds, nodeOrder, [na("foo"), na("bar")]);
|
||||||
const expected = {
|
const expected = [
|
||||||
intervals: [
|
{
|
||||||
{startTimeMs: 0, endTimeMs: 10},
|
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||||
{startTimeMs: 10, endTimeMs: 20},
|
cred: new Float64Array([1, 1]),
|
||||||
],
|
},
|
||||||
intervalCredScores: [
|
{
|
||||||
new Float64Array([1, 1]),
|
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||||
new Float64Array([9, 1]),
|
cred: new Float64Array([9, 1]),
|
||||||
],
|
},
|
||||||
};
|
];
|
||||||
expect(expected).toEqual(actual);
|
expect(expected).toEqual(actual);
|
||||||
});
|
});
|
||||||
it("works in a case where some nodes are scoring", () => {
|
it("works in a case where some nodes are scoring", () => {
|
||||||
|
@ -75,16 +75,16 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
];
|
];
|
||||||
const nodeOrder = [na("foo"), na("bar")];
|
const nodeOrder = [na("foo"), na("bar")];
|
||||||
const actual = distributionToCred(ds, nodeOrder, [na("bar")]);
|
const actual = distributionToCred(ds, nodeOrder, [na("bar")]);
|
||||||
const expected = {
|
const expected = [
|
||||||
intervals: [
|
{
|
||||||
{startTimeMs: 0, endTimeMs: 10},
|
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||||
{startTimeMs: 10, endTimeMs: 20},
|
cred: new Float64Array([2, 2]),
|
||||||
],
|
},
|
||||||
intervalCredScores: [
|
{
|
||||||
new Float64Array([2, 2]),
|
interval: {startTimeMs: 10, endTimeMs: 20},
|
||||||
new Float64Array([90, 10]),
|
cred: new Float64Array([90, 10]),
|
||||||
],
|
},
|
||||||
};
|
];
|
||||||
expect(expected).toEqual(actual);
|
expect(expected).toEqual(actual);
|
||||||
});
|
});
|
||||||
it("handles the case where no nodes are scoring", () => {
|
it("handles the case where no nodes are scoring", () => {
|
||||||
|
@ -97,10 +97,12 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
];
|
];
|
||||||
const nodeOrder = [na("foo"), na("bar")];
|
const nodeOrder = [na("foo"), na("bar")];
|
||||||
const actual = distributionToCred(ds, nodeOrder, []);
|
const actual = distributionToCred(ds, nodeOrder, []);
|
||||||
const expected = {
|
const expected = [
|
||||||
intervals: [{startTimeMs: 0, endTimeMs: 10}],
|
{
|
||||||
intervalCredScores: [new Float64Array([0, 0])],
|
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||||
};
|
cred: new Float64Array([0, 0]),
|
||||||
|
},
|
||||||
|
];
|
||||||
expect(actual).toEqual(expected);
|
expect(actual).toEqual(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -114,18 +116,17 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
];
|
];
|
||||||
const nodeOrder = [na("foo"), na("bar")];
|
const nodeOrder = [na("foo"), na("bar")];
|
||||||
const actual = distributionToCred(ds, nodeOrder, [na("bar")]);
|
const actual = distributionToCred(ds, nodeOrder, [na("bar")]);
|
||||||
const expected = {
|
const expected = [
|
||||||
intervals: [{startTimeMs: 0, endTimeMs: 10}],
|
{
|
||||||
intervalCredScores: [new Float64Array([0, 0])],
|
interval: {startTimeMs: 0, endTimeMs: 10},
|
||||||
};
|
cred: new Float64Array([0, 0]),
|
||||||
|
},
|
||||||
|
];
|
||||||
expect(actual).toEqual(expected);
|
expect(actual).toEqual(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("returns empty CredScores if no intervals are present", () => {
|
it("returns empty CredScores if no intervals are present", () => {
|
||||||
expect(distributionToCred([], [], [])).toEqual({
|
expect(distributionToCred([], [], [])).toEqual([]);
|
||||||
intervals: [],
|
|
||||||
intervalCredScores: [],
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe("to/from JSON", () => {
|
describe("to/from JSON", () => {
|
||||||
|
@ -155,30 +156,30 @@ describe("src/core/algorithm/distributionToCred", () => {
|
||||||
Array [
|
Array [
|
||||||
Object {
|
Object {
|
||||||
"type": "sourcecred/timelineCredScores",
|
"type": "sourcecred/timelineCredScores",
|
||||||
"version": "0.1.0",
|
"version": "0.2.0",
|
||||||
},
|
},
|
||||||
Object {
|
Array [
|
||||||
"intervalCredScores": Array [
|
Object {
|
||||||
Array [
|
"cred": Array [
|
||||||
2,
|
2,
|
||||||
2,
|
2,
|
||||||
],
|
],
|
||||||
Array [
|
"interval": Object {
|
||||||
90,
|
|
||||||
10,
|
|
||||||
],
|
|
||||||
],
|
|
||||||
"intervals": Array [
|
|
||||||
Object {
|
|
||||||
"endTimeMs": 10,
|
"endTimeMs": 10,
|
||||||
"startTimeMs": 0,
|
"startTimeMs": 0,
|
||||||
},
|
},
|
||||||
Object {
|
},
|
||||||
|
Object {
|
||||||
|
"cred": Array [
|
||||||
|
90,
|
||||||
|
10,
|
||||||
|
],
|
||||||
|
"interval": Object {
|
||||||
"endTimeMs": 20,
|
"endTimeMs": 20,
|
||||||
"startTimeMs": 10,
|
"startTimeMs": 10,
|
||||||
},
|
},
|
||||||
],
|
},
|
||||||
},
|
],
|
||||||
]
|
]
|
||||||
`);
|
`);
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in New Issue