refactor: describe edge weights consistently
This commit resolves an inconsistency where we called edge weights `toWeight` and `froWeight` in the core/attribution module, but `forwards` and `backwards` in the analysis module. I changed field names in the PagerankGraph JSON, so I bumped its compat. Test plan: `yarn test --full` passes.
This commit is contained in:
parent
e459a82fae
commit
302850202a
|
@ -132,7 +132,7 @@ describe("analysis/pagerankNodeDecomposition", () => {
|
|||
.addEdge(e2)
|
||||
.addEdge(e3)
|
||||
.addEdge(e4);
|
||||
const edgeWeight = () => ({toWeight: 6.0, froWeight: 3.0});
|
||||
const edgeWeight = () => ({forwards: 6.0, backwards: 3.0});
|
||||
const connections = createConnections(g, edgeWeight, 1.0);
|
||||
const osmc = createOrderedSparseMarkovChain(connections);
|
||||
const params: PagerankParams = {
|
||||
|
@ -158,7 +158,7 @@ describe("analysis/pagerankNodeDecomposition", () => {
|
|||
|
||||
it("is valid on the example graph", async () => {
|
||||
const g = advancedGraph().graph1();
|
||||
const edgeWeight = () => ({toWeight: 6.0, froWeight: 3.0});
|
||||
const edgeWeight = () => ({forwards: 6.0, backwards: 3.0});
|
||||
const connections = createConnections(g, edgeWeight, 1.0);
|
||||
const osmc = createOrderedSparseMarkovChain(connections);
|
||||
const params: PagerankParams = {
|
||||
|
|
|
@ -11,10 +11,10 @@ import {NodeTrie, EdgeTrie} from "../core/trie";
|
|||
* Given the weight choices and the node and edge types, produces an edge
|
||||
* evaluator.
|
||||
*
|
||||
* The edge evaluator will give a toWeight and froWeight to every edge in the
|
||||
* graph according to the provided weights. When multiple weights apply (e.g. a
|
||||
* nodeType weight, an edgeType weight, and a manual nodeWeight all affecting
|
||||
* the same edge), they are composed multiplicatively.
|
||||
* The edge evaluator will give a forwards and backwards weight to every edge
|
||||
* in the graph according to the provided weights. When multiple weights apply
|
||||
* (e.g. a nodeType weight, an edgeType weight, and a manual nodeWeight all
|
||||
* affecting the same edge), they are composed multiplicatively.
|
||||
*
|
||||
* When multiple node or edge types may match a given node or edge, only
|
||||
* weights for the most specific type are considered (i.e. the type with the
|
||||
|
@ -54,8 +54,8 @@ export function weightsToEdgeEvaluator(
|
|||
});
|
||||
const {forwards, backwards} = edgeWeight;
|
||||
return {
|
||||
toWeight: dstWeight * forwards,
|
||||
froWeight: srcWeight * backwards,
|
||||
forwards: dstWeight * forwards,
|
||||
backwards: srcWeight * backwards,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -47,13 +47,13 @@ describe("analysis/weightsToEdgeEvaluator", () => {
|
|||
}
|
||||
|
||||
it("applies default weights when none are specified", () => {
|
||||
expect(evaluateEdge(defaultWeights())).toEqual({toWeight: 1, froWeight: 2});
|
||||
expect(evaluateEdge(defaultWeights())).toEqual({forwards: 1, backwards: 2});
|
||||
});
|
||||
|
||||
it("only matches the most specific node types", () => {
|
||||
const weights = defaultWeights();
|
||||
weights.nodeTypeWeights.set(NodeAddress.empty, 99);
|
||||
expect(evaluateEdge(weights)).toEqual({toWeight: 99, froWeight: 2});
|
||||
expect(evaluateEdge(weights)).toEqual({forwards: 99, backwards: 2});
|
||||
});
|
||||
|
||||
it("takes manually specified edge type weights into account", () => {
|
||||
|
@ -64,13 +64,13 @@ describe("analysis/weightsToEdgeEvaluator", () => {
|
|||
forwards: 6,
|
||||
backwards: 12,
|
||||
});
|
||||
expect(evaluateEdge(weights)).toEqual({toWeight: 6, froWeight: 24});
|
||||
expect(evaluateEdge(weights)).toEqual({forwards: 6, backwards: 24});
|
||||
});
|
||||
|
||||
it("takes manually specified per-node weights into account", () => {
|
||||
const weights = defaultWeights();
|
||||
weights.nodeManualWeights.set(src, 10);
|
||||
expect(evaluateEdge(weights)).toEqual({toWeight: 1, froWeight: 20});
|
||||
expect(evaluateEdge(weights)).toEqual({forwards: 1, backwards: 20});
|
||||
});
|
||||
|
||||
it("uses 1 as a default weight for unmatched nodes and edges", () => {
|
||||
|
@ -78,7 +78,7 @@ describe("analysis/weightsToEdgeEvaluator", () => {
|
|||
nodeTypes: [],
|
||||
edgeTypes: [],
|
||||
});
|
||||
expect(evaluator(edge)).toEqual({toWeight: 1, froWeight: 1});
|
||||
expect(evaluator(edge)).toEqual({forwards: 1, backwards: 1});
|
||||
});
|
||||
|
||||
it("ignores extra weights if they do not apply", () => {
|
||||
|
|
|
@ -131,7 +131,7 @@ describe("cli/pagerank", () => {
|
|||
const graphResult = () => ({status: "SUCCESS", graph: graph()});
|
||||
const loader = (_unused_repoId) =>
|
||||
new Promise((resolve) => resolve(graphResult()));
|
||||
const evaluator = (_unused_edge) => ({toWeight: 1, froWeight: 1});
|
||||
const evaluator = (_unused_edge) => ({forwards: 1, backwards: 1});
|
||||
const pagerankGraph = () => new PagerankGraph(graph(), evaluator, 0.001);
|
||||
const mockPagerankRunner = (_unused_graph) =>
|
||||
new Promise((resolve) => resolve(pagerankGraph()));
|
||||
|
@ -170,7 +170,7 @@ describe("cli/pagerank", () => {
|
|||
describe("savePagerankGraph", () => {
|
||||
it("saves the PagerankGraphJSON to the right filepath", async () => {
|
||||
const graph = new Graph().addNode(node("n"));
|
||||
const evaluator = (_unused_edge) => ({toWeight: 1, froWeight: 2});
|
||||
const evaluator = (_unused_edge) => ({forwards: 1, backwards: 2});
|
||||
const prg = new PagerankGraph(graph, evaluator);
|
||||
const dirname = tmp.dirSync().name;
|
||||
const repoId = makeRepoId("foo", "bar");
|
||||
|
@ -242,8 +242,8 @@ describe("cli/pagerank", () => {
|
|||
const prg = new PagerankGraph(
|
||||
new Graph().addNode(node("n")),
|
||||
(_unused_edge) => ({
|
||||
toWeight: 1,
|
||||
froWeight: 2,
|
||||
forwards: 1,
|
||||
backwards: 2,
|
||||
})
|
||||
);
|
||||
await defaultSaver(repoId, prg);
|
||||
|
|
|
@ -4,14 +4,19 @@ exports[`core/pagerankGraph to/from JSON matches expected snapshot 1`] = `
|
|||
Array [
|
||||
Object {
|
||||
"type": "sourcecred/pagerankGraph",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
},
|
||||
Object {
|
||||
"froWeights": Array [
|
||||
"backwardsWeights": Array [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
],
|
||||
"forwardsWeights": Array [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
],
|
||||
"graphJSON": Array [
|
||||
Object {
|
||||
"type": "sourcecred/graph",
|
||||
|
@ -119,11 +124,6 @@ Array [
|
|||
0.2,
|
||||
],
|
||||
"syntheticLoopWeight": 0.001,
|
||||
"toWeights": Array [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
],
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
|
|
@ -44,11 +44,9 @@ export type OrderedSparseMarkovChain = {|
|
|||
+chain: SparseMarkovChain,
|
||||
|};
|
||||
|
||||
// TODO(@decentralion): Rename these fields to `forwards` and `backwards` to
|
||||
// deduplicate with the EdgeWeight type defined by analysis/weights
|
||||
export type EdgeWeight = {|
|
||||
+toWeight: number, // weight from src to dst
|
||||
+froWeight: number, // weight from dst to src
|
||||
+forwards: number, // weight from src to dst
|
||||
+backwards: number, // weight from dst to src
|
||||
|};
|
||||
|
||||
export function createConnections(
|
||||
|
@ -83,15 +81,15 @@ export function createConnections(
|
|||
|
||||
// Process edges.
|
||||
for (const edge of graph.edges({showDangling: false})) {
|
||||
const {toWeight, froWeight} = edgeWeight(edge);
|
||||
const {forwards, backwards} = edgeWeight(edge);
|
||||
const {src, dst} = edge;
|
||||
processConnection(dst, {
|
||||
adjacency: {type: "IN_EDGE", edge},
|
||||
weight: toWeight,
|
||||
weight: forwards,
|
||||
});
|
||||
processConnection(src, {
|
||||
adjacency: {type: "OUT_EDGE", edge},
|
||||
weight: froWeight,
|
||||
weight: backwards,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ describe("core/attribution/graphToMarkovChain", () => {
|
|||
.addEdge(e2)
|
||||
.addEdge(e3)
|
||||
.addEdge(e4);
|
||||
const edgeWeight = () => ({toWeight: 6.0, froWeight: 3.0});
|
||||
const edgeWeight = () => ({forwards: 6.0, backwards: 3.0});
|
||||
const actual = createConnections(g, edgeWeight, 1.0);
|
||||
// Total out-weights (for normalization factors):
|
||||
// - for `n1`: 2 out, 0 in, 1 synthetic: 12 + 0 + 1 = 13
|
||||
|
@ -179,7 +179,7 @@ describe("core/attribution/graphToMarkovChain", () => {
|
|||
.addEdge(e2)
|
||||
.addEdge(e3)
|
||||
.addEdge(e4);
|
||||
const edgeWeight = () => ({toWeight: 1, froWeight: 0});
|
||||
const edgeWeight = () => ({forwards: 1, backwards: 0});
|
||||
const osmc = createOrderedSparseMarkovChain(
|
||||
createConnections(g, edgeWeight, 0.0)
|
||||
);
|
||||
|
@ -214,7 +214,7 @@ describe("core/attribution/graphToMarkovChain", () => {
|
|||
.addEdge(e1)
|
||||
.addEdge(e2)
|
||||
.addEdge(e3);
|
||||
const edgeWeight = () => ({toWeight: 1, froWeight: 1});
|
||||
const edgeWeight = () => ({forwards: 1, backwards: 1});
|
||||
const osmc = createOrderedSparseMarkovChain(
|
||||
createConnections(g, edgeWeight, 0.0)
|
||||
);
|
||||
|
@ -245,7 +245,7 @@ describe("core/attribution/graphToMarkovChain", () => {
|
|||
function edgeWeight() {
|
||||
// These values are technically arbitrary, but make the
|
||||
// arithmetic simple.
|
||||
return {toWeight: 4 - epsilon / 2, froWeight: 1 - epsilon / 2};
|
||||
return {forwards: 4 - epsilon / 2, backwards: 1 - epsilon / 2};
|
||||
}
|
||||
const osmc = createOrderedSparseMarkovChain(
|
||||
createConnections(g, edgeWeight, epsilon)
|
||||
|
@ -299,7 +299,7 @@ describe("core/attribution/graphToMarkovChain", () => {
|
|||
function graphToOrder(g) {
|
||||
const connections = createConnections(
|
||||
g,
|
||||
(_) => ({toWeight: 1, froWeight: 1}),
|
||||
(_) => ({forwards: 1, backwards: 1}),
|
||||
0.01
|
||||
);
|
||||
const osmc = createOrderedSparseMarkovChain(connections);
|
||||
|
|
|
@ -65,12 +65,12 @@ export opaque type PagerankGraphJSON = Compatible<{|
|
|||
// Score for every node, ordered by the sorted node address.
|
||||
+scores: $ReadOnlyArray<number>,
|
||||
// Weights for every edge, ordered by sorted edge address.
|
||||
// We could save the EdgeWeights directly rather than having separate
|
||||
// arrays for toWeights and froWeights, but this would lead to an inflated
|
||||
// JSON representation because we would be needlessly duplicating the keys
|
||||
// "toWeight" and "froWeight" themselves.
|
||||
+toWeights: $ReadOnlyArray<number>,
|
||||
+froWeights: $ReadOnlyArray<number>,
|
||||
// We could save the EdgeWeights directly rather than having separate arrays
|
||||
// for forwardsWeights and backwardsWeights, but this would lead to an
|
||||
// inflated JSON representation because we would be needlessly duplicating
|
||||
// the keys "forwards" and "backwards" themselves.
|
||||
+forwardsWeights: $ReadOnlyArray<number>,
|
||||
+backwardsWeights: $ReadOnlyArray<number>,
|
||||
+syntheticLoopWeight: number,
|
||||
|}>;
|
||||
|
||||
|
@ -132,7 +132,7 @@ function defaultOptions(): PagerankOptions {
|
|||
};
|
||||
}
|
||||
|
||||
const COMPAT_INFO = {type: "sourcecred/pagerankGraph", version: "0.1.0"};
|
||||
const COMPAT_INFO = {type: "sourcecred/pagerankGraph", version: "0.2.0"};
|
||||
|
||||
/**
|
||||
* PagerankGraph is a wrapper over the Graph class, which adds
|
||||
|
@ -149,9 +149,9 @@ const COMPAT_INFO = {type: "sourcecred/pagerankGraph", version: "0.1.0"};
|
|||
* [PageRank]: https://en.wikipedia.org/wiki/PageRank
|
||||
*
|
||||
* Every edge in the Graph is assigned an `EdgeWeight`, which includes a
|
||||
* `toWeight` (weight from the `src` to the `dst`) and a `froWeight`
|
||||
* (weight from the `dst` back to the `src`). Both `toWeight` and
|
||||
* `froWeight` must be nonnegative numbers. The weights influence how
|
||||
* `forwards` (weight from the `src` to the `dst`) and a `backwards`
|
||||
* (weight from the `dst` back to the `src`). Both `forwards` and
|
||||
* `backwards` must be nonnegative numbers. The weights influence how
|
||||
* score flows from node to node. For example, if the node `root` is
|
||||
* connected to `a` with a weight of `1` and to `b` with a weight of `2`,
|
||||
* then `b` will recieve twice as much score from `root` as `a` does.
|
||||
|
@ -242,8 +242,8 @@ export class PagerankGraph {
|
|||
for (const edge of this._graph.edges({showDangling: false})) {
|
||||
const weights = edgeEvaluator(edge);
|
||||
this._edgeWeights.set(edge.address, weights);
|
||||
addOutWeight(edge.src, weights.toWeight);
|
||||
addOutWeight(edge.dst, weights.froWeight);
|
||||
addOutWeight(edge.src, weights.forwards);
|
||||
addOutWeight(edge.dst, weights.backwards);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -422,10 +422,10 @@ export class PagerankGraph {
|
|||
// based on whether it was an IN-edge or OUT-edge or loop.
|
||||
let relevantEdgeWeight = 0;
|
||||
if (edge.src === target) {
|
||||
relevantEdgeWeight += weightedEdge.weight.froWeight;
|
||||
relevantEdgeWeight += weightedEdge.weight.backwards;
|
||||
}
|
||||
if (edge.dst === target) {
|
||||
relevantEdgeWeight += weightedEdge.weight.toWeight;
|
||||
relevantEdgeWeight += weightedEdge.weight.forwards;
|
||||
}
|
||||
// We normalize this edge weight by the total outWeight for `node`.
|
||||
const normalizedEdgeWeight =
|
||||
|
@ -568,14 +568,14 @@ export class PagerankGraph {
|
|||
const scores = Array.from(this.nodes()).map((x) => x.score);
|
||||
|
||||
const edgeWeights = Array.from(this.edges()).map((x) => x.weight);
|
||||
const toWeights: number[] = edgeWeights.map((x) => x.toWeight);
|
||||
const froWeights: number[] = edgeWeights.map((x) => x.froWeight);
|
||||
const forwardsWeights: number[] = edgeWeights.map((x) => x.forwards);
|
||||
const backwardsWeights: number[] = edgeWeights.map((x) => x.backwards);
|
||||
|
||||
const rawJSON = {
|
||||
graphJSON,
|
||||
scores,
|
||||
toWeights,
|
||||
froWeights,
|
||||
forwardsWeights,
|
||||
backwardsWeights,
|
||||
syntheticLoopWeight: this.syntheticLoopWeight(),
|
||||
};
|
||||
|
||||
|
@ -584,8 +584,8 @@ export class PagerankGraph {
|
|||
|
||||
static fromJSON(json: PagerankGraphJSON): PagerankGraph {
|
||||
const {
|
||||
toWeights,
|
||||
froWeights,
|
||||
forwardsWeights,
|
||||
backwardsWeights,
|
||||
scores,
|
||||
graphJSON,
|
||||
syntheticLoopWeight,
|
||||
|
@ -603,9 +603,9 @@ export class PagerankGraph {
|
|||
);
|
||||
const edgeWeights: Map<EdgeAddressT, EdgeWeight> = new Map();
|
||||
for (let i = 0; i < edgeAddresses.length; i++) {
|
||||
const toWeight = toWeights[i];
|
||||
const froWeight = froWeights[i];
|
||||
edgeWeights.set(edgeAddresses[i], {toWeight, froWeight});
|
||||
const forwards = forwardsWeights[i];
|
||||
const backwards = backwardsWeights[i];
|
||||
edgeWeights.set(edgeAddresses[i], {forwards, backwards});
|
||||
}
|
||||
|
||||
function evaluator(e: Edge): EdgeWeight {
|
||||
|
|
|
@ -22,7 +22,7 @@ import {advancedGraph, node, partsNode, partsEdge} from "./graphTestUtil";
|
|||
import * as NullUtil from "../util/null";
|
||||
|
||||
describe("core/pagerankGraph", () => {
|
||||
const defaultEvaluator = (_unused_edge) => ({toWeight: 1, froWeight: 0});
|
||||
const defaultEvaluator = (_unused_edge) => ({forwards: 1, backwards: 0});
|
||||
const nonEmptyGraph = () => new Graph().addNode(node("hi"));
|
||||
|
||||
function examplePagerankGraph(
|
||||
|
@ -58,7 +58,7 @@ describe("core/pagerankGraph", () => {
|
|||
it("graphs with changed edge weights are not equal", () => {
|
||||
const e1 = examplePagerankGraph();
|
||||
const e2 = examplePagerankGraph();
|
||||
e2.setEdgeEvaluator(() => ({toWeight: 3, froWeight: 9}));
|
||||
e2.setEdgeEvaluator(() => ({forwards: 3, backwards: 9}));
|
||||
expect(e1.equals(e2)).toBe(false);
|
||||
});
|
||||
it("graphs are distinct but with identical scores if evaluators are the same modulo multiplication", async () => {
|
||||
|
@ -68,8 +68,8 @@ describe("core/pagerankGraph", () => {
|
|||
// are different by a scalar multiple of 3.
|
||||
// So we know the scores should all turn out the same, but the graphs will be different,
|
||||
// because the edge weights are nominally distinct.
|
||||
const e1 = examplePagerankGraph(() => ({toWeight: 3, froWeight: 6}));
|
||||
const e2 = examplePagerankGraph(() => ({toWeight: 1, froWeight: 2}));
|
||||
const e1 = examplePagerankGraph(() => ({forwards: 3, backwards: 6}));
|
||||
const e2 = examplePagerankGraph(() => ({forwards: 1, backwards: 2}));
|
||||
expect(e1.equals(e2)).toBe(false);
|
||||
await e1.runPagerank();
|
||||
await e2.runPagerank();
|
||||
|
@ -178,8 +178,8 @@ describe("core/pagerankGraph", () => {
|
|||
it("edge/edges both correctly return the edge weights", () => {
|
||||
const edgeEvaluator = ({address, src, dst}) => {
|
||||
return {
|
||||
toWeight: address.length + src.length,
|
||||
froWeight: address.length + dst.length,
|
||||
forwards: address.length + src.length,
|
||||
backwards: address.length + dst.length,
|
||||
};
|
||||
};
|
||||
const g = advancedGraph().graph1();
|
||||
|
@ -239,8 +239,8 @@ describe("core/pagerankGraph", () => {
|
|||
outWeight.set(node, newWeight);
|
||||
};
|
||||
for (const {edge, weight} of pg.edges()) {
|
||||
addOutWeight(edge.src, weight.toWeight);
|
||||
addOutWeight(edge.dst, weight.froWeight);
|
||||
addOutWeight(edge.src, weight.forwards);
|
||||
addOutWeight(edge.dst, weight.backwards);
|
||||
}
|
||||
for (const node of pg.graph().nodes()) {
|
||||
expect(pg.totalOutWeight(node.address)).toEqual(
|
||||
|
@ -249,12 +249,12 @@ describe("core/pagerankGraph", () => {
|
|||
}
|
||||
}
|
||||
it("computes outWeight correctly on the example graph", () => {
|
||||
const edgeEvaluator = (_unused_edge) => ({toWeight: 1, froWeight: 2});
|
||||
const edgeEvaluator = (_unused_edge) => ({forwards: 1, backwards: 2});
|
||||
const eg = examplePagerankGraph(edgeEvaluator);
|
||||
verifyOutWeights(eg);
|
||||
});
|
||||
it("outWeight is always the syntheticLoopWeight when edges have no weight", () => {
|
||||
const zeroEvaluator = (_unused_edge) => ({toWeight: 0, froWeight: 0});
|
||||
const zeroEvaluator = (_unused_edge) => ({forwards: 0, backwards: 0});
|
||||
const syntheticLoopWeight = 0.1337;
|
||||
const pg = new PagerankGraph(
|
||||
advancedGraph().graph1(),
|
||||
|
@ -296,8 +296,8 @@ describe("core/pagerankGraph", () => {
|
|||
function expectConsistentEdges(options: PagerankGraphEdgesOptions | void) {
|
||||
const pagerankGraphEdges = Array.from(pagerankGraph().edges(options));
|
||||
pagerankGraphEdges.forEach((e) => {
|
||||
expect(e.weight.froWeight).toBe(0);
|
||||
expect(e.weight.toWeight).toBe(1);
|
||||
expect(e.weight.backwards).toBe(0);
|
||||
expect(e.weight.forwards).toBe(1);
|
||||
});
|
||||
const graphOptions: EdgesOptions =
|
||||
options == null
|
||||
|
@ -424,10 +424,10 @@ describe("core/pagerankGraph", () => {
|
|||
} of pg.neighbors(target, allNeighbors())) {
|
||||
let rawWeight = 0;
|
||||
if (weightedEdge.edge.dst === target) {
|
||||
rawWeight += weightedEdge.weight.toWeight;
|
||||
rawWeight += weightedEdge.weight.forwards;
|
||||
}
|
||||
if (weightedEdge.edge.src === target) {
|
||||
rawWeight += weightedEdge.weight.froWeight;
|
||||
rawWeight += weightedEdge.weight.backwards;
|
||||
}
|
||||
const normalizedWeight =
|
||||
rawWeight / pg.totalOutWeight(scoredNode.node.address);
|
||||
|
@ -448,7 +448,7 @@ describe("core/pagerankGraph", () => {
|
|||
});
|
||||
it("neighbors score contributions + synthetic score contribution == node score", async () => {
|
||||
// Note: I've verified that test fails if we don't properly handle loop
|
||||
// neighbors (need to add the edge toWeight and froWeight if the neighbor
|
||||
// neighbors (need to add the edge forwards and backwards if the neighbor
|
||||
// is a loop).
|
||||
const pg = await convergedPagerankGraph();
|
||||
for (const {node, score} of pg.nodes()) {
|
||||
|
@ -621,8 +621,8 @@ describe("core/pagerankGraph", () => {
|
|||
expect(pg1.equals(pg2)).toBe(false);
|
||||
});
|
||||
it("unequal edge weights => unequal", () => {
|
||||
const evaluator1 = (_unused_edge) => ({toWeight: 1, froWeight: 1});
|
||||
const evaluator2 = (_unused_edge) => ({toWeight: 0, froWeight: 1});
|
||||
const evaluator1 = (_unused_edge) => ({forwards: 1, backwards: 1});
|
||||
const evaluator2 = (_unused_edge) => ({forwards: 0, backwards: 1});
|
||||
const pg1 = new PagerankGraph(advancedGraph().graph1(), evaluator1);
|
||||
const pg2 = new PagerankGraph(advancedGraph().graph1(), evaluator2);
|
||||
expect(pg1.equals(pg2)).toBe(false);
|
||||
|
|
|
@ -13,8 +13,8 @@ export async function example() {
|
|||
const adapters = await dynamicExplorerAdapterSet();
|
||||
const graph = adapters.graph();
|
||||
const pnd = await pagerank(graph, (_unused_Edge) => ({
|
||||
toWeight: 1,
|
||||
froWeight: 1,
|
||||
forwards: 1,
|
||||
backwards: 1,
|
||||
}));
|
||||
const maxEntriesPerList = 123;
|
||||
const manualWeights: Map<NodeAddressT, number> = new Map();
|
||||
|
|
Loading…
Reference in New Issue