From e01247a64264b1e55ce8112f6990e31aa16ded1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dandelion=20Man=C3=A9?= Date: Tue, 28 May 2019 18:03:44 +0300 Subject: [PATCH] Expose `createdAt` in `AnalysisAdapter` (#1157) * Refactor Loader from AnalysisAdapter At present, the only data the AnalysisAdapter provides is the Graph, so the AnalysisAdapter has a `load` method which directly returns the graph. I'm planning to add a `createdAt` getter to the adapter as well, which also will depend on loading the data. To make this change convenient, I'm starting by refactoring an AdapterLoader out, which manages loading data from disk, so that once we have an AnalysisAdapter, it already has all relevant data loaded. Then, it will be much easier to add a `createdAt` method. Test plan: Tests updated, flow passes. * Add `createdAt` to the analysis adapter A big step forward for time-varying cred. This will make `createdAt` timestamps available for PageRank analysis. Test plan: Added new unit tests. Inspect the snapshots. Run `yarn test`. --- src/analysis/analysisAdapter.js | 56 +++++++++++++- src/analysis/loadGraph.js | 9 ++- src/analysis/loadGraph.test.js | 71 +++++++++++++----- src/cli/exportGraph.js | 8 +- src/cli/pagerank.js | 14 ++-- src/plugins/demo/analysisAdapter.js | 32 +++++--- src/plugins/git/analysisAdapter.js | 73 +++++++++++++++--- src/plugins/git/analysisAdapter.test.js | 87 +++++++++++++++------- src/plugins/github/analysisAdapter.js | 41 ++++++++-- src/plugins/github/analysisAdapter.test.js | 87 ++++++++++++++++------ src/plugins/github/createdAt.js | 17 +++++ src/plugins/github/createdAt.test.js | 26 +++++++ src/plugins/odyssey/analysisAdapter.js | 32 ++++++-- 13 files changed, 434 insertions(+), 119 deletions(-) create mode 100644 src/plugins/github/createdAt.js create mode 100644 src/plugins/github/createdAt.test.js diff --git a/src/analysis/analysisAdapter.js b/src/analysis/analysisAdapter.js index 7639476..8edfcc3 100644 --- a/src/analysis/analysisAdapter.js +++ b/src/analysis/analysisAdapter.js @@ -1,10 +1,62 @@ // @flow +/** + * This module contains declarations for the AnalysisAdapter. + * + * In general, "Adapters" are general interfaces for plugins to provide + * information about SourceCred graphs. Adapters are scoped to a particular + * purpose. The AnalysisAdapter exists for the purpose of analyzing cred + * in a project. As such, the AnalysisAdapter provides the cred graph, + * a declaration of the types, and any other information needed to compute + * PageRank scores. + * + * In general, every plugin will provide an AnalysisAdapter, and the analysis + * data pipeline will aggregate results across all plugins' adapters. + * + * TODO(@decentralion): As the AnalysisAdapter evolves, consider whether it + * would make sense to simply move the data the AnalysisAdapter provides + * directly into the core Graph. Note that doing so would require considerable + * changes to the Graph APIs, including having Node be a rich data type rather + * than just an address, and allowing edges to Nodes which do not exist in the + * graph. Due to the complexity, such a refactor should not be undertaken + * lightly. + */ -import {Graph} from "../core/graph"; +import {Graph, type NodeAddressT} from "../core/graph"; import type {RepoId} from "../core/repoId"; import type {PluginDeclaration} from "./pluginDeclaration"; +/** + * Enables loading a plugin's AnalysisAdapter on the backend. + * + * Takes a RepoId and the path to the SourceCred directory, and provides an + * AnalysisAdapter for that plugin. Also provides the declaration for the + * plugin. + */ +export interface IBackendAdapterLoader { + declaration(): PluginDeclaration; + load(sourcecredDirectory: string, repoId: RepoId): Promise; +} + +export type MsSinceEpoch = number; +/** + * Provides data needed for cred analysis for an individual plugin. + * + * It's scoped to a particular RepoId (and plugin). + */ export interface IAnalysisAdapter { declaration(): PluginDeclaration; - load(sourcecredDirectory: string, repoId: RepoId): Promise; + graph(): Graph; + /** + * Provides a timestamp of when the node was created. + * + * The creation time is for the object the node represents, rather than the + * time the node was added to the graph. E.g. a commit authored in 2001 has a + * createdAt timestamp for a date in 2001. + * + * createdAt may be null if the node doesn't have a creation time available, + * or is "timeless". A "timeless" node is one that we want to treat as + * always existing for purposes of cred analysis. (E.g. we may want to + * consider user identities timeless.) + */ + createdAt(n: NodeAddressT): MsSinceEpoch | null; } diff --git a/src/analysis/loadGraph.js b/src/analysis/loadGraph.js index 730527c..38c0f16 100644 --- a/src/analysis/loadGraph.js +++ b/src/analysis/loadGraph.js @@ -5,7 +5,7 @@ import * as NullUtil from "../util/null"; import * as RepoIdRegistry from "../core/repoIdRegistry"; import {type RepoId} from "../core/repoId"; -import type {IAnalysisAdapter} from "./analysisAdapter"; +import type {IBackendAdapterLoader} from "./analysisAdapter"; /** * Module for loading a graph from a SOURCECRED_DIRECTORY. @@ -30,7 +30,7 @@ type GraphOrError = */ export async function loadGraph( sourcecredDirectory: string, - adapters: $ReadOnlyArray, + adapters: $ReadOnlyArray, repoId: RepoId ): Promise { const registry = RepoIdRegistry.getRegistry(sourcecredDirectory); @@ -38,13 +38,14 @@ export async function loadGraph( return {status: "REPO_NOT_LOADED"}; } async function graphForAdapter( - adapter: IAnalysisAdapter + adapter: IBackendAdapterLoader ): Promise { try { - const graph = await adapter.load( + const dynamicAdapter = await adapter.load( sourcecredDirectory, NullUtil.get(repoId) ); + const graph = dynamicAdapter.graph(); return {type: "GRAPH", graph}; } catch (e) { return {type: "ERROR", pluginName: adapter.declaration().name, error: e}; diff --git a/src/analysis/loadGraph.test.js b/src/analysis/loadGraph.test.js index dc8ea78..ec7dd9f 100644 --- a/src/analysis/loadGraph.test.js +++ b/src/analysis/loadGraph.test.js @@ -3,13 +3,29 @@ import tmp from "tmp"; import path from "path"; -import {Graph, NodeAddress, EdgeAddress} from "../core/graph"; -import type {IAnalysisAdapter} from "../analysis/analysisAdapter"; +import { + Graph, + type NodeAddressT, + NodeAddress, + EdgeAddress, +} from "../core/graph"; +import type { + IBackendAdapterLoader, + IAnalysisAdapter, +} from "../analysis/analysisAdapter"; import * as RepoIdRegistry from "../core/repoIdRegistry"; import {makeRepoId, type RepoId} from "../core/repoId"; import {loadGraph} from "./loadGraph"; -class MockAnalysisAdapter implements IAnalysisAdapter { +const declaration = (name) => ({ + name, + nodePrefix: NodeAddress.empty, + edgePrefix: EdgeAddress.empty, + nodeTypes: Object.freeze([]), + edgeTypes: Object.freeze([]), +}); + +class MockStaticAdapter implements IBackendAdapterLoader { _resolutionGraph: ?Graph; _name: string; @@ -24,27 +40,42 @@ class MockAnalysisAdapter implements IAnalysisAdapter { } declaration() { - return { - name: this._name, - nodePrefix: NodeAddress.empty, - edgePrefix: EdgeAddress.empty, - nodeTypes: [], - edgeTypes: [], - }; + return declaration(this._name); } async load( _unused_sourcecredDirectory: string, _unused_repoId: RepoId - ): Promise { + ): Promise { if (this._resolutionGraph != null) { - return this._resolutionGraph; + return new MockAdapter(this._name, this._resolutionGraph); } else { - throw new Error("MockAnalysisAdapterRejects"); + throw new Error("MockStaticAdapterRejects"); } } } +class MockAdapter implements IAnalysisAdapter { + _name: string; + _resolutionGraph: Graph; + constructor(name: string, resolutionGraph: Graph) { + this._name = name; + this._resolutionGraph = resolutionGraph; + } + repoId() { + return makeRepoId("foo", "bar"); + } + createdAt(_unused_node: NodeAddressT): number | null { + return null; + } + declaration() { + return declaration(this._name); + } + graph() { + return this._resolutionGraph; + } +} + describe("analysis/loadGraph", () => { function setUpRegistryWithId(repoId: RepoId) { const dirname = tmp.dirSync().name; @@ -60,7 +91,7 @@ describe("analysis/loadGraph", () => { const dirname = tmp.dirSync().name; const result = await loadGraph( dirname, - [new MockAnalysisAdapter("foo")], + [new MockStaticAdapter("foo")], makeRepoId("foo", "bar") ); expect(result).toEqual({status: "REPO_NOT_LOADED"}); @@ -69,7 +100,7 @@ describe("analysis/loadGraph", () => { const dirname = path.join(tmp.dirSync().name, "nonexistent"); const result = await loadGraph( dirname, - [new MockAnalysisAdapter("foo")], + [new MockStaticAdapter("foo")], makeRepoId("foo", "bar") ); expect(result).toEqual({status: "REPO_NOT_LOADED"}); @@ -78,7 +109,7 @@ describe("analysis/loadGraph", () => { const dirname = setUpRegistryWithId(makeRepoId("zod", "zoink")); const result = await loadGraph( dirname, - [new MockAnalysisAdapter("foo")], + [new MockStaticAdapter("foo")], makeRepoId("foo", "bar") ); expect(result).toEqual({status: "REPO_NOT_LOADED"}); @@ -86,8 +117,8 @@ describe("analysis/loadGraph", () => { it("returns status:SUCCESS with merged graph on success", async () => { const g1 = new Graph().addNode(NodeAddress.fromParts(["g1"])); const g2 = new Graph().addNode(NodeAddress.fromParts(["g2"])); - const m1 = new MockAnalysisAdapter("foo", g1); - const m2 = new MockAnalysisAdapter("bar", g2); + const m1 = new MockStaticAdapter("foo", g1); + const m2 = new MockStaticAdapter("bar", g2); const mergedGraph = Graph.merge([g1, g2]); const dir = setUpRegistryWithId(makeRepoId("foo", "bar")); const result = await loadGraph(dir, [m1, m2], makeRepoId("foo", "bar")); @@ -107,14 +138,14 @@ describe("analysis/loadGraph", () => { expect(result.graph.equals(new Graph())).toBe(true); }); it("returns a status:PLUGIN_FAILURE if the plugin errors", async () => { - const mockAdapter = new MockAnalysisAdapter("bar"); + const mockAdapter = new MockStaticAdapter("bar"); const repoId = makeRepoId("foo", "bar"); const dir = setUpRegistryWithId(repoId); const result = await loadGraph(dir, [mockAdapter], repoId); expect(result).toEqual({ status: "PLUGIN_FAILURE", pluginName: "bar", - error: new Error("MockAnalysisAdapterRejects"), + error: new Error("MockStaticAdapterRejects"), }); }); }); diff --git a/src/cli/exportGraph.js b/src/cli/exportGraph.js index dfbf828..b332564 100644 --- a/src/cli/exportGraph.js +++ b/src/cli/exportGraph.js @@ -8,8 +8,8 @@ import * as Common from "./common"; import stringify from "json-stable-stringify"; import {loadGraph, type LoadGraphResult} from "../analysis/loadGraph"; -import {AnalysisAdapter as GithubAnalysisAdapter} from "../plugins/github/analysisAdapter"; -import {AnalysisAdapter as GitAnalysisAdapter} from "../plugins/git/analysisAdapter"; +import {BackendAdapterLoader as GithubAdapterLoader} from "../plugins/github/analysisAdapter"; +import {BackendAdapterLoader as GitAdapterLoader} from "../plugins/git/analysisAdapter"; function usage(print: (string) => void): void { print( @@ -103,9 +103,9 @@ export function makeExportGraph( }; } -const defaultAdapters = [new GithubAnalysisAdapter(), new GitAnalysisAdapter()]; +const defaultLoaders = [new GithubAdapterLoader(), new GitAdapterLoader()]; const defaultLoadGraph = (r: RepoId) => - loadGraph(Common.sourcecredDirectory(), defaultAdapters, r); + loadGraph(Common.sourcecredDirectory(), defaultLoaders, r); export const exportGraph = makeExportGraph(defaultLoadGraph); export const help: Command = async (args, std) => { diff --git a/src/cli/pagerank.js b/src/cli/pagerank.js index b1b6932..64ecfd0 100644 --- a/src/cli/pagerank.js +++ b/src/cli/pagerank.js @@ -22,8 +22,8 @@ import {type Weights, defaultWeights} from "../analysis/weights"; import {type NodeAndEdgeTypes} from "../analysis/types"; import {combineTypes} from "../analysis/pluginDeclaration"; import {weightsToEdgeEvaluator} from "../analysis/weightsToEdgeEvaluator"; -import {AnalysisAdapter as GithubAnalysisAdapter} from "../plugins/github/analysisAdapter"; -import {AnalysisAdapter as GitAnalysisAdapter} from "../plugins/git/analysisAdapter"; +import {BackendAdapterLoader as GithubAdapterLoader} from "../plugins/github/analysisAdapter"; +import {BackendAdapterLoader as GitAdapterLoader} from "../plugins/git/analysisAdapter"; function usage(print: (string) => void): void { print( @@ -169,15 +169,15 @@ export async function savePagerankGraph( await fs.writeFile(pgFile, stringify(pgJSON)); } -export const defaultAdapters = () => [ - new GithubAnalysisAdapter(), - new GitAnalysisAdapter(), +export const defaultAdapterLoaders = () => [ + new GithubAdapterLoader(), + new GitAdapterLoader(), ]; -const declarations = () => defaultAdapters().map((x) => x.declaration()); +const declarations = () => defaultAdapterLoaders().map((x) => x.declaration()); const defaultLoader = (r: RepoId) => - loadGraph(Common.sourcecredDirectory(), defaultAdapters(), r); + loadGraph(Common.sourcecredDirectory(), defaultAdapterLoaders(), r); export const defaultPagerank = (g: Graph) => runPagerank(defaultWeights(), g, combineTypes(declarations())); export const defaultSaver = (r: RepoId, pg: PagerankGraph) => diff --git a/src/plugins/demo/analysisAdapter.js b/src/plugins/demo/analysisAdapter.js index d2930f9..13251e6 100644 --- a/src/plugins/demo/analysisAdapter.js +++ b/src/plugins/demo/analysisAdapter.js @@ -1,23 +1,37 @@ // @flow -import {Graph} from "../../core/graph"; +import {Graph, type NodeAddressT} from "../../core/graph"; import type {RepoId} from "../../core/repoId"; import type {PluginDeclaration} from "../../analysis/pluginDeclaration"; -import type {IAnalysisAdapter} from "../../analysis/analysisAdapter"; +import type { + IAnalysisAdapter, + IBackendAdapterLoader, +} from "../../analysis/analysisAdapter"; import {declaration} from "./declaration"; import {graph} from "./graph"; -export class AnalysisAdapter implements IAnalysisAdapter { - loadingMock: (sourcecredDirectory: string, repoId: RepoId) => Promise; +export class BackendAdapterLoader implements IBackendAdapterLoader { declaration(): PluginDeclaration { return declaration; } - load(sourcecredDirectory: string, repoId: RepoId): Promise { - if (this.loadingMock) { - return this.loadingMock(sourcecredDirectory, repoId).then(() => graph()); - } - return Promise.resolve(graph()); + load( + _unused_sourcecredDirectory: string, + _unused_repoId: RepoId + ): Promise { + return Promise.resolve(new AnalysisAdapter()); + } +} + +export class AnalysisAdapter implements IAnalysisAdapter { + declaration(): PluginDeclaration { + return declaration; + } + createdAt(_unused_node: NodeAddressT): number | null { + return null; + } + graph(): Graph { + return graph(); } } diff --git a/src/plugins/git/analysisAdapter.js b/src/plugins/git/analysisAdapter.js index da63f75..1464588 100644 --- a/src/plugins/git/analysisAdapter.js +++ b/src/plugins/git/analysisAdapter.js @@ -2,25 +2,78 @@ import fs from "fs-extra"; import path from "path"; -import {Graph} from "../../core/graph"; -import type {IAnalysisAdapter} from "../../analysis/analysisAdapter"; +import {Graph, type NodeAddressT} from "../../core/graph"; +import type { + IAnalysisAdapter, + IBackendAdapterLoader, + MsSinceEpoch, +} from "../../analysis/analysisAdapter"; import {type RepoId, repoIdToString} from "../../core/repoId"; import {declaration} from "./declaration"; +import {type Repository} from "./types"; +import {type StructuredAddress, fromRaw} from "./nodes"; -export class AnalysisAdapter implements IAnalysisAdapter { +export class BackendAdapterLoader implements IBackendAdapterLoader { declaration() { return declaration; } - async load(sourcecredDirectory: string, repoId: RepoId): Promise { - const file = path.join( + + async load( + sourcecredDirectory: string, + repoId: RepoId + ): Promise { + const dataDirectory = path.join( sourcecredDirectory, "data", repoIdToString(repoId), - "git", - "graph.json" + "git" ); - const rawData = await fs.readFile(file); - const json = JSON.parse(rawData.toString()); - return Graph.fromJSON(json); + async function loadJson(filename) { + const filepath = path.join(dataDirectory, filename); + const rawData = await fs.readFile(filepath); + return JSON.parse(rawData.toString()); + } + const [graphJson, repository] = await Promise.all([ + loadJson("graph.json"), + loadJson("repository.json"), + ]); + const graph = Graph.fromJSON(graphJson); + return new AnalysisAdapter(graph, repository); + } +} + +export class AnalysisAdapter implements IAnalysisAdapter { + _graph: Graph; + _repository: Repository; + constructor(graph: Graph, repository: Repository) { + this._graph = graph; + this._repository = repository; + } + declaration() { + return declaration; + } + graph(): Graph { + // Copy for safety, as the AnalysisAdapter is storing the graph + // directly in memory. + // TODO(perf): Consider removing this copy if this becomes a perf + // hotspot. If so, implement a do-not-modify flag and set it (for safety) + return this._graph.copy(); + } + createdAt(n: NodeAddressT): MsSinceEpoch { + // Coerce the NodeAddressT into a Git plugin 'RawAddress'. + // If this coercion is false (i.e. the AnalysisAdapter was passed a non-Git NodeAddress) + // then this will throw a runtime error. + const addr: StructuredAddress = fromRaw((n: any)); + switch (addr.type) { + case "COMMIT": + const hash = addr.hash; + const commit = this._repository.commits[hash]; + if (commit == null) { + throw new Error(`Can't find commit for hash: ${hash}`); + } + return commit.createdAt; + default: + throw new Error(`Unexpected type: ${(addr.type: empty)}`); + } } } diff --git a/src/plugins/git/analysisAdapter.test.js b/src/plugins/git/analysisAdapter.test.js index 4d95773..c4fa06c 100644 --- a/src/plugins/git/analysisAdapter.test.js +++ b/src/plugins/git/analysisAdapter.test.js @@ -2,38 +2,69 @@ import fs from "fs-extra"; import path from "path"; -import {AnalysisAdapter} from "./analysisAdapter"; +import {BackendAdapterLoader} from "./analysisAdapter"; import {stringToRepoId} from "../../core/repoId"; import {declaration} from "./declaration"; -import {Graph} from "../../core/graph"; +import {Graph, NodeAddress} from "../../core/graph"; +import {toRaw} from "./nodes"; describe("plugins/git/analysisAdapter", () => { - it("provides the declaration", () => { - const aa = new AnalysisAdapter(); - expect(aa.declaration()).toEqual(declaration); + const sourcecredDirectory = path.join( + "sharness", + "__snapshots__", + "example-github-load" + ); + it("the loader provides the declaration", () => { + const loader = new BackendAdapterLoader(); + expect(loader.declaration()).toEqual(declaration); }); - it("loads the Git graph", async () => { - const sourcecredDirectory = path.join( - "sharness", - "__snapshots__", - "example-github-load" - ); - const expectedPath = path.join( - sourcecredDirectory, - "data", - "sourcecred", - "example-github", - "git", - "graph.json" - ); - const expectedGraphBuffer: Buffer = await fs.readFile(expectedPath); - const expectedGraphJSON = JSON.parse(expectedGraphBuffer.toString()); - const expectedGraph = Graph.fromJSON(expectedGraphJSON); - const aa = new AnalysisAdapter(); - const actualGraph = await aa.load( - sourcecredDirectory, - stringToRepoId("sourcecred/example-github") - ); - expect(actualGraph.equals(expectedGraph)).toBe(true); + describe("can load an AnalysisAdapter which", () => { + const loadAnalysisAdapter = () => + new BackendAdapterLoader().load( + sourcecredDirectory, + stringToRepoId("sourcecred/example-github") + ); + it("loads the Git graph", async () => { + const graphPath = path.join( + sourcecredDirectory, + "data", + "sourcecred", + "example-github", + "git", + "graph.json" + ); + const expectedGraphBuffer: Buffer = await fs.readFile(graphPath); + const expectedGraphJSON = JSON.parse(expectedGraphBuffer.toString()); + const expectedGraph = Graph.fromJSON(expectedGraphJSON); + const aa = await loadAnalysisAdapter(); + const actualGraph = aa.graph(); + expect(actualGraph.equals(expectedGraph)).toBe(true); + }); + it("provides the declaration", async () => { + const aa = await loadAnalysisAdapter(); + expect(aa.declaration()).toEqual(declaration); + }); + describe("has a createdAt method which", () => { + it("provides createdAt times", async () => { + const aa = await loadAnalysisAdapter(); + const hash = "0a223346b4e6dec0127b1e6aa892c4ee0424b66a"; + const commitAddr = toRaw({type: "COMMIT", hash}); + const actualCreatedAt = aa.createdAt(commitAddr); + expect(actualCreatedAt).toEqual(1519807427000); + }); + it("throws an error for an absent commit hash", async () => { + const aa = await loadAnalysisAdapter(); + const commitAddr = toRaw({type: "COMMIT", hash: "1234"}); + expect(() => aa.createdAt(commitAddr)).toThrowError( + "Can't find commit" + ); + }); + it("throws an error for an invalid NodeAddress", async () => { + const aa = await loadAnalysisAdapter(); + expect(() => aa.createdAt(NodeAddress.empty)).toThrowError( + "Bad address" + ); + }); + }); }); }); diff --git a/src/plugins/github/analysisAdapter.js b/src/plugins/github/analysisAdapter.js index 5a43c58..f795e4a 100644 --- a/src/plugins/github/analysisAdapter.js +++ b/src/plugins/github/analysisAdapter.js @@ -3,18 +3,28 @@ import fs from "fs-extra"; import path from "path"; import pako from "pako"; +import stringify from "json-stable-stringify"; import {type RepoId, repoIdToString} from "../../core/repoId"; -import {Graph} from "../../core/graph"; -import type {IAnalysisAdapter} from "../../analysis/analysisAdapter"; +import type { + IAnalysisAdapter, + IBackendAdapterLoader, + MsSinceEpoch, +} from "../../analysis/analysisAdapter"; import {declaration} from "./declaration"; import {RelationalView} from "./relationalView"; import {createGraph} from "./createGraph"; +import {createdAt} from "./createdAt"; +import {fromRaw} from "./nodes"; +import {type NodeAddressT} from "../../core/graph"; -export class AnalysisAdapter implements IAnalysisAdapter { +export class BackendAdapterLoader implements IBackendAdapterLoader { declaration() { return declaration; } - async load(sourcecredDirectory: string, repoId: RepoId): Promise { + async load( + sourcecredDirectory: string, + repoId: RepoId + ): Promise { const file = path.join( sourcecredDirectory, "data", @@ -25,6 +35,27 @@ export class AnalysisAdapter implements IAnalysisAdapter { const compressedData = await fs.readFile(file); const json = JSON.parse(pako.ungzip(compressedData, {to: "string"})); const view = RelationalView.fromJSON(json); - return createGraph(view); + return new AnalysisAdapter(view); + } +} + +export class AnalysisAdapter implements IAnalysisAdapter { + _view: RelationalView; + constructor(view: RelationalView) { + this._view = view; + } + declaration() { + return declaration; + } + createdAt(n: NodeAddressT): MsSinceEpoch | null { + const addr = fromRaw((n: any)); + const entity = this._view.entity(addr); + if (entity == null) { + throw new Error(`No entity matching ${stringify(addr)}`); + } + return createdAt(entity); + } + graph() { + return createGraph(this._view); } } diff --git a/src/plugins/github/analysisAdapter.test.js b/src/plugins/github/analysisAdapter.test.js index c78121b..b43cb67 100644 --- a/src/plugins/github/analysisAdapter.test.js +++ b/src/plugins/github/analysisAdapter.test.js @@ -3,40 +3,81 @@ import fs from "fs-extra"; import path from "path"; import pako from "pako"; -import {AnalysisAdapter} from "./analysisAdapter"; +import {BackendAdapterLoader} from "./analysisAdapter"; import {stringToRepoId} from "../../core/repoId"; import {declaration} from "./declaration"; import {RelationalView} from "./relationalView"; import {createGraph} from "./createGraph"; +import {NodeAddress} from "../../core/graph"; +import {toRaw} from "./nodes"; describe("plugins/github/analysisAdapter", () => { - it("provides the declaration", () => { - const aa = new AnalysisAdapter(); - expect(aa.declaration()).toEqual(declaration); + it("the loader provides the declaration", () => { + const loader = new BackendAdapterLoader(); + expect(loader.declaration()).toEqual(declaration); }); - it("loads the GitHub graph", async () => { + describe("can load an AnalysisAdapter which", () => { const sourcecredDirectory = path.join( "sharness", "__snapshots__", "example-github-load" ); - const expectedPath = path.join( - sourcecredDirectory, - "data", - "sourcecred", - "example-github", - "github", - "view.json.gz" - ); - const blob = await fs.readFile(expectedPath); - const json = JSON.parse(pako.ungzip(blob, {to: "string"})); - const view = RelationalView.fromJSON(json); - const graph = createGraph(view); - const aa = new AnalysisAdapter(); - const actualGraph = await aa.load( - sourcecredDirectory, - stringToRepoId("sourcecred/example-github") - ); - expect(actualGraph.equals(graph)).toBe(true); + async function loadView() { + const expectedPath = path.join( + sourcecredDirectory, + "data", + "sourcecred", + "example-github", + "github", + "view.json.gz" + ); + const blob = await fs.readFile(expectedPath); + const json = JSON.parse(pako.ungzip(blob, {to: "string"})); + const view = RelationalView.fromJSON(json); + return view; + } + const loadAnalysisAdapter = () => + new BackendAdapterLoader().load( + sourcecredDirectory, + stringToRepoId("sourcecred/example-github") + ); + it("loads the GitHub graph", async () => { + const view = await loadView(); + const expectedGraph = createGraph(view); + const aa = await loadAnalysisAdapter(); + const actualGraph = aa.graph(); + expect(actualGraph.equals(expectedGraph)).toBe(true); + }); + it("provides the declaration", async () => { + const aa = await loadAnalysisAdapter(); + expect(aa.declaration()).toEqual(declaration); + }); + describe("has a createdAt method which", () => { + it("provides createdAt times", async () => { + const aa = await loadAnalysisAdapter(); + const addr = toRaw({ + type: "ISSUE", + repo: {type: "REPO", owner: "sourcecred", name: "example-github"}, + number: "1", + }); + const actualCreatedAt = aa.createdAt(addr); + expect(actualCreatedAt).toMatchInlineSnapshot(`1519807088000`); + }); + it("throws an error for an absent entity", async () => { + const aa = await loadAnalysisAdapter(); + const addr = toRaw({ + type: "ISSUE", + repo: {type: "REPO", owner: "sourcecred", name: "example-github"}, + number: "1001", + }); + expect(() => aa.createdAt(addr)).toThrowError("No entity matching"); + }); + it("throws an error for an invalid NodeAddress", async () => { + const aa = await loadAnalysisAdapter(); + expect(() => aa.createdAt(NodeAddress.empty)).toThrowError( + "Bad address" + ); + }); + }); }); }); diff --git a/src/plugins/github/createdAt.js b/src/plugins/github/createdAt.js new file mode 100644 index 0000000..985dd97 --- /dev/null +++ b/src/plugins/github/createdAt.js @@ -0,0 +1,17 @@ +// @flow + +import * as R from "./relationalView"; +export type MsSinceEpoch = number; + +export function createdAt(e: R.Entity): MsSinceEpoch | null { + const handlers = { + repo: () => null, + issue: (x) => x.createdAt(), + pull: (x) => x.createdAt(), + review: (x) => x.createdAt(), + comment: (x) => x.createdAt(), + commit: () => null, + userlike: () => null, + }; + return R.match(handlers, e); +} diff --git a/src/plugins/github/createdAt.test.js b/src/plugins/github/createdAt.test.js new file mode 100644 index 0000000..19cd8a7 --- /dev/null +++ b/src/plugins/github/createdAt.test.js @@ -0,0 +1,26 @@ +// @flow + +import {exampleEntities} from "./example/example"; +import {createdAt} from "./createdAt"; + +describe("plugins/github/createdAt", () => { + it("provides timestamps", () => { + const results = {}; + const examples = exampleEntities(); + for (const name of Object.keys(examples)) { + const entity = examples[name]; + results[name] = createdAt(entity); + } + expect(results).toMatchInlineSnapshot(` +Object { + "comment": 1519878210000, + "commit": null, + "issue": 1519807129000, + "pull": 1519807636000, + "repo": null, + "review": 1519878210000, + "userlike": null, +} +`); + }); +}); diff --git a/src/plugins/odyssey/analysisAdapter.js b/src/plugins/odyssey/analysisAdapter.js index 65efa0b..b4a2e0a 100644 --- a/src/plugins/odyssey/analysisAdapter.js +++ b/src/plugins/odyssey/analysisAdapter.js @@ -1,20 +1,38 @@ // @flow -import {Graph} from "../../core/graph"; import type {RepoId} from "../../core/repoId"; -import type {IAnalysisAdapter} from "../../analysis/analysisAdapter"; +import {type NodeAddressT} from "../../core/graph"; +import type { + IBackendAdapterLoader, + IAnalysisAdapter, +} from "../../analysis/analysisAdapter"; import {hackathonExample} from "./example"; import {declaration} from "./declaration"; +export class BackendAdapterLoader implements IBackendAdapterLoader { + declaration() { + return declaration; + } + // TODO(@decentralion): Enable loading graphs other than the hackathon example. + load( + _unused_sourcecredDirectory: string, + _unused_repoId: RepoId + ): Promise { + const aa: AnalysisAdapter = new AnalysisAdapter(); + // HACK: This any-coercion should be unncessary. Sad flow. + return Promise.resolve((aa: any)); + } +} + export class AnalysisAdapter implements IAnalysisAdapter { declaration() { return declaration; } - // TODO(@decentralion): Enable loading graphs other than the hackathon example. - async load( - _unused_sourcecredDirectory: string, - _unused_repoId: RepoId - ): Promise { + // TODO(@decentralion): Add real creation times to the data model + createdAt(_unused_node: NodeAddressT): null { + return null; + } + graph() { return hackathonExample().graph(); } }