Remove vestigial load-related logic (#1898)

This commit removes old logic related to loading projects in the context
of "data directories". In particular, almost all of the `src/backend`
directory is removed (and I suspect the remaining files should come out
too; definitely the compat_io module is superseded by the new Parser
based approach). The old style loaders are also removed, as they are
rendered redundant by the new CliPlugin interface.

Test plan: `yarn flow` passes, and none of the load-bearing (ha ha)
parts of the current setup are impacted, i.e. there are no changes to
the CLI. Thus, we may be confident that everything is still working.
This commit is contained in:
Dandelion Mané 2020-06-27 20:30:44 -07:00 committed by GitHub
parent f6dbbc951f
commit 46c8e83d28
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 0 additions and 1806 deletions

View File

@ -1,54 +0,0 @@
// @flow
import {type Project} from "../core/project";
import {type Weights as WeightsT} from "../core/weights";
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
import {type TimelineCredParameters} from "../analysis/timeline/params";
import {type DiscordToken} from "../plugins/experimental-discord/config";
import {type GithubToken} from "../plugins/github/token";
import {type CacheProvider} from "../backend/cache";
import {DataDirectory} from "../backend/dataDirectory";
import {TaskReporter} from "../util/taskReporter";
import {LoadContext} from "../backend/loadContext";
export type LoadOptions = {|
+project: Project,
+params: ?$Shape<TimelineCredParameters>,
+weightsOverrides: WeightsT,
+plugins: $ReadOnlyArray<PluginDeclaration>,
+sourcecredDirectory: string,
+githubToken: ?GithubToken,
+discordToken: ?DiscordToken,
+initiativesDirectory: ?string,
|};
/**
* Loads and computes cred for a Project, storing the result in a DataDirectory.
*/
export async function load(
options: LoadOptions,
reporter: TaskReporter
): Promise<void> {
const {
sourcecredDirectory,
githubToken,
discordToken,
project,
params,
weightsOverrides,
initiativesDirectory,
} = options;
const data = new DataDirectory(sourcecredDirectory);
const context = new LoadContext({
cache: (data: CacheProvider),
githubToken,
discordToken,
reporter,
initiativesDirectory,
});
const result = await context.load(project, {
params: params || {},
weightsOverrides,
});
data.storeProject(project, result);
}

View File

@ -1,35 +0,0 @@
//@flow
import {type WeightedGraph} from "../core/weightedGraph";
import {TaskReporter} from "../util/taskReporter";
import {type TimelineCredParameters} from "../analysis/timeline/params";
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
import {TimelineCred} from "../analysis/timeline/timelineCred";
/**
* An abstract handle for TimelineCred.compute-like functions.
*/
export type ComputeFunction = (opts: ComputeOpts) => Promise<TimelineCred>;
// Note: type should allow extra properties, it's expected to be a subset.
type ComputeEnv = {
+reporter: TaskReporter,
};
type ComputeOpts = {|
weightedGraph: WeightedGraph,
params?: $Shape<TimelineCredParameters>,
// TODO(@decentralion, #1557): remove plugins arg
plugins: $ReadOnlyArray<PluginDeclaration>,
|};
export async function computeTask(
compute: ComputeFunction,
{reporter}: ComputeEnv,
opts: ComputeOpts
): Promise<TimelineCred> {
reporter.start("compute-cred");
const cred = await compute(opts);
reporter.finish("compute-cred");
return cred;
}

View File

@ -1,70 +0,0 @@
// @flow
import {TestTaskReporter} from "../util/taskReporter";
import {TimelineCred} from "../analysis/timeline/timelineCred";
import {type ComputeFunction, computeTask} from "./computeFunction";
const mockCompute = () => jest.fn();
const fakeWeightedGraph = ({is: "fake-weighted-graph"}: any);
const fakeCred = ({
toJSON: () => ({is: "fake-cred"}),
}: any);
describe("src/backend/computeFunction", () => {
describe("ComputeFunction", () => {
it("should match the TimelineCred.compute signature", () => {
const _: ComputeFunction = TimelineCred.compute;
});
});
describe("computeTask", () => {
it("should defer to the provided compute function", async () => {
// Given
const plugins = [];
const reporter = new TestTaskReporter();
const params = {alpha: 0.456};
const compute = mockCompute();
compute.mockResolvedValueOnce(fakeCred);
// When
const cred = await computeTask(
compute,
{reporter},
{weightedGraph: fakeWeightedGraph, plugins, params}
);
// Then
expect(cred).toEqual(fakeCred);
expect(compute).toBeCalledTimes(1);
expect(compute).toBeCalledWith({
weightedGraph: fakeWeightedGraph,
plugins,
params,
});
});
it("should give the right tasks to the TaskReporter", async () => {
// Given
const plugins = [];
const reporter = new TestTaskReporter();
const params = {alpha: 0.456};
const compute = mockCompute();
compute.mockResolvedValueOnce(fakeCred);
// When
await computeTask(
compute,
{reporter},
{weightedGraph: fakeWeightedGraph, plugins, params}
);
// Then
expect(reporter.activeTasks()).toEqual([]);
expect(reporter.entries()).toEqual([
{type: "START", taskId: "compute-cred"},
{type: "FINISH", taskId: "compute-cred"},
]);
});
});
});

View File

@ -1,65 +0,0 @@
// @flow
import path from "path";
import fs from "fs-extra";
import Database from "better-sqlite3";
import stringify from "json-stable-stringify";
import {type Project, projectToJSON} from "../core/project";
import {directoryForProjectId} from "../core/project_io";
import * as WeightedGraph from "../core/weightedGraph";
import {type CacheProvider} from "./cache";
import type {
ProjectStorageProvider,
ProjectStorageExtras,
} from "./projectStorage";
import {toJSON as pluginsToJSON} from "../analysis/pluginDeclaration";
/**
* Represents a SourceCred data directory.
*/
export class DataDirectory implements CacheProvider, ProjectStorageProvider {
+_sourcecredDirectory: string;
+_cacheDirectory: string;
constructor(sourcecredDirectory: string) {
this._sourcecredDirectory = sourcecredDirectory;
this._cacheDirectory = path.join(sourcecredDirectory, "cache");
}
async database(id: string): Promise<Database> {
await fs.mkdirp(this._cacheDirectory);
const file = path.join(this._cacheDirectory, `${id}.db`);
return new Database(file);
}
async storeProject(
project: Project,
{weightedGraph, cred, pluginDeclarations}: ProjectStorageExtras
): Promise<void> {
const projectDirectory = directoryForProjectId(
project.id,
this._sourcecredDirectory
);
await fs.mkdirp(projectDirectory);
const writeFile = async (name: string, data: string) => {
const fileName = path.join(projectDirectory, name);
await fs.writeFile(fileName, data);
};
await writeFile("project.json", stringify(projectToJSON(project)));
if (weightedGraph) {
await writeFile(
"weightedGraph.json",
stringify(WeightedGraph.toJSON(weightedGraph))
);
}
if (cred) {
await writeFile("cred.json", stringify(cred.toJSON()));
}
if (pluginDeclarations) {
await writeFile(
"pluginDeclarations.json",
stringify(pluginsToJSON(pluginDeclarations))
);
}
}
}

View File

@ -1,174 +0,0 @@
// @flow
import deepFreeze from "deep-freeze";
import tmp from "tmp";
import path from "path";
import fs from "fs-extra";
import Database from "better-sqlite3";
import {createProject, projectToJSON, encodeProjectId} from "../core/project";
import {type CacheProvider} from "./cache";
import {type ProjectStorageProvider} from "./projectStorage";
import {DataDirectory} from "./dataDirectory";
import * as WeightedGraph from "../core/weightedGraph";
import {toJSON as pluginsToJSON} from "../analysis/pluginDeclaration";
const project = createProject({id: "testing-project"});
const fakeWeightedGraph = deepFreeze(WeightedGraph.empty());
const fakeDeclarations = deepFreeze([]);
const fakeCred = ({
toJSON: () => ({is: "fake-cred"}),
}: any);
const fakeExtras = {
weightedGraph: fakeWeightedGraph,
cred: fakeCred,
pluginDeclarations: fakeDeclarations,
};
describe("src/backend/dataDirectory", () => {
describe("DataDirectory", () => {
it("should be a CacheProvider", () => {
const _ = (x: DataDirectory): CacheProvider => x;
});
it("should be a ProjectStorageProvider", () => {
const _ = (x: DataDirectory): ProjectStorageProvider => x;
});
describe("DataDirectory.database", () => {
it("should create SQLite DB in the cache directory", async () => {
// Given
const sourcecredDirectory = tmp.dirSync().name;
const id = "test-db-id";
// When
const data = new DataDirectory(sourcecredDirectory);
const db = await data.database(id);
// Then
expect(db).toBeInstanceOf(Database);
const dbFile = path.join(sourcecredDirectory, "cache", `${id}.db`);
await fs.stat(dbFile);
});
it("should work when sourcecredDirectory doesn't exist", async () => {
// Given
const sourcecredDirectory = path.join(
tmp.dirSync().name,
"sourcecred_data_test"
);
const id = "test-db-id";
// When
const data = new DataDirectory(sourcecredDirectory);
const db = await data.database(id);
// Then
expect(db).toBeInstanceOf(Database);
const dbFile = path.join(sourcecredDirectory, "cache", `${id}.db`);
await fs.stat(dbFile);
});
it("should fail when sourcecredDirectory is a file", async () => {
// Given
const sourcecredDirectory = path.join(
tmp.dirSync().name,
"sourcecred_data_test"
);
await fs.writeFile(sourcecredDirectory, "blocking file");
const id = "test-db-id";
// When
const data = new DataDirectory(sourcecredDirectory);
const p = data.database(id);
// Then
await expect(p).rejects.toThrow("ENOTDIR:");
});
});
describe("DataDirectory.storeProject", () => {
it("should populate a project directory", async () => {
// Given
const sourcecredDirectory = tmp.dirSync().name;
// When
const data = new DataDirectory(sourcecredDirectory);
await data.storeProject(project, fakeExtras);
// Then
const expectedProjectDirectory = path.join(
sourcecredDirectory,
"projects",
encodeProjectId(project.id)
);
const expectJSONFile = async (name: string, expected: any) => {
const filePath = path.join(expectedProjectDirectory, name);
const actual = JSON.parse(await fs.readFile(filePath));
expect(actual).toEqual(expected);
};
await expectJSONFile("project.json", projectToJSON(project));
await expectJSONFile(
"weightedGraph.json",
WeightedGraph.toJSON(fakeWeightedGraph)
);
await expectJSONFile("cred.json", fakeCred.toJSON());
await expectJSONFile(
"pluginDeclarations.json",
pluginsToJSON(fakeDeclarations)
);
});
it("should work when sourcecredDirectory doesn't exist", async () => {
// Given
const sourcecredDirectory = path.join(
tmp.dirSync().name,
"sourcecred_data_test"
);
// When
const data = new DataDirectory(sourcecredDirectory);
await data.storeProject(project, fakeExtras);
// Then
const expectedProjectDirectory = path.join(
sourcecredDirectory,
"projects",
encodeProjectId(project.id)
);
const expectJSONFile = async (name: string, expected: any) => {
const filePath = path.join(expectedProjectDirectory, name);
const actual = JSON.parse(await fs.readFile(filePath));
expect(actual).toEqual(expected);
};
await expectJSONFile("project.json", projectToJSON(project));
await expectJSONFile(
"weightedGraph.json",
WeightedGraph.toJSON(fakeWeightedGraph)
);
await expectJSONFile("cred.json", fakeCred.toJSON());
await expectJSONFile(
"pluginDeclarations.json",
pluginsToJSON(fakeDeclarations)
);
});
it("should fail when sourcecredDirectory is a file", async () => {
// Given
const sourcecredDirectory = path.join(
tmp.dirSync().name,
"sourcecred_data_test"
);
await fs.writeFile(sourcecredDirectory, "blocking file");
// When
const data = new DataDirectory(sourcecredDirectory);
const p = data.storeProject(project, fakeExtras);
// Then
await expect(p).rejects.toThrow("ENOTDIR:");
});
});
});
});

View File

@ -1,139 +0,0 @@
//@flow
import {type Project} from "../core/project";
import {type Weights as WeightsT} from "../core/weights";
import {type WeightedGraph as WeightedGraphT} from "../core/weightedGraph";
import * as WeightedGraph from "../core/weightedGraph";
import {type TimelineCredParameters} from "../analysis/timeline/params";
import {type GithubToken} from "../plugins/github/token";
import {type DiscordToken} from "../plugins/experimental-discord/config";
import {type CacheProvider} from "./cache";
import {TaskReporter} from "../util/taskReporter";
import {TimelineCred} from "../analysis/timeline/timelineCred";
import {type ComputeFunction as ComputeFunctionT} from "./computeFunction";
import {type PluginLoaders as PluginLoadersT} from "./pluginLoaders";
import * as ComputeFunction from "./computeFunction";
import * as PluginLoaders from "./pluginLoaders";
import {default as githubLoader} from "../plugins/github/loader";
import {default as discordLoader} from "../plugins/experimental-discord/loader";
import {default as discourseLoader} from "../plugins/discourse/loader";
import {default as identityLoader} from "../plugins/identity/loader";
import {default as initiativesLoader} from "../plugins/initiatives/loader";
import {type PluginDeclarations} from "../analysis/pluginDeclaration";
export type LoadResult = {|
+pluginDeclarations: PluginDeclarations,
+weightedGraph: WeightedGraphT,
+cred: TimelineCred,
|};
export type LoadContextOptions = {|
+cache: CacheProvider,
+reporter: TaskReporter,
+githubToken: ?GithubToken,
+discordToken: ?DiscordToken,
+initiativesDirectory: ?string,
|};
type OptionalLoadArguments = {|
+weightsOverrides?: WeightsT,
+params?: $Shape<TimelineCredParameters>,
|};
/**
* This class is responsible composing all the variables and concrete functions
* of the loading process.
*
* Specifically it composes:
* - The loading environment (through the constructor).
* - Concrete functions of the loading process (internally).
* - Parameters for a load (Project and TimelineCredParameters).
*
* You can think of LoadContext as an instance where the environment and
* functions have been composed so it's ready to perform a load with.
*/
export class LoadContext {
+_options: LoadContextOptions;
constructor(opts: LoadContextOptions) {
this._options = opts;
}
/**
* Here we're exposing multiple "proxy functions".
* They're just aliases of functions from another module. But by aliasing them
* as private properties we allow test code to spyOn these per LoadContext
* instance, and without needing to know details of the external modules.
*
* Of course this would break if the external module changes, but that would
* also occur if we directly depended on them.
*/
+_declarations = PluginLoaders.declarations;
+_updateMirror = PluginLoaders.updateMirror;
+_createPluginGraphs = PluginLoaders.createPluginGraphs;
+_createReferenceDetector = PluginLoaders.createReferenceDetector;
+_contractPluginGraphs = PluginLoaders.contractPluginGraphs;
+_overrideWeights = WeightedGraph.overrideWeights;
+_computeTask = ComputeFunction.computeTask;
/**
* The above proxy functions we're deferring to, accept interfaces so they
* could easily be mocked. This class takes the role of composing the concrete
* implementations though. So we're exposing them as aliases here, similar to
* the functions. As we'll need to test if these have been correctly passed on.
*/
+_compute: ComputeFunctionT = TimelineCred.compute;
+_pluginLoaders: PluginLoadersT = {
github: githubLoader,
discord: discordLoader,
discourse: discourseLoader,
identity: identityLoader,
initiatives: initiativesLoader,
};
/**
* Performs a load in this context.
*/
async load(
project: Project,
{params, weightsOverrides}: OptionalLoadArguments
): Promise<LoadResult> {
const cachedProject = await this._updateMirror(
this._pluginLoaders,
this._options,
project
);
const referenceDetector = await this._createReferenceDetector(
this._pluginLoaders,
this._options,
cachedProject
);
const pluginGraphs = await this._createPluginGraphs(
this._pluginLoaders,
this._options,
cachedProject,
referenceDetector
);
const contractedGraph = await this._contractPluginGraphs(
this._pluginLoaders,
pluginGraphs
);
let weightedGraph = contractedGraph;
if (weightsOverrides) {
weightedGraph = this._overrideWeights(contractedGraph, weightsOverrides);
}
const plugins = this._declarations(this._pluginLoaders, project);
const cred = await this._computeTask(this._compute, this._options, {
params,
plugins,
weightedGraph,
});
return {
pluginDeclarations: plugins,
weightedGraph,
cred,
};
}
}

View File

@ -1,249 +0,0 @@
// @flow
import {type CacheProvider} from "./cache";
import {type Project, createProject} from "../core/project";
import * as Weights from "../core/weights";
import {validateToken} from "../plugins/github/token";
import {TestTaskReporter} from "../util/taskReporter";
import {LoadContext} from "./loadContext";
const fakes = {
declarations: ({fake: "declarations"}: any),
referenceDetector: ({fake: "referenceDetector"}: any),
pluginGraphs: ({fake: "pluginGraphs"}: any),
contractedGraph: ({fake: "contractedGraph"}: any),
weightedGraph: ({fake: "weightedGraph"}: any),
timelineCred: ({fake: "timelineCred"}: any),
initiativesDirectory: ({fake: "initiativesDirectory"}: any),
};
const mockCacheProvider = (): CacheProvider => ({
database: jest.fn(),
});
const spyBuilderFor = (target) => ({
proxyMethod: (on: string) => {
return jest.spyOn(target, `_${on}`).mockImplementation(() => {
throw new Error(`Unexpected call of _${on}`);
});
},
});
const mockProxyMethods = (
loadContext: LoadContext,
project: Project,
cache: CacheProvider
) => {
const spyBuilder = spyBuilderFor(loadContext);
return {
declarations: spyBuilder
.proxyMethod("declarations")
.mockReturnValueOnce(fakes.declarations),
updateMirror: spyBuilder
.proxyMethod("updateMirror")
.mockResolvedValueOnce({project, cache}),
createReferenceDetector: spyBuilder
.proxyMethod("createReferenceDetector")
.mockResolvedValueOnce(fakes.referenceDetector),
createPluginGraphs: spyBuilder
.proxyMethod("createPluginGraphs")
.mockResolvedValueOnce(fakes.pluginGraphs),
contractPluginGraphs: spyBuilder
.proxyMethod("contractPluginGraphs")
.mockReturnValueOnce(fakes.contractedGraph),
overrideWeights: spyBuilder
.proxyMethod("overrideWeights")
.mockReturnValueOnce(fakes.weightedGraph),
computeTask: spyBuilder
.proxyMethod("computeTask")
.mockResolvedValueOnce(fakes.timelineCred),
};
};
describe("src/backend/loadContext", () => {
describe("LoadContext", () => {
const githubToken = validateToken("0".repeat(40));
const discordToken = "fakeBotToken";
const project = createProject({id: "testing-project"});
const params = {alpha: 0.123};
const initiativesDirectory = fakes.initiativesDirectory;
describe("constructor", () => {
/**
* Note: we're not testing the proxy properties are the "correct" ones.
* This would be too constraining. Instead we should use an integration
* test to see if the results are as expected. However they should be
* exposed, in order to validate they are correctly called during `load`.
*/
it("should expose proxy properties", () => {
// Given
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
// When
const loadContext = new LoadContext({
cache,
githubToken,
discordToken,
reporter,
initiativesDirectory,
});
// Then
expect(loadContext).toMatchObject({
// Properties
_compute: expect.anything(),
_pluginLoaders: expect.anything(),
// Methods
_declarations: expect.anything(),
_updateMirror: expect.anything(),
_createReferenceDetector: expect.anything(),
_createPluginGraphs: expect.anything(),
_contractPluginGraphs: expect.anything(),
_overrideWeights: expect.anything(),
_computeTask: expect.anything(),
});
});
});
describe("load", () => {
it("should call proxy methods with correct arguments", async () => {
// Given
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
const weightsOverrides = Weights.empty();
const loadContext = new LoadContext({
cache,
githubToken,
discordToken,
reporter,
initiativesDirectory,
});
const spies = mockProxyMethods(loadContext, project, cache);
// When
await loadContext.load(project, {weightsOverrides, params});
// Then
const cachedProject = {project, cache};
const expectedEnv = {
initiativesDirectory,
githubToken,
discordToken,
reporter,
cache,
};
expect(spies.declarations).toBeCalledWith(
loadContext._pluginLoaders,
project
);
expect(spies.updateMirror).toBeCalledWith(
loadContext._pluginLoaders,
expectedEnv,
project
);
expect(spies.createReferenceDetector).toBeCalledWith(
loadContext._pluginLoaders,
expectedEnv,
cachedProject
);
expect(spies.createPluginGraphs).toBeCalledWith(
loadContext._pluginLoaders,
expectedEnv,
cachedProject,
fakes.referenceDetector
);
expect(spies.contractPluginGraphs).toBeCalledWith(
loadContext._pluginLoaders,
fakes.pluginGraphs
);
expect(spies.overrideWeights).toBeCalledWith(
fakes.contractedGraph,
weightsOverrides
);
expect(spies.computeTask).toBeCalledWith(
loadContext._compute,
expectedEnv,
{
weightedGraph: fakes.weightedGraph,
plugins: fakes.declarations,
params,
}
);
});
it("should support omitting optional arguments", async () => {
// Given
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
const loadContext = new LoadContext({
cache,
githubToken,
discordToken,
reporter,
initiativesDirectory,
});
const spies = mockProxyMethods(loadContext, project, cache);
// When
await loadContext.load(project, {});
// Then
const expectedEnv = {
initiativesDirectory,
githubToken,
discordToken,
reporter,
cache,
};
// Omitting weight overrides option, should not call this function.
expect(spies.overrideWeights).toBeCalledTimes(0);
// We have a different input graph, because weight overrides wasn't called.
// We're omitting the `params` argument from the options.
expect(spies.computeTask).toBeCalledWith(
loadContext._compute,
expectedEnv,
{weightedGraph: fakes.contractedGraph, plugins: fakes.declarations}
);
});
it("should return a LoadResult", async () => {
// Given
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
const weightsOverrides = Weights.empty();
const loadContext = new LoadContext({
cache,
githubToken,
discordToken,
reporter,
initiativesDirectory,
});
mockProxyMethods(loadContext, project, cache);
// When
const result = await loadContext.load(project, {
weightsOverrides,
params,
});
// Then
expect(result).toEqual({
pluginDeclarations: fakes.declarations,
weightedGraph: fakes.weightedGraph,
cred: fakes.timelineCred,
});
});
});
});
});

View File

@ -1,242 +0,0 @@
//@flow
import {TaskReporter} from "../util/taskReporter";
import {type Project} from "../core/project";
import {type WeightedGraph as WeightedGraphT} from "../core/weightedGraph";
import * as WeightedGraph from "../core/weightedGraph";
import {type PluginDeclaration} from "../analysis/pluginDeclaration";
import {type CacheProvider} from "./cache";
import {type GithubToken} from "../plugins/github/token";
import {type DiscordToken} from "../plugins/experimental-discord/config";
import {type Loader as GithubLoader} from "../plugins/github/loader";
import {type Loader as DiscordLoader} from "../plugins/experimental-discord/loader";
import {type Loader as DiscourseLoader} from "../plugins/discourse/loader";
import {type Loader as IdentityLoader} from "../plugins/identity/loader";
import {type Loader as InitiativesLoader} from "../plugins/initiatives/loader";
import {type LoadedInitiativesDirectory} from "../plugins/initiatives/initiativesDirectory";
import {
type ReferenceDetector,
CascadingReferenceDetector,
} from "../core/references";
/**
* A type combining all known plugin Loader interfaces.
*
* Using this allows us to define "for all plugins" semantics, while keeping
* each underlying plugin's interface flexible.
*/
export type PluginLoaders = {|
+github: GithubLoader,
+discord: DiscordLoader,
+discourse: DiscourseLoader,
+identity: IdentityLoader,
+initiatives: InitiativesLoader,
|};
/**
* Represents a Project which has been mirrored into the CacheProvider.
*
* Note: no guarantees about the cache are made, it's state is a best effort.
*/
opaque type CachedProject = {|
+loadedInitiativesDirectory: ?LoadedInitiativesDirectory,
+cache: CacheProvider,
+project: Project,
|};
/**
* Represents all disjoint WeightedGraphs for a CachedProject.
*/
opaque type PluginGraphs = {|
+graphs: $ReadOnlyArray<WeightedGraphT>,
+cachedProject: CachedProject,
|};
type MirrorEnv = {
+initiativesDirectory: ?string,
+githubToken: ?GithubToken,
+discordToken: ?DiscordToken,
+reporter: TaskReporter,
+cache: CacheProvider,
};
type GraphEnv = {
+githubToken: ?GithubToken,
+discordToken: ?DiscordToken,
};
/**
* Gets all relevant PluginDeclarations for a given Project.
*/
export function declarations(
{github, discourse, discord, identity, initiatives}: PluginLoaders,
project: Project
): $ReadOnlyArray<PluginDeclaration> {
const plugins: PluginDeclaration[] = [];
if (project.repoIds.length) {
plugins.push(github.declaration());
}
if (project.discourseServer != null) {
plugins.push(discourse.declaration());
}
if (project.discord != null) {
plugins.push(discord.declaration());
}
if (project.identities.length) {
plugins.push(identity.declaration());
}
if (project.initiatives) {
plugins.push(initiatives.declaration());
}
return plugins;
}
/**
* Updates all mirrors into cache as requested by the Project.
*/
export async function updateMirror(
{github, discourse, discord, initiatives}: PluginLoaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory}: MirrorEnv,
project: Project
): Promise<CachedProject> {
const tasks: Promise<void>[] = [];
if (project.discourseServer) {
tasks.push(
discourse.updateMirror(project.discourseServer, cache, reporter)
);
}
if (project.repoIds.length) {
if (!githubToken) {
throw new Error("Tried to load GitHub, but no GitHub token set");
}
tasks.push(
github.updateMirror(project.repoIds, githubToken, cache, reporter)
);
}
if (project.discord) {
if (!discordToken) {
throw new Error("Tried to load Discord, but no Discord bot token set");
}
tasks.push(
discord.updateMirror(project.discord, discordToken, cache, reporter)
);
}
let loadedInitiativesDirectory: ?LoadedInitiativesDirectory;
if (project.initiatives) {
if (!initiativesDirectory) {
throw new Error(
"Tried to load Initiatives, but no Initiatives directory set"
);
}
loadedInitiativesDirectory = await initiatives.loadDirectory(
{
localPath: initiativesDirectory,
remoteUrl: project.initiatives.remoteUrl,
},
reporter
);
}
await Promise.all(tasks);
return {project, cache, loadedInitiativesDirectory};
}
/**
* Creates a ReferenceDetector composing all plugin reference detectors
* requested by the project.
*/
export async function createReferenceDetector(
{github, discourse}: $Shape<PluginLoaders>,
{githubToken}: GraphEnv,
{cache, project, loadedInitiativesDirectory}: CachedProject
): Promise<ReferenceDetector> {
const refs: ReferenceDetector[] = [];
if (project.repoIds.length) {
// TODO: similar to create graph, rather not depend on the token (#1580).
if (!githubToken) {
throw new Error("Tried to load GitHub, but no GitHub token set");
}
refs.push(
await github.referenceDetector(project.repoIds, githubToken, cache)
);
}
if (project.discourseServer) {
refs.push(
await discourse.referenceDetector(project.discourseServer, cache)
);
}
if (loadedInitiativesDirectory) {
refs.push(loadedInitiativesDirectory.referenceDetector);
}
return new CascadingReferenceDetector(refs);
}
/**
* Creates PluginGraphs containing all plugins requested by the Project.
*/
export async function createPluginGraphs(
{github, discourse, discord, initiatives}: PluginLoaders,
{githubToken, discordToken}: GraphEnv,
{cache, project, loadedInitiativesDirectory}: CachedProject,
referenceDetector: ReferenceDetector
): Promise<PluginGraphs> {
const tasks: Promise<WeightedGraphT>[] = [];
if (project.discourseServer) {
tasks.push(discourse.createGraph(project.discourseServer, cache));
}
if (project.repoIds.length) {
if (!githubToken) {
throw new Error("Tried to load GitHub, but no GitHub token set");
}
tasks.push(github.createGraph(project.repoIds, githubToken, cache));
}
if (project.discord) {
if (!discordToken) {
throw new Error("Tried to load Discord, but no Discord bot token set");
}
tasks.push(discord.createGraph(project.discord, cache));
}
if (loadedInitiativesDirectory) {
tasks.push(
initiatives.createGraph(
loadedInitiativesDirectory.initiatives,
referenceDetector
)
);
}
// It's important to use Promise.all so that we can load the plugins in
// parallel -- since loading is often IO-bound, this can be a big performance
// improvement.
return {
graphs: await Promise.all(tasks),
cachedProject: {cache, project, loadedInitiativesDirectory},
};
}
/**
* Takes PluginGraphs and merges it into a WeightedGraph with identities contracted.
*/
export async function contractPluginGraphs(
{identity}: PluginLoaders,
{graphs, cachedProject}: PluginGraphs
): Promise<WeightedGraphT> {
const {project} = cachedProject;
const mergedGraph = WeightedGraph.merge(graphs);
// Don't contract when there's no identities. This will prevent unnecessary copying.
if (!project.identities.length) {
return mergedGraph;
}
const discourseServer = project.discourseServer || {serverUrl: null};
const identitySpec = {
identities: project.identities,
discourseServerUrl: discourseServer.serverUrl,
};
return identity.contractIdentities(mergedGraph, identitySpec);
}

View File

@ -1,529 +0,0 @@
// @flow
import {type CacheProvider} from "./cache";
import {
type ReferenceDetector,
CascadingReferenceDetector,
} from "../core/references";
import * as WeightedGraph from "../core/weightedGraph";
import {node as graphNode} from "../core/graphTestUtil";
import {createProject} from "../core/project";
import {TestTaskReporter} from "../util/taskReporter";
import {validateToken} from "../plugins/github/token";
import {makeRepoId} from "../plugins/github/repoId";
import * as PluginLoaders from "./pluginLoaders";
import {type LoadedInitiativesDirectory} from "../plugins/initiatives/initiativesDirectory";
export function createWG(name: string) {
const weightedGraph = WeightedGraph.empty();
weightedGraph.graph.addNode(graphNode(`${name}-sentinel`));
return weightedGraph;
}
const mockGraphs = {
github: createWG("github"),
discord: createWG("discord"),
discourse: createWG("discourse"),
initiatives: createWG("initiatives"),
contracted: createWG("identity-contracted"),
};
const fakes = {
githubDeclaration: ({fake: "githubDeclaration"}: any),
discordDeclaration: ({fake: "discordDeclaration"}: any),
githubReferences: ({fake: "githubReferences"}: any),
discourseDeclaration: ({fake: "discourseDeclaration"}: any),
discourseReferences: ({fake: "discourseReferences"}: any),
identityDeclaration: ({fake: "identityDeclaration"}: any),
initiativesDeclaration: ({fake: "initiativesDeclaration"}: any),
initiativesReferences: ({fake: "initiativesReferences"}: any),
initiativesRepository: ({fake: "initiativesRepository"}: any),
};
const mockLoadedDirectory = (): LoadedInitiativesDirectory =>
({
referenceDetector: fakes.initiativesReferences,
initiatives: fakes.initiativesRepository,
}: any);
const mockCacheProvider = (): CacheProvider => ({
database: jest.fn(),
});
const mockReferenceDetector = (): ReferenceDetector => ({
addressFromUrl: jest.fn(),
});
const mockPluginLoaders = () => ({
github: {
declaration: jest.fn().mockReturnValue(fakes.githubDeclaration),
updateMirror: jest.fn(),
referenceDetector: jest.fn().mockResolvedValue(fakes.githubReferences),
createGraph: jest.fn().mockResolvedValue(mockGraphs.github),
},
discord: {
declaration: jest.fn().mockReturnValue(fakes.discordDeclaration),
updateMirror: jest.fn(),
createGraph: jest.fn().mockResolvedValue(mockGraphs.discord),
},
discourse: {
declaration: jest.fn().mockReturnValue(fakes.discourseDeclaration),
updateMirror: jest.fn(),
referenceDetector: jest.fn().mockResolvedValue(fakes.discourseReferences),
createGraph: jest.fn().mockResolvedValue(mockGraphs.discourse),
},
identity: {
declaration: jest.fn().mockReturnValue(fakes.identityDeclaration),
contractIdentities: jest.fn().mockReturnValue(mockGraphs.contracted),
},
initiatives: {
declaration: jest.fn().mockReturnValue(fakes.initiativesDeclaration),
loadDirectory: jest.fn().mockResolvedValue(mockLoadedDirectory()),
createGraph: jest.fn().mockResolvedValue(mockGraphs.initiatives),
},
});
describe("src/backend/pluginLoaders", () => {
const exampleGithubToken = validateToken("0".repeat(40));
const exampleRepoId = makeRepoId("sourcecred-test", "example-github");
describe("declarations", () => {
it("should include discourse declaration", async () => {
// Given
const loaders = mockPluginLoaders();
const project = createProject({
id: "has-discourse",
discourseServer: {serverUrl: "http://foo.bar"},
});
// When
const decs = PluginLoaders.declarations(loaders, project);
// Then
expect(decs).toEqual([fakes.discourseDeclaration]);
});
it("should include github declaration", async () => {
// Given
const loaders = mockPluginLoaders();
const project = createProject({
id: "has-github",
repoIds: [exampleRepoId],
});
// When
const decs = PluginLoaders.declarations(loaders, project);
// Then
expect(decs).toEqual([fakes.githubDeclaration]);
});
it("should include initiatives declaration", async () => {
// Given
const loaders = mockPluginLoaders();
const project = createProject({
id: "has-initiatives",
initiatives: {remoteUrl: "http://example.com/initiatives"},
});
// When
const decs = PluginLoaders.declarations(loaders, project);
// Then
expect(decs).toEqual([fakes.initiativesDeclaration]);
});
it("should include identity declaration", async () => {
// Given
const loaders = mockPluginLoaders();
const project = createProject({
id: "has-identity",
identities: [{username: "foo", aliases: ["github/foo"]}],
});
// When
const decs = PluginLoaders.declarations(loaders, project);
// Then
expect(decs).toEqual([fakes.identityDeclaration]);
});
});
describe("updateMirror", () => {
it("should update discourse mirror", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
const githubToken = null;
const discordToken = null;
const initiativesDirectory = null;
const project = createProject({
id: "has-discourse",
discourseServer: {serverUrl: "http://foo.bar"},
});
// When
await PluginLoaders.updateMirror(
loaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory},
project
);
// Then
const {discourse} = loaders;
expect(discourse.updateMirror).toBeCalledTimes(1);
expect(discourse.updateMirror).toBeCalledWith(
project.discourseServer,
cache,
reporter
);
});
it("should fail when missing initiativesDirectory", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = null;
const discordToken = null;
const initiativesDirectory = null;
const reporter = new TestTaskReporter();
const project = createProject({
id: "has-initiatives",
initiatives: {remoteUrl: "http://example.com/initiatives"},
});
// When
const p = PluginLoaders.updateMirror(
loaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory},
project
);
// Then
await expect(p).rejects.toThrow(
"Tried to load Initiatives, but no Initiatives directory set"
);
});
it("should load initiatives directory", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const reporter = new TestTaskReporter();
const githubToken = null;
const discordToken = null;
const initiativesDirectory = __dirname;
const project = createProject({
id: "has-initiatives",
initiatives: {remoteUrl: "http://example.com/initiatives"},
});
// When
await PluginLoaders.updateMirror(
loaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory},
project
);
// Then
const {initiatives} = loaders;
expect(initiatives.loadDirectory).toBeCalledTimes(1);
expect(initiatives.loadDirectory).toBeCalledWith(
{
localPath: initiativesDirectory,
remoteUrl: "http://example.com/initiatives",
},
reporter
);
});
it("should fail when missing GithubToken", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = null;
const discordToken = null;
const initiativesDirectory = null;
const reporter = new TestTaskReporter();
const project = createProject({
id: "has-github",
repoIds: [exampleRepoId],
});
// When
const p = PluginLoaders.updateMirror(
loaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory},
project
);
// Then
await expect(p).rejects.toThrow(
"Tried to load GitHub, but no GitHub token set"
);
});
it("should update github mirror", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = exampleGithubToken;
const discordToken = null;
const reporter = new TestTaskReporter();
const initiativesDirectory = null;
const project = createProject({
id: "has-github",
repoIds: [exampleRepoId],
});
// When
await PluginLoaders.updateMirror(
loaders,
{githubToken, discordToken, cache, reporter, initiativesDirectory},
project
);
// Then
const {github} = loaders;
expect(github.updateMirror).toBeCalledTimes(1);
expect(github.updateMirror).toBeCalledWith(
project.repoIds,
githubToken,
cache,
reporter
);
});
});
describe("createPluginGraphs", () => {
it("should create discourse graph", async () => {
// Given
const references = mockReferenceDetector();
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = null;
const discordToken = null;
const project = createProject({
id: "has-discourse",
discourseServer: {serverUrl: "http://foo.bar"},
});
const cachedProject = ({project, cache}: any);
// When
const pluginGraphs = await PluginLoaders.createPluginGraphs(
loaders,
{githubToken, discordToken},
cachedProject,
references
);
// Then
const {discourse} = loaders;
expect(pluginGraphs).toEqual({
graphs: [mockGraphs.discourse],
cachedProject,
});
expect(discourse.createGraph).toBeCalledTimes(1);
expect(discourse.createGraph).toBeCalledWith(
project.discourseServer,
cache
);
});
it("should create initiatives graph", async () => {
// Given
const references = mockReferenceDetector();
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const loadedInitiativesDirectory = mockLoadedDirectory();
const githubToken = null;
const discordToken = null;
const project = createProject({
id: "has-initiatives",
initiatives: {remoteUrl: "http://example.com/initiatives"},
});
const cachedProject = ({project, cache, loadedInitiativesDirectory}: any);
// When
const pluginGraphs = await PluginLoaders.createPluginGraphs(
loaders,
{githubToken, discordToken},
cachedProject,
references
);
// Then
const {initiatives} = loaders;
expect(pluginGraphs).toEqual({
graphs: [mockGraphs.initiatives],
cachedProject,
});
expect(initiatives.createGraph).toBeCalledTimes(1);
expect(initiatives.createGraph).toBeCalledWith(
loadedInitiativesDirectory.initiatives,
references
);
});
it("fail when missing GithubToken", async () => {
// Given
const references = mockReferenceDetector();
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = null;
const discordToken = null;
const project = createProject({
id: "has-github",
repoIds: [exampleRepoId],
});
const cachedProject = ({project, cache}: any);
// When
const p = PluginLoaders.createPluginGraphs(
loaders,
{githubToken, discordToken},
cachedProject,
references
);
// Then
await expect(p).rejects.toThrow(
"Tried to load GitHub, but no GitHub token set"
);
});
it("should create github graph", async () => {
// Given
const references = mockReferenceDetector();
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = exampleGithubToken;
const discordToken = null;
const project = createProject({
id: "has-github",
repoIds: [exampleRepoId],
});
const cachedProject = ({project, cache}: any);
// When
const pluginGraphs = await PluginLoaders.createPluginGraphs(
loaders,
{githubToken, discordToken},
cachedProject,
references
);
// Then
const {github} = loaders;
expect(pluginGraphs).toEqual({
graphs: [mockGraphs.github],
cachedProject,
});
expect(github.createGraph).toBeCalledTimes(1);
expect(github.createGraph).toBeCalledWith(
project.repoIds,
githubToken,
cache
);
});
});
describe("createReferenceDetector", () => {
it("should create a CascadingReferenceDetector", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const githubToken = exampleGithubToken;
const discordToken = null;
const loadedInitiativesDirectory = mockLoadedDirectory();
const project = createProject({
id: "has-github-discourse-initiatives",
discourseServer: {serverUrl: "http://foo.bar"},
initiatives: {remoteUrl: "http://example.com/initiatives"},
repoIds: [exampleRepoId],
});
const cachedProject = ({project, cache, loadedInitiativesDirectory}: any);
// When
const references = await PluginLoaders.createReferenceDetector(
loaders,
{githubToken, discordToken},
cachedProject
);
// Then
expect(references).toBeInstanceOf(CascadingReferenceDetector);
expect(((references: any): CascadingReferenceDetector).refs).toEqual([
fakes.githubReferences,
fakes.discourseReferences,
fakes.initiativesReferences,
]);
});
});
describe("contractPluginGraphs", () => {
it("should only merge graphs when no identities are defined", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const project = createProject({
id: "has-github-and-discourse",
discourseServer: {serverUrl: "http://foo.bar"},
repoIds: [exampleRepoId],
});
const pluginGraphs = ({
graphs: [mockGraphs.github, mockGraphs.discourse],
cachedProject: {project, cache},
}: any);
// When
const graph = await PluginLoaders.contractPluginGraphs(
loaders,
pluginGraphs
);
// Then
const expectedGraph = WeightedGraph.merge([
mockGraphs.github,
mockGraphs.discourse,
]);
expect(graph).toEqual(expectedGraph);
});
it("should contract identities when they are defined", async () => {
// Given
const loaders = mockPluginLoaders();
const cache = mockCacheProvider();
const project = createProject({
id: "has-github-and-discourse-and-identity",
identities: [{username: "foo", aliases: ["github/foo"]}],
discourseServer: {serverUrl: "http://foo.bar"},
repoIds: [exampleRepoId],
});
const pluginGraphs = ({
graphs: [mockGraphs.github, mockGraphs.discourse],
cachedProject: {project, cache},
}: any);
// When
const graph = await PluginLoaders.contractPluginGraphs(
loaders,
pluginGraphs
);
// Then
const {identity} = loaders;
const expectedGraph = WeightedGraph.merge([
mockGraphs.github,
mockGraphs.discourse,
]);
expect(graph).toEqual(mockGraphs.contracted);
expect(identity.contractIdentities).toBeCalledTimes(1);
expect(identity.contractIdentities).toBeCalledWith(expectedGraph, {
identities: project.identities,
discourseServerUrl: (project.discourseServer: any).serverUrl,
});
});
});
});

View File

@ -1,16 +0,0 @@
//@flow
import {type WeightedGraph} from "../core/weightedGraph";
import {TimelineCred} from "../analysis/timeline/timelineCred";
import {type Project} from "../core/project";
import {type PluginDeclarations} from "../analysis/pluginDeclaration";
export type ProjectStorageExtras = {
+weightedGraph?: WeightedGraph,
+cred?: TimelineCred,
+pluginDeclarations?: PluginDeclarations,
};
export interface ProjectStorageProvider {
storeProject(project: Project, extras: ProjectStorageExtras): Promise<void>;
}

View File

@ -1,79 +0,0 @@
// @flow
import base64url from "base64url";
import {TaskReporter} from "../../util/taskReporter";
import {type CacheProvider} from "../../backend/cache";
import {type WeightedGraph} from "../../core/weightedGraph";
import {type ReferenceDetector} from "../../core/references";
import {type PluginDeclaration} from "../../analysis/pluginDeclaration";
import {type DiscourseServer} from "./server";
import {Mirror} from "./mirror";
import {SqliteMirrorRepository} from "./mirrorRepository";
import {weightsForDeclaration} from "../../analysis/pluginDeclaration";
import {DiscourseReferenceDetector} from "./referenceDetector";
import {createGraph as _createGraph} from "./createGraph";
import {declaration} from "./declaration";
import {Fetcher} from "./fetch";
export interface Loader {
declaration(): PluginDeclaration;
updateMirror(
server: DiscourseServer,
cache: CacheProvider,
reporter: TaskReporter
): Promise<void>;
referenceDetector(
server: DiscourseServer,
cache: CacheProvider
): Promise<ReferenceDetector>;
createGraph(
server: DiscourseServer,
cache: CacheProvider
): Promise<WeightedGraph>;
}
export default ({
declaration: () => declaration,
updateMirror,
referenceDetector,
createGraph,
}: Loader);
export async function updateMirror(
server: DiscourseServer,
cache: CacheProvider,
reporter: TaskReporter
): Promise<void> {
const {serverUrl, mirrorOptions} = server;
const repo = await repository(cache, serverUrl);
const fetcher = new Fetcher({serverUrl});
const mirror = new Mirror(repo, fetcher, serverUrl, mirrorOptions);
await mirror.update(reporter);
}
export async function referenceDetector(
{serverUrl}: DiscourseServer,
cache: CacheProvider
): Promise<ReferenceDetector> {
const repo = await repository(cache, serverUrl);
return new DiscourseReferenceDetector(repo);
}
export async function createGraph(
{serverUrl}: DiscourseServer,
cache: CacheProvider
): Promise<WeightedGraph> {
const repo = await repository(cache, serverUrl);
const graph = _createGraph(serverUrl, repo);
const weights = weightsForDeclaration(declaration);
return {graph, weights};
}
async function repository(
cache: CacheProvider,
serverUrl: string
): Promise<SqliteMirrorRepository> {
// TODO: should replace base64url with hex, to be case insensitive.
const db = await cache.database(base64url.encode(serverUrl));
return new SqliteMirrorRepository(db, serverUrl);
}

View File

@ -1,56 +0,0 @@
// @flow
import {type PluginDeclaration} from "../../analysis/pluginDeclaration";
import {type WeightedGraph} from "../../core/weightedGraph";
import {TaskReporter} from "../../util/taskReporter";
import {type CacheProvider} from "../../backend/cache";
import {SqliteMirrorRepository} from "./mirrorRepository";
import {weightsForDeclaration} from "../../analysis/pluginDeclaration";
import {createGraph as _createGraph} from "./createGraph";
import {type DiscordConfig} from "./config";
import {declaration} from "./declaration";
import * as Model from "./models";
import {Fetcher} from "./fetcher";
import {Mirror} from "./mirror";
export interface Loader {
declaration(): PluginDeclaration;
updateMirror: typeof updateMirror;
createGraph: typeof createGraph;
}
export default ({
declaration: () => declaration,
updateMirror,
createGraph,
}: Loader);
export async function updateMirror(
{guildId}: DiscordConfig,
token: Model.BotToken,
cache: CacheProvider,
reporter: TaskReporter
): Promise<void> {
const repo = await repository(cache, guildId);
const fetcher = new Fetcher({token});
const mirror = new Mirror(repo, fetcher, guildId);
await mirror.update(reporter);
}
export async function createGraph(
{guildId, reactionWeights}: DiscordConfig,
cache: CacheProvider
): Promise<WeightedGraph> {
const repo = await repository(cache, guildId);
const declarationWeights = weightsForDeclaration(declaration);
return await _createGraph(guildId, repo, declarationWeights, reactionWeights);
}
async function repository(
cache: CacheProvider,
guild: Model.Snowflake
): Promise<SqliteMirrorRepository> {
// TODO: should replace base64url with hex, to be case insensitive.
const db = await cache.database(`discord_${guild}`);
return new SqliteMirrorRepository(db, guild);
}

View File

@ -1,98 +0,0 @@
// @flow
import {TaskReporter} from "../../util/taskReporter";
import {type CacheProvider} from "../../backend/cache";
import {type WeightedGraph} from "../../core/weightedGraph";
import {type ReferenceDetector} from "../../core/references";
import {type PluginDeclaration} from "../../analysis/pluginDeclaration";
import {type GithubToken} from "./token";
import {Graph} from "../../core/graph";
import {declaration} from "./declaration";
import {type RepoId, repoIdToString} from "./repoId";
import {createGraph as _createGraph} from "./createGraph";
import {RelationalView} from "./relationalView";
import {weightsForDeclaration} from "../../analysis/pluginDeclaration";
import {
default as fetchGithubRepo,
fetchGithubRepoFromCache,
} from "./fetchGithubRepo";
import {fromRelationalViews as referenceDetectorFromRelationalViews} from "./referenceDetector";
export interface Loader {
declaration(): PluginDeclaration;
updateMirror(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider,
reporter: TaskReporter
): Promise<void>;
referenceDetector(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider
): Promise<ReferenceDetector>;
createGraph(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider
): Promise<WeightedGraph>;
}
export default ({
declaration: () => declaration,
referenceDetector,
updateMirror,
createGraph,
}: Loader);
export async function updateMirror(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider,
reporter: TaskReporter
): Promise<void> {
for (const repoId of repoIds) {
const taskId = `github/${repoIdToString(repoId)}`;
reporter.start(taskId);
await fetchGithubRepo(repoId, {
token: token,
cache,
});
reporter.finish(taskId);
}
}
export async function referenceDetector(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider
): Promise<ReferenceDetector> {
const rvs = [];
for (const repoId of repoIds) {
const repo = await fetchGithubRepoFromCache(repoId, {token, cache});
const rv = new RelationalView();
rv.addRepository(repo);
rvs.push(rv);
}
return referenceDetectorFromRelationalViews(rvs);
}
export async function createGraph(
repoIds: $ReadOnlyArray<RepoId>,
token: GithubToken,
cache: CacheProvider
): Promise<WeightedGraph> {
const repositories = [];
for (const repoId of repoIds) {
repositories.push(await fetchGithubRepoFromCache(repoId, {token, cache}));
}
const graph = Graph.merge(
repositories.map((r) => {
const rv = new RelationalView();
rv.addRepository(r);
return _createGraph(rv);
})
);
const weights = weightsForDeclaration(declaration);
return {graph, weights};
}