cli2: add sketch of plugin loading (#1810)
Summary: This adds a `CliPlugin` interface and a basic implementation for the GitHub plugin. Paired with @decentralion. Test Plan: Create a new directory `/tmp/test-instance`, with: ``` // sourcecred.json {"bundledPlugins": ["sourcecred/github"]} // config/sourcecred/github/config.json {"repositories": ["sourcecred/example-github"]} ``` Then, run ``` yarn backend && (cd /tmp/test-instance && node "$OLDPWD/bin/sc2.js" load) ``` and observe that the new instance has a cache directory containing a GitHub database. wchargin-branch: cli2-load
This commit is contained in:
parent
0f6a765569
commit
80c3c38282
|
@ -0,0 +1,12 @@
|
|||
// @flow
|
||||
|
||||
import type {CliPlugin} from "./cliPlugin";
|
||||
import {GithubCliPlugin} from "../plugins/github/cliPlugin";
|
||||
|
||||
/**
|
||||
* Returns an object mapping owner-name pairs to CLI plugin
|
||||
* declarations; keys are like `sourcecred/github`.
|
||||
*/
|
||||
export function bundledPlugins(): {[pluginId: string]: CliPlugin} {
|
||||
return {"sourcecred/github": new GithubCliPlugin()};
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
// @flow
|
||||
|
||||
import type {PluginDeclaration} from "../analysis/pluginDeclaration";
|
||||
import type {WeightedGraph} from "../core/weightedGraph";
|
||||
import type {ReferenceDetector} from "../core/references/referenceDetector";
|
||||
|
||||
export interface CliPlugin {
|
||||
declaration(): PluginDeclaration;
|
||||
load(PluginDirectoryContext): Promise<void>;
|
||||
graph(PluginDirectoryContext, ReferenceDetector): Promise<WeightedGraph>;
|
||||
referenceDetector(PluginDirectoryContext): Promise<ReferenceDetector>;
|
||||
}
|
||||
|
||||
export interface PluginDirectoryContext {
|
||||
configDirectory(): string;
|
||||
cacheDirectory(): string;
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
// @flow
|
||||
|
||||
import {join as pathJoin} from "path";
|
||||
import fs from "fs-extra";
|
||||
|
||||
import type {PluginDirectoryContext} from "./cliPlugin";
|
||||
import {parse as parseConfig, type InstanceConfig} from "./instanceConfig";
|
||||
|
||||
export async function loadInstanceConfig(
|
||||
baseDir: string
|
||||
): Promise<InstanceConfig> {
|
||||
const projectFilePath = pathJoin(baseDir, "sourcecred.json");
|
||||
const contents = await fs.readFile(projectFilePath);
|
||||
return Promise.resolve(parseConfig(JSON.parse(contents)));
|
||||
}
|
||||
|
||||
// Make a directory, if it doesn't exist.
|
||||
function mkdirx(path: string) {
|
||||
try {
|
||||
fs.mkdirSync(path);
|
||||
} catch (e) {
|
||||
if (e.code !== "EEXIST") {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function makePluginDir(
|
||||
baseDir: string,
|
||||
prefix: $ReadOnlyArray<string>,
|
||||
pluginId: string
|
||||
): string {
|
||||
const idParts = pluginId.split("/");
|
||||
if (idParts.length !== 2) {
|
||||
throw new Error(`Bad plugin name: ${pluginId}`);
|
||||
}
|
||||
const [pluginOwner, pluginName] = idParts;
|
||||
const pathComponents = [...prefix, pluginOwner, pluginName];
|
||||
let path = baseDir;
|
||||
for (const pc of pathComponents) {
|
||||
path = pathJoin(path, pc);
|
||||
mkdirx(path);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
export function pluginDirectoryContext(
|
||||
baseDir: string,
|
||||
pluginName: string
|
||||
): PluginDirectoryContext {
|
||||
const cacheDir = makePluginDir(baseDir, ["cache"], pluginName);
|
||||
const configDir = makePluginDir(baseDir, ["config"], pluginName);
|
||||
return {
|
||||
configDirectory() {
|
||||
return configDir;
|
||||
},
|
||||
cacheDirectory() {
|
||||
return cacheDir;
|
||||
},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
// @flow
|
||||
|
||||
import {CliPlugin} from "./cliPlugin";
|
||||
import {bundledPlugins as getAllBundledPlugins} from "./bundledPlugins";
|
||||
|
||||
type PluginName = string;
|
||||
|
||||
export type InstanceConfig = {|
|
||||
+bundledPlugins: Map<PluginName, CliPlugin>,
|
||||
|};
|
||||
|
||||
export type RawInstanceConfig = {|
|
||||
+bundledPlugins: $ReadOnlyArray<BundledPluginSpec>,
|
||||
|};
|
||||
|
||||
// Plugin identifier, like `sourcecred/identity`. Version number is
|
||||
// implicit from the SourceCred version. This is a stopgap until we have
|
||||
// a plugin system that admits external, dynamically loaded
|
||||
// dependencies.
|
||||
export type BundledPluginSpec = string;
|
||||
|
||||
type JsonObject =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| JsonObject[]
|
||||
| {[string]: JsonObject};
|
||||
|
||||
export function parse(raw: JsonObject): InstanceConfig {
|
||||
if (raw == null || typeof raw !== "object" || Array.isArray(raw)) {
|
||||
throw new Error("bad config: " + JSON.stringify(raw));
|
||||
}
|
||||
const {bundledPlugins: rawBundledPlugins} = raw;
|
||||
if (!Array.isArray(rawBundledPlugins)) {
|
||||
console.warn(JSON.stringify(raw));
|
||||
throw new Error(
|
||||
"bad bundled plugins: " + JSON.stringify(rawBundledPlugins)
|
||||
);
|
||||
}
|
||||
const allBundledPlugins = getAllBundledPlugins();
|
||||
const bundledPlugins = new Map();
|
||||
for (const name of rawBundledPlugins) {
|
||||
if (typeof name !== "string") {
|
||||
throw new Error("bad bundled plugin: " + JSON.stringify(name));
|
||||
}
|
||||
const plugin = allBundledPlugins[name];
|
||||
if (plugin == null) {
|
||||
throw new Error("bad bundled plugin: " + JSON.stringify(name));
|
||||
}
|
||||
bundledPlugins.set(name, plugin);
|
||||
}
|
||||
const result = {bundledPlugins};
|
||||
return result;
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
// @flow
|
||||
|
||||
import type {Command} from "./command";
|
||||
import {loadInstanceConfig, pluginDirectoryContext} from "./common";
|
||||
|
||||
function die(std, message) {
|
||||
std.err("fatal: " + message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const loadCommand: Command = async (args, std) => {
|
||||
if (args.length !== 0) {
|
||||
die(std, "usage: sourcecred load");
|
||||
}
|
||||
const baseDir = process.cwd();
|
||||
const config = await loadInstanceConfig(baseDir);
|
||||
for (const [name, plugin] of config.bundledPlugins) {
|
||||
const dirContext = pluginDirectoryContext(baseDir, name);
|
||||
plugin.load(dirContext);
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
export default loadCommand;
|
|
@ -2,9 +2,20 @@
|
|||
|
||||
import type {Command} from "./command";
|
||||
|
||||
import load from "./load";
|
||||
|
||||
const sourcecred: Command = async (args, std) => {
|
||||
std.err("SourceCred CLI v2 not yet implemented");
|
||||
return 1;
|
||||
if (args.length === 0) {
|
||||
std.err("fatal: specify a command");
|
||||
return 1;
|
||||
}
|
||||
switch (args[0]) {
|
||||
case "load":
|
||||
return load(args.slice(1), std);
|
||||
default:
|
||||
std.err("fatal: unknown command: " + JSON.stringify(args[0]));
|
||||
return 1;
|
||||
}
|
||||
};
|
||||
|
||||
export default sourcecred;
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
// @flow
|
||||
|
||||
import Database from "better-sqlite3";
|
||||
import fs from "fs-extra";
|
||||
import {join as pathJoin} from "path";
|
||||
|
||||
import fetchGithubRepo, {fetchGithubRepoFromCache} from "./fetchGithubRepo";
|
||||
import type {CacheProvider} from "../../backend/cache";
|
||||
import type {CliPlugin, PluginDirectoryContext} from "../../cli2/cliPlugin";
|
||||
import type {PluginDeclaration} from "../../analysis/pluginDeclaration";
|
||||
import type {ReferenceDetector} from "../../core/references/referenceDetector";
|
||||
import type {WeightedGraph} from "../../core/weightedGraph";
|
||||
import {Graph} from "../../core/graph";
|
||||
import {RelationalView} from "./relationalView";
|
||||
import {createGraph} from "./createGraph";
|
||||
import {declaration} from "./declaration";
|
||||
import {fromRelationalViews as referenceDetectorFromRelationalViews} from "./referenceDetector";
|
||||
import {parse as parseConfig, type GithubConfig} from "./config";
|
||||
import {validateToken, type GithubToken} from "./token";
|
||||
import {weightsForDeclaration} from "../../analysis/pluginDeclaration";
|
||||
|
||||
const TOKEN_ENV_VAR_NAME = "SOURCECRED_GITHUB_TOKEN";
|
||||
|
||||
async function loadConfig(
|
||||
dirContext: PluginDirectoryContext
|
||||
): Promise<GithubConfig> {
|
||||
const dirname = dirContext.configDirectory();
|
||||
const path = pathJoin(dirname, "config.json");
|
||||
const contents = await fs.readFile(path);
|
||||
return Promise.resolve(parseConfig(JSON.parse(contents)));
|
||||
}
|
||||
|
||||
// Shim to interface with `fetchGithubRepo`; TODO: refactor that to just
|
||||
// take a directory.
|
||||
class CacheProviderImpl implements CacheProvider {
|
||||
_dirContext: PluginDirectoryContext;
|
||||
constructor(dirContext: PluginDirectoryContext) {
|
||||
this._dirContext = dirContext;
|
||||
}
|
||||
database(id: string): Promise<Database> {
|
||||
const path = pathJoin(this._dirContext.cacheDirectory(), `${id}.db`);
|
||||
return Promise.resolve(new Database(path));
|
||||
}
|
||||
}
|
||||
|
||||
function getTokenFromEnv(): GithubToken {
|
||||
const rawToken = process.env[TOKEN_ENV_VAR_NAME];
|
||||
if (rawToken == null) {
|
||||
throw new Error(`No GitHub token provided: set ${TOKEN_ENV_VAR_NAME}`);
|
||||
}
|
||||
return validateToken(rawToken);
|
||||
}
|
||||
|
||||
export class GithubCliPlugin implements CliPlugin {
|
||||
declaration(): PluginDeclaration {
|
||||
return declaration;
|
||||
}
|
||||
|
||||
async load(ctx: PluginDirectoryContext): Promise<void> {
|
||||
const cache = new CacheProviderImpl(ctx);
|
||||
const token = getTokenFromEnv();
|
||||
const config = await loadConfig(ctx);
|
||||
for (const repoId of config.repoIds) {
|
||||
await fetchGithubRepo(repoId, {token, cache});
|
||||
}
|
||||
}
|
||||
|
||||
async graph(
|
||||
ctx: PluginDirectoryContext,
|
||||
rd: ReferenceDetector
|
||||
): Promise<WeightedGraph> {
|
||||
const _ = rd; // TODO(#1808): not yet used
|
||||
const cache = new CacheProviderImpl(ctx);
|
||||
const token = getTokenFromEnv();
|
||||
const config = await loadConfig(ctx);
|
||||
|
||||
const repositories = [];
|
||||
for (const repoId of config.repoIds) {
|
||||
repositories.push(await fetchGithubRepoFromCache(repoId, {token, cache}));
|
||||
}
|
||||
const graph = Graph.merge(
|
||||
repositories.map((r) => {
|
||||
const rv = new RelationalView();
|
||||
rv.addRepository(r);
|
||||
return createGraph(rv);
|
||||
})
|
||||
);
|
||||
const weights = weightsForDeclaration(declaration);
|
||||
return {graph, weights};
|
||||
}
|
||||
|
||||
async referenceDetector(
|
||||
ctx: PluginDirectoryContext
|
||||
): Promise<ReferenceDetector> {
|
||||
const cache = new CacheProviderImpl(ctx);
|
||||
const token = getTokenFromEnv();
|
||||
const config = await loadConfig(ctx);
|
||||
|
||||
const rvs = [];
|
||||
for (const repoId of config.repoIds) {
|
||||
const repo = await fetchGithubRepoFromCache(repoId, {token, cache});
|
||||
const rv = new RelationalView();
|
||||
rv.addRepository(repo);
|
||||
rvs.push(rv);
|
||||
}
|
||||
return referenceDetectorFromRelationalViews(rvs);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
// @flow
|
||||
|
||||
import {type RepoId, stringToRepoId} from "./repoId";
|
||||
|
||||
export type GithubConfig = {|
|
||||
+repoIds: $ReadOnlyArray<RepoId>,
|
||||
|};
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
type SerializedGithubConfig = {|
|
||||
+repositories: $ReadOnlyArray<string>,
|
||||
|};
|
||||
// (^ for documentation purposes)
|
||||
|
||||
type JsonObject =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| JsonObject[]
|
||||
| {[string]: JsonObject};
|
||||
|
||||
export function parse(raw: JsonObject): GithubConfig {
|
||||
if (raw == null || typeof raw !== "object" || Array.isArray(raw)) {
|
||||
throw new Error("bad config: " + JSON.stringify(raw));
|
||||
}
|
||||
const {repositories} = raw;
|
||||
if (!Array.isArray(repositories)) {
|
||||
throw new Error("bad repositories: " + JSON.stringify(repositories));
|
||||
}
|
||||
const repoIds = repositories.map((x) => {
|
||||
if (typeof x !== "string") {
|
||||
throw new Error("bad repository: " + JSON.stringify(x));
|
||||
}
|
||||
return stringToRepoId(x);
|
||||
});
|
||||
return {repoIds};
|
||||
}
|
Loading…
Reference in New Issue