Create pagination continuations for GitHub query (#121)
Summary: Per #117, this is a first step toward at writing a pagination API that specifically targets our current GitHub query. For design details, see new module docs on `src/plugins/github/graphql.js`. This commit modifies the core GitHub query and thus the `example-repo.json` snapshot: we now request `endCursor` fields for all pagination info, and we request the `id` of the root `repository` field. The former is obviously necessary. The latter is necessary for the repository to be consistent with other nodes that offer connections as fields: we require an ID on the node containing the connection so that we can have random access to it in a continuation selector. Test Plan: Unit tests added. You can also try out the generated continuation queries for yourself: apply the patch below, run `yarn backend`, and then run the `fetchGithubRepo.js` script on `sourcecred/sourcecred`. This will output a nicely formatted query that you can paste directly into GitHub’s API explorer and execute. (Note that, because this patch is not fully polished, the query must be run against a repository that has a continuation for every node type: more pages of issues, PRs, comments, reviews, and review comments. This is due to an easy-but-annoying-to-fix bug in the patch, not in the code included in this commit.) <details> <summary>Patch for generating a continuations query</summary> ```diff diff --git a/src/plugins/github/fetchGithubRepo.js b/src/plugins/github/fetchGithubRepo.js index 789a20e..418c736 100644 --- a/src/plugins/github/fetchGithubRepo.js +++ b/src/plugins/github/fetchGithubRepo.js @@ -6,8 +6,13 @@ import fetch from "isomorphic-fetch"; -import {stringify, inlineLayout} from "../../graphql/queries"; -import {createQuery, createVariables} from "./graphql"; +import {stringify, inlineLayout, multilineLayout} from "../../graphql/queries"; +import { + continuationsFromQuery, + continuationQuery, + createQuery, + createVariables, +} from "./graphql"; /** * Scrape data from a GitHub repo using the GitHub API. @@ -66,8 +71,13 @@ function postQuery(payload, token) { if (x.errors) { return Promise.reject(x); } - ensureNoMorePages(x); - return Promise.resolve(x); + console.log( + stringify.body( + continuationQuery(Array.from(continuationsFromQuery(x.data))), + multilineLayout(" ") + ) + ); + throw new Error("STOPSHIP"); }); } diff --git a/src/plugins/github/graphql.js b/src/plugins/github/graphql.js index 9ea2592..9ead42b 100644 --- a/src/plugins/github/graphql.js +++ b/src/plugins/github/graphql.js @@ -39,11 +39,11 @@ import {build} from "../../graphql/queries"; * * [1]: https://developer.github.com/v4/guides/resource-limitations/#node-limit */ -export const PAGE_LIMIT = 100; -const PAGE_SIZE_ISSUES = 100; -const PAGE_SIZE_PRS = 100; -const PAGE_SIZE_COMMENTS = 20; -const PAGE_SIZE_REVIEWS = 10; +export const PAGE_LIMIT = 10; +const PAGE_SIZE_ISSUES = 10; +const PAGE_SIZE_PRS = 10; +const PAGE_SIZE_COMMENTS = 3; +const PAGE_SIZE_REVIEWS = 1; const PAGE_SIZE_REVIEW_COMMENTS = 10; /** @@ -340,6 +340,36 @@ function* continuationsFromReview( } } +/** + * Combine continuations into a query. + */ +export function continuationQuery( + continuations: $ReadOnlyArray<Continuation> +): Body { + const nonces: string[] = continuations.map((_, i) => `_n${String(i)}`); + const nonceToIndex = {}; + nonces.forEach((n, i) => { + nonceToIndex[n] = i; + }); + const b = build; + const query = b.query( + "Continuations", + [], + continuations.map((continuation, i) => + b.alias( + nonces[i], + b.field( + "node", + {id: b.literal(continuation.enclosingNodeId)}, + continuation.selections.slice() + ) + ) + ) + ); + const body = [query, ...createFragments()]; + return body; +} + /** * These fragments are used to construct the root query, and also to * fetch more pages of specific entity types. ``` </details> wchargin-branch: ad-hoc-pagination-continuations
This commit is contained in:
parent
7711f01b84
commit
751172ea77
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"data": {
|
||||
"repository": {
|
||||
"id": "MDEwOlJlcG9zaXRvcnkxMjMyNTUwMDY=",
|
||||
"issues": {
|
||||
"nodes": [
|
||||
{
|
||||
|
@ -14,6 +15,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -52,6 +54,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpHOFkdCkg==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -70,6 +73,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -108,6 +112,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpHOFkdBWg==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -126,6 +131,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -144,6 +150,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -153,6 +160,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpHOEkw5lw==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -179,6 +187,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpHOFgD37g==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -188,6 +197,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -204,6 +214,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -232,6 +243,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpK0MjAxOC0wMy0wMVQwNDoyMzozMFrOCjhGZg==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -249,6 +261,7 @@
|
|||
"nodes": [
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -257,6 +270,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpO5MjAxOC0wMi0yOFQyMDoyNDo1Ni0wODowMLkyMDE4LTAyLTI4VDIwOjI0OjU2LTA4OjAwzgX6q7Y=",
|
||||
"hasNextPage": false
|
||||
}
|
||||
},
|
||||
|
@ -264,6 +278,7 @@
|
|||
}
|
||||
],
|
||||
"pageInfo": {
|
||||
"endCursor": "Y3Vyc29yOnYyOpHOCj7Pig==",
|
||||
"hasNextPage": false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,30 @@
|
|||
// @flow
|
||||
|
||||
import type {Body, FragmentDefinition} from "../../graphql/queries";
|
||||
import type {Body, FragmentDefinition, Selection} from "../../graphql/queries";
|
||||
import {build} from "../../graphql/queries";
|
||||
|
||||
/**
|
||||
* This module defines the GraphQL query that we use to access the
|
||||
* GitHub API, and defines functions to facilitate exhaustively
|
||||
* requesting all pages of results for this query.
|
||||
*
|
||||
* The key type is the `Continuation`, which represents a selection set
|
||||
* that fetches the next page of results for a particular connection.
|
||||
* The flow is as follows:
|
||||
*
|
||||
* - A Query is executed and fetches some Results in standard form.
|
||||
* - The Results are analyzed to form Continuations.
|
||||
* - These continuations are embedded into a new Query.
|
||||
*
|
||||
* This process repeats, and each time that Results are fetched, they
|
||||
* are merged into the previous Results so that the Results get
|
||||
* progressively more complete. The process terminates when the second
|
||||
* step does not yield any more Continuations.
|
||||
*
|
||||
* Of particular import is the function `continuationsFromContinuation`;
|
||||
* see more docs on that function.
|
||||
*/
|
||||
|
||||
/*
|
||||
* GitHub enforces a hard limit of no more than 100 entities per page,
|
||||
* in any single connection. GitHub also has a more global restriction
|
||||
|
@ -12,14 +34,64 @@ import {build} from "../../graphql/queries";
|
|||
* tune the page sizes of various entities to keep them comfortably
|
||||
* within the global capacity.
|
||||
*
|
||||
* We use the `PAGE_LIMIT` field for the top-level page size in
|
||||
* continuations.
|
||||
*
|
||||
* [1]: https://developer.github.com/v4/guides/resource-limitations/#node-limit
|
||||
*/
|
||||
export const PAGE_LIMIT = 100;
|
||||
const PAGE_SIZE_ISSUES = 100;
|
||||
const PAGE_SIZE_PRS = 100;
|
||||
const PAGE_SIZE_COMMENTS = 20;
|
||||
const PAGE_SIZE_REVIEWS = 10;
|
||||
const PAGE_SIZE_REVIEW_COMMENTS = 10;
|
||||
|
||||
/**
|
||||
* What's in a continuation? If we want to fetch more comments for the
|
||||
* 22nd issue in the results list, we fire off the following query:
|
||||
*
|
||||
* _n0: node(id: "<opaque-id-for-issue>") {
|
||||
* ... on Issue {
|
||||
* comments(first: PAGE_LIMIT, after: "<some-cursor>") {
|
||||
* ...comments
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* This would be represented as:
|
||||
*
|
||||
* {
|
||||
* enclosingNodeType: "ISSUE",
|
||||
* enclosingNodeId: "<opaque-id-for-issue>",
|
||||
* selections: [b.inlineFragment("Issue", ...)],
|
||||
* destinationPath: ["repository", "issues", 21],
|
||||
* }
|
||||
*
|
||||
* The `enclosingNodeId` and `selections` are used to construct the
|
||||
* query. The `destinationPath` is used to merge the continued results
|
||||
* back into the original results. The `enclosingNodeType` is required
|
||||
* so that we know how to check for further continuations on the result.
|
||||
* See function `continuationsFromContinuation` for more details on the
|
||||
* last one.
|
||||
*
|
||||
* The nonce (`_n0`) is deliberately not included in the continuation
|
||||
* type, because the nonce is a property of a particular embedding of
|
||||
* the continuation into a query, and not of the continuation itself.
|
||||
*/
|
||||
export type Continuation = {|
|
||||
+enclosingNodeType:
|
||||
| "REPOSITORY"
|
||||
| "ISSUE"
|
||||
| "PULL_REQUEST"
|
||||
| "PULL_REQUEST_REVIEW",
|
||||
+enclosingNodeId: string,
|
||||
+selections: $ReadOnlyArray<Selection>,
|
||||
+destinationPath: $ReadOnlyArray<string | number>,
|
||||
|};
|
||||
|
||||
/**
|
||||
* The top-level GitHub query to request data about a repository.
|
||||
* Callers will also be interested in `createVariables`.
|
||||
*/
|
||||
export function createQuery(): Body {
|
||||
const b = build;
|
||||
const body: Body = [
|
||||
|
@ -31,6 +103,7 @@ export function createQuery(): Body {
|
|||
"repository",
|
||||
{owner: b.variable("repoOwner"), name: b.variable("repoName")},
|
||||
[
|
||||
b.field("id"),
|
||||
b.field("issues", {first: b.literal(PAGE_SIZE_ISSUES)}, [
|
||||
b.fragmentSpread("issues"),
|
||||
]),
|
||||
|
@ -46,9 +119,235 @@ export function createQuery(): Body {
|
|||
return body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find continuations for the top-level result ("data" field) of a
|
||||
* query.
|
||||
*/
|
||||
export function continuationsFromQuery(result: any): Iterator<Continuation> {
|
||||
return continuationsFromRepository(result.repository, result.repository.id, [
|
||||
"repository",
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find continuations for a result of a query that was itself generated
|
||||
* from a continuation. If an original query Q1 returns results R1 that
|
||||
* yield continuations C1, and the query Q2 is an embedding of
|
||||
* continuations C1 and returns results R2, then this function, when
|
||||
* called with (R2, C1), generates the continuations C2 that should be
|
||||
* used to continue the chain.
|
||||
*
|
||||
* Note that these continuations' results should be merged into the
|
||||
* _original_ data structure, not subsequent results. Continuing with
|
||||
* the above terminology: results R2 should be merged into R1 to form
|
||||
* R2', and then continuations C2 should be embedded into a query Q3
|
||||
* whose results R3 should be merged into R2' (as opposed to being
|
||||
* merged into R2, and then this result being merged into R1). This is
|
||||
* somewhat less efficient in terms of client-side CPU usage, but is
|
||||
* also somewhat easier to implement.
|
||||
*
|
||||
* This function is a critical piece of plumbing: it enables us to
|
||||
* iterate through pages, using a continuation to fetch a further
|
||||
* continuation on the same entity. The fact that this function is
|
||||
* implementable is an indication that the `Continuation` type is
|
||||
* defined appropriately. This is non-trivial, as there are a lot of
|
||||
* choices as to where the boundaries should be. (For instance, should
|
||||
* we include the type of the node that we want to fetch more of, or the
|
||||
* type of the enclosing node? What sort of path information should we
|
||||
* retain?)
|
||||
*/
|
||||
export function continuationsFromContinuation(
|
||||
result: any,
|
||||
source: Continuation
|
||||
): Iterator<Continuation> {
|
||||
const continuationsFromEnclosingType = {
|
||||
REPOSITORY: continuationsFromRepository,
|
||||
ISSUE: continuationsFromIssue,
|
||||
PULL_REQUEST: continuationsFromPR,
|
||||
PULL_REQUEST_REVIEW: continuationsFromReview,
|
||||
}[source.enclosingNodeType];
|
||||
return continuationsFromEnclosingType(
|
||||
result,
|
||||
source.enclosingNodeId,
|
||||
source.destinationPath
|
||||
);
|
||||
}
|
||||
|
||||
function* continuationsFromRepository(
|
||||
result: any,
|
||||
nodeId: string,
|
||||
path: $ReadOnlyArray<string | number>
|
||||
): Iterator<Continuation> {
|
||||
const b = build;
|
||||
if (result.issues && result.issues.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"issues",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.issues.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("issues")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
if (result.pullRequests && result.pullRequests.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"pullRequests",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.pullRequests.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("prs")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
if (result.issues) {
|
||||
for (let i = 0; i < result.issues.nodes.length; i++) {
|
||||
const issue = result.issues.nodes[i];
|
||||
const subpath = [...path, "issues", "nodes", i];
|
||||
yield* continuationsFromIssue(issue, issue.id, subpath);
|
||||
}
|
||||
}
|
||||
if (result.pullRequests) {
|
||||
for (let i = 0; i < result.pullRequests.nodes.length; i++) {
|
||||
const pr = result.pullRequests.nodes[i];
|
||||
const subpath = [...path, "pullRequests", "nodes", i];
|
||||
yield* continuationsFromPR(pr, pr.id, subpath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function* continuationsFromIssue(
|
||||
result: any,
|
||||
nodeId: string,
|
||||
path: $ReadOnlyArray<string | number>
|
||||
): Iterator<Continuation> {
|
||||
const b = build;
|
||||
if (result.comments.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "ISSUE",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("Issue", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.comments.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function* continuationsFromPR(
|
||||
result: any,
|
||||
nodeId: string,
|
||||
path: $ReadOnlyArray<string | number>
|
||||
): Iterator<Continuation> {
|
||||
const b = build;
|
||||
if (result.comments && result.comments.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "PULL_REQUEST",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.comments.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
if (result.reviews && result.reviews.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "PULL_REQUEST",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"reviews",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.reviews.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("reviews")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
if (result.reviews) {
|
||||
for (let i = 0; i < result.reviews.nodes.length; i++) {
|
||||
const issue = result.reviews.nodes[i];
|
||||
const subpath = [...path, "reviews", "nodes", i];
|
||||
yield* continuationsFromReview(issue, issue.id, subpath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function* continuationsFromReview(
|
||||
result: any,
|
||||
nodeId: string,
|
||||
path: $ReadOnlyArray<string | number>
|
||||
): Iterator<Continuation> {
|
||||
const b = build;
|
||||
if (result.comments.pageInfo.hasNextPage) {
|
||||
yield {
|
||||
enclosingNodeType: "PULL_REQUEST_REVIEW",
|
||||
enclosingNodeId: nodeId,
|
||||
selections: [
|
||||
b.inlineFragment("PullRequestReview", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal(result.comments.pageInfo.endCursor),
|
||||
},
|
||||
[b.fragmentSpread("reviewComments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: path,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* These fragments are used to construct the root query, and also to
|
||||
* fetch more pages of specific entity types.
|
||||
*/
|
||||
function createFragments(): FragmentDefinition[] {
|
||||
const b = build;
|
||||
const makePageInfo = () => b.field("pageInfo", {}, [b.field("hasNextPage")]);
|
||||
const makePageInfo = () =>
|
||||
b.field("pageInfo", {}, [b.field("hasNextPage"), b.field("endCursor")]);
|
||||
const makeAuthor = () => b.field("author", {}, [b.fragmentSpread("whoami")]);
|
||||
return [
|
||||
b.fragment("whoami", "Actor", [
|
||||
|
|
|
@ -0,0 +1,381 @@
|
|||
// @flow
|
||||
|
||||
import type {Continuation} from "./graphql";
|
||||
import {build} from "../../graphql/queries";
|
||||
import {
|
||||
PAGE_LIMIT,
|
||||
continuationsFromQuery,
|
||||
continuationsFromContinuation,
|
||||
} from "./graphql";
|
||||
|
||||
describe("graphql", () => {
|
||||
describe("creates continuations", () => {
|
||||
const makeAuthor = (name) => ({
|
||||
__typename: "User",
|
||||
login: name,
|
||||
id: `opaque-user-${name}`,
|
||||
});
|
||||
function makeData(hasNextPageFor: {
|
||||
issues: boolean,
|
||||
prs: boolean,
|
||||
issueComments: boolean,
|
||||
prComments: boolean,
|
||||
reviews: boolean,
|
||||
reviewComments: boolean,
|
||||
}) {
|
||||
return {
|
||||
repository: {
|
||||
id: "opaque-repo",
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.issues,
|
||||
endCursor: "opaque-cursor-issues",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1",
|
||||
title: "A pressing issue",
|
||||
body: "<button>A</button>",
|
||||
number: 1,
|
||||
author: makeAuthor("decentralion"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.issueComments,
|
||||
endCursor: "opaque-cursor-issue1comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1comment1",
|
||||
author: makeAuthor("wchargin"),
|
||||
body: "I wish pancakes were still in vogue.",
|
||||
url: "opaque://issue/1/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
pullRequests: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.prs,
|
||||
endCursor: "opaque-cursor-prs",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pr2",
|
||||
title: "texdoc exam",
|
||||
body: "What is air?",
|
||||
number: 2,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.prComments,
|
||||
endCursor: "opaque-cursor-pr2comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pr2comment1",
|
||||
author: makeAuthor("decentralion"),
|
||||
body: "Why is there air?",
|
||||
url: "opaque://pr/2/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
reviews: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.reviews,
|
||||
endCursor: "opaque-cursor-pr2reviews",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pr2review1",
|
||||
body: "Hmmm...",
|
||||
author: makeAuthor("decentralion"),
|
||||
state: "CHANGES_REQUESTED",
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.reviewComments,
|
||||
endCursor: "opaque-cursor-pr2review1comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pr2review1comment1",
|
||||
body: "What if there were no air?",
|
||||
url: "opaque://pr/2/review/1/comment/1",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
function makeContinuations(): {[string]: Continuation} {
|
||||
const b = build;
|
||||
return {
|
||||
issues: {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"issues",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issues"),
|
||||
},
|
||||
[b.fragmentSpread("issues")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
prs: {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"pullRequests",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-prs"),
|
||||
},
|
||||
[b.fragmentSpread("prs")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
issueComments: {
|
||||
enclosingNodeType: "ISSUE",
|
||||
enclosingNodeId: "opaque-issue1",
|
||||
selections: [
|
||||
b.inlineFragment("Issue", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issue1comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "issues", "nodes", 0],
|
||||
},
|
||||
prComments: {
|
||||
enclosingNodeType: "PULL_REQUEST",
|
||||
enclosingNodeId: "opaque-pr2",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pr2comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "pullRequests", "nodes", 0],
|
||||
},
|
||||
reviews: {
|
||||
enclosingNodeType: "PULL_REQUEST",
|
||||
enclosingNodeId: "opaque-pr2",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"reviews",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pr2reviews"),
|
||||
},
|
||||
[b.fragmentSpread("reviews")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "pullRequests", "nodes", 0],
|
||||
},
|
||||
reviewComments: {
|
||||
enclosingNodeType: "PULL_REQUEST_REVIEW",
|
||||
enclosingNodeId: "opaque-pr2review1",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequestReview", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pr2review1comments"),
|
||||
},
|
||||
[b.fragmentSpread("reviewComments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: [
|
||||
"repository",
|
||||
"pullRequests",
|
||||
"nodes",
|
||||
0,
|
||||
"reviews",
|
||||
"nodes",
|
||||
0,
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test("from a top-level result with lots of continuations", () => {
|
||||
const data = makeData({
|
||||
issues: true,
|
||||
prs: true,
|
||||
issueComments: true,
|
||||
prComments: true,
|
||||
reviews: true,
|
||||
reviewComments: true,
|
||||
});
|
||||
const result = Array.from(continuationsFromQuery(data));
|
||||
const expectedContinuations: Continuation[] = (() => {
|
||||
const continuations = makeContinuations();
|
||||
return [
|
||||
continuations.issues,
|
||||
continuations.prs,
|
||||
continuations.issueComments,
|
||||
continuations.prComments,
|
||||
continuations.reviews,
|
||||
continuations.reviewComments,
|
||||
];
|
||||
})();
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
|
||||
test("from a top-level result with sparse continuations", () => {
|
||||
// Here, some elements have continuations, but are children of
|
||||
// elements without continuations. This tests that we always recur
|
||||
// through the whole structure.
|
||||
const data = makeData({
|
||||
issues: true,
|
||||
prs: false,
|
||||
issueComments: false,
|
||||
prComments: true,
|
||||
reviews: false,
|
||||
reviewComments: true,
|
||||
});
|
||||
const result = Array.from(continuationsFromQuery(data));
|
||||
const expectedContinuations: Continuation[] = (() => {
|
||||
const continuations = makeContinuations();
|
||||
return [
|
||||
continuations.issues,
|
||||
continuations.prComments,
|
||||
continuations.reviewComments,
|
||||
];
|
||||
})();
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
|
||||
describe("from another continuation", () => {
|
||||
function makeContinuationResult(hasNextPages: boolean) {
|
||||
return {
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPages,
|
||||
endCursor: "opaque-cursor-moreissues",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3",
|
||||
title: "todo",
|
||||
body: "it means everything",
|
||||
number: 3,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPages,
|
||||
endCursor: "opaque-cursor-issue3comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3comment1",
|
||||
author: makeAuthor("decentralion"),
|
||||
body:
|
||||
"if it means everything, does it really mean anything?",
|
||||
url: "opaque://issue/3/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
test("when there are more pages at multiple levels of nesting", () => {
|
||||
const continuation = makeContinuations().issues;
|
||||
const continuationResult = makeContinuationResult(true);
|
||||
const result = Array.from(
|
||||
continuationsFromContinuation(continuationResult, continuation)
|
||||
);
|
||||
const b = build;
|
||||
const expectedContinuations = [
|
||||
{
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"issues",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-moreissues"),
|
||||
},
|
||||
[b.fragmentSpread("issues")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
{
|
||||
enclosingNodeType: "ISSUE",
|
||||
enclosingNodeId: "opaque-issue3",
|
||||
selections: [
|
||||
b.inlineFragment("Issue", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issue3comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "issues", "nodes", 0],
|
||||
},
|
||||
];
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
test("when there are no more pages", () => {
|
||||
const continuation = makeContinuations().issues;
|
||||
const continuationResult = makeContinuationResult(false);
|
||||
const result = Array.from(
|
||||
continuationsFromContinuation(continuationResult, continuation)
|
||||
);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue