From 7711f01b844529208ea88f7af8de7ae29a478aac Mon Sep 17 00:00:00 2001 From: William Chargin Date: Thu, 5 Apr 2018 02:19:29 -0700 Subject: [PATCH] Extract paginatable fragments of GitHub query (#120) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Any time that we pull fields off a connection object, we may need to repeat the query for subsequent pages. Therefore, such fragments will be shared across multiple queries, and also shared within a query if we need to fetch—say—more issue comments on two or more distinct issues. This is a perfect use case for fragments. This commit refactors the GitHub query to be organized in terms of fragments, without changing the format of the results. (We also take this opportunity to factor the page limits into constants.) Test Plan: After running `yarn backend`, the `fetchGithubRepoTest.sh` test passes. wchargin-branch: extract-github-query-fragments --- src/plugins/github/graphql.js | 150 +++++++++++++++++++++------------- 1 file changed, 92 insertions(+), 58 deletions(-) diff --git a/src/plugins/github/graphql.js b/src/plugins/github/graphql.js index 11a622a..4a06896 100644 --- a/src/plugins/github/graphql.js +++ b/src/plugins/github/graphql.js @@ -1,12 +1,27 @@ // @flow -import type {Body} from "../../graphql/queries"; +import type {Body, FragmentDefinition} from "../../graphql/queries"; import {build} from "../../graphql/queries"; +/* + * GitHub enforces a hard limit of no more than 100 entities per page, + * in any single connection. GitHub also has a more global restriction + * on the worst-case number of nodes that could be requested by a query, + * which scales as the product of the page limits in any given sequence + * of nested connections. (For more information, see [1].) Therefore, we + * tune the page sizes of various entities to keep them comfortably + * within the global capacity. + * + * [1]: https://developer.github.com/v4/guides/resource-limitations/#node-limit + */ +const PAGE_SIZE_ISSUES = 100; +const PAGE_SIZE_PRS = 100; +const PAGE_SIZE_COMMENTS = 20; +const PAGE_SIZE_REVIEWS = 10; +const PAGE_SIZE_REVIEW_COMMENTS = 10; + export function createQuery(): Body { const b = build; - const makePageInfo = () => b.field("pageInfo", {}, [b.field("hasNextPage")]); - const makeAuthor = () => b.field("author", {}, [b.fragmentSpread("whoami")]); const body: Body = [ b.query( "FetchData", @@ -16,66 +31,26 @@ export function createQuery(): Body { "repository", {owner: b.variable("repoOwner"), name: b.variable("repoName")}, [ - b.field("issues", {first: b.literal(100)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - b.field("title"), - b.field("body"), - b.field("number"), - makeAuthor(), - b.field("comments", {first: b.literal(20)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - makeAuthor(), - b.field("body"), - b.field("url"), - ]), - ]), - ]), + b.field("issues", {first: b.literal(PAGE_SIZE_ISSUES)}, [ + b.fragmentSpread("issues"), ]), - b.field("pullRequests", {first: b.literal(100)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - b.field("title"), - b.field("body"), - b.field("number"), - makeAuthor(), - b.field("comments", {first: b.literal(20)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - makeAuthor(), - b.field("body"), - b.field("url"), - ]), - ]), - b.field("reviews", {first: b.literal(10)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - b.field("body"), - makeAuthor(), - b.field("state"), - b.field("comments", {first: b.literal(10)}, [ - makePageInfo(), - b.field("nodes", {}, [ - b.field("id"), - b.field("body"), - b.field("url"), - makeAuthor(), - ]), - ]), - ]), - ]), - ]), + b.field("pullRequests", {first: b.literal(PAGE_SIZE_PRS)}, [ + b.fragmentSpread("prs"), ]), ] ), ] ), + ...createFragments(), + ]; + return body; +} + +function createFragments(): FragmentDefinition[] { + const b = build; + const makePageInfo = () => b.field("pageInfo", {}, [b.field("hasNextPage")]); + const makeAuthor = () => b.field("author", {}, [b.fragmentSpread("whoami")]); + return [ b.fragment("whoami", "Actor", [ b.field("__typename"), b.field("login"), @@ -83,8 +58,67 @@ export function createQuery(): Body { b.inlineFragment("Organization", [b.field("id")]), b.inlineFragment("Bot", [b.field("id")]), ]), + b.fragment("issues", "IssueConnection", [ + makePageInfo(), + b.field("nodes", {}, [ + b.field("id"), + b.field("title"), + b.field("body"), + b.field("number"), + makeAuthor(), + b.field("comments", {first: b.literal(PAGE_SIZE_COMMENTS)}, [ + b.fragmentSpread("comments"), + ]), + ]), + ]), + b.fragment("prs", "PullRequestConnection", [ + makePageInfo(), + b.field("nodes", {}, [ + b.field("id"), + b.field("title"), + b.field("body"), + b.field("number"), + makeAuthor(), + b.field("comments", {first: b.literal(PAGE_SIZE_COMMENTS)}, [ + b.fragmentSpread("comments"), + ]), + b.field("reviews", {first: b.literal(PAGE_SIZE_REVIEWS)}, [ + b.fragmentSpread("reviews"), + ]), + ]), + ]), + // (Note: issue comments and PR comments use the same connection type.) + b.fragment("comments", "IssueCommentConnection", [ + makePageInfo(), + b.field("nodes", {}, [ + b.field("id"), + makeAuthor(), + b.field("body"), + b.field("url"), + ]), + ]), + b.fragment("reviews", "PullRequestReviewConnection", [ + makePageInfo(), + b.field("nodes", {}, [ + b.field("id"), + b.field("body"), + makeAuthor(), + b.field("state"), + b.field("comments", {first: b.literal(PAGE_SIZE_REVIEW_COMMENTS)}, [ + b.fragmentSpread("reviewComments"), + ]), + ]), + ]), + b.fragment("reviewComments", "PullRequestReviewCommentConnection", [ + makePageInfo(), + b.field("nodes", {}, [ + b.field("id"), + b.field("body"), + b.field("url"), + makeAuthor(), + ]), + ]), ]; - return body; } export function createVariables(repoOwner: string, repoName: string) {