github: remove legacy continuations code (#964)
Summary: It is time. (Replaced with #622.) Test Plan: Running `yarn flow` suffices. Running `yarn test --full` also passes. wchargin-branch: remove-legacy-graphql
This commit is contained in:
parent
2e0b17cef7
commit
6b789d61d6
|
@ -1,408 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`plugins/github/graphql #postQueryExhaustive resolves a representative query 1`] = `
|
||||
Object {
|
||||
"repository": Object {
|
||||
"id": "opaque-repo",
|
||||
"issues": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "Like it says, please comment!",
|
||||
"comments": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "Here: I'll start.",
|
||||
"id": "opaque-issue1comment1",
|
||||
"url": "opaque://issue/1/comment/1",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-wchargin",
|
||||
"login": "wchargin",
|
||||
},
|
||||
"body": "Closing due to no fun allowed.",
|
||||
"id": "opaque-issue1comment2",
|
||||
"url": "opaque://issue/1/comment/2",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "That is not very nice.",
|
||||
"id": "opaque-issue1comment3",
|
||||
"url": "opaque://issue/1/comment/3",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-issue1comments-v2",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-issue1",
|
||||
"number": 1,
|
||||
"title": "Request for comments",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-wchargin",
|
||||
"login": "wchargin",
|
||||
},
|
||||
"body": "You can comment here, too.",
|
||||
"comments": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "What fun!",
|
||||
"id": "opaque-issue3comment1",
|
||||
"url": "opaque://issue/3/comment/1",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "I will comment on this issue for a second time.",
|
||||
"id": "opaque-issue3comment2",
|
||||
"url": "opaque://issue/1/comment/3",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-issue3comments-v2",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-issue3",
|
||||
"number": 2,
|
||||
"title": "Another",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-wchargin",
|
||||
"login": "wchargin",
|
||||
},
|
||||
"body": "My mailbox is out of space",
|
||||
"comments": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "But you posted the last issue",
|
||||
"id": "opaque-issue4comment1",
|
||||
"url": "opaque://issue/4/comment/1",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-issue4comments-v2",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-issue4",
|
||||
"number": 4,
|
||||
"title": "Please stop making issues",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-issues-v2",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"pulls": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-wchargin",
|
||||
"login": "wchargin",
|
||||
},
|
||||
"body": "Surely this deserves much cred.",
|
||||
"comments": Object {
|
||||
"nodes": Array [],
|
||||
"pageInfo": Object {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-pull2",
|
||||
"number": 2,
|
||||
"reviews": Object {
|
||||
"nodes": Array [
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "You actually introduced a new typo instead.",
|
||||
"comments": Object {
|
||||
"nodes": Array [],
|
||||
"pageInfo": Object {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-pull2review1",
|
||||
"state": "CHANGES_REQUESTED",
|
||||
},
|
||||
Object {
|
||||
"author": Object {
|
||||
"__typename": "User",
|
||||
"id": "opaque-user-decentralion",
|
||||
"login": "decentralion",
|
||||
},
|
||||
"body": "Looks godo to me.",
|
||||
"comments": Object {
|
||||
"nodes": Array [],
|
||||
"pageInfo": Object {
|
||||
"endCursor": null,
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"id": "opaque-pull2review2",
|
||||
"state": "APPROVED",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-pull2reviews-v1",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
"title": "Fix typo in README",
|
||||
},
|
||||
],
|
||||
"pageInfo": Object {
|
||||
"endCursor": "opaque-cursor-pulls-v0",
|
||||
"hasNextPage": false,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`plugins/github/graphql creates a query 1`] = `
|
||||
"query FetchData($owner: String! $name: String!) {
|
||||
repository(owner: $owner name: $name) {
|
||||
url
|
||||
name
|
||||
owner {
|
||||
...whoami
|
||||
}
|
||||
id
|
||||
issues(first: 50) {
|
||||
...issues
|
||||
}
|
||||
pulls: pullRequests(first: 50) {
|
||||
...pulls
|
||||
}
|
||||
defaultBranchRef {
|
||||
id
|
||||
target {
|
||||
__typename
|
||||
... on Commit {
|
||||
history(first: 100) {
|
||||
...commitHistory
|
||||
}
|
||||
}
|
||||
... on Blob {
|
||||
id
|
||||
oid
|
||||
}
|
||||
... on Tag {
|
||||
id
|
||||
oid
|
||||
}
|
||||
... on Tree {
|
||||
id
|
||||
oid
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment whoami on Actor {
|
||||
__typename
|
||||
login
|
||||
url
|
||||
... on User {
|
||||
id
|
||||
}
|
||||
... on Organization {
|
||||
id
|
||||
}
|
||||
... on Bot {
|
||||
id
|
||||
}
|
||||
}
|
||||
fragment issues on IssueConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
url
|
||||
title
|
||||
body
|
||||
number
|
||||
author {
|
||||
...whoami
|
||||
}
|
||||
comments(first: 20) {
|
||||
...comments
|
||||
}
|
||||
reactions(first: 5) {
|
||||
...reactions
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment pulls on PullRequestConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
url
|
||||
title
|
||||
body
|
||||
number
|
||||
mergeCommit {
|
||||
...commit
|
||||
}
|
||||
additions
|
||||
deletions
|
||||
author {
|
||||
...whoami
|
||||
}
|
||||
comments(first: 20) {
|
||||
...comments
|
||||
}
|
||||
reviews(first: 5) {
|
||||
...reviews
|
||||
}
|
||||
reactions(first: 5) {
|
||||
...reactions
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment comments on IssueCommentConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
url
|
||||
author {
|
||||
...whoami
|
||||
}
|
||||
body
|
||||
reactions(first: 5) {
|
||||
...reactions
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment reviews on PullRequestReviewConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
url
|
||||
body
|
||||
author {
|
||||
...whoami
|
||||
}
|
||||
state
|
||||
comments(first: 10) {
|
||||
...reviewComments
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment reviewComments on PullRequestReviewCommentConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
url
|
||||
body
|
||||
author {
|
||||
...whoami
|
||||
}
|
||||
reactions(first: 5) {
|
||||
...reactions
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment commitHistory on CommitHistoryConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
...commit
|
||||
}
|
||||
}
|
||||
fragment commitParents on CommitConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
oid
|
||||
}
|
||||
}
|
||||
fragment commit on Commit {
|
||||
id
|
||||
url
|
||||
oid
|
||||
message
|
||||
author {
|
||||
date
|
||||
user {
|
||||
...whoami
|
||||
}
|
||||
}
|
||||
parents(first: 5) {
|
||||
...commitParents
|
||||
}
|
||||
}
|
||||
fragment reactions on ReactionConnection {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
content
|
||||
user {
|
||||
...whoami
|
||||
}
|
||||
}
|
||||
}"
|
||||
`;
|
File diff suppressed because it is too large
Load Diff
|
@ -1,964 +0,0 @@
|
|||
// @flow
|
||||
|
||||
import type {Continuation} from "./graphql";
|
||||
import {build, stringify, multilineLayout} from "../../graphql/queries";
|
||||
import {
|
||||
PAGE_LIMIT,
|
||||
createQuery,
|
||||
createVariables,
|
||||
continuationsFromQuery,
|
||||
continuationsFromContinuation,
|
||||
createFragments,
|
||||
merge,
|
||||
postQueryExhaustive,
|
||||
requiredFragments,
|
||||
} from "./graphql";
|
||||
import {makeRepoId} from "../../core/repoId";
|
||||
|
||||
describe("plugins/github/graphql", () => {
|
||||
describe("creates continuations", () => {
|
||||
const makeAuthor = (name) => ({
|
||||
__typename: "User",
|
||||
login: name,
|
||||
id: `opaque-user-${name}`,
|
||||
});
|
||||
function makeData(hasNextPageFor: {
|
||||
issues: boolean,
|
||||
pulls: boolean,
|
||||
issueComments: boolean,
|
||||
pullComments: boolean,
|
||||
reviews: boolean,
|
||||
reviewComments: boolean,
|
||||
}) {
|
||||
return {
|
||||
repository: {
|
||||
id: "opaque-repo",
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.issues,
|
||||
endCursor: "opaque-cursor-issues",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1",
|
||||
title: "A pressing issue",
|
||||
body: "<button>A</button>",
|
||||
number: 1,
|
||||
author: makeAuthor("decentralion"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.issueComments,
|
||||
endCursor: "opaque-cursor-issue1comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1comment1",
|
||||
author: makeAuthor("wchargin"),
|
||||
body: "I wish pancakes were still in vogue.",
|
||||
url: "opaque://issue/1/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
pulls: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.pulls,
|
||||
endCursor: "opaque-cursor-pulls",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2",
|
||||
title: "texdoc exam",
|
||||
body: "What is air?",
|
||||
number: 2,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.pullComments,
|
||||
endCursor: "opaque-cursor-pull2comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2comment1",
|
||||
author: makeAuthor("decentralion"),
|
||||
body: "Why is there air?",
|
||||
url: "opaque://pull/2/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
reviews: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.reviews,
|
||||
endCursor: "opaque-cursor-pull2reviews",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2review1",
|
||||
body: "Hmmm...",
|
||||
author: makeAuthor("decentralion"),
|
||||
state: "CHANGES_REQUESTED",
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPageFor.reviewComments,
|
||||
endCursor: "opaque-cursor-pull2review1comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2review1comment1",
|
||||
body: "What if there were no air?",
|
||||
url: "opaque://pull/2/review/1/comment/1",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
function makeContinuations(): {[string]: Continuation} {
|
||||
const b = build;
|
||||
return {
|
||||
issues: {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"issues",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issues"),
|
||||
},
|
||||
[b.fragmentSpread("issues")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
pulls: {
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.alias(
|
||||
"pulls",
|
||||
b.field(
|
||||
"pullRequests",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pulls"),
|
||||
},
|
||||
[b.fragmentSpread("pulls")]
|
||||
)
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
issueComments: {
|
||||
enclosingNodeType: "ISSUE",
|
||||
enclosingNodeId: "opaque-issue1",
|
||||
selections: [
|
||||
b.inlineFragment("Issue", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issue1comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "issues", "nodes", 0],
|
||||
},
|
||||
pullComments: {
|
||||
enclosingNodeType: "PULL",
|
||||
enclosingNodeId: "opaque-pull2",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pull2comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "pulls", "nodes", 0],
|
||||
},
|
||||
reviews: {
|
||||
enclosingNodeType: "PULL",
|
||||
enclosingNodeId: "opaque-pull2",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"reviews",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pull2reviews"),
|
||||
},
|
||||
[b.fragmentSpread("reviews")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "pulls", "nodes", 0],
|
||||
},
|
||||
reviewComments: {
|
||||
enclosingNodeType: "REVIEW",
|
||||
enclosingNodeId: "opaque-pull2review1",
|
||||
selections: [
|
||||
b.inlineFragment("PullRequestReview", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-pull2review1comments"),
|
||||
},
|
||||
[b.fragmentSpread("reviewComments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: [
|
||||
"repository",
|
||||
"pulls",
|
||||
"nodes",
|
||||
0,
|
||||
"reviews",
|
||||
"nodes",
|
||||
0,
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test("from a top-level result with lots of continuations", () => {
|
||||
const data = makeData({
|
||||
issues: true,
|
||||
pulls: true,
|
||||
issueComments: true,
|
||||
pullComments: true,
|
||||
reviews: true,
|
||||
reviewComments: true,
|
||||
});
|
||||
const result = Array.from(continuationsFromQuery(data));
|
||||
const expectedContinuations: Continuation[] = (() => {
|
||||
const continuations = makeContinuations();
|
||||
return [
|
||||
continuations.issues,
|
||||
continuations.pulls,
|
||||
continuations.issueComments,
|
||||
continuations.pullComments,
|
||||
continuations.reviews,
|
||||
continuations.reviewComments,
|
||||
];
|
||||
})();
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
|
||||
test("from a top-level result with sparse continuations", () => {
|
||||
// Here, some elements have continuations, but are children of
|
||||
// elements without continuations. This tests that we always recur
|
||||
// through the whole structure.
|
||||
const data = makeData({
|
||||
issues: true,
|
||||
pulls: false,
|
||||
issueComments: false,
|
||||
pullComments: true,
|
||||
reviews: false,
|
||||
reviewComments: true,
|
||||
});
|
||||
const result = Array.from(continuationsFromQuery(data));
|
||||
const expectedContinuations: Continuation[] = (() => {
|
||||
const continuations = makeContinuations();
|
||||
return [
|
||||
continuations.issues,
|
||||
continuations.pullComments,
|
||||
continuations.reviewComments,
|
||||
];
|
||||
})();
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
|
||||
describe("from another continuation", () => {
|
||||
function makeContinuationResult(hasNextPages: boolean) {
|
||||
return {
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPages,
|
||||
endCursor: "opaque-cursor-moreissues",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3",
|
||||
title: "todo",
|
||||
body: "it means everything",
|
||||
number: 3,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: hasNextPages,
|
||||
endCursor: "opaque-cursor-issue3comments",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3comment1",
|
||||
author: makeAuthor("decentralion"),
|
||||
body:
|
||||
"if it means everything, does it really mean anything?",
|
||||
url: "opaque://issue/3/comment/1",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
test("when there are more pages at multiple levels of nesting", () => {
|
||||
const continuation = makeContinuations().issues;
|
||||
const continuationResult = makeContinuationResult(true);
|
||||
const result = Array.from(
|
||||
continuationsFromContinuation(continuationResult, continuation)
|
||||
);
|
||||
const b = build;
|
||||
const expectedContinuations = [
|
||||
{
|
||||
enclosingNodeType: "REPOSITORY",
|
||||
enclosingNodeId: "opaque-repo",
|
||||
selections: [
|
||||
b.inlineFragment("Repository", [
|
||||
b.field(
|
||||
"issues",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-moreissues"),
|
||||
},
|
||||
[b.fragmentSpread("issues")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository"],
|
||||
},
|
||||
{
|
||||
enclosingNodeType: "ISSUE",
|
||||
enclosingNodeId: "opaque-issue3",
|
||||
selections: [
|
||||
b.inlineFragment("Issue", [
|
||||
b.field(
|
||||
"comments",
|
||||
{
|
||||
first: b.literal(PAGE_LIMIT),
|
||||
after: b.literal("opaque-cursor-issue3comments"),
|
||||
},
|
||||
[b.fragmentSpread("comments")]
|
||||
),
|
||||
]),
|
||||
],
|
||||
destinationPath: ["repository", "issues", "nodes", 0],
|
||||
},
|
||||
];
|
||||
expectedContinuations.forEach((x) => {
|
||||
expect(result).toContainEqual(x);
|
||||
});
|
||||
expect(result).toHaveLength(expectedContinuations.length);
|
||||
});
|
||||
test("when there are no more pages", () => {
|
||||
const continuation = makeContinuations().issues;
|
||||
const continuationResult = makeContinuationResult(false);
|
||||
const result = Array.from(
|
||||
continuationsFromContinuation(continuationResult, continuation)
|
||||
);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("#merge", () => {
|
||||
describe("merges at the root", () => {
|
||||
it("replacing primitive numbers", () => {
|
||||
expect(merge(3, 5, [])).toEqual(5);
|
||||
});
|
||||
|
||||
it("replacing primitive strings", () => {
|
||||
expect(merge("three", "five", [])).toEqual("five");
|
||||
});
|
||||
|
||||
it("replacing a primitive string with null", () => {
|
||||
expect(merge("three", null, [])).toEqual(null);
|
||||
});
|
||||
|
||||
it("replacing null with a number", () => {
|
||||
expect(merge(null, 3, [])).toEqual(3);
|
||||
});
|
||||
|
||||
it("concatenating arrays", () => {
|
||||
expect(merge([1, 2], [3, 4], [])).toEqual([1, 2, 3, 4]);
|
||||
});
|
||||
|
||||
it("merging objects", () => {
|
||||
const destination = {a: 1, b: 2};
|
||||
const source = {c: 3, d: 4};
|
||||
const expected = {a: 1, b: 2, c: 3, d: 4};
|
||||
expect(merge(destination, source, [])).toEqual(expected);
|
||||
});
|
||||
|
||||
it("overwriting primitives in an object", () => {
|
||||
const destination = {hasNextPage: true, endCursor: "cursor-aaa"};
|
||||
const source = {hasNextPage: false, endCursor: "cursor-bbb"};
|
||||
expect(merge(destination, source, [])).toEqual(source);
|
||||
});
|
||||
|
||||
it("merging complex structures recursively", () => {
|
||||
const destination = {
|
||||
fst: {a: 1, b: 2},
|
||||
snd: {e: 5, f: 6},
|
||||
fruits: ["apple", "banana"],
|
||||
letters: ["whiskey", "x-ray"],
|
||||
};
|
||||
const source = {
|
||||
fst: {c: 3, d: 4},
|
||||
snd: {g: 7, h: 8},
|
||||
fruits: ["cherry", "durian"],
|
||||
letters: ["yankee", "zulu"],
|
||||
};
|
||||
const expected = {
|
||||
fst: {a: 1, b: 2, c: 3, d: 4},
|
||||
snd: {e: 5, f: 6, g: 7, h: 8},
|
||||
fruits: ["apple", "banana", "cherry", "durian"],
|
||||
letters: ["whiskey", "x-ray", "yankee", "zulu"],
|
||||
};
|
||||
expect(merge(destination, source, [])).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("traverses", () => {
|
||||
it("down an object path", () => {
|
||||
const destination = {
|
||||
child: {
|
||||
grandchild: {
|
||||
one: 1,
|
||||
two: 2,
|
||||
},
|
||||
otherGrandchild: "world",
|
||||
},
|
||||
otherChild: "hello",
|
||||
};
|
||||
const source = {
|
||||
three: 3,
|
||||
four: 4,
|
||||
};
|
||||
const expected = {
|
||||
child: {
|
||||
grandchild: {
|
||||
one: 1,
|
||||
two: 2,
|
||||
three: 3,
|
||||
four: 4,
|
||||
},
|
||||
otherGrandchild: "world",
|
||||
},
|
||||
otherChild: "hello",
|
||||
};
|
||||
expect(merge(destination, source, ["child", "grandchild"])).toEqual(
|
||||
expected
|
||||
);
|
||||
});
|
||||
|
||||
it("down an array path", () => {
|
||||
const destination = [["change me", [1, 2]], ["ignore me", [5, 6]]];
|
||||
const source = [3, 4];
|
||||
const expected = [["change me", [1, 2, 3, 4]], ["ignore me", [5, 6]]];
|
||||
expect(merge(destination, source, [0, 1])).toEqual(expected);
|
||||
});
|
||||
|
||||
it("down a path of mixed objects and arrays", () => {
|
||||
const destination = {
|
||||
families: [
|
||||
{
|
||||
childCount: 3,
|
||||
children: [
|
||||
{name: "Alice", hobbies: ["acupuncture"]},
|
||||
{name: "Bob", hobbies: ["billiards"]},
|
||||
{name: "Cheryl", hobbies: ["chess"]},
|
||||
],
|
||||
},
|
||||
{
|
||||
childCount: 0,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
const path = ["families", 0, "children", 2, "hobbies"];
|
||||
const source = ["charades", "cheese-rolling"];
|
||||
const expected = {
|
||||
families: [
|
||||
{
|
||||
childCount: 3,
|
||||
children: [
|
||||
{name: "Alice", hobbies: ["acupuncture"]},
|
||||
{name: "Bob", hobbies: ["billiards"]},
|
||||
{
|
||||
name: "Cheryl",
|
||||
hobbies: ["chess", "charades", "cheese-rolling"],
|
||||
},
|
||||
],
|
||||
},
|
||||
{childCount: 0, children: []},
|
||||
],
|
||||
};
|
||||
expect(merge(destination, source, path)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("doesn't mutate its inputs", () => {
|
||||
it("when merging arrays", () => {
|
||||
const destination = [1, 2];
|
||||
const source = [3, 4];
|
||||
merge(destination, source, []);
|
||||
expect(destination).toEqual([1, 2]);
|
||||
expect(source).toEqual([3, 4]);
|
||||
});
|
||||
|
||||
it("when merging objects", () => {
|
||||
const destination = {a: 1, b: 2};
|
||||
const source = {c: 3, d: 4};
|
||||
merge(destination, source, []);
|
||||
expect(destination).toEqual({a: 1, b: 2});
|
||||
expect(source).toEqual({c: 3, d: 4});
|
||||
});
|
||||
|
||||
test("along an object path", () => {
|
||||
const makeDestination = () => ({
|
||||
child: {
|
||||
grandchild: {
|
||||
one: 1,
|
||||
two: 2,
|
||||
},
|
||||
otherGrandchild: "world",
|
||||
},
|
||||
otherChild: "hello",
|
||||
});
|
||||
const makeSource = () => ({
|
||||
three: 3,
|
||||
four: 4,
|
||||
});
|
||||
const destination = makeDestination();
|
||||
const source = makeSource();
|
||||
merge(destination, source, ["child", "grandchild"]);
|
||||
expect(destination).toEqual(makeDestination());
|
||||
expect(source).toEqual(makeSource());
|
||||
});
|
||||
|
||||
test("along an array path", () => {
|
||||
const makeDestination = () => [
|
||||
["change me", [1, 2]],
|
||||
["ignore me", [5, 6]],
|
||||
];
|
||||
const makeSource = () => [3, 4];
|
||||
const destination = makeDestination();
|
||||
const source = makeSource();
|
||||
merge(destination, source, [0, 1]);
|
||||
expect(destination).toEqual(makeDestination());
|
||||
expect(source).toEqual(makeSource());
|
||||
});
|
||||
});
|
||||
|
||||
describe("complains", () => {
|
||||
describe("about bad keys", () => {
|
||||
it("when given a numeric key into a primitive", () => {
|
||||
expect(() => merge(123, 234, [0])).toThrow(/non-array/);
|
||||
});
|
||||
it("when given a numeric key into null", () => {
|
||||
expect(() => merge(null, null, [0])).toThrow(/non-array/);
|
||||
});
|
||||
describe("when given a numeric key into an object", () => {
|
||||
test("for the usual case of an object with string keys", () => {
|
||||
expect(() => merge({a: 1}, {b: 2}, [0])).toThrow(/non-array/);
|
||||
});
|
||||
test("even when the object has the stringifed version of the key", () => {
|
||||
expect(() =>
|
||||
merge({"0": "zero", "1": "one"}, {"2": "two"}, [0])
|
||||
).toThrow(/non-array/);
|
||||
});
|
||||
});
|
||||
|
||||
it("when given a string key into a primitive", () => {
|
||||
expect(() => merge(123, 234, ["k"])).toThrow(/non-object/);
|
||||
});
|
||||
it("when given a string key into null", () => {
|
||||
expect(() => merge(null, null, ["k"])).toThrow(/non-object/);
|
||||
});
|
||||
it("when given a string key into an array", () => {
|
||||
expect(() => merge([1, 2], [1, 2], ["k"])).toThrow(/non-object/);
|
||||
});
|
||||
|
||||
it("when given a non-string, non-numeric key", () => {
|
||||
const badKey: any = false;
|
||||
expect(() => merge({a: 1}, {b: 2}, [badKey])).toThrow(/key.*false/);
|
||||
});
|
||||
|
||||
it("when given a non-existent string key", () => {
|
||||
expect(() => merge({a: 1}, {b: 2}, ["c"])).toThrow(/"c" not found/);
|
||||
});
|
||||
it("when given a non-existent numeric key", () => {
|
||||
expect(() => merge([1], [2], [3])).toThrow(/3 not found/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("about source/destination mismatch", () => {
|
||||
it("when merging an array into a non-array", () => {
|
||||
const re = () => /array into non-array/;
|
||||
expect(() => merge({a: 1}, [2], [])).toThrow(re());
|
||||
expect(() => merge(true, [2], [])).toThrow(re());
|
||||
});
|
||||
it("when merging an object into a non-object", () => {
|
||||
const re = () => /object into non-object/;
|
||||
expect(() => merge([1], {b: 2}, [])).toThrow(re());
|
||||
expect(() => merge(true, {b: 2}, [])).toThrow(re());
|
||||
});
|
||||
it("when merging a primitive into a non-primitive", () => {
|
||||
const re = () => /primitive into non-primitive/;
|
||||
expect(() => merge([], true, [])).toThrow(re());
|
||||
expect(() => merge({a: 1}, true, [])).toThrow(re());
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("#postQueryExhaustive", () => {
|
||||
it("finds no fragments in an empty query", () => {
|
||||
const b = build;
|
||||
const query = b.query("Noop", [], []);
|
||||
expect(requiredFragments(query)).toEqual([]);
|
||||
});
|
||||
|
||||
it("finds a fragment with no dependencies", () => {
|
||||
const b = build;
|
||||
const query = b.query(
|
||||
"FindReviewComments",
|
||||
[],
|
||||
[
|
||||
b.field("node", {id: b.literal("some-user")}, [
|
||||
b.inlineFragment("Actor", [b.fragmentSpread("whoami")]),
|
||||
]),
|
||||
]
|
||||
);
|
||||
const result = requiredFragments(query);
|
||||
expect(result.map((fd) => fd.name).sort()).toEqual(["whoami"]);
|
||||
result.forEach((fd) => expect(createFragments()).toContainEqual(fd));
|
||||
});
|
||||
|
||||
it("transitively finds dependent fragments", () => {
|
||||
const b = build;
|
||||
const query = b.query(
|
||||
"FindReviewComments",
|
||||
[],
|
||||
[
|
||||
b.field("node", {id: b.literal("some-pull-request")}, [
|
||||
b.inlineFragment("PullRequest", [
|
||||
b.field(
|
||||
"reviews",
|
||||
{
|
||||
first: b.literal(1),
|
||||
},
|
||||
[b.fragmentSpread("reviews")]
|
||||
),
|
||||
]),
|
||||
]),
|
||||
]
|
||||
);
|
||||
const result = requiredFragments(query);
|
||||
expect(result.map((fd) => fd.name).sort()).toEqual([
|
||||
"reactions",
|
||||
"reviewComments",
|
||||
"reviews",
|
||||
"whoami",
|
||||
]);
|
||||
result.forEach((fd) => expect(createFragments()).toContainEqual(fd));
|
||||
});
|
||||
});
|
||||
|
||||
describe("#postQueryExhaustive", () => {
|
||||
it("resolves a representative query", async () => {
|
||||
const makeAuthor = (name) => ({
|
||||
__typename: "User",
|
||||
login: name,
|
||||
id: `opaque-user-${name}`,
|
||||
});
|
||||
// We'll have three stages:
|
||||
// - The original result will need more issues, and more
|
||||
// comments for issue 1, and more reviews for PR 2.
|
||||
// - The next result will need more issues, and comments for
|
||||
// issues 1 (original issue) and 3 (new issue).
|
||||
// - The final result will need no more data.
|
||||
// We obey the contract pretty much exactly, except that we return
|
||||
// far fewer results than are asked for by the query.
|
||||
//
|
||||
// Here is the response to the initial query.
|
||||
const response0 = {
|
||||
repository: {
|
||||
id: "opaque-repo",
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-issues-v0",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1",
|
||||
title: "Request for comments",
|
||||
body: "Like it says, please comment!",
|
||||
number: 1,
|
||||
author: makeAuthor("decentralion"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-issue1comments-v0",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1comment1",
|
||||
body: "Here: I'll start.",
|
||||
url: "opaque://issue/1/comment/1",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
pulls: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-pulls-v0",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2",
|
||||
title: "Fix typo in README",
|
||||
body: "Surely this deserves much cred.",
|
||||
number: 2,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
nodes: [],
|
||||
},
|
||||
reviews: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-pull2reviews-v0",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2review1",
|
||||
body: "You actually introduced a new typo instead.",
|
||||
author: makeAuthor("decentralion"),
|
||||
state: "CHANGES_REQUESTED",
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Here is the response to the continuations generated from the
|
||||
// first query.
|
||||
const response1 = {
|
||||
_n0: {
|
||||
// Requested more issues.
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-issues-v1",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3",
|
||||
title: "Another",
|
||||
body: "You can comment here, too.",
|
||||
number: 2,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-issue3comments-v1",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3comment1",
|
||||
body: "What fun!",
|
||||
url: "opaque://issue/3/comment/1",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_n1: {
|
||||
// Requested more comments for issue 1.
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "opaque-cursor-issue1comments-v1",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1comment2",
|
||||
body: "Closing due to no fun allowed.",
|
||||
url: "opaque://issue/1/comment/2",
|
||||
author: makeAuthor("wchargin"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_n2: {
|
||||
// Requested more reviews for issue 2.
|
||||
reviews: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-pull2reviews-v1",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-pull2review2",
|
||||
body: "Looks godo to me.",
|
||||
author: makeAuthor("decentralion"),
|
||||
state: "APPROVED",
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Here is the response to the continuations generated from the
|
||||
// second query.
|
||||
const response2 = {
|
||||
_n0: {
|
||||
// Requested more issues.
|
||||
issues: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-issues-v2",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue4",
|
||||
title: "Please stop making issues",
|
||||
body: "My mailbox is out of space",
|
||||
number: 4,
|
||||
author: makeAuthor("wchargin"),
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-issue4comments-v2",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue4comment1",
|
||||
body: "But you posted the last issue",
|
||||
url: "opaque://issue/4/comment/1",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_n1: {
|
||||
// Requested more comments for issue 1.
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-issue1comments-v2",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue1comment3",
|
||||
body: "That is not very nice.",
|
||||
url: "opaque://issue/1/comment/3",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_n2: {
|
||||
// Requested more comments for issue 3.
|
||||
comments: {
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: "opaque-cursor-issue3comments-v2",
|
||||
},
|
||||
nodes: [
|
||||
{
|
||||
id: "opaque-issue3comment2",
|
||||
body: "I will comment on this issue for a second time.",
|
||||
url: "opaque://issue/1/comment/3",
|
||||
author: makeAuthor("decentralion"),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const postQuery = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(Promise.resolve(response0))
|
||||
.mockReturnValueOnce(Promise.resolve(response1))
|
||||
.mockReturnValueOnce(Promise.resolve(response2));
|
||||
|
||||
const result = await postQueryExhaustive(postQuery, {
|
||||
body: createQuery(),
|
||||
variables: createVariables(makeRepoId("sourcecred", "discussion")),
|
||||
});
|
||||
expect(postQuery).toHaveBeenCalledTimes(3);
|
||||
|
||||
// Save the result snapshot for inspection. In particular, there
|
||||
// shouldn't be any nodes in the snapshot that have more pages.
|
||||
expect(result).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
it("creates a query", () => {
|
||||
expect(
|
||||
stringify.body(createQuery(), multilineLayout(" "))
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
|
@ -16,14 +16,11 @@ import type {
|
|||
UserlikeAddress,
|
||||
} from "./nodes";
|
||||
import * as T from "./graphqlTypes";
|
||||
import type {GithubResponseJSON} from "./graphql";
|
||||
import * as GitNode from "../git/nodes";
|
||||
import * as MapUtil from "../../util/map";
|
||||
import * as NullUtil from "../../util/null";
|
||||
import {botSet} from "./bots";
|
||||
|
||||
import translateContinuations from "./translateContinuations";
|
||||
|
||||
import {
|
||||
reviewUrlToId,
|
||||
issueCommentUrlToId,
|
||||
|
@ -59,19 +56,12 @@ export class RelationalView {
|
|||
this._mapReferencedBy = new Map();
|
||||
}
|
||||
|
||||
addData(data: GithubResponseJSON) {
|
||||
// Warning: calling `addData` can put the RelationalView in an inconistent
|
||||
// state. for example, if called with {repo: {issues: [1,2,3]}} and then with
|
||||
// {repo: {issues: [4, 5]}}, then calls to repo.issues() will only give back
|
||||
// issues 4 and 5 (although issues 1, 2, and 3 will still be in the view)
|
||||
const {result: repository, warnings} = translateContinuations(data);
|
||||
for (const warning of warnings) {
|
||||
console.warn(stringify(warning));
|
||||
}
|
||||
this.addRepository(repository);
|
||||
}
|
||||
|
||||
addRepository(repository: T.Repository): void {
|
||||
// Warning: calling `addRepository` can put the RelationalView in an
|
||||
// inconsistent state. for example, if called with a repo with
|
||||
// issues [#1, #2, #3] and then with a repo with issues [#4, #5],
|
||||
// then calls to `repo.issues()` will only give back issues 4 and 5
|
||||
// (although issues 1, 2, and 3 will still be in the view).
|
||||
this._addRepo(repository);
|
||||
this._addReferences();
|
||||
}
|
||||
|
|
|
@ -1,331 +0,0 @@
|
|||
// @flow
|
||||
// Temporary module to translate GraphQL results from the old format
|
||||
// with manually resolved continuations to the format emitted by the
|
||||
// Mirror module. See issue #923 for context.
|
||||
|
||||
import type {
|
||||
AuthorJSON,
|
||||
BotJSON,
|
||||
CommentJSON,
|
||||
CommitJSON,
|
||||
GitObjectJSON,
|
||||
GithubResponseJSON,
|
||||
IssueJSON,
|
||||
OrganizationJSON,
|
||||
PullJSON,
|
||||
ReactionJSON,
|
||||
RefJSON,
|
||||
RepositoryJSON,
|
||||
ReviewCommentJSON,
|
||||
ReviewJSON,
|
||||
UserJSON,
|
||||
} from "./graphql";
|
||||
import type {
|
||||
Actor,
|
||||
Blob,
|
||||
Bot,
|
||||
Commit,
|
||||
GitObject,
|
||||
GitObjectID,
|
||||
Issue,
|
||||
IssueComment,
|
||||
Organization,
|
||||
PullRequest,
|
||||
PullRequestReview,
|
||||
PullRequestReviewComment,
|
||||
Reaction,
|
||||
Ref,
|
||||
Repository,
|
||||
RepositoryOwner,
|
||||
Tag,
|
||||
Tree,
|
||||
User,
|
||||
} from "./graphqlTypes";
|
||||
|
||||
export type Warning =
|
||||
// We've never seen it happen, and don't know how it could. But the
|
||||
// GitHub schema says that it can. This warning is more of a
|
||||
// diagnostic to the SourceCred maintainers (if it comes up on a real
|
||||
// repository, we can learn something!) than an indication that
|
||||
// something has gone wrong.
|
||||
| {|+type: "NON_COMMIT_REF_TARGET", +target: GitObjectJSON|}
|
||||
// This can happen if a commit has a parent that we did not fetch. We
|
||||
// only fetch commits that are Git-reachable from HEAD or are the direct
|
||||
// merge commit of a pull request. We may therefore omit commits that
|
||||
// disappeared from master after a force-push, or were an ancestor of a
|
||||
// pull request that was merged into a branch other than master. See
|
||||
// issue #923 for more context. If this is omitted, we will simply
|
||||
// omit the offending parent commit.
|
||||
| {|+type: "UNKNOWN_PARENT_OID", +child: GitObjectID, +parent: GitObjectID|};
|
||||
|
||||
export default function translate(
|
||||
json: GithubResponseJSON
|
||||
): {|
|
||||
+result: Repository,
|
||||
+warnings: $ReadOnlyArray<Warning>,
|
||||
|} {
|
||||
const repositoryJson = json.repository;
|
||||
const warnings: Array<Warning> = [];
|
||||
|
||||
// Most of the work that this function does is exploding connections
|
||||
// into lists of nodes. But commits require some special attention,
|
||||
// because we have to resolve parent OIDs to actual parent commits.
|
||||
// This means that it is most convenient to start by discovering all
|
||||
// commits in the data.
|
||||
const commits: Map<
|
||||
GitObjectID,
|
||||
{|
|
||||
...Commit,
|
||||
parents: Array<null | Commit>, // mutable: we build this incrementally
|
||||
|}
|
||||
> = new Map();
|
||||
|
||||
// First, create all the commit objects, initializing them with empty
|
||||
// parent arrays. We put these temporarily into a map keyed by OID for
|
||||
// deduplication: a commit may appear both in the linearized history
|
||||
// from HEAD and also as the merge commit of a pull request, and we
|
||||
// want to process it just once.
|
||||
const commitJsons: $ReadOnlyArray<CommitJSON> = Array.from(
|
||||
new Map(
|
||||
Array.from(
|
||||
(function*() {
|
||||
if (repositoryJson.defaultBranchRef) {
|
||||
const target = repositoryJson.defaultBranchRef.target;
|
||||
switch (target.__typename) {
|
||||
case "Commit":
|
||||
yield* target.history.nodes;
|
||||
break;
|
||||
case "Tree":
|
||||
case "Blob":
|
||||
case "Tag":
|
||||
warnings.push({type: "NON_COMMIT_REF_TARGET", target});
|
||||
break;
|
||||
// istanbul ignore next: unreachable per Flow
|
||||
default:
|
||||
throw new Error((target.type: empty));
|
||||
}
|
||||
}
|
||||
for (const pull of repositoryJson.pulls.nodes) {
|
||||
if (pull.mergeCommit) {
|
||||
yield pull.mergeCommit;
|
||||
}
|
||||
}
|
||||
})()
|
||||
).map((json) => [json.oid, json])
|
||||
).values()
|
||||
);
|
||||
for (const commitJson of commitJsons) {
|
||||
const commit = {
|
||||
__typename: "Commit",
|
||||
author: {...commitJson.author},
|
||||
id: commitJson.id,
|
||||
message: commitJson.message,
|
||||
oid: commitJson.oid,
|
||||
parents: [],
|
||||
url: commitJson.url,
|
||||
};
|
||||
commits.set(commit.oid, commit);
|
||||
}
|
||||
|
||||
// Then, once all the objects have been created, we can set up the
|
||||
// parents.
|
||||
for (const commitJson of commitJsons) {
|
||||
const commit = commits.get(commitJson.oid);
|
||||
// istanbul ignore next: should not be possible
|
||||
if (commit == null) {
|
||||
throw new Error(
|
||||
"invariant violation: commit came out of nowhere: " + commitJson.oid
|
||||
);
|
||||
}
|
||||
for (const {oid: parentOid} of commitJson.parents.nodes) {
|
||||
const parentCommit = commits.get(parentOid);
|
||||
if (parentCommit == null) {
|
||||
warnings.push({
|
||||
type: "UNKNOWN_PARENT_OID",
|
||||
child: commitJson.oid,
|
||||
parent: parentOid,
|
||||
});
|
||||
} else {
|
||||
commit.parents.push(parentCommit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The rest is mostly mechanical. The pattern is: we pull off and
|
||||
// recursively translate the non-primitive fields of each object, and
|
||||
// then add a typename and put back the primitives. For union types,
|
||||
// we switch on the __typename and dispatch to the appropriate object
|
||||
// translators.
|
||||
|
||||
function translateRepository(json: RepositoryJSON): Repository {
|
||||
const {defaultBranchRef, issues, owner, pulls, ...rest} = json;
|
||||
return {
|
||||
__typename: "Repository",
|
||||
defaultBranchRef:
|
||||
defaultBranchRef == null
|
||||
? null
|
||||
: translateDefaultBranchRef(defaultBranchRef),
|
||||
issues: issues.nodes.map(translateIssue),
|
||||
owner: translateRepositoryOwner(owner),
|
||||
pullRequests: pulls.nodes.map(translatePullRequest),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translateDefaultBranchRef(json: RefJSON): Ref {
|
||||
const {target, ...rest} = json;
|
||||
return {
|
||||
__typename: "Ref",
|
||||
target: translateDefaultBranchRefTarget(target),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
// This one is a bit wonky, because our `GitObjectJSON` type is not a
|
||||
// good representation of the GitHub schema. In particular, a
|
||||
// `GitObjectJSON` can represent a commit, but in a different form
|
||||
// than our `CommitJSON`! This function _only_ applies to
|
||||
// `GitObjectJSON`s that we fetched as the `target` of the
|
||||
// `defaultBranchRef` of a repository. But these are the only
|
||||
// `GitObjectJSON`s that we fetch, so it's okay.
|
||||
function translateDefaultBranchRefTarget(json: GitObjectJSON): GitObject {
|
||||
switch (json.__typename) {
|
||||
case "Commit":
|
||||
// The default branch ref is `null` if there are no commits, so
|
||||
// the history must include at least one commit (the HEAD
|
||||
// commit).
|
||||
return lookUpCommit(json.history.nodes[0].oid);
|
||||
case "Blob":
|
||||
return ({...json}: Blob);
|
||||
case "Tag":
|
||||
return ({...json}: Tag);
|
||||
case "Tree":
|
||||
return ({...json}: Tree);
|
||||
// istanbul ignore next: unreachable per Flow
|
||||
default:
|
||||
throw new Error((json.__typename: empty));
|
||||
}
|
||||
}
|
||||
|
||||
function lookUpCommit(oid: GitObjectID): Commit {
|
||||
const commit = commits.get(oid);
|
||||
// istanbul ignore if: unreachable: we explored all commits in
|
||||
// the response, including this one.
|
||||
if (commit == null) {
|
||||
throw new Error("invariant violation: unknown commit: " + oid);
|
||||
}
|
||||
return commit;
|
||||
}
|
||||
|
||||
function translateCommit(json: CommitJSON): Commit {
|
||||
return lookUpCommit(json.oid);
|
||||
}
|
||||
|
||||
function translateIssue(json: IssueJSON): Issue {
|
||||
const {author, comments, reactions, ...rest} = json;
|
||||
return {
|
||||
__typename: "Issue",
|
||||
author: author == null ? null : translateActor(author),
|
||||
comments: comments.nodes.map(translateIssueComment),
|
||||
reactions: reactions.nodes.map(translateReaction),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translateIssueComment(json: CommentJSON): IssueComment {
|
||||
const {author, reactions, ...rest} = json;
|
||||
return {
|
||||
__typename: "IssueComment",
|
||||
author: author == null ? null : translateActor(author),
|
||||
reactions: reactions.nodes.map(translateReaction),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translateReaction(json: ReactionJSON): Reaction {
|
||||
const {user, ...rest} = json;
|
||||
return {
|
||||
__typename: "Reaction",
|
||||
user: user == null ? null : translateUser(user),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translateRepositoryOwner(
|
||||
json: UserJSON | OrganizationJSON
|
||||
): RepositoryOwner {
|
||||
switch (json.__typename) {
|
||||
case "User":
|
||||
return translateUser(json);
|
||||
case "Organization":
|
||||
return translateOrganization(json);
|
||||
// istanbul ignore next: unreachable per Flow
|
||||
default:
|
||||
throw new Error((json.__typename: empty));
|
||||
}
|
||||
}
|
||||
|
||||
function translateActor(json: AuthorJSON): Actor {
|
||||
switch (json.__typename) {
|
||||
case "User":
|
||||
return translateUser(json);
|
||||
case "Organization":
|
||||
return translateOrganization(json);
|
||||
case "Bot":
|
||||
return translateBot(json);
|
||||
// istanbul ignore next: unreachable per Flow
|
||||
default:
|
||||
throw new Error((json.__typename: empty));
|
||||
}
|
||||
}
|
||||
|
||||
function translateUser(json: UserJSON): User {
|
||||
return {...json};
|
||||
}
|
||||
|
||||
function translateOrganization(json: OrganizationJSON): Organization {
|
||||
return {...json};
|
||||
}
|
||||
|
||||
function translateBot(json: BotJSON): Bot {
|
||||
return {...json};
|
||||
}
|
||||
|
||||
function translatePullRequest(json: PullJSON): PullRequest {
|
||||
const {author, comments, mergeCommit, reactions, reviews, ...rest} = json;
|
||||
return {
|
||||
__typename: "PullRequest",
|
||||
author: author == null ? null : translateActor(author),
|
||||
comments: comments.nodes.map(translateIssueComment),
|
||||
mergeCommit: mergeCommit == null ? null : translateCommit(mergeCommit),
|
||||
reactions: reactions.nodes.map(translateReaction),
|
||||
reviews: reviews.nodes.map(translatePullRequestReview),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translatePullRequestReview(json: ReviewJSON): PullRequestReview {
|
||||
const {author, comments, ...rest} = json;
|
||||
return {
|
||||
__typename: "PullRequestReview",
|
||||
author: author == null ? null : translateActor(author),
|
||||
comments: comments.nodes.map(translatePullRequestReviewComment),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
function translatePullRequestReviewComment(
|
||||
json: ReviewCommentJSON
|
||||
): PullRequestReviewComment {
|
||||
const {author, reactions, ...rest} = json;
|
||||
return {
|
||||
__typename: "PullRequestReviewComment",
|
||||
author: author == null ? null : translateActor(author),
|
||||
reactions: reactions.nodes.map(translateReaction),
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
const result = translateRepository(repositoryJson);
|
||||
return {result, warnings};
|
||||
}
|
|
@ -1,144 +0,0 @@
|
|||
// @flow
|
||||
|
||||
import translateContinuations from "./translateContinuations";
|
||||
|
||||
describe("plugins/github/translateContinuations", () => {
|
||||
describe("translateContinuations", () => {
|
||||
it("raises a warning if the defaultBranchRef is not a commit", () => {
|
||||
const exampleData = {
|
||||
repository: {
|
||||
defaultBranchRef: {
|
||||
id: "ref-id",
|
||||
target: {
|
||||
__typename: "Tree",
|
||||
id: "tree-id",
|
||||
oid: "123",
|
||||
},
|
||||
},
|
||||
id: "repo-id",
|
||||
issues: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
name: "bar",
|
||||
owner: {
|
||||
__typename: "User",
|
||||
id: "user-id",
|
||||
login: "foo",
|
||||
url: "https://github.com/foo",
|
||||
},
|
||||
pulls: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
url: "https://github.com/foo/bar",
|
||||
},
|
||||
};
|
||||
const {result, warnings} = translateContinuations(exampleData);
|
||||
expect(result.defaultBranchRef).toEqual({
|
||||
__typename: "Ref",
|
||||
id: "ref-id",
|
||||
target: {__typename: "Tree", id: "tree-id", oid: "123"},
|
||||
});
|
||||
expect(warnings).toEqual([
|
||||
{
|
||||
type: "NON_COMMIT_REF_TARGET",
|
||||
target: {__typename: "Tree", id: "tree-id", oid: "123"},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("raises a warning if there is an unknown commit", () => {
|
||||
const exampleData = {
|
||||
repository: {
|
||||
defaultBranchRef: null,
|
||||
id: "repo-id",
|
||||
issues: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
name: "bar",
|
||||
owner: {
|
||||
__typename: "User",
|
||||
id: "user-id",
|
||||
login: "foo",
|
||||
url: "https://github.com/foo",
|
||||
},
|
||||
pulls: {
|
||||
nodes: [
|
||||
{
|
||||
id: "pr-id",
|
||||
number: 1,
|
||||
author: {
|
||||
__typename: "Bot",
|
||||
id: "bot-id",
|
||||
login: "baz",
|
||||
url: "https://github.com/baz",
|
||||
},
|
||||
additions: 7,
|
||||
deletions: 9,
|
||||
comments: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
reviews: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
reactions: {
|
||||
nodes: [],
|
||||
pageInfo: {hasNextPage: false, endCursor: null},
|
||||
},
|
||||
mergeCommit: {
|
||||
id: "commit-id",
|
||||
author: {
|
||||
date: "2001-02-03T04:05:06",
|
||||
user: null,
|
||||
},
|
||||
message: "where are my parents?",
|
||||
oid: "456",
|
||||
parents: {
|
||||
nodes: [{oid: "789"}],
|
||||
pageInfo: {hasNextPage: false, endCursor: "cursor-parents"},
|
||||
},
|
||||
url: "https://github.com/foo/bar/commit/456",
|
||||
},
|
||||
title: "something",
|
||||
body: "whatever",
|
||||
url: "https://github.com/foo/bar/pull/1",
|
||||
},
|
||||
],
|
||||
pageInfo: {hasNextPage: false, endCursor: "cursor-pulls"},
|
||||
},
|
||||
url: "https://github.com/foo/bar",
|
||||
},
|
||||
};
|
||||
const {result, warnings} = translateContinuations(exampleData);
|
||||
const pr = result.pullRequests[0];
|
||||
if (pr == null) {
|
||||
throw new Error(String(pr));
|
||||
}
|
||||
expect(pr.mergeCommit).toEqual({
|
||||
__typename: "Commit",
|
||||
id: "commit-id",
|
||||
author: {
|
||||
date: "2001-02-03T04:05:06",
|
||||
user: null,
|
||||
},
|
||||
message: "where are my parents?",
|
||||
oid: "456",
|
||||
parents: [
|
||||
/* empty! */
|
||||
],
|
||||
url: "https://github.com/foo/bar/commit/456",
|
||||
});
|
||||
expect(warnings).toEqual([
|
||||
{
|
||||
type: "UNKNOWN_PARENT_OID",
|
||||
child: "456",
|
||||
parent: "789",
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue