2019-05-28 15:59:50 +00:00
|
|
|
#!/bin/sh
|
|
|
|
|
|
|
|
# Automatically update all SourceCred snapshot data.
|
|
|
|
set -eu
|
|
|
|
|
|
|
|
toplevel="$(git -C "$(dirname "$0")" rev-parse --show-toplevel)"
|
|
|
|
cd "${toplevel}"
|
|
|
|
|
2019-06-05 06:26:52 +00:00
|
|
|
tmpdir="$(mktemp -d)"
|
|
|
|
cleanup() {
|
|
|
|
rm -r "${tmpdir}"
|
|
|
|
}
|
|
|
|
trap cleanup EXIT
|
|
|
|
|
|
|
|
SOURCECRED_BIN="${tmpdir}/bin"
|
|
|
|
yarn run --silent backend --output-path "${SOURCECRED_BIN}"
|
|
|
|
export SOURCECRED_BIN # for Sharness and shell tests
|
|
|
|
export NODE_PATH="${toplevel}/node_modules${NODE_PATH:+:${NODE_PATH}}"
|
|
|
|
|
|
|
|
echo "Updating GitHub GraphQL Flow types"
|
|
|
|
cp .prettierrc.json "${SOURCECRED_BIN}/"
|
|
|
|
node "${SOURCECRED_BIN}/generateGithubGraphqlFlowTypes.js" \
|
|
|
|
>src/plugins/github/graphqlTypes.js
|
|
|
|
|
|
|
|
echo "Updating sharness/test_load_example_github.t"
|
2019-05-28 15:59:50 +00:00
|
|
|
(cd sharness; UPDATE_SNAPSHOT=1 ./test_load_example_github.t -l)
|
2019-06-05 06:26:52 +00:00
|
|
|
|
2019-07-14 16:05:13 +00:00
|
|
|
echo "Updating sharness/test_cli_scores.t"
|
|
|
|
(cd sharness; UPDATE_SNAPSHOT=1 ./test_cli_scores.t -l)
|
|
|
|
|
2019-05-28 15:59:50 +00:00
|
|
|
echo "Updating github/fetchGithubOrgTest.sh"
|
|
|
|
./src/plugins/github/fetchGithubOrgTest.sh -u --no-build
|
2019-06-05 06:26:52 +00:00
|
|
|
|
2019-05-28 15:59:50 +00:00
|
|
|
echo "Updating github/fetchGithubRepoTest.sh"
|
|
|
|
./src/plugins/github/fetchGithubRepoTest.sh -u --no-build
|
2019-06-05 06:26:52 +00:00
|
|
|
|
2019-09-07 23:27:29 +00:00
|
|
|
if [ -n "${DISCOURSE_TEST_API_KEY:-}" ]; then
|
Add class for fetching data from Discourse (#1265)
The `DiscourseFetcher` class abstracts over fetching from the Discourse
API, and post-processing and filtering the result into a form that's
convenient for us.
Testing is a bit tricky because the Discourse API keys are sensitive
(they are admin keys) and so I'm reluctant to commit them, even for our
test instance. As a workaround, I've added a shell script which
downloads some data from the SourceCred test instance, and saves it with
a filename which is an encoding of the actual endpoint. Then, in
testing, we can use a mocked fetch which actually hits the snapshots
directory, and thus validate the processing logic on "real" data from
the server. We also test that the fetch headers are set correctly, and
that we handle non-200 error codes appropriately.
Test plan: In addition to the included tests, I have an end-to-end test
which actually uses this fetcher to fully populate the mirror and then
generate a valid SourceCred graph.
This builds on API investigations
[here](https://github.com/sourcecred/sourcecred/issues/865#issuecomment-478026449),
and is general progress towards #865. Thanks to @erlend-sh, without whom
we wouldn't have a test instance.
2019-08-15 11:22:06 +00:00
|
|
|
echo "Updating Discourse API snapshots"
|
|
|
|
./src/plugins/discourse/update_discourse_api_snapshots.sh
|
|
|
|
else
|
|
|
|
echo "Not updating Discourse API snapshots (need DISCOURSE_TEST_API_KEY)"
|
|
|
|
fi
|
|
|
|
|
2019-05-28 15:59:50 +00:00
|
|
|
echo "Updating Jest snapshots"
|
|
|
|
yarn unit -u
|