Deploying to gh-pages from @ status-im/network-stories@b78c20c1d5 🚀

This commit is contained in:
jessiebroke 2022-04-27 04:18:07 +00:00
parent 329f124eda
commit 623dc91677
16 changed files with 11117 additions and 11688 deletions

File diff suppressed because one or more lines are too long

View File

@ -1163,3 +1163,9 @@ html[data-theme='dark'] .keyboard-shortcut > code {
.overflow-y-scroll { .overflow-y-scroll {
overflow-y: scroll; overflow-y: scroll;
} }
.text-ellipsis-wrapper {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,176 +0,0 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.GitHttp = {}));
}(this, (function (exports) { 'use strict';
/**
* @typedef {Object} GitProgressEvent
* @property {string} phase
* @property {number} loaded
* @property {number} total
*/
/**
* @callback ProgressCallback
* @param {GitProgressEvent} progress
* @returns {void | Promise<void>}
*/
/**
* @typedef {Object} GitHttpRequest
* @property {string} url - The URL to request
* @property {string} [method='GET'] - The HTTP method to use
* @property {Object<string, string>} [headers={}] - Headers to include in the HTTP request
* @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of POST requests
* @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s)
* @property {object} [signal] - Reserved for future use (canceling a request)
*/
/**
* @typedef {Object} GitHttpResponse
* @property {string} url - The final URL that was fetched after any redirects
* @property {string} [method] - The HTTP method that was used
* @property {Object<string, string>} [headers] - HTTP response headers
* @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of the response
* @property {number} statusCode - The HTTP status code
* @property {string} statusMessage - The HTTP status message
*/
/**
* @callback HttpFetch
* @param {GitHttpRequest} request
* @returns {Promise<GitHttpResponse>}
*/
/**
* @typedef {Object} HttpClient
* @property {HttpFetch} request
*/
// Convert a value to an Async Iterator
// This will be easier with async generator functions.
function fromValue(value) {
let queue = [value];
return {
next() {
return Promise.resolve({ done: queue.length === 0, value: queue.pop() })
},
return() {
queue = [];
return {}
},
[Symbol.asyncIterator]() {
return this
},
}
}
function getIterator(iterable) {
if (iterable[Symbol.asyncIterator]) {
return iterable[Symbol.asyncIterator]()
}
if (iterable[Symbol.iterator]) {
return iterable[Symbol.iterator]()
}
if (iterable.next) {
return iterable
}
return fromValue(iterable)
}
// Currently 'for await' upsets my linters.
async function forAwait(iterable, cb) {
const iter = getIterator(iterable);
while (true) {
const { value, done } = await iter.next();
if (value) await cb(value);
if (done) break
}
if (iter.return) iter.return();
}
async function collect(iterable) {
let size = 0;
const buffers = [];
// This will be easier once `for await ... of` loops are available.
await forAwait(iterable, value => {
buffers.push(value);
size += value.byteLength;
});
const result = new Uint8Array(size);
let nextIndex = 0;
for (const buffer of buffers) {
result.set(buffer, nextIndex);
nextIndex += buffer.byteLength;
}
return result
}
// Convert a web ReadableStream (not Node stream!) to an Async Iterator
// adapted from https://jakearchibald.com/2017/async-iterators-and-generators/
function fromStream(stream) {
// Use native async iteration if it's available.
if (stream[Symbol.asyncIterator]) return stream
const reader = stream.getReader();
return {
next() {
return reader.read()
},
return() {
reader.releaseLock();
return {}
},
[Symbol.asyncIterator]() {
return this
},
}
}
/* eslint-env browser */
/**
* HttpClient
*
* @param {GitHttpRequest} request
* @returns {Promise<GitHttpResponse>}
*/
async function request({
onProgress,
url,
method = 'GET',
headers = {},
body,
}) {
// streaming uploads aren't possible yet in the browser
if (body) {
body = await collect(body);
}
const res = await fetch(url, { method, headers, body });
const iter =
res.body && res.body.getReader
? fromStream(res.body)
: [new Uint8Array(await res.arrayBuffer())];
// convert Header object to ordinary JSON
headers = {};
for (const [key, value] of res.headers.entries()) {
headers[key] = value;
}
return {
url: res.url,
method: res.method,
statusCode: res.status,
statusMessage: res.statusText,
body: iter,
headers: headers,
}
}
var index = { request };
exports.default = index;
exports.request = request;
Object.defineProperty(exports, '__esModule', { value: true });
})));

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1 +1 @@
{:module-uris {:main [], :code-editor ["https://asset.logseq.com/static/js/code-editor.js"], :age-encryption ["https://asset.logseq.com/static/js/age-encryption.js"], :excalidraw ["https://asset.logseq.com/static/js/excalidraw.js"]}, :module-infos {:main nil, :code-editor #{:main}, :age-encryption #{:main}, :excalidraw #{:main}}} {:module-uris {:main [], :code-editor ["https://asset.logseq.com/static/js/code-editor.js"], :excalidraw ["https://asset.logseq.com/static/js/excalidraw.js"]}, :module-infos {:main nil, :code-editor #{:main}, :excalidraw #{:main}}}

View File

@ -1 +1 @@
{"module-uris":{"main":[],"code-editor":["https://asset.logseq.com/static/js/code-editor.js"],"age-encryption":["https://asset.logseq.com/static/js/age-encryption.js"],"excalidraw":["https://asset.logseq.com/static/js/excalidraw.js"]},"module-infos":{"main":null,"code-editor":["main"],"age-encryption":["main"],"excalidraw":["main"]}} {"module-uris":{"main":[],"code-editor":["https://asset.logseq.com/static/js/code-editor.js"],"excalidraw":["https://asset.logseq.com/static/js/excalidraw.js"]},"module-infos":{"main":null,"code-editor":["main"],"excalidraw":["main"]}}

File diff suppressed because one or more lines are too long

View File

@ -1,8 +1,6 @@
importScripts( importScripts(
// Batched optimization // Batched optimization
"./lightning-fs.min.js?v=0.0.2.3", "./lightning-fs.min.js?v=0.0.2.3",
"./isomorphic-git/1.7.4/index.umd.min.js",
"./isomorphic-git/1.7.4/http-web-index.umd.js",
// Fixed a bug // Fixed a bug
"./magic_portal.js" "./magic_portal.js"
); );
@ -17,10 +15,6 @@ const detect = () => {
} }
}; };
function basicAuth (username, token) {
return "Basic " + btoa(username + ":" + token);
}
const fsName = 'logseq'; const fsName = 'logseq';
const createFS = () => new LightningFS(fsName); const createFS = () => new LightningFS(fsName);
let fs = createFS(); let fs = createFS();
@ -28,174 +22,10 @@ let pfs = fs.promises;
if (detect() === 'Worker') { if (detect() === 'Worker') {
const portal = new MagicPortal(self); const portal = new MagicPortal(self);
portal.set('git', git);
portal.set('fs', fs); portal.set('fs', fs);
portal.set('pfs', pfs); portal.set('pfs', pfs);
portal.set('gitHttp', GitHttp);
portal.set('workerThread', { portal.set('workerThread', {
setConfig: function (dir, path, value) {
return git.setConfig ({
fs,
dir,
path,
value
});
},
clone: function (dir, url, corsProxy, depth, branch, username, token) {
return git.clone ({
fs,
dir,
http: GitHttp,
url,
corsProxy,
ref: branch,
singleBranch: true,
depth,
headers: {
"Authorization": basicAuth(username, token)
}
});
},
fetch: function (dir, url, corsProxy, depth, branch, username, token) {
return git.fetch ({
fs,
dir,
http: GitHttp,
url,
corsProxy,
ref: branch,
singleBranch: true,
depth,
headers: {
"Authorization": basicAuth(username, token)
}
});
},
pull: function (dir, corsProxy, branch, username, token) {
return git.pull ({
fs,
dir,
http: GitHttp,
corsProxy,
ref: branch,
singleBranch: true,
// fast: true,
headers: {
"Authorization": basicAuth(username, token)
}
});
},
push: function (dir, corsProxy, branch, force, username, token) {
return git.push ({
fs,
dir,
http: GitHttp,
ref: branch,
corsProxy,
remote: "origin",
force,
headers: {
"Authorization": basicAuth(username, token)
}
});
},
merge: function (dir, branch) {
return git.merge ({
fs,
dir,
ours: branch,
theirs: "remotes/origin/" + branch,
// fastForwardOnly: true
});
},
checkout: function (dir, branch) {
return git.checkout ({
fs,
dir,
ref: branch,
});
},
log: function (dir, branch, depth) {
return git.log ({
fs,
dir,
ref: branch,
depth,
singleBranch: true
})
},
add: function (dir, file) {
return git.add ({
fs,
dir,
filepath: file
});
},
remove: function (dir, file) {
return git.remove ({
fs,
dir,
filepath: file
});
},
commit: function (dir, message, name, email, parent) {
if (parent) {
return git.commit ({
fs,
dir,
message,
author: {name: name,
email: email},
parent: parent
});
} else {
return git.commit ({
fs,
dir,
message,
author: {name: name,
email: email}
});
}
},
readCommit: function (dir, oid) {
return git.readCommit ({
fs,
dir,
oid
});
},
readBlob: function (dir, oid, path) {
return git.readBlob ({
fs,
dir,
oid,
path
});
},
writeRef: function (dir, branch, oid) {
return git.writeRef ({
fs,
dir,
ref: "refs/heads/" + branch,
value: oid,
force: true
});
},
resolveRef: function (dir, ref) {
return git.resolveRef ({
fs,
dir,
ref
});
},
listFiles: function (dir, branch) {
return git.listFiles ({
fs,
dir,
ref: branch
});
},
rimraf: async function (path) { rimraf: async function (path) {
// try { // try {
// // First assume path is itself a file // // First assume path is itself a file
@ -225,83 +55,6 @@ if (detect() === 'Worker') {
} }
// Finally, delete the empty directory // Finally, delete the empty directory
await pfs.rmdir(path) await pfs.rmdir(path)
},
getFileStateChanges: async function (commitHash1, commitHash2, dir) {
return git.walk({
fs,
dir,
trees: [git.TREE({ ref: commitHash1 }), git.TREE({ ref: commitHash2 })],
map: async function(filepath, [A, B]) {
var type = 'equal';
if (A === null) {
type = "add";
}
if (B === null) {
type = "remove";
}
// ignore directories
if (filepath === '.') {
return
}
if ((A !== null && (await A.type()) === 'tree')
||
(B !== null && (await B.type()) === 'tree')) {
return
}
// generate ids
const Aoid = A !== null && await A.oid();
const Boid = B !== null && await B.oid();
if (type === "equal") {
// determine modification type
if (Aoid !== Boid) {
type = 'modify'
}
if (Aoid === undefined) {
type = 'add'
}
if (Boid === undefined) {
type = 'remove'
}
}
if (Aoid === undefined && Boid === undefined) {
console.log('Something weird happened:')
console.log(A)
console.log(B)
}
return {
path: `/${filepath}`,
type: type,
}
},
})
},
statusMatrix: async function (dir) {
return git.statusMatrix({ fs, dir });
},
statusMatrixChanged: async function (dir) {
return (await git.statusMatrix({ fs, dir }))
.filter(([_, head, workDir, stage]) => !(head == 1 && workDir == 1 && stage == 1));
},
getChangedFiles: async function (dir) {
try {
const FILE = 0, HEAD = 1, WORKDIR = 2;
let filenames = (await git.statusMatrix({ fs, dir }))
.filter(row => row[HEAD] !== row[WORKDIR])
.map(row => row[FILE]);
return filenames;
} catch (err) {
console.error(err);
return [];
}
} }
}); });
// self.addEventListener("message", ({ data }) => console.log(data));
} }

BIN
trace.zip

Binary file not shown.