Implement per-user message queues to prevent duplicate post syncing. Correclty initialize from a user or connect URL. Dont register the service worker when we\'re headless or an archive peer. Wrap sync logging with superlog

This commit is contained in:
2025-05-27 00:01:52 -07:00
parent 71b047099b
commit 75b55104bd
3 changed files with 200 additions and 179 deletions

View File

@@ -714,28 +714,29 @@ class PeerConnection {
while (this.dataChannel.bufferedAmount >= 8 * 1024 * 1024) {
await new Promise<void>((resolve, reject) => { setTimeout(()=> resolve(), 1000);
})
}
await new Promise<void>((resolve, reject) => {
setTimeout(() => resolve(), 1000);
})
}
let messageJSON = JSON.stringify(message);
this.messageSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-datachannel[${logID(this.peerManager.peerID)}]:`, message.type, message, `message size:${messageJSON.length}`));
this.messageSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-datachannel[${logID(this.peerManager.peerID)}]:`, message.type, message, `message size:${messageJSON.length}`));
if (messageJSON.length > this.chunkSize) {
this.messageSuperlog && console.log.apply(null, log(`[datachannel] sending long message: `, messageJSON.length));
this.sendLongMessage(messageJSON);
return;
}
if (messageJSON.length > this.chunkSize) {
this.messageSuperlog && console.log.apply(null, log(`[datachannel] sending long message: `, messageJSON.length));
this.sendLongMessage(messageJSON);
return;
}
try {
this.dataChannel?.send(messageJSON);
} catch (e) {
console.log.apply(null, log(e));
try {
this.dataChannel?.send(messageJSON);
} catch (e) {
console.log.apply(null, log(e));
}
}
// this.onMessage(messageJSON);
}
@@ -743,140 +744,140 @@ try {
// Get a polyfill for browsers that don't have this API
async hashMessage(message: string) {
let msgUint8 = new TextEncoder().encode(message);
const hashBuffer = await crypto.subtle.digest("SHA-256", msgUint8);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, "0")).join('');
return hashHex;
}
let msgUint8 = new TextEncoder().encode(message);
const hashBuffer = await crypto.subtle.digest("SHA-256", msgUint8);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, "0")).join('');
return hashHex;
}
async sendLongMessage(message: string) {
// message = JSON.parse(message);
let chunkSize = this.chunkSize / 2;
// let chunkSize = 1024;
let chunks = Math.ceil(message!.length / chunkSize);
let messageID = generateID();
// message = JSON.parse(message);
let chunkSize = this.chunkSize / 2;
// let chunkSize = 1024;
let chunks = Math.ceil(message!.length / chunkSize);
let messageID = generateID();
let hash = await this.hashMessage(message);
let hash = await this.hashMessage(message);
for (let i = 0; i < chunks; i++) {
let offset = i * chunkSize;
let chunk = message?.substring(offset, offset + chunkSize);
// this.send(message?.substring(offset, offset + chunkSize-1));
// console.log("[chunk]", chunk);
let chunkHash = await this.hashMessage(chunk);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] chunkHash:${logID(chunkHash)} from:${logID(this.peerManager.peerID)} to:${logID(this.remotePeerID)} messageID:${logID(messageID)} hash:${logID(hash)} ${i + 1}/${chunks}`));
for (let i = 0; i < chunks; i++) {
let offset = i * chunkSize;
let chunk = message?.substring(offset, offset + chunkSize);
// this.send(message?.substring(offset, offset + chunkSize-1));
// console.log("[chunk]", chunk);
let chunkHash = await this.hashMessage(chunk);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] chunkHash:${logID(chunkHash)} from:${logID(this.peerManager.peerID)} to:${logID(this.remotePeerID)} messageID:${logID(messageID)} hash:${logID(hash)} ${i + 1}/${chunks}`));
let netMessage = { type: 'chunk', message_id: messageID, hash: hash, chunk_index: i, total_chunks: chunks, chunk: chunk, chunk_hash: chunkHash };
await this.send(netMessage);
let netMessage = { type: 'chunk', message_id: messageID, hash: hash, chunk_index: i, total_chunks: chunks, chunk: chunk, chunk_hash: chunkHash };
await this.send(netMessage);
}
}
}
call(functionName: string, args: any) {
let transactionID = generateID(); // make this faster as we will only ever have a small number of in-flight queries on a peer
// Think about a timeout here to auto reject it after a while.
let promise = new Promise((resolve, reject) => {
this.pendingRPCs.set(transactionID, { resolve, reject, functionName });
// setTimeout(() => reject("bad"), 1000);
});
call(functionName: string, args: any) {
let transactionID = generateID(); // make this faster as we will only ever have a small number of in-flight queries on a peer
// Think about a timeout here to auto reject it after a while.
let promise = new Promise((resolve, reject) => {
this.pendingRPCs.set(transactionID, { resolve, reject, functionName });
// setTimeout(() => reject("bad"), 1000);
});
let message = {
type: "rpc_call",
transaction_id: transactionID,
function_name: functionName,
args: args,
};
let message = {
type: "rpc_call",
transaction_id: transactionID,
function_name: functionName,
args: args,
};
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-[rpc][${logID(this.peerManager.peerID)}]`, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-[rpc][${logID(this.peerManager.peerID)}]`, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
this.send(message);
this.send(message);
return promise;
}
return promise;
}
async onMessage(messageJSON: any) {
let message: any = {};
try {
message = JSON.parse(messageJSON);
} catch (e) {
console.log.apply(null, log("PeerConnection.onMessage:", e));
}
this.messageSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]->datachannel[${logID(this.peerManager.peerID)}]`, message.type, message));
let type = message.type;
if (type === "rpc_response") {
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-[rpc][${logID(this.peerManager.peerID)}] response: `, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
let pendingRPC = this.pendingRPCs.get(message.transaction_id);
if (!pendingRPC) {
throw new Error();
let message: any = {};
try {
message = JSON.parse(messageJSON);
} catch (e) {
console.log.apply(null, log("PeerConnection.onMessage:", e));
}
pendingRPC.resolve(message.response);
}
this.messageSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]->datachannel[${logID(this.peerManager.peerID)}]`, message.type, message));
let type = message.type;
if (type === "rpc_call") {
if (type === "rpc_response") {
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]<-[rpc][${logID(this.peerManager.peerID)}] response: `, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]->[rpc][${logID(this.peerManager.peerID)}] call: `, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
let pendingRPC = this.pendingRPCs.get(message.transaction_id);
let response = await this.peerManager.callFromRemote(message.function_name, message.args);
this.rpcSuperlog && console.log.apply(null, log(`[rpc] call: response:`, response));
if (response === undefined) {
return;
}
let responseMessage = { type: 'rpc_response', function_name: message.function_name, transaction_id: message.transaction_id, response: response };
this.send(responseMessage);
}
if (type === "initial_peers") {
for (let peerID of message.peers) {
console.log(log("Connecting to initial peer ", peerID));
this.peerManager.connectToPeer(peerID);
}
}
if (type === "chunk") {
let messageID = message.message_id;
if (!this.longMessages.has(messageID)) {
this.longMessages.set(messageID, { messageChunks: [], totalChunks: message.total_chunks, hash: message.hash });
}
let longMessage = this.longMessages.get(messageID);
if (!longMessage) {
return;
}
let chunkHash = await this.hashMessage(message.chunk_hash);
longMessage.messageChunks.push(message.chunk);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] chunked message sent chunkHash:${logID(message.chunk_hash)} computed hash: ${logID(chunkHash)} messageId:${logID(messageID)} chunk ${message.chunk_index + 1}/${longMessage.totalChunks}`));
if (message.chunk_index === longMessage.totalChunks - 1) {
let completeMessage = longMessage.messageChunks.join('');
let hash = await this.hashMessage(completeMessage);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] hashes match: ${hash === longMessage.hash} sent hash: ${logID(longMessage.hash)} computed hash: ${logID(hash)}`));
if (hash !== longMessage.hash) {
throw new Error("[chunk] long message hashes don't match.");
if (!pendingRPC) {
throw new Error();
}
this.onMessage(completeMessage);
this.longMessages.delete(messageID);
}
}
// this.peerManger.onMessage(this.remotePeerID, message);
}
pendingRPC.resolve(message.response);
}
if (type === "rpc_call") {
this.rpcSuperlog && console.log.apply(null, log(`[${logID(this.remotePeerID)}]->[rpc][${logID(this.peerManager.peerID)}] call: `, message.function_name, message.transaction_id, JSON.stringify(message.args, null, 2)));
let response = await this.peerManager.callFromRemote(message.function_name, message.args);
this.rpcSuperlog && console.log.apply(null, log(`[rpc] call: response:`, response));
if (response === undefined) {
return;
}
let responseMessage = { type: 'rpc_response', function_name: message.function_name, transaction_id: message.transaction_id, response: response };
this.send(responseMessage);
}
if (type === "initial_peers") {
for (let peerID of message.peers) {
console.log(log("Connecting to initial peer ", peerID));
this.peerManager.connectToPeer(peerID);
}
}
if (type === "chunk") {
let messageID = message.message_id;
if (!this.longMessages.has(messageID)) {
this.longMessages.set(messageID, { messageChunks: [], totalChunks: message.total_chunks, hash: message.hash });
}
let longMessage = this.longMessages.get(messageID);
if (!longMessage) {
return;
}
let chunkHash = await this.hashMessage(message.chunk_hash);
longMessage.messageChunks.push(message.chunk);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] chunked message sent chunkHash:${logID(message.chunk_hash)} computed hash: ${logID(chunkHash)} messageId:${logID(messageID)} chunk ${message.chunk_index + 1}/${longMessage.totalChunks}`));
if (message.chunk_index === longMessage.totalChunks - 1) {
let completeMessage = longMessage.messageChunks.join('');
let hash = await this.hashMessage(completeMessage);
this.chunkSuperlog && console.log.apply(null, log(`[chunk] hashes match: ${hash === longMessage.hash} sent hash: ${logID(longMessage.hash)} computed hash: ${logID(hash)}`));
if (hash !== longMessage.hash) {
throw new Error("[chunk] long message hashes don't match.");
}
this.onMessage(completeMessage);
this.longMessages.delete(messageID);
}
}
// this.peerManger.onMessage(this.remotePeerID, message);
}
}