Post syncing logic is wokting. As expected datachannels fail to send posts above a certain size. Need to implement chunking next.

This commit is contained in:
2025-05-19 01:21:32 -07:00
parent 1a47dc8931
commit 42bed0f516
4 changed files with 115 additions and 15 deletions

View File

@@ -1,13 +1,37 @@
import { openDatabase, getData, addData, addDataArray, clearData, deleteData, mergeDataArray, getAllData, checkPostIds, getAllIds, getPostsByIds } from "db";
import { log, logID } from "log";
async function bytesToBase64DataUrl(bytes: Uint8Array, type = "application/octet-stream") {
return await new Promise((resolve, reject) => {
const reader = Object.assign(new FileReader(), {
onload: () => resolve(reader.result),
onerror: () => reject(reader.error),
});
reader.readAsDataURL(new File([bytes], "", { type }));
});
}
async function arrayBufferToBase64(buffer: ArrayBuffer) {
var bytes = new Uint8Array(buffer);
return (await bytesToBase64DataUrl(bytes) as string).replace("data:application/octet-stream;base64,", "");
}
async function base64ToArrayBuffer(base64String: string) {
let response = await fetch("data:application/octet-stream;base64," + base64String);
let arrayBuffer = await response.arrayBuffer();
return arrayBuffer;
}
export class Sync {
isArchivePeer: boolean = false;
userID: string = "";
userPeers: Map<string, Set<string> = new Map();
userPeers: Map<string, Set<string>> = new Map();
userIDsToSync: Set<string> = new Set();
syncSuperlog: boolean = true;
syncSuperlog: boolean = false;
setArchive(isHeadless: boolean) {
this.isArchivePeer = isHeadless;
@@ -132,7 +156,7 @@ export class Sync {
let postIds = await getAllIds(userID) ?? [];
postIds = postIds.filter((postID: string) => !this.postBlockList.has(postID));
if (postIds.length === 0) {
console.log.apply(null, log(`Net: I know about user ${logID(userID)} but I have 0 posts`));;
console.log.apply(null, log(`Net: I know about user ${logID(userID)} but I have 0 posts`));
return null;
}
@@ -155,7 +179,40 @@ export class Sync {
// return [];
// }
return postIDs;
return neededPostIds;
}
async getPostsForUser(userID:string, postIDs: string[]) {
let posts = await getPostsByIds(userID, postIDs) ?? [];
console.log.apply(null, log(`[sync] got ${posts.length} posts for user ${logID(userID)}`));;
// app.timerStart();
let output = [];
console.log.apply(null, log("Serializing images"));
for (let post of posts) {
let newPost = (post as any).data;
if (newPost.image_data) {
// let compressedData = await wsConnection.compressArrayBuffer(newPost.image_data);
// console.log.apply(null, log((newPost.image_data.byteLength - compressedData.byteLength) / 1024 / 1024);
// TODO don't do this, use Blobs direclty!
// https://developer.chrome.com/blog/blob-support-for-Indexeddb-landed-on-chrome-dev
newPost.image_data = await arrayBufferToBase64(newPost.image_data);
}
// let megs = JSON.stringify(newPost).length/1024/1024;
// console.log.apply(null, log(`getPostsForUserHandler id:${newPost.post_id} post length:${megs}`);
output.push(newPost);
}
return output;
// console.log.apply(null, log(`getPostsForUser`,output));
}
// async getPostIdsForUserHandler(data: any) {