Working export/import

This commit is contained in:
bobbydigitales
2026-04-19 21:39:25 -07:00
parent 4ae581b1a2
commit e8cc08e5cc
6 changed files with 181 additions and 61 deletions

View File

@@ -528,42 +528,105 @@ export class App {
globalThis.URL.revokeObjectURL(url);
}
async importPostsForUser(userID: string, buffer: ArrayBuffer) {
async importPostsForUser(buffer: ArrayBuffer) {
const startTime = performance.now();
console.log.apply(null, log("Importing posts"));
const json = await decompressBuffer(buffer);
const posts = JSON.parse(json);
const data = JSON.parse(json);
for (let post of posts) {
if (post.image_data && typeof post.image_data === 'string') {
post.image_data = await base64ToArrayBuffer(post.image_data);
}
if (post.post_timestamp && typeof post.post_timestamp === 'string') {
post.post_timestamp = new Date(post.post_timestamp);
}
let postsByUser: { [userID: string]: any[] };
let username = this.username;
let userID = this.userID;
if (Array.isArray(data)) {
console.log.apply(null, log("Detected old export format"));
postsByUser = { [this.userID]: data };
} else {
console.log.apply(null, log("Detected new export format"));
const { username: importedUsername, userID: importedUserID, posts } = data;
username = importedUsername;
userID = importedUserID;
postsByUser = posts;
localStorage.setItem("dandelion_username", username);
localStorage.setItem("dandelion_id", userID);
}
await mergeDataArray(userID, posts);
console.log.apply(null, log(`Imported ${posts.length} posts`));
let totalPostsImported = 0;
const userTimings: { [userID: string]: number } = {};
for (const [sourceUserID, posts] of Object.entries(postsByUser)) {
const userStartTime = performance.now();
const postList = posts as any[];
for (let post of postList) {
if (post.image_data && typeof post.image_data === 'string') {
post.image_data = await base64ToArrayBuffer(post.image_data);
}
if (post.post_timestamp && typeof post.post_timestamp === 'string') {
post.post_timestamp = new Date(post.post_timestamp);
}
}
await mergeDataArray(sourceUserID, postList);
totalPostsImported += postList.length;
userTimings[sourceUserID] = performance.now() - userStartTime;
}
const totalTime = performance.now() - startTime;
const userTimingsLog = Object.entries(userTimings)
.map(([uid, ms]) => `${logID(uid)}: ${ms.toFixed(2)}ms`)
.join(', ');
console.log.apply(null, log(`Imported ${totalPostsImported} posts from ${Object.keys(postsByUser).length} users in ${totalTime.toFixed(2)}ms (${userTimingsLog})`));
}
async exportPostsForUser(userID: string) {
async exportPostsForUser() {
console.log.apply(null, log("Exporting all posts for all users"));
const exportStartTime = performance.now();
let posts = await getAllData(userID);
const knownUsers = [...(await indexedDB.databases())]
.map(db => db.name?.replace('user_', ''))
.filter((userID): userID is string => userID !== undefined);
let output = [];
const postsByUser: { [userID: string]: any[] } = {};
const userTimings: { [userID: string]: number } = {};
console.log.apply(null, log("Serializing images"));
for (let post of posts) {
let newPost = (post as any).data;
for (const userID of knownUsers) {
const userStartTime = performance.now();
const posts = await getAllData(userID);
const output = [];
if (newPost.image_data) {
newPost.image_data = await arrayBufferToBase64(newPost.image_data);
for (let post of posts) {
let newPost = (post as any).data;
if (newPost.image_data) {
newPost.image_data = await arrayBufferToBase64(newPost.image_data);
}
output.push(newPost);
}
output.push(newPost);
if (output.length > 0) {
postsByUser[userID] = output;
}
userTimings[userID] = performance.now() - userStartTime;
}
let compressedData = await compressString(JSON.stringify(output));
const totalTime = performance.now() - exportStartTime;
const userTimingsLog = Object.entries(userTimings)
.map(([uid, ms]) => `${logID(uid)}: ${ms.toFixed(2)}ms`)
.join(', ');
console.log.apply(null, log(`Exported ${Object.keys(postsByUser).length} users in ${totalTime.toFixed(2)}ms (${userTimingsLog})`));
const exportData = {
username: this.username,
userID: this.userID,
posts: postsByUser
};
let compressedData = await compressString(JSON.stringify(exportData));
const d = new Date();
const timestamp = `${d.getFullYear()
@@ -573,8 +636,7 @@ export class App {
}_${String(d.getMinutes()).padStart(2, '0')
}_${String(d.getSeconds()).padStart(2, '0')}`;
this.downloadBinary(compressedData, `ddln_${this.username}_export_${timestamp}.json.gz`);
this.downloadBinary(compressedData, `ddln_export_${timestamp}.json.gz`);
}
async importTweetArchive(userID: string, tweetArchive: any[]) {
@@ -1013,15 +1075,16 @@ export class App {
const file = importFilePicker.files?.[0];
if (!file) return;
const buffer = await file.arrayBuffer();
await this.importPostsForUser(this.userID, buffer);
await this.importPostsForUser(buffer);
importFilePicker.value = '';
this.userID = localStorage.getItem("dandelion_id") || this.userID;
this.username = localStorage.getItem("dandelion_username") || this.username;
this.render();
});
let exportButton = this.button("export-button");
exportButton.addEventListener('click', async e => {
await this.exportPostsForUser(this.userID)
await this.exportPostsForUser()
});
let composeButton = this.div('compose-button');