try { Deno.mkdirSync("blobs") } catch (error) {} try { Deno.mkdirSync("repos") } catch (error) {} async function get(url, options) { console.debug("get", url); var res = await fetch(url, options); if (!res.ok) throw new Error(`HTTP ${res.status} ${res.statusText} ${await res.text()}`); return res; } async function getPds(did) { var doc = await get(`https://plc.directory/${did}`).then(res => res.json()); var {serviceEndpoint} = doc.service.find(s => s.id == "#atproto_pds"); return serviceEndpoint; } async function getRepo(pds, did) { var fp = `repos/${did.replaceAll(':','-')}`; var res = await get(`${pds}/xrpc/com.atproto.sync.getRepo?did=${did}`); var ab = await res.arrayBuffer(); Deno.writeFileSync(fp, new Uint8Array(ab)); return ab; } function exists(path) { try { Deno.lstatSync(path); return true; } catch (error) { if (error instanceof Deno.errors.NotFound) return false; throw error; } } async function getBlob(pds, did, cid) { /*try { var file = await Deno.open(`blobs/${cid}`, {write: true, createNew: true}); } catch (error) { if (error instanceof Deno.errors.AlreadyExists) return; throw error; } var res = await get(`${pds}/xrpc/com.atproto.sync.getBlob?did=${did}&cid=${cid}`); await res.body.pipeTo(file.writable);*/ // if download is interrupted will leave corrupt file var fp = `blobs/${cid}`; if (exists(fp)) return; var res = await get(`${pds}/xrpc/com.atproto.sync.getBlob?did=${did}&cid=${cid}`); var ab = await res.arrayBuffer(); Deno.writeFileSync(fp, new Uint8Array(ab)); } async function getBlobs(pds, did) { var allCids = []; var _cursor; do { try { let {cids, cursor} = await get(`${pds}/xrpc/com.atproto.sync.listBlobs?did=${did}&limit=1000${_cursor?`&cursor=${_cursor}`:''}`).then(res => res.json()); _cursor = cursor; allCids.push(...cids); } catch (error) { console.error(error); } } while (_cursor); console.log(`${allCids.length} blobs`); for (var cid of allCids) await getBlob(pds, did, cid).catch(console.error); } var backedUpDids = []; async function backup(did) { if (backedUpDids.includes(did)) return; console.log("backup", did); backedUpDids.push(did); var pds = await getPds(did); var repo = await getRepo(pds, did).catch(console.error); await getBlobs(pds, did); return repo; } var ascii = new TextDecoder("ascii"); async function backupRecursive(dids, depth = 1) { console.log(`backup ${dids.length} dids depth ${depth}`); if (!depth) { for (var did of dids) await backup(did).catch(console.error); return; } var allrdids = new Set(); for (var did of dids) { try { var repo = await backup(did); repo = ascii.decode(repo); var rdids = new Set(repo.match(/did:plc:[a-z2-7]{24}/g)); rdids.delete(did); console.log(`${rdids.size} related didplcs`); rdids.forEach(rdid => allrdids.add(rdid)); } catch (error) { console.error(error); } } await backupRecursive(allrdids, depth - 1); } var didsToBackup = Deno.args.filter(arg => arg.startsWith("did:plc:")); var depth; for (let arg of Deno.args) { let match = arg.match(/^--depth=(\d)+$/i); if (match) { depth = Number(match[1]); } } await backupRecursive(didsToBackup, depth);