split common and repo out into separate packages

This commit is contained in:
dholms 2022-09-12 21:01:56 -05:00
parent 978fcaeeb9
commit 94babb38b7
35 changed files with 1408 additions and 205 deletions

@ -1,17 +1,10 @@
export * from './blockstore'
export * from './repo'
export * as check from './check'
export * as util from './util'
export * from './util'
export * as check from './common/check'
export * as util from './common/util'
export * from './tid'
export * as service from './network/service'
export * from './network/names'
export * from './network/uri'
import { def as commonDef } from './common/types'
import { def as repoDef } from './repo/types'
export const def = {
common: commonDef,
repo: repoDef,
}
export * from './types'

@ -1,156 +0,0 @@
import axios from 'axios'
import { CID } from 'multiformats'
import { authCfg, didNetworkUrl, parseAxiosError } from './util'
import * as check from '../common/check'
import { def } from '../common/types'
import * as uint8arrays from 'uint8arrays'
import * as auth from '@adxp/auth'
export const registerToDidNetwork = async (
username: string,
signer: auth.AuthStore,
): Promise<void> => {
const url = didNetworkUrl()
const dataBytes = uint8arrays.fromString(username, 'utf8')
const sigBytes = await signer.sign(dataBytes)
const signature = uint8arrays.toString(sigBytes, 'base64url')
const did = await signer.did()
const data = { did, username, signature }
try {
await axios.post(url, data)
} catch (e) {
const err = parseAxiosError(e)
throw new Error(err.msg)
}
}
export const getUsernameFromDidNetwork = async (
did: string,
): Promise<string | null> => {
const url = didNetworkUrl()
const params = { did }
try {
const res = await axios.get(url, { params })
return res.data.username
} catch (e) {
const err = parseAxiosError(e)
if (err.code === 404) {
return null
}
throw new Error(err.msg)
}
}
export const register = async (
url: string,
username: string,
did: string,
createRepo: boolean,
token: auth.Ucan,
): Promise<void> => {
const data = { username, did, createRepo }
try {
await axios.post(`${url}/id/register`, data, authCfg(token))
} catch (e) {
const err = parseAxiosError(e)
throw new Error(err.msg)
}
}
export const lookupDid = async (
url: string,
name: string,
): Promise<string | null> => {
const params = { resource: name }
try {
const res = await axios.get(`${url}/.well-known/webfinger`, {
params,
})
return check.assure(def.string, res.data.id)
} catch (e) {
const err = parseAxiosError(e)
if (err.code === 404) {
return null
}
throw new Error(err.msg)
}
}
export const getServerDid = async (url: string): Promise<string> => {
try {
const res = await axios.get(`${url}/.well-known/adx-did`)
return res.data
} catch (e) {
const err = parseAxiosError(e)
throw new Error(`Could not retrieve server did ${err.msg}`)
}
}
export const getRemoteRoot = async (
url: string,
did: string,
): Promise<CID | null> => {
const params = { did }
try {
const res = await axios.get(`${url}/data/root`, { params })
return CID.parse(res.data.root)
} catch (e) {
const err = parseAxiosError(e)
if (err.code === 404) {
return null
}
throw new Error(`Could not retrieve server did ${err.msg}`)
}
}
export const subscribe = async (
url: string,
did: string,
ownUrl: string,
): Promise<void> => {
const data = { did, host: ownUrl }
try {
await axios.post(`${url}/data/subscribe`, data)
} catch (e) {
const err = parseAxiosError(e)
throw new Error(`Could not retrieve server did ${err.msg}`)
}
}
export const pushRepo = async (
url: string,
did: string,
car: Uint8Array,
): Promise<void> => {
try {
await axios.post(`${url}/data/repo/${did}`, car, {
headers: {
'Content-Type': 'application/octet-stream',
},
})
} catch (e) {
const err = parseAxiosError(e)
throw new Error(`Could not retrieve server did ${err.msg}`)
}
}
export const pullRepo = async (
url: string,
did: string,
from?: CID,
): Promise<Uint8Array | null> => {
const params = { did, from: from?.toString() }
try {
const res = await axios.get(`${url}/data/repo`, {
params,
responseType: 'arraybuffer',
})
return new Uint8Array(res.data)
} catch (e) {
const err = parseAxiosError(e)
if (err.code === 404) {
return null
}
throw new Error(`Could not retrieve server did ${err.msg}`)
}
}

@ -1,4 +1,4 @@
import { s32encode, s32decode } from '../common/util'
import { s32encode, s32decode } from './util'
let lastTimestamp = 0
let timestampCount = 0
let clockid: number | null = null

@ -0,0 +1 @@
module.exports = require('../../babel.config.js')

@ -0,0 +1,7 @@
const base = require('./jest.config')
module.exports = {
...base,
testRegex: '(/tests/.*.bench)',
testTimeout: 3000000
}

@ -0,0 +1,6 @@
const base = require('../../jest.config.base.js')
module.exports = {
...base,
displayName: 'Common',
}

@ -0,0 +1,33 @@
{
"name": "@adxp/common",
"version": "0.0.1",
"main": "src/index.ts",
"scripts": {
"test": "jest",
"test:profile": "node --inspect ../../node_modules/.bin/jest",
"bench": "jest --config jest.bench.config.js ",
"bench:profile": "node --inspect ../../node_modules/.bin/jest --config jest.bench.config.js",
"prettier": "prettier --check src/",
"prettier:fix": "prettier --write src/",
"lint": "eslint . --ext .ts,.tsx",
"lint:fix": "yarn lint --fix",
"verify": "run-p prettier lint",
"verify:fix": "yarn prettier:fix && yarn lint:fix",
"build": "esbuild src/index.ts --define:process.env.NODE_ENV=\\\"production\\\" --bundle --platform=node --sourcemap --outfile=dist/index.js",
"postbuild": "tsc --build tsconfig.build.json"
},
"dependencies": {
"@adxp/auth": "*",
"@adxp/common": "*",
"@adxp/schemas": "*",
"@ipld/car": "^3.2.3",
"@ipld/dag-cbor": "^7.0.0",
"@ucans/core": "0.0.1-alpha2",
"axios": "^0.24.0",
"ipld-hashmap": "^2.1.10",
"level": "^8.0.0",
"multiformats": "^9.6.4",
"uint8arrays": "^3.0.0",
"zod": "^3.14.2"
}
}

@ -4,10 +4,9 @@ import { sha256 as blockHasher } from 'multiformats/hashes/sha2'
import * as blockCodec from '@ipld/dag-cbor'
import { BlockWriter } from '@ipld/car/writer'
import * as check from '../common/check'
import * as util from '../common/util'
import { check, util } from '@adxp/common'
import { BlockReader } from '@ipld/car/api'
import CidSet from '../repo/cid-set'
import CidSet from '../cid-set'
type AllowedIpldRecordVal = string | number | CID | CID[] | Uint8Array | null

@ -1,5 +1,5 @@
import { TID } from '@adxp/common'
import Repo from './repo'
import TID from './tid'
export class Collection {
repo: Repo

@ -1,4 +1,4 @@
export * from './blockstore'
export * from './repo'
export * from './tid'
export * from './mst'
export * from './types'

@ -1,8 +1,8 @@
import z from 'zod'
import { CID } from 'multiformats'
import IpldStore from '../../blockstore/ipld-store'
import { def } from '../../common/types'
import IpldStore from '../blockstore/ipld-store'
import { def } from '@adxp/common'
import { DataDiff } from './diff'
import { DataStore } from '../types'
import { BlockWriter } from '@ipld/car/api'

@ -3,7 +3,7 @@ import { sha256 as blockHasher } from 'multiformats/hashes/sha2'
import * as blockCodec from '@ipld/dag-cbor'
import { CID } from 'multiformats'
import * as uint8arrays from 'uint8arrays'
import IpldStore from '../../blockstore/ipld-store'
import IpldStore from '../blockstore/ipld-store'
import { sha256 } from '@adxp/crypto'
import { MST, Leaf, NodeEntry, NodeData, MstOpts, Fanout } from './mst'

@ -3,15 +3,12 @@ import { CarReader, CarWriter } from '@ipld/car'
import { BlockWriter } from '@ipld/car/lib/writer-browser'
import { RepoRoot, Commit, def, BatchWrite, DataStore } from './types'
import * as check from '../common/check'
import IpldStore, { AllowedIpldVal } from '../blockstore/ipld-store'
import { streamToArray } from '../common/util'
import { check, streamToArray, TID } from '@adxp/common'
import IpldStore, { AllowedIpldVal } from './blockstore/ipld-store'
import * as auth from '@adxp/auth'
import * as service from '../network/service'
import { AuthStore } from '@adxp/auth'
import { DataDiff, MST } from './mst'
import Collection from './collection'
import TID from './tid'
export class Repo {
blockstore: IpldStore
@ -262,27 +259,6 @@ export class Repo {
return this.authStore.createUcan(forDid, auth.maintenanceCap(this.did()))
}
// PUSH/PULL TO REMOTE
// -----------
async push(url: string): Promise<void> {
const remoteRoot = await service.getRemoteRoot(url, this.did())
if (this.cid.equals(remoteRoot)) {
// already up to date
return
}
const car = await this.getDiffCar(remoteRoot)
await service.pushRepo(url, this.did(), car)
}
async pull(url: string): Promise<void> {
const car = await service.pullRepo(url, this.did(), this.cid)
if (car === null) {
throw new Error(`Could not find repo for did: ${this.did()}`)
}
await this.loadAndVerifyDiff(car)
}
// VERIFYING UPDATES
// -----------

@ -1,7 +1,6 @@
import { z } from 'zod'
import { BlockWriter } from '@ipld/car/writer'
import { def as common } from '../common/types'
import TID from './tid'
import { def as common, TID } from '@adxp/common'
import { CID } from 'multiformats'
import { DataDiff } from './mst'

@ -0,0 +1,222 @@
import { CID } from 'multiformats'
import IpldStore from '../src/blockstore/ipld-store'
import TID from '../src/repo/tid'
import { Repo } from '../src/repo'
import { MemoryBlockstore } from '../src/blockstore'
import { DataDiff, MST } from '../src/repo/mst'
import fs from 'fs'
type IdMapping = Record<string, CID>
const fakeStore = new MemoryBlockstore()
export const randomCid = async (store: IpldStore = fakeStore): Promise<CID> => {
const str = randomStr(50)
return store.put({ test: str })
}
export const generateBulkTids = (count: number): TID[] => {
const ids: TID[] = []
for (let i = 0; i < count; i++) {
ids.push(TID.next())
}
return ids
}
export const generateBulkTidMapping = async (
count: number,
blockstore: IpldStore = fakeStore,
): Promise<IdMapping> => {
const ids = generateBulkTids(count)
const obj: IdMapping = {}
for (const id of ids) {
obj[id.toString()] = await randomCid(blockstore)
}
return obj
}
export const keysFromMapping = (mapping: IdMapping): TID[] => {
return Object.keys(mapping).map((id) => TID.fromStr(id))
}
export const keysFromMappings = (mappings: IdMapping[]): TID[] => {
return mappings.map(keysFromMapping).flat()
}
export const randomStr = (len: number): string => {
let result = ''
const CHARS = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
for (let i = 0; i < len; i++) {
result += CHARS.charAt(Math.floor(Math.random() * CHARS.length))
}
return result
}
export const shuffle = <T>(arr: T[]): T[] => {
let toShuffle = [...arr]
let shuffled: T[] = []
while (toShuffle.length > 0) {
const index = Math.floor(Math.random() * toShuffle.length)
shuffled.push(toShuffle[index])
toShuffle.splice(index, 1)
}
return shuffled
}
export const generateObject = (): Record<string, string> => {
return {
name: randomStr(100),
}
}
// Mass repo mutations & checking
// -------------------------------
export const testCollections = ['bsky/posts', 'bsky/likes']
export type CollectionData = Record<string, unknown>
export type RepoData = Record<string, CollectionData>
export const fillRepo = async (
repo: Repo,
itemsPerCollection: number,
): Promise<RepoData> => {
const repoData: RepoData = {}
for (const collName of testCollections) {
const collData: CollectionData = {}
const coll = await repo.getCollection(collName)
for (let i = 0; i < itemsPerCollection; i++) {
const object = generateObject()
const tid = await coll.createRecord(object)
collData[tid.toString()] = object
}
repoData[collName] = collData
}
return repoData
}
export const editRepo = async (
repo: Repo,
prevData: RepoData,
params: {
adds?: number
updates?: number
deletes?: number
},
): Promise<RepoData> => {
const { adds = 0, updates = 0, deletes = 0 } = params
const repoData: RepoData = {}
for (const collName of testCollections) {
const collData = prevData[collName]
const shuffled = shuffle(Object.entries(collData))
const coll = await repo.getCollection(collName)
for (let i = 0; i < adds; i++) {
const object = generateObject()
const tid = await coll.createRecord(object)
collData[tid.toString()] = object
}
const toUpdate = shuffled.slice(0, updates)
for (let i = 0; i < toUpdate.length; i++) {
const object = generateObject()
const tid = TID.fromStr(toUpdate[i][0])
await coll.updateRecord(tid, object)
collData[tid.toString()] = object
}
const toDelete = shuffled.slice(updates, deletes)
for (let i = 0; i < toDelete.length; i++) {
const tid = TID.fromStr(toDelete[i][0])
await coll.deleteRecord(tid)
delete collData[tid.toString()]
}
repoData[collName] = collData
}
return repoData
}
export const checkRepo = async (repo: Repo, data: RepoData): Promise<void> => {
for (const collName of Object.keys(data)) {
const coll = await repo.getCollection(collName)
const collData = data[collName]
for (const tid of Object.keys(collData)) {
const record = await coll.getRecord(TID.fromStr(tid))
expect(record).toEqual(collData[tid])
}
}
}
export const checkRepoDiff = async (
diff: DataDiff,
before: RepoData,
after: RepoData,
): Promise<void> => {
const getObjectCid = async (
key: string,
data: RepoData,
): Promise<CID | undefined> => {
const parts = key.split('/')
const collection = parts.slice(0, 2).join('/')
const obj = (data[collection] || {})[parts[2]]
return obj === undefined ? undefined : fakeStore.put(obj as any)
}
for (const add of diff.addList()) {
const beforeCid = await getObjectCid(add.key, before)
const afterCid = await getObjectCid(add.key, after)
expect(beforeCid).toBeUndefined()
expect(afterCid).toEqual(add.cid)
}
for (const update of diff.updateList()) {
const beforeCid = await getObjectCid(update.key, before)
const afterCid = await getObjectCid(update.key, after)
expect(beforeCid).toEqual(update.prev)
expect(afterCid).toEqual(update.cid)
}
for (const del of diff.deleteList()) {
const beforeCid = await getObjectCid(del.key, before)
const afterCid = await getObjectCid(del.key, after)
expect(beforeCid).toEqual(del.cid)
expect(afterCid).toBeUndefined()
}
}
// Logging
// ----------------
export const writeMstLog = async (filename: string, tree: MST) => {
let log = ''
for await (const entry of tree.walk()) {
if (entry.isLeaf()) continue
const layer = await entry.getLayer()
log += `Layer ${layer}: ${entry.pointer}\n`
log += '--------------\n'
const entries = await entry.getEntries()
for (const e of entries) {
if (e.isLeaf()) {
log += `Key: ${e.key} (${e.value})\n`
} else {
log += `Subtree: ${e.pointer}\n`
}
}
log += '\n\n'
}
fs.writeFileSync(filename, log)
}
export const saveMstEntries = (filename: string, entries: [string, CID][]) => {
const writable = entries.map(([key, val]) => [key, val.toString()])
fs.writeFileSync(filename, JSON.stringify(writable))
}
export const loadMstEntries = (filename: string): [string, CID][] => {
const contents = fs.readFileSync(filename)
const parsed = JSON.parse(contents.toString())
return parsed.map(([key, value]) => [key, CID.parse(value)])
}

@ -0,0 +1,69 @@
import * as auth from '@adxp/auth'
import Repo from '../src/repo/index'
import IpldStore from '../src/blockstore/ipld-store'
import * as delta from '../src/repo/delta'
import * as util from './_util'
import TID from '../src/repo/tid'
describe('Delta', () => {
let alice: Repo
let ipldBob: IpldStore
const namespaceId = 'did:example:test'
beforeAll(async () => {
const ipldAlice = IpldStore.createInMemory()
const authStore = await auth.MemoryStore.load()
await authStore.claimFull()
alice = await Repo.create(ipldAlice, await authStore.did(), authStore)
ipldBob = IpldStore.createInMemory()
})
it('syncs a repo that is behind', async () => {
// bring bob up to date with early version of alice's repo
await util.fillRepo(alice, namespaceId, 150, 10, 50)
const car = await alice.getFullHistory()
const bob = await Repo.fromCarFile(car, ipldBob)
await alice.runOnNamespace(namespaceId, async (namespace) => {
const postTid = TID.next()
const cid = await util.randomCid(alice.blockstore)
await namespace.posts.addEntry(postTid, cid)
await namespace.posts.editEntry(
postTid,
await util.randomCid(alice.blockstore),
)
await namespace.posts.deleteEntry(postTid)
const interTid = TID.next()
await namespace.interactions.addEntry(
interTid,
await util.randomCid(alice.blockstore),
)
await namespace.interactions.editEntry(
interTid,
await util.randomCid(alice.blockstore),
)
await namespace.interactions.deleteEntry(interTid)
})
const follow = util.randomFollow()
await alice.relationships.follow(follow.did, follow.username)
await alice.relationships.unfollow(follow.did)
const diff = await alice.getDiffCar(bob.cid)
const events: delta.Event[] = []
await bob.loadAndVerifyDiff(diff, async (evt) => {
events.push(evt)
})
expect(events.length).toEqual(8)
expect(events[0].event).toEqual(delta.EventType.AddedObject)
expect(events[1].event).toEqual(delta.EventType.UpdatedObject)
expect(events[2].event).toEqual(delta.EventType.DeletedObject)
expect(events[3].event).toEqual(delta.EventType.AddedObject)
expect(events[4].event).toEqual(delta.EventType.UpdatedObject)
expect(events[5].event).toEqual(delta.EventType.DeletedObject)
expect(events[6].event).toEqual(delta.EventType.AddedRelationship)
expect(events[7].event).toEqual(delta.EventType.DeletedRelationship)
})
})

@ -0,0 +1,45 @@
import * as auth from '@adxp/auth'
import { MicroblogFull } from '../src/microblog/index'
import Repo from '../src/repo/index'
import IpldStore from '../src/blockstore/ipld-store'
describe('Microblog', () => {
let microblog: MicroblogFull
beforeAll(async () => {
const ipld = IpldStore.createInMemory()
const authStore = await auth.MemoryStore.load()
await authStore.claimFull()
const repo = await Repo.create(ipld, await authStore.did(), authStore)
microblog = new MicroblogFull(repo, '', { pushOnUpdate: false })
})
it('basic post operations', async () => {
const created = await microblog.addPost('hello world')
const tid = created.tid
const post = await microblog.getPost(tid)
expect(post?.text).toBe('hello world')
await microblog.editPost(tid, 'edit')
const edited = await microblog.getPost(tid)
expect(edited?.text).toBe('edit')
await microblog.deletePost(tid)
const deleted = await microblog.getPost(tid)
expect(deleted).toBe(null)
})
it('basic like operations', async () => {
const post = await microblog.addPost('hello world')
const like = await microblog.likePost(post.author, post.tid)
let likes = await microblog.listLikes(1)
expect(likes.length).toBe(1)
expect(likes[0]?.tid?.toString()).toBe(like.tid.toString())
expect(likes[0]?.post_tid?.toString()).toBe(post.tid?.toString())
await microblog.deleteLike(like.tid)
likes = await microblog.listLikes(1)
expect(likes.length).toBe(0)
})
})

@ -0,0 +1,162 @@
import { CID } from 'multiformats'
import { Fanout, MemoryBlockstore, MST, NodeEntry } from '../src'
import * as util from './_util'
import fs from 'fs'
type BenchmarkData = {
fanout: number
size: number
addTime: string
saveTime: string
walkTime: string
depth: number
maxWidth: number
blockstoreSize: number
largestProofSize: number
avgProofSize: number
widths: Record<number, number>
}
describe('MST Benchmarks', () => {
let mapping: Record<string, CID>
let shuffled: [string, CID][]
const size = 500000
beforeAll(async () => {
mapping = await util.generateBulkTidMapping(size)
shuffled = util.shuffle(Object.entries(mapping))
})
// const fanouts: Fanout[] = [8, 16, 32]
const fanouts: Fanout[] = [16, 32]
it('benchmarks various fanouts', async () => {
let benches: BenchmarkData[] = []
for (const fanout of fanouts) {
const blockstore = new MemoryBlockstore()
let mst = await MST.create(blockstore, [], { fanout })
const start = Date.now()
for (const entry of shuffled) {
mst = await mst.add(entry[0], entry[1])
}
const doneAdding = Date.now()
const root = await mst.save()
const doneSaving = Date.now()
let reloaded = await MST.load(blockstore, root, { fanout })
const widthTracker = new NodeWidths()
for await (const entry of reloaded.walk()) {
await widthTracker.trackEntry(entry)
}
const doneWalking = Date.now()
const paths = await reloaded.paths()
let largestProof = 0
let combinedProofSizes = 0
for (const path of paths) {
let proofSize = 0
for (const entry of path) {
if (entry.isTree()) {
const bytes = await blockstore.getBytes(entry.pointer)
proofSize += bytes.byteLength
}
}
largestProof = Math.max(largestProof, proofSize)
combinedProofSizes += proofSize
}
const avgProofSize = Math.ceil(combinedProofSizes / paths.length)
const blockstoreSize = await blockstore.sizeInBytes()
benches.push({
fanout,
size,
addTime: secDiff(start, doneAdding),
saveTime: secDiff(doneAdding, doneSaving),
walkTime: secDiff(doneSaving, doneWalking),
depth: await mst.getLayer(),
blockstoreSize,
largestProofSize: largestProof,
avgProofSize: avgProofSize,
maxWidth: widthTracker.max,
widths: widthTracker.data,
})
}
writeBenchData(benches, 'mst-benchmarks')
})
})
const secDiff = (first: number, second: number): string => {
return ((second - first) / 1000).toFixed(3)
}
class NodeWidths {
data = {
0: 0,
16: 0,
32: 0,
48: 0,
64: 0,
96: 0,
128: 0,
160: 0,
192: 0,
224: 0,
256: 0,
}
max = 0
async trackEntry(entry: NodeEntry) {
if (!entry.isTree()) return
const entries = await entry.getEntries()
const width = entries.filter((e) => e.isLeaf()).length
this.max = Math.max(this.max, width)
if (width >= 0) this.data[0]++
if (width >= 16) this.data[16]++
if (width >= 32) this.data[32]++
if (width >= 48) this.data[48]++
if (width >= 64) this.data[64]++
if (width >= 96) this.data[96]++
if (width >= 128) this.data[128]++
if (width >= 160) this.data[160]++
if (width >= 192) this.data[192]++
if (width >= 224) this.data[224]++
if (width >= 256) this.data[256]++
}
}
const writeBenchData = (benches: BenchmarkData[], fileLoc: string) => {
let toWrite = ''
for (const bench of benches) {
toWrite += `Fanout: ${bench.fanout}
----------------------
Time to add ${bench.size} leaves: ${bench.addTime}s
Time to save tree with ${bench.size} leaves: ${bench.saveTime}s
Time to reconstruct & walk ${bench.size} leaves: ${bench.walkTime}s
Tree depth: ${bench.depth}
Max Node Width (only counting leaves): ${bench.maxWidth}
The total blockstore size is: ${bench.blockstoreSize} bytes
Largest proof size: ${bench.largestProofSize} bytes
Average proof size: ${bench.avgProofSize} bytes
Nodes with >= 0 leaves: ${bench.widths[0]}
Nodes with >= 16 leaves: ${bench.widths[16]}
Nodes with >= 32 leaves: ${bench.widths[32]}
Nodes with >= 48 leaves: ${bench.widths[48]}
Nodes with >= 64 leaves: ${bench.widths[64]}
Nodes with >= 96 leaves: ${bench.widths[96]}
Nodes with >= 128 leaves: ${bench.widths[128]}
Nodes with >= 160 leaves: ${bench.widths[160]}
Nodes with >= 192 leaves: ${bench.widths[192]}
Nodes with >= 224 leaves: ${bench.widths[224]}
Nodes with >= 256 leaves: ${bench.widths[256]}
`
}
fs.writeFileSync(fileLoc, toWrite)
}

@ -0,0 +1,254 @@
import { MST, DataAdd, DataUpdate, DataDelete } from '../src/repo/mst'
import { countPrefixLen } from '../src/repo/mst/util'
import { MemoryBlockstore } from '../src/blockstore'
import * as util from './_util'
import { CID } from 'multiformats'
describe('Merkle Search Tree', () => {
let blockstore: MemoryBlockstore
let mst: MST
let mapping: Record<string, CID>
let shuffled: [string, CID][]
beforeAll(async () => {
blockstore = new MemoryBlockstore()
mst = await MST.create(blockstore)
mapping = await util.generateBulkTidMapping(1000, blockstore)
shuffled = util.shuffle(Object.entries(mapping))
})
it('adds records', async () => {
for (const entry of shuffled) {
mst = await mst.add(entry[0], entry[1])
}
for (const entry of shuffled) {
const got = await mst.get(entry[0])
expect(entry[1].equals(got)).toBeTruthy()
}
const totalSize = await mst.leafCount()
expect(totalSize).toBe(1000)
})
it('edits records', async () => {
let editedMst = mst
const toEdit = shuffled.slice(0, 100)
const edited: [string, CID][] = []
for (const entry of toEdit) {
const newCid = await util.randomCid()
editedMst = await editedMst.update(entry[0], newCid)
edited.push([entry[0], newCid])
}
for (const entry of edited) {
const got = await editedMst.get(entry[0])
expect(entry[1].equals(got)).toBeTruthy()
}
const totalSize = await editedMst.leafCount()
expect(totalSize).toBe(1000)
})
it('deletes records', async () => {
let deletedMst = mst
const toDelete = shuffled.slice(0, 100)
const theRest = shuffled.slice(100)
for (const entry of toDelete) {
deletedMst = await deletedMst.delete(entry[0])
}
const totalSize = await deletedMst.leafCount()
expect(totalSize).toBe(900)
for (const entry of toDelete) {
const got = await deletedMst.get(entry[0])
expect(got).toBe(null)
}
for (const entry of theRest) {
const got = await deletedMst.get(entry[0])
expect(entry[1].equals(got)).toBeTruthy()
}
})
it('is order independent', async () => {
const allNodes = await mst.allNodes()
let recreated = await MST.create(blockstore)
const reshuffled = util.shuffle(Object.entries(mapping))
for (const entry of reshuffled) {
recreated = await recreated.add(entry[0], entry[1])
}
const allReshuffled = await recreated.allNodes()
expect(allNodes.length).toBe(allReshuffled.length)
for (let i = 0; i < allNodes.length; i++) {
expect(await allNodes[i].equals(allReshuffled[i])).toBeTruthy()
}
})
it('saves and loads from blockstore', async () => {
const cid = await mst.save()
const loaded = await MST.load(blockstore, cid)
const origNodes = await mst.allNodes()
const loadedNodes = await loaded.allNodes()
expect(origNodes.length).toBe(loadedNodes.length)
for (let i = 0; i < origNodes.length; i++) {
expect(await origNodes[i].equals(loadedNodes[i])).toBeTruthy()
}
})
it('diffs', async () => {
let toDiff = mst
const toAdd = Object.entries(
await util.generateBulkTidMapping(100, blockstore),
)
const toEdit = shuffled.slice(500, 600)
const toDel = shuffled.slice(400, 500)
const expectedAdds: Record<string, DataAdd> = {}
const expectedUpdates: Record<string, DataUpdate> = {}
const expectedDels: Record<string, DataDelete> = {}
for (const entry of toAdd) {
toDiff = await toDiff.add(entry[0], entry[1])
expectedAdds[entry[0]] = { key: entry[0], cid: entry[1] }
}
for (const entry of toEdit) {
const updated = await util.randomCid()
toDiff = await toDiff.update(entry[0], updated)
expectedUpdates[entry[0]] = {
key: entry[0],
prev: entry[1],
cid: updated,
}
}
for (const entry of toDel) {
toDiff = await toDiff.delete(entry[0])
expectedDels[entry[0]] = { key: entry[0], cid: entry[1] }
}
const diff = await mst.diff(toDiff)
expect(diff.addList().length).toBe(100)
expect(diff.updateList().length).toBe(100)
expect(diff.deleteList().length).toBe(100)
expect(diff.adds).toEqual(expectedAdds)
expect(diff.updates).toEqual(expectedUpdates)
expect(diff.deletes).toEqual(expectedDels)
// ensure we correctly report all added CIDs
for await (const entry of toDiff.walk()) {
let cid: CID
if (entry.isTree()) {
cid = await entry.getPointer()
} else {
cid = entry.value
}
const found = (await blockstore.has(cid)) || diff.newCids.has(cid)
expect(found).toBeTruthy()
}
})
// Special Cases (these are made for fanout 32)
// ------------
// These are some tricky things that can come up that may not be included in a randomized tree
/**
* `f` gets added & it does two node splits (e is no longer grouped with g/h)
*
* * *
* _________|________ ____|_____
* | | | | | | | |
* * d * i * -> * f *
* __|__ __|__ __|__ __|__ __|___
* | | | | | | | | | | | | | | |
* a b c e g h j k l * d * * i *
* __|__ | _|_ __|__
* | | | | | | | | |
* a b c e g h j k l
*
*/
it('handles splits that must go 2 deep', async () => {
const layer0 = [
'3j6hnk65jis2t',
'3j6hnk65jit2t',
'3j6hnk65jiu2t',
'3j6hnk65jne2t',
'3j6hnk65jnm2t',
'3j6hnk65jnn2t',
'3j6hnk65kvx2t',
'3j6hnk65kvy2t',
'3j6hnk65kvz2t',
]
const layer1 = ['3j6hnk65jju2t', '3j6hnk65kve2t']
const layer2 = '3j6hnk65jng2t'
mst = await MST.create(blockstore, [], { fanout: 32 })
const cid = await util.randomCid()
for (const tid of layer0) {
mst = await mst.add(tid, cid)
}
for (const tid of layer1) {
mst = await mst.add(tid, cid)
}
mst = await mst.add(layer2, cid)
const layer = await mst.getLayer()
expect(layer).toBe(2)
const allTids = [...layer0, ...layer1, layer2]
for (const tid of allTids) {
const got = await mst.get(tid)
expect(cid.equals(got)).toBeTruthy()
}
})
/**
* `b` gets added & it hashes to 2 levels above any existing leaves
*
* * -> *
* __|__ __|__
* | | | | |
* a c * b *
* | |
* * *
* | |
* a c
*
*/
it('handles new layers that are two higher than existing', async () => {
const layer0 = ['3j6hnk65jis2t', '3j6hnk65kvz2t']
const layer1 = ['3j6hnk65jju2t', '3j6hnk65l222t']
const layer2 = '3j6hnk65jng2t'
mst = await MST.create(blockstore, [], { fanout: 32 })
const cid = await util.randomCid()
for (const tid of layer0) {
mst = await mst.add(tid, cid)
}
mst = await mst.add(layer2, cid)
for (const tid of layer1) {
mst = await mst.add(tid, cid)
}
const layer = await mst.getLayer()
expect(layer).toBe(2)
const allTids = [...layer0, ...layer1, layer2]
for (const tid of allTids) {
const got = await mst.get(tid)
expect(cid.equals(got)).toBeTruthy()
}
})
})
describe('utils', () => {
it('counts prefix length', () => {
expect(countPrefixLen('abc', 'abc')).toBe(3)
expect(countPrefixLen('', 'abc')).toBe(0)
expect(countPrefixLen('abc', '')).toBe(0)
expect(countPrefixLen('ab', 'abc')).toBe(2)
expect(countPrefixLen('abc', 'ab')).toBe(2)
})
})

@ -0,0 +1,71 @@
import * as auth from '@adxp/auth'
import { Repo } from '../src/repo'
import { MemoryBlockstore } from '../src/blockstore'
import * as util from './_util'
describe('Repo', () => {
let blockstore: MemoryBlockstore
let authStore: auth.AuthStore
let repo: Repo
let repoData: util.RepoData
it('creates repo', async () => {
blockstore = new MemoryBlockstore()
authStore = await auth.MemoryStore.load()
await authStore.claimFull()
repo = await Repo.create(blockstore, await authStore.did(), authStore)
})
it('does basic operations', async () => {
const collection = repo.getCollection('bsky/posts')
const obj = util.generateObject()
const tid = await collection.createRecord(obj)
let got = await collection.getRecord(tid)
expect(got).toEqual(obj)
const updatedObj = util.generateObject()
await collection.updateRecord(tid, updatedObj)
got = await collection.getRecord(tid)
expect(got).toEqual(updatedObj)
await collection.deleteRecord(tid)
got = await collection.getRecord(tid)
expect(got).toBeNull()
})
it('adds content collections', async () => {
repoData = await util.fillRepo(repo, 100)
await util.checkRepo(repo, repoData)
})
it('edits and deletes content', async () => {
repoData = await util.editRepo(repo, repoData, {
adds: 20,
updates: 20,
deletes: 20,
})
await util.checkRepo(repo, repoData)
})
it('adds a valid signature to commit', async () => {
const commit = await repo.getCommit()
const verified = await auth.verifySignature(
repo.did(),
commit.root.bytes,
commit.sig,
)
expect(verified).toBeTruthy()
})
it('sets correct DID', async () => {
expect(repo.did()).toEqual(await authStore.did())
})
it('loads from blockstore', async () => {
const reloadedRepo = await Repo.load(blockstore, repo.cid, authStore)
await util.checkRepo(reloadedRepo, repoData)
})
})

@ -0,0 +1,117 @@
import * as auth from '@adxp/auth'
import { Repo, RepoRoot, TID } from '../src/repo'
import { MemoryBlockstore } from '../src/blockstore'
import * as util from './_util'
import { AuthStore } from '@adxp/auth'
describe('Sync', () => {
let aliceBlockstore, bobBlockstore: MemoryBlockstore
let aliceRepo: Repo
let aliceAuth: AuthStore
let repoData: util.RepoData
beforeAll(async () => {
aliceBlockstore = new MemoryBlockstore()
aliceAuth = await auth.MemoryStore.load()
await aliceAuth.claimFull()
aliceRepo = await Repo.create(
aliceBlockstore,
await aliceAuth.did(),
aliceAuth,
)
bobBlockstore = new MemoryBlockstore()
})
it('syncs an empty repo', async () => {
const car = await aliceRepo.getFullHistory()
const repoBob = await Repo.fromCarFile(car, bobBlockstore)
const data = await repoBob.data.list('', 10)
expect(data.length).toBe(0)
})
let bobRepo: Repo
it('syncs a repo that is starting from scratch', async () => {
repoData = await util.fillRepo(aliceRepo, 100)
try {
const car = await aliceRepo.getFullHistory()
} catch (err) {
const contents = await aliceBlockstore.getContents()
console.log(contents)
throw err
}
const car = await aliceRepo.getFullHistory()
bobRepo = await Repo.fromCarFile(car, bobBlockstore)
const diff = await bobRepo.verifySetOfUpdates(null, bobRepo.cid)
await util.checkRepo(bobRepo, repoData)
await util.checkRepoDiff(diff, {}, repoData)
})
it('syncs a repo that is behind', async () => {
// add more to alice's repo & have bob catch up
const beforeData = JSON.parse(JSON.stringify(repoData))
repoData = await util.editRepo(aliceRepo, repoData, {
adds: 20,
updates: 20,
deletes: 20,
})
const diffCar = await aliceRepo.getDiffCar(bobRepo.cid)
const diff = await bobRepo.loadAndVerifyDiff(diffCar)
await util.checkRepo(bobRepo, repoData)
await util.checkRepoDiff(diff, beforeData, repoData)
})
it('throws an error on invalid UCANs', async () => {
const obj = util.generateObject()
const cid = await aliceBlockstore.put(obj)
const updatedData = await aliceRepo.data.add(`test/coll/${TID.next()}`, cid)
// we create an unrelated token for bob & try to permission alice's repo commit with it
const bobAuth = await auth.MemoryStore.load()
const badUcan = await bobAuth.claimFull()
const auth_token = await aliceBlockstore.put(auth.encodeUcan(badUcan))
const dataCid = await updatedData.save()
const root: RepoRoot = {
did: aliceRepo.did(),
prev: aliceRepo.cid,
auth_token,
data: dataCid,
}
const rootCid = await aliceBlockstore.put(root)
const commit = {
root: rootCid,
sig: await aliceAuth.sign(rootCid.bytes),
}
aliceRepo.cid = await aliceBlockstore.put(commit)
aliceRepo.data = updatedData
const diffCar = await aliceRepo.getDiffCar(bobRepo.cid)
await expect(bobRepo.loadAndVerifyDiff(diffCar)).rejects.toThrow()
await aliceRepo.revert(1)
})
it('throws on a bad signature', async () => {
const obj = util.generateObject()
const cid = await aliceBlockstore.put(obj)
const updatedData = await aliceRepo.data.add(`test/coll/${TID.next()}`, cid)
const auth_token = await aliceRepo.ucanForOperation(updatedData)
const dataCid = await updatedData.save()
const root: RepoRoot = {
did: aliceRepo.did(),
prev: aliceRepo.cid,
auth_token,
data: dataCid,
}
const rootCid = await aliceBlockstore.put(root)
// we generated a bad sig by signing the data cid instead of root cid
const commit = {
root: rootCid,
sig: await aliceAuth.sign(dataCid.bytes),
}
aliceRepo.cid = await aliceBlockstore.put(commit)
aliceRepo.data = updatedData
const diffCar = await aliceRepo.getDiffCar(bobRepo.cid)
await expect(bobRepo.loadAndVerifyDiff(diffCar)).rejects.toThrow()
await aliceRepo.revert(1)
})
})

@ -0,0 +1,18 @@
import TID from '../src/repo/tid'
describe('TIDs', () => {
it('creates a new TID', () => {
const tid = TID.next()
const str = tid.toString()
expect(typeof str).toEqual('string')
expect(str.length).toEqual(13)
})
it('parses a TID', () => {
const tid = TID.next()
const str = tid.toString()
const parsed = TID.fromStr(str)
expect(parsed.timestamp()).toEqual(tid.timestamp())
expect(parsed.clockid()).toEqual(tid.clockid())
})
})

@ -0,0 +1,370 @@
import { AdxUri } from '../src/network/uri'
describe('Adx Uris', () => {
it('parses valid Adx Uris', () => {
// input host path query hash
type AdxUriTest = [string, string, string, string, string]
const TESTS: AdxUriTest[] = [
['foo.com', 'foo.com', '', '', ''],
['adx://foo.com', 'foo.com', '', '', ''],
['adx://foo.com/', 'foo.com', '/', '', ''],
['adx://foo.com/foo', 'foo.com', '/foo', '', ''],
['adx://foo.com/foo/', 'foo.com', '/foo/', '', ''],
['adx://foo.com/foo/bar', 'foo.com', '/foo/bar', '', ''],
['adx://foo.com?foo=bar', 'foo.com', '', 'foo=bar', ''],
['adx://foo.com?foo=bar&baz=buux', 'foo.com', '', 'foo=bar&baz=buux', ''],
['adx://foo.com/?foo=bar', 'foo.com', '/', 'foo=bar', ''],
['adx://foo.com/foo?foo=bar', 'foo.com', '/foo', 'foo=bar', ''],
['adx://foo.com/foo/?foo=bar', 'foo.com', '/foo/', 'foo=bar', ''],
['adx://foo.com#hash', 'foo.com', '', '', '#hash'],
['adx://foo.com/#hash', 'foo.com', '/', '', '#hash'],
['adx://foo.com/foo#hash', 'foo.com', '/foo', '', '#hash'],
['adx://foo.com/foo/#hash', 'foo.com', '/foo/', '', '#hash'],
['adx://foo.com?foo=bar#hash', 'foo.com', '', 'foo=bar', '#hash'],
[
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo/',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo/',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo/bar',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo/bar',
'',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw?foo=bar',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'foo=bar',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw?foo=bar&baz=buux',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'foo=bar&baz=buux',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/?foo=bar',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/',
'foo=bar',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo?foo=bar',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo',
'foo=bar',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo/?foo=bar',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo/',
'foo=bar',
'',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw#hash',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'',
'#hash',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/#hash',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/',
'',
'#hash',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo#hash',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo',
'',
'#hash',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw/foo/#hash',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'/foo/',
'',
'#hash',
],
[
'adx://did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw?foo=bar#hash',
'did:ion:EiAnKD8-jfdd0MDcZUjAbRgaThBrMxPTFOxcnfJhI7Ukaw',
'',
'foo=bar',
'#hash',
],
['did:web:localhost%3A1234', 'did:web:localhost%3A1234', '', '', ''],
[
'adx://did:web:localhost%3A1234',
'did:web:localhost%3A1234',
'',
'',
'',
],
[
'adx://did:web:localhost%3A1234/',
'did:web:localhost%3A1234',
'/',
'',
'',
],
[
'adx://did:web:localhost%3A1234/foo',
'did:web:localhost%3A1234',
'/foo',
'',
'',
],
[
'adx://did:web:localhost%3A1234/foo/',
'did:web:localhost%3A1234',
'/foo/',
'',
'',
],
[
'adx://did:web:localhost%3A1234/foo/bar',
'did:web:localhost%3A1234',
'/foo/bar',
'',
'',
],
[
'adx://did:web:localhost%3A1234?foo=bar',
'did:web:localhost%3A1234',
'',
'foo=bar',
'',
],
[
'adx://did:web:localhost%3A1234?foo=bar&baz=buux',
'did:web:localhost%3A1234',
'',
'foo=bar&baz=buux',
'',
],
[
'adx://did:web:localhost%3A1234/?foo=bar',
'did:web:localhost%3A1234',
'/',
'foo=bar',
'',
],
[
'adx://did:web:localhost%3A1234/foo?foo=bar',
'did:web:localhost%3A1234',
'/foo',
'foo=bar',
'',
],
[
'adx://did:web:localhost%3A1234/foo/?foo=bar',
'did:web:localhost%3A1234',
'/foo/',
'foo=bar',
'',
],
[
'adx://did:web:localhost%3A1234#hash',
'did:web:localhost%3A1234',
'',
'',
'#hash',
],
[
'adx://did:web:localhost%3A1234/#hash',
'did:web:localhost%3A1234',
'/',
'',
'#hash',
],
[
'adx://did:web:localhost%3A1234/foo#hash',
'did:web:localhost%3A1234',
'/foo',
'',
'#hash',
],
[
'adx://did:web:localhost%3A1234/foo/#hash',
'did:web:localhost%3A1234',
'/foo/',
'',
'#hash',
],
[
'adx://did:web:localhost%3A1234?foo=bar#hash',
'did:web:localhost%3A1234',
'',
'foo=bar',
'#hash',
],
]
for (const [uri, hostname, pathname, search, hash] of TESTS) {
const urip = new AdxUri(uri)
expect(urip.protocol).toBe('adx:')
expect(urip.host).toBe(hostname)
expect(urip.hostname).toBe(hostname)
expect(urip.origin).toBe(`adx://${hostname}`)
expect(urip.pathname).toBe(pathname)
expect(urip.search).toBe(search)
expect(urip.hash).toBe(hash)
}
})
it('handles ADX-specific parsing', () => {
{
const urip = new AdxUri('adx://foo.com')
expect(urip.collection).toBe('')
expect(urip.recordKey).toBe('')
}
{
const urip = new AdxUri('adx://foo.com/namespace')
expect(urip.namespace).toBe('namespace')
expect(urip.dataset).toBe('')
expect(urip.collection).toBe('namespace/')
expect(urip.recordKey).toBe('')
}
{
const urip = new AdxUri('adx://foo.com/namespace/dataset')
expect(urip.namespace).toBe('namespace')
expect(urip.dataset).toBe('dataset')
expect(urip.collection).toBe('namespace/dataset')
expect(urip.recordKey).toBe('')
}
{
const urip = new AdxUri('adx://foo.com/namespace/dataset/123')
expect(urip.namespace).toBe('namespace')
expect(urip.dataset).toBe('dataset')
expect(urip.collection).toBe('namespace/dataset')
expect(urip.recordKey).toBe('123')
}
})
it('supports modifications', () => {
const urip = new AdxUri('adx://foo.com')
expect(urip.toString()).toBe('adx://foo.com/')
urip.host = 'bar.com'
expect(urip.toString()).toBe('adx://bar.com/')
urip.host = 'did:web:localhost%3A1234'
expect(urip.toString()).toBe('adx://did:web:localhost%3A1234/')
urip.host = 'foo.com'
urip.pathname = '/'
expect(urip.toString()).toBe('adx://foo.com/')
urip.pathname = '/foo'
expect(urip.toString()).toBe('adx://foo.com/foo')
urip.pathname = 'foo'
expect(urip.toString()).toBe('adx://foo.com/foo')
urip.collection = 'namespace/dataset'
urip.recordKey = '123'
expect(urip.toString()).toBe('adx://foo.com/namespace/dataset/123')
urip.recordKey = '124'
expect(urip.toString()).toBe('adx://foo.com/namespace/dataset/124')
urip.collection = 'other/data'
expect(urip.toString()).toBe('adx://foo.com/other/data/124')
urip.pathname = ''
urip.recordKey = '123'
expect(urip.toString()).toBe('adx://foo.com/undefined/undefined/123')
urip.pathname = 'foo'
urip.search = '?foo=bar'
expect(urip.toString()).toBe('adx://foo.com/foo?foo=bar')
urip.searchParams.set('baz', 'buux')
expect(urip.toString()).toBe('adx://foo.com/foo?foo=bar&baz=buux')
urip.hash = '#hash'
expect(urip.toString()).toBe('adx://foo.com/foo?foo=bar&baz=buux#hash')
urip.hash = 'hash'
expect(urip.toString()).toBe('adx://foo.com/foo?foo=bar&baz=buux#hash')
})
it('supports relative URIs', () => {
// input path query hash
type AdxUriTest = [string, string, string, string]
const TESTS: AdxUriTest[] = [
// input hostname pathname query hash
['', '', '', ''],
['/', '/', '', ''],
['/foo', '/foo', '', ''],
['/foo/', '/foo/', '', ''],
['/foo/bar', '/foo/bar', '', ''],
['?foo=bar', '', 'foo=bar', ''],
['?foo=bar&baz=buux', '', 'foo=bar&baz=buux', ''],
['/?foo=bar', '/', 'foo=bar', ''],
['/foo?foo=bar', '/foo', 'foo=bar', ''],
['/foo/?foo=bar', '/foo/', 'foo=bar', ''],
['#hash', '', '', '#hash'],
['/#hash', '/', '', '#hash'],
['/foo#hash', '/foo', '', '#hash'],
['/foo/#hash', '/foo/', '', '#hash'],
['?foo=bar#hash', '', 'foo=bar', '#hash'],
]
const BASES: string[] = [
'did:web:localhost%3A1234',
'adx://did:web:localhost%3A1234',
'adx://did:web:localhost%3A1234/foo/bar?foo=bar&baz=buux#hash',
'did:web:localhost%3A1234',
'adx://did:web:localhost%3A1234',
'adx://did:web:localhost%3A1234/foo/bar?foo=bar&baz=buux#hash',
]
for (const base of BASES) {
const basep = new AdxUri(base)
for (const [relative, pathname, search, hash] of TESTS) {
const urip = new AdxUri(relative, base)
expect(urip.protocol).toBe('adx:')
expect(urip.host).toBe(basep.host)
expect(urip.hostname).toBe(basep.hostname)
expect(urip.origin).toBe(basep.origin)
expect(urip.pathname).toBe(pathname)
expect(urip.search).toBe(search)
expect(urip.hash).toBe(hash)
}
}
})
})

@ -0,0 +1,4 @@
{
"extends": "./tsconfig.json",
"exclude": ["**/*.spec.ts", "**/*.test.ts"]
}

@ -0,0 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist", // Your outDir,
"emitDeclarationOnly": true
},
"include": ["./src","__tests__/**/**.ts"],
"references": [
{ "path": "../auth/tsconfig.build.json" },
{ "path": "../common/tsconfig.build.json" },
{ "path": "../schemas/tsconfig.build.json" },
]
}