Merge pull request #13 from orbitdb/test/db-test-restructure

Test/db test restructure
This commit is contained in:
Haad 2023-02-24 08:32:20 +02:00 committed by GitHub
commit 4b8379cbbb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 1471 additions and 2243 deletions

View File

@ -98,7 +98,6 @@
"standard": {
"env": "mocha",
"ignore": [
"test/**",
"examples/**",
"benchmarks/**"
]

View File

@ -76,12 +76,13 @@ const DocumentStore = async ({ OpLog, Database, ipfs, identity, databaseId, acce
return {
...database,
type: 'documents',
type: 'documentstore',
put,
del,
get,
iterator,
query
query,
indexBy
}
}

View File

@ -33,7 +33,7 @@ const Events = async ({ OpLog, Database, ipfs, identity, databaseId, accessContr
return {
...database,
type: 'events',
type: 'eventstore',
put,
add,
get,

View File

@ -1,57 +0,0 @@
const Feed = async ({ OpLog, Database, ipfs, identity, databaseId, accessController, storage }) => {
const database = await Database({ OpLog, ipfs, identity, databaseId, accessController, storage })
const { addOperation, log } = database
const put = async (key = null, value) => {
return add(value)
}
const add = async (value) => {
return addOperation({ op: 'ADD', key: null, value })
}
const del = async (hash) => {
return addOperation({ op: 'DEL', key: hash, value: null })
}
const get = async (hash) => {
const entry = await log.get(hash)
return entry.payload.value
}
const iterator = async function * ({ gt, gte, lt, lte, amount } = {}) {
const deleted = {}
const it = log.iterator({ gt, gte, lt, lte, amount })
for await (const entry of it) {
const { hash, payload } = entry
const { op, key, value } = payload
if (op === 'ADD' && !deleted[hash]) {
yield value
} else if (op === 'DEL' && !deleted[key]) {
deleted[key] = true
}
}
}
const all = async () => {
const values = []
for await (const entry of iterator()) {
values.unshift(entry)
}
return values
}
return {
...database,
type: 'feed',
put,
add,
del,
get,
iterator,
all
}
}
export default Feed

View File

@ -1,6 +1,5 @@
export { default as Database } from './database.js'
export { default as DocumentStore } from './document-store.js'
export { default as EventStore } from './event-store.js'
export { default as Feed } from './feed.js'
export { default as KeyValuePersisted } from './keyvalue-persisted.js'
export { default as KeyValue } from './keyvalue.js'

View File

@ -1,4 +1,5 @@
import { Level } from 'level'
import PQueue from 'p-queue'
const valueEncoding = 'json'
@ -6,6 +7,8 @@ const KeyValuePersisted = async ({ KeyValue, OpLog, Database, ipfs, identity, da
const keyValueStore = await KeyValue({ OpLog, Database, ipfs, identity, databaseId, accessController, storage })
const { events, log } = keyValueStore
const queue = new PQueue({ concurrency: 1 })
const path = `./${identity.id}/${databaseId}/_index`
const index = new Level(path, { valueEncoding })
await index.open()
@ -14,8 +17,10 @@ const KeyValuePersisted = async ({ KeyValue, OpLog, Database, ipfs, identity, da
const updateIndex = (index) => async (entry) => {
const keys = {}
for await (const entry of log.iterator({ gt: latestOplogHash })) {
const { op, key, value } = entry.payload
if (op === 'PUT' && !keys[key]) {
keys[key] = true
await index.put(key, value)
@ -28,6 +33,8 @@ const KeyValuePersisted = async ({ KeyValue, OpLog, Database, ipfs, identity, da
}
const get = async (key) => {
await queue.onIdle()
try {
const value = await index.get(key)
if (value) {
@ -36,32 +43,39 @@ const KeyValuePersisted = async ({ KeyValue, OpLog, Database, ipfs, identity, da
} catch (e) {
// LEVEL_NOT_FOUND (ie. key not found)
}
return keyValueStore.get(key)
}
const iterator = async function * () {
await queue.onIdle()
for await (const [key, value] of index.iterator()) {
yield { key, value }
}
}
const task = async () => {
await queue.add(updateIndex(index))
}
// TODO: all()
const close = async () => {
events.off('update', updateIndex(index))
events.off('update', task)
await queue.onIdle()
await index.close()
await keyValueStore.close()
}
// TOD: rename to clear()
const drop = async () => {
events.off('update', updateIndex(index))
events.off('update', task)
await queue.onIdle()
await index.clear()
await keyValueStore.drop()
}
// Listen for update events from the database and update the index on every update
events.on('update', updateIndex(index))
events.on('update', task)
return {
...keyValueStore,

View File

@ -39,7 +39,7 @@ const KeyValue = async ({ OpLog, Database, ipfs, identity, databaseId, accessCon
return {
...database,
type: 'kv',
type: 'keyvalue',
put,
set: put, // Alias for put()
del,

View File

@ -248,33 +248,34 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
const traversed = {}
// Current entry during traversal
let entry
// Start traversal
// Start traversal and process stack until it's empty (traversed the full log)
while (stack.length > 0) {
// Process stack until it's empty (traversed the full log)
// or until shouldStopFn returns true
const done = await shouldStopFn(entry)
if (done === true) {
break
}
// Get the next entry from the stack
entry = stack.pop()
const hash = entry.hash
// If we have an entry that we haven't traversed yet, process it
if (entry && !traversed[hash]) {
// Add to the hashes we've traversed
traversed[hash] = true
// Yield the current entry
yield entry
// Add hashes of next entries to the stack from entry's
// causal connection (next) and references to history (refs)
for (const nextHash of [...entry.next, ...entry.refs]) {
// Check if we've already traversed this entry
if (!traversed[nextHash]) {
// Fetch the next entry
const next = await get(nextHash)
if (next) {
// Add the next entry in front of the stack and sort
stack = [next, ...stack].sort(sortFn)
if (entry) {
const hash = entry.hash
// If we have an entry that we haven't traversed yet, process it
if (!traversed[hash]) {
// Yield the current entry
yield entry
// If we should stop traversing, stop here
const done = await shouldStopFn(entry)
if (done === true) {
break
}
// Add to the hashes we've traversed
traversed[hash] = true
// Add hashes of next entries to the stack from entry's
// causal connection (next) and references to history (refs)
for (const nextHash of [...entry.next, ...entry.refs]) {
// Check if we've already traversed this entry
if (!traversed[nextHash]) {
// Fetch the next entry
const next = await get(nextHash)
if (next) {
// Add the next entry in front of the stack and sort
stack = [next, ...stack].sort(sortFn)
}
}
}
}
@ -337,12 +338,11 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
const start = (lt || (lte || await heads())).filter(isDefined)
const end = (gt || gte) ? await get(gt || gte) : null
const amountToIterate = end || amount === -1
? -1
: (lte || lt ? amount - 1 : amount)
const amountToIterate = (end || amount === -1) ? -1 : amount
let count = 0
const shouldStopTraversal = async (entry) => {
count++
if (!entry) {
return false
}
@ -352,7 +352,6 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
if (end && Entry.isEqual(entry, end)) {
return true
}
count++
return false
}
@ -376,7 +375,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
}
if (useBuffer) {
const endIndex = buffer.keys.length - 1
const endIndex = buffer.keys.length
const startIndex = endIndex - amount
const keys = buffer.keys.slice(startIndex, endIndex)
for (const key of keys) {

View File

@ -0,0 +1,263 @@
import { deepStrictEqual, strictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../../src/oplog/index.js'
import { DocumentStore, Database } from '../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../src/storage/index.js'
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import { createTestIdentities, cleanUpTestIdentities } from '../fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('DocumentStore Database (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd
let ipfs
let keystore, signingKeyStore
let accessController
let identities1
let testIdentity1
let db
const databaseId = 'documentstore-AAA'
before(async () => {
// Start two IPFS instances
ipfsd = await startIpfs(IPFS, config.daemon1)
ipfs = ipfsd.api
const [identities, testIdentities] = await createTestIdentities(ipfs)
identities1 = identities[0]
testIdentity1 = testIdentities[0]
rmrf(testIdentity1.id)
})
after(async () => {
await cleanUpTestIdentities([identities1])
if (ipfsd) {
await stopIpfs(ipfsd)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
})
describe('Default index \'_id\'', () => {
beforeEach(async () => {
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, databaseId, accessController })
})
afterEach(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('creates a document store', async () => {
strictEqual(db.databaseId, databaseId)
strictEqual(db.type, 'documentstore')
strictEqual(db.indexBy, '_id')
})
it('gets a document', async () => {
const key = 'hello world 1'
const expected = { _id: key, msg: 'writing 1 to db' }
await db.put(expected)
const doc = await db.get(key)
deepStrictEqual(doc, expected)
})
it('throws an error when putting a document with the wrong key', async () => {
let err
const key = 'hello world 1'
const expected = { wrong_key: key, msg: 'writing 1 to db' }
try {
await db.put(expected)
} catch (e) {
err = e
}
strictEqual(err.message, 'The provided document doesn\'t contain field \'_id\'')
})
it('throws an error when getting a document with the wrong key', async () => {
let err
const key = 'hello world 1'
const expected = { wrong_key: key, msg: 'writing 1 to db' }
try {
await db.put(expected)
} catch (e) {
err = e
}
strictEqual(err.message, 'The provided document doesn\'t contain field \'_id\'')
})
it('deletes a document', async () => {
const key = 'hello world 1'
await db.put({ _id: key, msg: 'writing 1 to db' })
await db.del(key)
const doc = await db.get(key)
strictEqual(doc, undefined)
})
it('throws an error when deleting a non-existent document', async () => {
const key = 'i do not exist'
let err
try {
await db.del(key)
} catch (e) {
err = e
}
strictEqual(err.message, `No document with key '${key}' in the database`)
})
it('queries for a document', async () => {
const expected = { _id: 'hello world 1', msg: 'writing new 1 to db', views: 10 }
await db.put({ _id: 'hello world 1', msg: 'writing 1 to db', views: 10 })
await db.put({ _id: 'hello world 2', msg: 'writing 2 to db', views: 5 })
await db.put({ _id: 'hello world 3', msg: 'writing 3 to db', views: 12 })
await db.del('hello world 3')
await db.put(expected)
const findFn = (doc) => doc.views > 5
deepStrictEqual(await db.query(findFn), [expected])
})
it('queries for a non-existent document', async () => {
await db.put({ _id: 'hello world 1', msg: 'writing 1 to db', views: 10 })
await db.del('hello world 1')
const findFn = (doc) => doc.views > 5
deepStrictEqual(await db.query(findFn), [])
})
})
describe('Custom index \'doc\'', () => {
beforeEach(async () => {
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, databaseId, accessController, indexBy: 'doc' })
})
afterEach(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('creates a document store', async () => {
strictEqual(db.databaseId, databaseId)
strictEqual(db.type, 'documentstore')
strictEqual(db.indexBy, 'doc')
})
it('gets a document', async () => {
const key = 'hello world 1'
const expected = { doc: key, msg: 'writing 1 to db' }
await db.put(expected)
const doc = await db.get(key)
deepStrictEqual(doc, expected)
})
it('deletes a document', async () => {
const key = 'hello world 1'
await db.put({ doc: key, msg: 'writing 1 to db' })
await db.del(key)
const doc = await db.get(key)
strictEqual(doc, undefined)
})
it('throws an error when putting a document with the wrong key', async () => {
let err
const key = 'hello world 1'
const expected = { _id: key, msg: 'writing 1 to db' }
try {
await db.put(expected)
} catch (e) {
err = e
}
strictEqual(err.message, 'The provided document doesn\'t contain field \'doc\'')
})
it('throws an error when getting a document with the wrong key', async () => {
let err
const key = 'hello world 1'
const expected = { _id: key, msg: 'writing 1 to db' }
try {
await db.put(expected)
} catch (e) {
err = e
}
strictEqual(err.message, 'The provided document doesn\'t contain field \'doc\'')
})
it('throws an error when deleting a non-existent document', async () => {
const key = 'i do not exist'
let err
try {
await db.del(key)
} catch (e) {
err = e
}
strictEqual(err.message, `No document with key '${key}' in the database`)
})
it('queries for a document', async () => {
const expected = { doc: 'hello world 1', msg: 'writing new 1 to db', views: 10 }
await db.put({ doc: 'hello world 1', msg: 'writing 1 to db', views: 10 })
await db.put({ doc: 'hello world 2', msg: 'writing 2 to db', views: 5 })
await db.put({ doc: 'hello world 3', msg: 'writing 3 to db', views: 12 })
await db.del('hello world 3')
await db.put(expected)
const findFn = (doc) => doc.views > 5
deepStrictEqual(await db.query(findFn), [expected])
})
it('queries for a non-existent document', async () => {
await db.put({ doc: 'hello world 1', msg: 'writing 1 to db', views: 10 })
await db.del('hello world 1')
const findFn = (doc) => doc.views > 5
deepStrictEqual(await db.query(findFn), [])
})
})
})
})

350
test/db/event-store.test.js Normal file
View File

@ -0,0 +1,350 @@
import { deepStrictEqual, strictEqual } from 'assert'
import mapSeries from 'p-map-series'
import rimraf from 'rimraf'
import { Log, Entry } from '../../src/oplog/index.js'
import { EventStore, Database } from '../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../src/storage/index.js'
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import { createTestIdentities, cleanUpTestIdentities } from '../fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('EventStore Database (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd
let ipfs
let keystore, signingKeyStore
let accessController
let identities1
let testIdentity1
let db
const databaseId = 'eventstore-AAA'
before(async () => {
// Start two IPFS instances
ipfsd = await startIpfs(IPFS, config.daemon1)
ipfs = ipfsd.api
const [identities, testIdentities] = await createTestIdentities(ipfs)
identities1 = identities[0]
testIdentity1 = testIdentities[0]
rmrf(testIdentity1.id)
})
after(async () => {
await cleanUpTestIdentities([identities1])
if (ipfsd) {
await stopIpfs(ipfsd)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
})
beforeEach(async () => {
db = await EventStore({ OpLog, Database, ipfs, identity: testIdentity1, databaseId, accessController })
})
afterEach(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('creates an event store', async () => {
strictEqual(db.databaseId, databaseId)
strictEqual(db.type, 'eventstore')
})
it('puts an event', async () => {
const expected = 'init'
const hash = await db.put(null, expected)
const actual = await db.get(hash)
strictEqual(actual, expected)
})
it('gets an event', async () => {
const expected = 'init'
const hash = await db.add(expected)
const actual = await db.get(hash)
strictEqual(actual, expected)
})
it('returns all events', async () => {
const events = [
'init',
true,
'hello',
'friend',
'12345',
'empty',
'friend33'
]
for (const record of events) {
await db.add(record)
}
const all = await db.all()
deepStrictEqual(all, events)
})
describe('Iterator Options', () => {
let hashes = []
const last = arr => arr[arr.length - 1]
const first = arr => arr[0]
beforeEach(async () => {
hashes = []
hashes = await mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
})
describe('amount', () => {
it('returns one item', async () => {
const expected = ['hello4']
const all = []
for await (const record of db.iterator({ amount: 1 })) {
all.unshift(record)
}
strictEqual(all.length, 1)
deepStrictEqual(all, expected)
})
it('returns two items', async () => {
const expected = ['hello3', 'hello4']
const all = []
for await (const record of db.iterator({ amount: 2 })) {
all.unshift(record)
}
strictEqual(all.length, 2)
deepStrictEqual(all, expected)
})
it('returns three items', async () => {
const expected = ['hello2', 'hello3', 'hello4']
const all = []
for await (const record of db.iterator({ amount: 3 })) {
all.unshift(record)
}
strictEqual(all.length, 3)
deepStrictEqual(all, expected)
})
it('sets \'amount\' greater than items available', async () => {
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
const all = []
for await (const record of db.iterator({ amount: 100 })) {
all.unshift(record)
}
strictEqual(all.length, 5)
deepStrictEqual(all, expected)
})
it('sets \'amount\' to 0', async () => {
const expected = []
const all = []
for await (const record of db.iterator({ amount: 0 })) {
all.unshift(record)
}
strictEqual(all.length, 0)
deepStrictEqual(all, expected)
})
})
describe('lt', () => {
it('returns all items less than head', async () => {
const expected = ['hello0', 'hello1', 'hello2', 'hello3']
const all = []
for await (const record of db.iterator({ lt: last(hashes) })) {
all.unshift(record)
}
strictEqual(all.length, 4)
deepStrictEqual(all, expected)
})
it('returns one item less than head', async () => {
const expected = ['hello3']
const all = []
for await (const record of db.iterator({ lt: last(hashes), amount: 1 })) {
all.unshift(record)
}
strictEqual(all.length, 1)
deepStrictEqual(all, expected)
})
it('returns two items less than head', async () => {
const expected = ['hello2', 'hello3']
const all = []
for await (const record of db.iterator({ lt: last(hashes), amount: 2 })) {
all.unshift(record)
}
strictEqual(all.length, 2)
deepStrictEqual(all, expected)
})
})
describe('lte', () => {
it('returns all items less or equal to head', async () => {
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
const all = []
for await (const record of db.iterator({ lte: last(hashes) })) {
all.unshift(record)
}
strictEqual(all.length, 5)
deepStrictEqual(all, expected)
})
it('returns one item less than or equal to head', async () => {
const expected = ['hello4']
const all = []
for await (const record of db.iterator({ lte: last(hashes), amount: 1 })) {
all.unshift(record)
}
strictEqual(all.length, 1)
deepStrictEqual(all, expected)
})
it('returns two items less than or equal to head', async () => {
const expected = ['hello3', 'hello4']
const all = []
for await (const record of db.iterator({ lte: last(hashes), amount: 2 })) {
all.unshift(record)
}
strictEqual(all.length, 2)
deepStrictEqual(all, expected)
})
})
describe('gt', () => {
it('returns all items greater than root', async () => {
const expected = ['hello1', 'hello2', 'hello3', 'hello4']
const all = []
for await (const record of db.iterator({ gt: first(hashes) })) {
all.unshift(record)
}
strictEqual(all.length, 4)
deepStrictEqual(all, expected)
})
it('returns one item greater than root', async () => {
const expected = ['hello1']
const all = []
for await (const record of db.iterator({ gt: first(hashes), amount: 1 })) {
all.unshift(record)
}
strictEqual(all.length, 1)
deepStrictEqual(all, expected)
})
it('returns two items greater than root', async () => {
const expected = ['hello1', 'hello2']
const all = []
for await (const record of db.iterator({ gt: first(hashes), amount: 2 })) {
all.unshift(record)
}
strictEqual(all.length, 2)
deepStrictEqual(all, expected)
})
})
describe('gte', () => {
it('returns all items greater than or equal to root', async () => {
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
const all = []
for await (const record of db.iterator({ gte: first(hashes) })) {
all.unshift(record)
}
strictEqual(all.length, 5)
deepStrictEqual(all, expected)
})
it('returns one item greater than or equal to root', async () => {
const expected = ['hello0']
const all = []
for await (const record of db.iterator({ gte: first(hashes), amount: 1 })) {
all.unshift(record)
}
strictEqual(all.length, 1)
deepStrictEqual(all, expected)
})
it('returns two items greater than or equal to root', async () => {
const expected = ['hello0', 'hello1']
const all = []
for await (const record of db.iterator({ gte: first(hashes), amount: 2 })) {
all.unshift(record)
}
strictEqual(all.length, 2)
deepStrictEqual(all, expected)
})
})
describe('range', async () => {
it('returns all items greater than root and less than head', async () => {
const expected = ['hello1', 'hello2', 'hello3']
const all = []
for await (const record of db.iterator({ gt: first(hashes), lt: last(hashes) })) {
all.unshift(record)
}
strictEqual(all.length, 3)
deepStrictEqual(all, expected)
})
})
})
})
})

View File

@ -0,0 +1,188 @@
import { deepStrictEqual, strictEqual } from 'assert'
import mapSeries from 'p-map-series'
import rimraf from 'rimraf'
import { Log, Entry } from '../../src/oplog/index.js'
import { KeyValuePersisted, KeyValue, Database } from '../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../src/storage/index.js'
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import { createTestIdentities, cleanUpTestIdentities } from '../fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('KeyValuePersisted Database (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd
let ipfs
let keystore, signingKeyStore
let accessController
let identities1
let testIdentity1
let db
const databaseId = 'keyvalue-AAA'
before(async () => {
// Start two IPFS instances
ipfsd = await startIpfs(IPFS, config.daemon1)
ipfs = ipfsd.api
const [identities, testIdentities] = await createTestIdentities(ipfs)
identities1 = identities[0]
testIdentity1 = testIdentities[0]
rmrf(testIdentity1.id)
})
after(async () => {
await cleanUpTestIdentities([identities1])
if (ipfsd) {
await stopIpfs(ipfsd)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
})
beforeEach(async () => {
db = await KeyValuePersisted({ OpLog, KeyValue, Database, ipfs, identity: testIdentity1, databaseId, accessController })
})
afterEach(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('creates a keyvalue store', async () => {
strictEqual(db.databaseId, databaseId)
strictEqual(db.type, 'keyvalue')
})
it('returns 0 items when it\'s a fresh database', async () => {
const all = []
for await (const item of db.iterator()) {
all.unshift(item)
}
strictEqual(all.length, 0)
})
it('sets a key/value pair', async () => {
const expected = 'zdpuAyRbzMUs1v7B1gqRRHe6rnxwYbHKzDhxh3rJanEjoucHt'
const actual = await db.set('key1', 'value1')
strictEqual(actual, expected)
})
it('puts a key/value pair', async () => {
const expected = 'zdpuAyRbzMUs1v7B1gqRRHe6rnxwYbHKzDhxh3rJanEjoucHt'
const actual = await db.put('key1', 'value1')
strictEqual(actual, expected)
})
it('gets a key/value pair\'s value', async () => {
const key = 'key1'
const expected = 'value1'
const hash = await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using put', async () => {
const key = 'key1'
const expected = 'hello2'
await db.put(key, 'value1')
await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using set', async () => {
const key = 'key1'
const expected = 'hello2'
await db.set(key, 'value1')
await db.set(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using set then put', async () => {
const key = 'key1'
const expected = 'hello2'
await db.set(key, 'value1')
await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using put then set', async () => {
const key = 'key1'
const expected = 'hello2'
await db.put(key, 'value1')
await db.set(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('deletes a key/value pair', async () => {
const key = 'key1'
const expected = undefined
await db.put(key, 'value1')
const hash = await db.del(key)
const actual = await db.get(hash)
strictEqual(actual, expected)
})
it('deletes a non-existent key/value pair', async () => {
const expected = undefined
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
const actual = await db.get(del)
strictEqual(actual, expected)
})
it('returns all key/value pairs', async () => {
const keyvalue = [
{ key: 'key1', value: 'init' },
{ key: 'key2', value: true },
{ key: 'key3', value: 'hello' },
{ key: 'key4', value: 'friend' },
{ key: 'key5', value: '12345' },
{ key: 'key6', value: 'empty' },
{ key: 'key7', value: 'friend33' }
]
for (const { key, value } of Object.values(keyvalue)) {
await db.put(key, value)
}
const all = []
for await (const pair of db.iterator()) {
all.push(pair)
}
deepStrictEqual(all, keyvalue)
})
})
})

188
test/db/keyvalue.test.js Normal file
View File

@ -0,0 +1,188 @@
import { deepStrictEqual, strictEqual } from 'assert'
import mapSeries from 'p-map-series'
import rimraf from 'rimraf'
import { Log, Entry } from '../../src/oplog/index.js'
import { KeyValue, Database } from '../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../src/storage/index.js'
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import { createTestIdentities, cleanUpTestIdentities } from '../fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('KeyValue Database (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd
let ipfs
let keystore, signingKeyStore
let accessController
let identities1
let testIdentity1
let db
const databaseId = 'keyvalue-AAA'
before(async () => {
// Start two IPFS instances
ipfsd = await startIpfs(IPFS, config.daemon1)
ipfs = ipfsd.api
const [identities, testIdentities] = await createTestIdentities(ipfs)
identities1 = identities[0]
testIdentity1 = testIdentities[0]
rmrf(testIdentity1.id)
})
after(async () => {
await cleanUpTestIdentities([identities1])
if (ipfsd) {
await stopIpfs(ipfsd)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
})
beforeEach(async () => {
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, databaseId, accessController })
})
afterEach(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('creates a keyvalue store', async () => {
strictEqual(db.databaseId, databaseId)
strictEqual(db.type, 'keyvalue')
})
it('returns 0 items when it\'s a fresh database', async () => {
const all = []
for await (const item of db.iterator()) {
all.unshift(item)
}
strictEqual(all.length, 0)
})
it('sets a key/value pair', async () => {
const expected = 'zdpuAyRbzMUs1v7B1gqRRHe6rnxwYbHKzDhxh3rJanEjoucHt'
const actual = await db.set('key1', 'value1')
strictEqual(actual, expected)
})
it('puts a key/value pair', async () => {
const expected = 'zdpuAyRbzMUs1v7B1gqRRHe6rnxwYbHKzDhxh3rJanEjoucHt'
const actual = await db.put('key1', 'value1')
strictEqual(actual, expected)
})
it('gets a key/value pair\'s value', async () => {
const key = 'key1'
const expected = 'value1'
const hash = await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using put', async () => {
const key = 'key1'
const expected = 'hello2'
await db.put(key, 'value1')
await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using set', async () => {
const key = 'key1'
const expected = 'hello2'
await db.set(key, 'value1')
await db.set(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using set then put', async () => {
const key = 'key1'
const expected = 'hello2'
await db.set(key, 'value1')
await db.put(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('get key\'s updated value when using put then set', async () => {
const key = 'key1'
const expected = 'hello2'
await db.put(key, 'value1')
await db.set(key, expected)
const actual = await db.get(key)
strictEqual(actual, expected)
})
it('deletes a key/value pair', async () => {
const key = 'key1'
const expected = undefined
await db.put(key, 'value1')
const hash = await db.del(key)
const actual = await db.get(hash)
strictEqual(actual, expected)
})
it('deletes a non-existent key/value pair', async () => {
const expected = undefined
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
const actual = await db.get(del)
strictEqual(actual, expected)
})
it('returns all key/value pairs', async () => {
const keyvalue = [
{ key: 'key1', value: 'init' },
{ key: 'key2', value: true },
{ key: 'key3', value: 'hello' },
{ key: 'key4', value: 'friend' },
{ key: 'key5', value: '12345' },
{ key: 'key6', value: 'empty' },
{ key: 'key7', value: 'friend33' }
]
for (const { key, value } of Object.values(keyvalue)) {
await db.put(key, value)
}
const all = []
for await (const pair of db.iterator()) {
all.unshift(pair)
}
deepStrictEqual(all, keyvalue)
})
})
})

View File

@ -0,0 +1,140 @@
import { deepStrictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../../../src/oplog/index.js'
import { DocumentStore, Database } from '../../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../../src/storage/index.js'
import { getIpfsPeerId, waitForPeers, config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from '../../utils/connect-nodes.js'
import { createTestIdentities, cleanUpTestIdentities } from '../../fixtures/orbit-db-identity-keys.js'
import waitFor from '../../utils/wait-for.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('DocumentStore Replication (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore, signingKeyStore
let peerId1, peerId2
let accessController
let identities1, identities2
let testIdentity1, testIdentity2
let db1, db2
const databaseId = 'documentstore-AAA'
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
accessController = {
canAppend: async (entry) => {
const identity1 = await identities1.getIdentity(entry.identity)
const identity2 = await identities2.getIdentity(entry.identity)
return identity1.id === testIdentity1.id || identity2.id === testIdentity2.id
}
}
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
after(async () => {
await cleanUpTestIdentities([identities1, identities1])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
beforeEach(async () => {
db1 = await DocumentStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
db2 = await DocumentStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
})
afterEach(async () => {
if (db1) {
await db1.drop()
await db1.close()
}
if (db2) {
await db2.drop()
await db2.close()
}
})
it('gets all documents', async () => {
let updateDB1Count = 0
let updateDB2Count = 0
const onDB1Update = (entry) => {
++updateDB1Count
}
const onDB2Update = (entry) => {
++updateDB2Count
}
db1.events.on('update', onDB1Update)
db2.events.on('update', onDB2Update)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
const puts = []
puts.push(await db1.put({ _id: 1, msg: 'record 1 on db 1' }))
puts.push(await db2.put({ _id: 2, msg: 'record 2 on db 2' }))
puts.push(await db1.put({ _id: 3, msg: 'record 3 on db 1' }))
puts.push(await db2.put({ _id: 4, msg: 'record 4 on db 2' }))
await waitFor(() => updateDB1Count, () => puts.length)
await waitFor(() => updateDB2Count, () => puts.length)
const all1 = []
for await (const item of db1.iterator()) {
all1.unshift(item)
}
const all2 = []
for await (const item of db2.iterator()) {
all2.unshift(item)
}
deepStrictEqual(all1, all2)
})
})
})

View File

@ -0,0 +1,144 @@
import { deepStrictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../../../src/oplog/index.js'
import { EventStore, Database } from '../../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../../src/storage/index.js'
import { getIpfsPeerId, waitForPeers, config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from '../../utils/connect-nodes.js'
import { createTestIdentities, cleanUpTestIdentities } from '../../fixtures/orbit-db-identity-keys.js'
import waitFor from '../../utils/wait-for.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('EventStore Replication (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore, signingKeyStore
let peerId1, peerId2
let accessController
let identities1, identities2
let testIdentity1, testIdentity2
let db1, db2
const databaseId = 'events-AAA'
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
accessController = {
canAppend: async (entry) => {
const identity1 = await identities1.getIdentity(entry.identity)
const identity2 = await identities2.getIdentity(entry.identity)
return identity1.id === testIdentity1.id || identity2.id === testIdentity2.id
}
}
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
after(async () => {
await cleanUpTestIdentities([identities1, identities2])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
beforeEach(async () => {
db1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
db2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
})
afterEach(async () => {
if (db1) {
await db1.drop()
await db1.close()
}
if (db2) {
await db2.drop()
await db2.close()
}
})
it('gets all documents', async () => {
let updateDB1Count = 0
let updateDB2Count = 0
const onDB1Update = (entry) => {
++updateDB1Count
}
const onDB2Update = (entry) => {
++updateDB2Count
}
db1.events.on('update', onDB1Update)
db2.events.on('update', onDB2Update)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
const puts = []
puts.push(await db1.add('init'))
puts.push(await db2.add(true))
puts.push(await db1.add('hello'))
puts.push(await db2.add('friend'))
puts.push(await db2.add('12345'))
puts.push(await db2.add('empty'))
puts.push(await db2.add(''))
puts.push(await db2.add('friend33'))
await waitFor(() => updateDB1Count, () => puts.length)
await waitFor(() => updateDB2Count, () => puts.length)
const all1 = []
for await (const record of db1.iterator()) {
all1.unshift(record)
}
const all2 = []
for await (const record of db2.iterator()) {
all2.unshift(record)
}
deepStrictEqual(all1, all2)
})
})
})

View File

@ -0,0 +1,144 @@
import { deepStrictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../../../src/oplog/index.js'
import { KeyValue, Database } from '../../../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../../../src/storage/index.js'
import { getIpfsPeerId, waitForPeers, config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from '../../utils/connect-nodes.js'
import { createTestIdentities, cleanUpTestIdentities } from '../../fixtures/orbit-db-identity-keys.js'
import waitFor from '../../utils/wait-for.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('KeyValue Replication (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore, signingKeyStore
let peerId1, peerId2
let accessController
let identities1, identities2
let testIdentity1, testIdentity2
let db1, db2
const databaseId = 'keyvalue-AAA'
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
accessController = {
canAppend: async (entry) => {
const identity1 = await identities1.getIdentity(entry.identity)
const identity2 = await identities2.getIdentity(entry.identity)
return identity1.id === testIdentity1.id || identity2.id === testIdentity2.id
}
}
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
after(async () => {
await cleanUpTestIdentities([identities1, identities2])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (signingKeyStore) {
await signingKeyStore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
beforeEach(async () => {
db1 = await KeyValue({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
db2 = await KeyValue({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
})
afterEach(async () => {
if (db1) {
await db1.drop()
await db1.close()
}
if (db2) {
await db2.drop()
await db2.close()
}
})
it('gets all key/value pairs', async () => {
let updateDB1Count = 0
let updateDB2Count = 0
const onDB1Update = (entry) => {
++updateDB1Count
}
const onDB2Update = (entry) => {
++updateDB2Count
}
db1.events.on('update', onDB1Update)
db2.events.on('update', onDB2Update)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
const ops = []
ops.push(await db1.put('key1', 'init'))
ops.push(await db2.put('key2', true))
ops.push(await db1.put('key3', 'hello'))
ops.push(await db2.put('key4', 'friend'))
ops.push(await db2.put('key5', '12345'))
ops.push(await db2.put('key6', 'empty'))
ops.push(await db2.put('key7', ''))
ops.push(await db2.put('key8', 'friend33'))
await waitFor(() => updateDB1Count, () => ops.length)
await waitFor(() => updateDB2Count, () => ops.length)
const all1 = []
for await (const record of db1.iterator()) {
all1.unshift(record)
}
const all2 = []
for await (const record of db2.iterator()) {
all2.unshift(record)
}
deepStrictEqual(all1, all2)
})
})
})

View File

@ -1,201 +0,0 @@
// import assert from 'assert'
// import rmrf from 'rimraf'
// import path from 'path'
// import OrbitDB from '../src/OrbitDB.js'
// // Include test utilities
// import {
// config,
// startIpfs,
// stopIpfs,
// testAPIs,
// } from 'orbit-db-test-utils'
// const dbPath = './orbitdb/tests/docstore'
// Object.keys(testAPIs).forEach(API => {
// describe(`orbit-db - Document Store (${API})`, function() {
// this.timeout(config.timeout)
// let ipfsd, ipfs, orbitdb1, db
// before(async () => {
// ipfsd = await startIpfs(API, config.daemon1)
// ipfs = ipfsd.api
// orbitdb1 = await OrbitDB.createInstance(ipfs, { directory: path.join(dbPath, '1') })
// })
// after(async () => {
// if(orbitdb1)
// await orbitdb1.stop()
// if (ipfsd)
// await stopIpfs(ipfsd)
// })
// it('creates and opens a database', async () => {
// db = await orbitdb1.docstore('first doc database')
// assert.notEqual(db, null)
// assert.equal(db.type, 'docstore')
// assert.equal(db.dbname, 'first doc database')
// })
// describe('Default index \'_id\'', function() {
// beforeEach(async () => {
// const options = {
// replicate: false,
// maxHistory: 0,
// path: dbPath,
// }
// db = await orbitdb1.docstore('orbit-db-tests', options)
// })
// afterEach(async () => {
// await db.drop()
// })
// it('put', async () => {
// const doc = { _id: 'hello world', doc: 'all the things'}
// await db.put(doc)
// const value = db.get('hello world')
// assert.deepEqual(value, [doc])
// })
// it('get - partial term match', async () => {
// const doc1 = { _id: 'hello world', doc: 'some things'}
// const doc2 = { _id: 'hello universe', doc: 'all the things'}
// const doc3 = { _id: 'sup world', doc: 'other things'}
// await db.put(doc1)
// await db.put(doc2)
// await db.put(doc3)
// const value = db.get('hello')
// assert.deepEqual(value, [doc1, doc2])
// })
// it('get after delete', async () => {
// const doc1 = { _id: 'hello world', doc: 'some things'}
// const doc2 = { _id: 'hello universe', doc: 'all the things'}
// const doc3 = { _id: 'sup world', doc: 'other things'}
// await db.put(doc1)
// await db.put(doc2)
// await db.put(doc3)
// await db.del('hello universe')
// const value1 = db.get('hello')
// const value2 = db.get('sup')
// assert.deepEqual(value1, [doc1])
// assert.deepEqual(value2, [doc3])
// })
// it('put updates a value', async () => {
// const doc1 = { _id: 'hello world', doc: 'all the things'}
// const doc2 = { _id: 'hello world', doc: 'some of the things'}
// await db.put(doc1)
// await db.put(doc2)
// const value = db.get('hello')
// assert.deepEqual(value, [doc2])
// })
// it('query', async () => {
// const doc1 = { _id: 'hello world', doc: 'all the things', views: 17}
// const doc2 = { _id: 'sup world', doc: 'some of the things', views: 10}
// const doc3 = { _id: 'hello other world', doc: 'none of the things', views: 5}
// const doc4 = { _id: 'hey universe', doc: ''}
// await db.put(doc1)
// await db.put(doc2)
// await db.put(doc3)
// await db.put(doc4)
// const value1 = db.query((e) => e.views > 5)
// const value2 = db.query((e) => e.views > 10)
// const value3 = db.query((e) => e.views > 17)
// assert.deepEqual(value1, [doc1, doc2])
// assert.deepEqual(value2, [doc1])
// assert.deepEqual(value3, [])
// })
// it('query after delete', async () => {
// const doc1 = { _id: 'hello world', doc: 'all the things', views: 17}
// const doc2 = { _id: 'sup world', doc: 'some of the things', views: 10}
// const doc3 = { _id: 'hello other world', doc: 'none of the things', views: 5}
// const doc4 = { _id: 'hey universe', doc: ''}
// await db.put(doc1)
// await db.put(doc2)
// await db.put(doc3)
// await db.del('hello world')
// await db.put(doc4)
// const value1 = db.query((e) => e.views >= 5)
// const value2 = db.query((e) => e.views >= 10)
// assert.deepEqual(value1, [doc2, doc3])
// assert.deepEqual(value2, [doc2])
// })
// it('query returns full op', async () => {
// const doc1 = { _id: 'hello world', doc: 'all the things', views: 17}
// const doc2 = { _id: 'sup world', doc: 'some of the things', views: 10}
// const expectedOperation = {
// op: 'PUT',
// key: 'sup world',
// value: {
// _id: 'sup world',
// doc: 'some of the things',
// views: 10
// },
// }
// await db.put(doc1)
// await db.put(doc2)
// const res = db.query(e => e.payload.value.views < 17, { fullOp: true })[0]
// assert.notEqual(res, undefined)
// assert.notEqual(res.hash, undefined)
// assert.notEqual(res.id, undefined)
// assert.deepEqual(res.payload, expectedOperation)
// assert.notEqual(res.next, undefined)
// assert.equal(res.next.length, 1)
// assert.equal(res.refs.length, 0)
// assert.equal(res.v, 2)
// assert.notEqual(res.clock, undefined)
// assert.equal(res.clock.time, 2)
// assert.notEqual(res.key, undefined)
// assert.notEqual(res.sig, undefined)
// })
// })
// describe('Specified index', function() {
// beforeEach(async () => {
// const options = {
// indexBy: 'doc',
// replicate: false,
// maxHistory: 0
// }
// db = await orbitdb1.docstore('orbit-db-tests', options)
// })
// afterEach(async () => {
// await db.drop()
// })
// it('put', async () => {
// const doc = { _id: 'hello world', doc: 'all the things'}
// await db.put(doc)
// const value = db.get('all')
// assert.deepEqual(value, [doc])
// })
// it('get - matches specified index', async () => {
// const doc1 = { _id: 'hello world', doc: 'all the things'}
// const doc2 = { _id: 'hello world', doc: 'some things'}
// await db.put(doc1)
// await db.put(doc2)
// const value1 = db.get('all')
// const value2 = db.get('some')
// assert.deepEqual(value1, [doc1])
// assert.deepEqual(value2, [doc2])
// })
// })
// })
// })

View File

@ -1,147 +0,0 @@
import { deepStrictEqual, strictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../src/oplog/index.js'
import KeyStore from '../src/key-store.js'
import { DocumentStore, Database } from '../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../src/storage/index.js'
// Test utils
// import { config, testAPIs, getIpfsPeerId, waitForPeers, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from './utils/connect-nodes.js'
import { createTestIdentities, cleanUpTestIdentities } from './fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('Documents Database (' + IPFS + ')', function () {
this.timeout(config.timeout * 2)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore
// let peerId1, peerId2
let identities1, identities2
let testIdentity1, testIdentity2
let db1, db2
const databaseId = 'documents-AAA'
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
// peerId1 = await getIpfsPeerId(ipfs1)
// peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
beforeEach(async () => {
const accessController = {
canAppend: async (entry) => {
const identity = await identities1.getIdentity(entry.identity)
return identity.id === testIdentity1.id
}
}
db1 = await DocumentStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
})
afterEach(async () => {
if (db1) {
await db1.drop()
await db1.close()
}
if (db2) {
await db2.drop()
await db2.close()
}
})
after(async () => {
await cleanUpTestIdentities([identities1, identities1])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
describe('using database', () => {
it('gets a document', async () => {
const key = 'hello world 1'
const expected = { _id: key, msg: 'writing 1 to db1' }
await db1.put(expected)
const doc = await db1.get(key)
deepStrictEqual(doc, expected)
})
it('deletes a document', async () => {
const key = 'hello world 1'
await db1.put({ _id: key, msg: 'writing 1 to db1' })
await db1.del(key)
const doc = await db1.get(key)
strictEqual(doc, undefined)
})
it('throws an error when deleting a non-existent document', async () => {
const key = 'i do not exist'
let err
try {
await db1.del(key)
} catch (e) {
err = e
}
strictEqual(err.message, `No document with key '${key}' in the database`)
})
it('queries for a document', async () => {
const expected = { _id: 'hello world 1', msg: 'writing new 1 to db1', views: 10 }
await db1.put({ _id: 'hello world 1', msg: 'writing 1 to db1', views: 10 })
await db1.put({ _id: 'hello world 2', msg: 'writing 2 to db1', views: 5 })
await db1.put({ _id: 'hello world 3', msg: 'writing 3 to db1', views: 12 })
await db1.del('hello world 3')
await db1.put(expected)
const findFn = (doc) => doc.views > 5
deepStrictEqual(await db1.query(findFn), [expected])
})
})
})
})

View File

@ -1,282 +0,0 @@
import { deepStrictEqual, strictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../src/oplog/index.js'
import { Identities } from '../src/identities/index.js'
import KeyStore from '../src/key-store.js'
import { EventStore, Database } from '../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../src/storage/index.js'
// Test utils
import { config, testAPIs, startIpfs, stopIpfs, getIpfsPeerId, waitForPeers } from 'orbit-db-test-utils'
import connectPeers from './utils/connect-nodes.js'
import waitFor from './utils/wait-for.js'
import { createTestIdentities, cleanUpTestIdentities } from './fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const { createIdentity } = Identities
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('Events Database (' + IPFS + ')', function () {
this.timeout(config.timeout)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore
let peerId1, peerId2
let identities1, identities2
let testIdentity1, testIdentity2
let kv1, kv2
const databaseId = 'events-AAA'
const accessController = {
canAppend: async (entry) => {
const identity = await identities1.getIdentity(entry.identity)
return identity.id === testIdentity1.id
}
}
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
afterEach(async () => {
if (kv1) {
await kv1.drop()
await kv1.close()
}
if (kv2) {
await kv2.drop()
await kv2.close()
}
})
after(async () => {
await cleanUpTestIdentities([identities1, identities2])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
describe('using database', () => {
it('returns all entries in the database', async () => {
let updateCount = 0
// let syncCount = 0
const onUpdate = (entry) => {
// console.log(".", updateCount, entry.payload)
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
const onError = () => {
}
kv1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
// kv1.events.on('error', onError)
kv2.events.on('error', onError)
strictEqual(kv1.type, 'events')
strictEqual(kv2.type, 'events')
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
// send a garbage message to pubsub to test onError firing
// await ipfs1.pubsub.publish(databaseId, Uint8Array.from([1, 2, 3, 4, 5]))
await kv1.add('init')
await kv1.add(true)
await kv1.add('hello')
await kv1.add('friend')
await kv1.add(12345)
await kv1.add('empty')
await kv1.add('')
await kv1.add('friend33')
// const hash = await kv1.add('friend33')
// const lastEntry = await kv1.get(hash)
// sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 8)
// onUpdate test
strictEqual(updateCount, 8)
// // write access test
// let errorMessage
// try {
// await kv2.set('hello', 'friend4')
// } catch (e) {
// errorMessage = e.message
// } finally {
// const valueNotUpdated = await kv2.get('hello')
// strictEqual(valueNotUpdated, 'friend3')
// notStrictEqual(errorMessage, undefined)
// strictEqual(errorMessage.startsWith('Could not append entry:\nKey'), true)
// }
// all() test
const all2 = []
console.time('all2')
for await (const event of kv2.iterator()) {
all2.unshift(event)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
const all1 = await kv2.all()
deepStrictEqual(all1, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
// onError test
// notStrictEqual(error, undefined)
// strictEqual(error.message, 'CBOR decode error: too many terminals, data makes no sense')
})
})
describe('load database', () => {
it('returns all entries in the database', async () => {
let updateCount = 0
// let syncCount = 0
const onUpdate = (entry) => {
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
kv1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
await kv1.add('init')
await kv1.add(true)
await kv1.add('hello')
await kv1.add('friend')
await kv1.add(12345)
await kv1.add('empty')
await kv1.add('')
await kv1.add('friend33')
// const hash = await kv1.add('friend33')
// const lastEntry = await kv1.log.get(hash)
// sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 8)
// onUpdate test
strictEqual(updateCount, 8)
await kv1.close()
await kv2.close()
kv1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// all() test
const all2 = []
console.time('all2')
for await (const event of kv2.iterator()) {
all2.unshift(event)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
const all1 = await kv2.all()
deepStrictEqual(all1, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
})
})
})
})

View File

@ -1,369 +0,0 @@
// import assert from 'assert'
// import mapSeries from 'p-map-series'
// import rmrf from 'rimraf'
// import path from 'path'
// import OrbitDB from '../src/OrbitDB.js'
// // Include test utilities
// import {
// config,
// startIpfs,
// stopIpfs,
// testAPIs,
// } from 'orbit-db-test-utils'
// const last = arr => arr[arr.length - 1]
// const dbPath = './orbitdb/tests/eventlog'
// Object.keys(testAPIs).forEach(API => {
// describe(`orbit-db - Log Database (${API})`, function() {
// this.timeout(config.timeout)
// let ipfsd, ipfs, orbitdb1
// before(async () => {
// rmrf.sync(dbPath)
// ipfsd = await startIpfs(API, config.daemon1)
// ipfs = ipfsd.api
// orbitdb1 = await OrbitDB.createInstance(ipfs, { directory: path.join(dbPath, '1') })
// })
// after(async () => {
// if(orbitdb1)
// await orbitdb1.stop()
// if (ipfsd)
// await stopIpfs(ipfsd)
// })
// describe('Eventlog', function () {
// it('creates and opens a database', async () => {
// const db = await orbitdb1.eventlog('log database')
// assert.notEqual(db, null)
// assert.equal(db.type, 'eventlog')
// assert.equal(db.dbname, 'log database')
// await db.drop()
// })
// it('returns 0 items when it\'s a fresh database', async () => {
// const db = await orbitdb1.eventlog('log database')
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 0)
// await db.drop()
// })
// it('returns the added entry\'s hash, 1 entry', async () => {
// const db = await orbitdb1.eventlog('first database')
// const hash = await db.add('hello1')
// const items = db.iterator({ limit: -1 }).collect()
// assert.notEqual(hash, null)
// assert.equal(hash, last(items).hash)
// assert.equal(items.length, 1)
// await db.drop()
// })
// it('returns the added entry\'s hash, 2 entries', async () => {
// const db = await orbitdb1.eventlog('first database')
// await db.load()
// await db.add('hello1')
// const prevHash = db.iterator().collect()[0].hash
// const hash = await db.add('hello2')
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 2)
// assert.notEqual(hash, null)
// assert.notEqual(hash, prevHash)
// assert.equal(hash, last(items).hash)
// await db.drop()
// })
// it('adds five items', async () => {
// const db = await orbitdb1.eventlog('second database')
// await mapSeries([1, 2, 3, 4, 5], (i) => db.add('hello' + i))
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 5)
// assert.equal(items[0].payload.value, 'hello1')
// assert.equal(last(items.map((f) => f.payload.value)), 'hello5')
// await db.drop()
// })
// it('adds an item that is > 256 bytes', async () => {
// const db = await orbitdb1.eventlog('third database')
// let msg = Buffer.alloc(1024)
// msg.fill('a')
// const hash = await db.add(msg.toString())
// assert.notEqual(hash, null)
// assert.equal(hash.startsWith('zd'), true)
// assert.equal(hash.length, 49)
// await db.drop()
// })
// })
// describe('Iterator', function() {
// let hashes = []
// const itemCount = 5
// let db
// before(async () => {
// hashes = []
// db = await orbitdb1.eventlog('iterator tests')
// hashes = await mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
// })
// describe('Defaults', function() {
// it('returns an iterator', () => {
// const iter = db.iterator()
// const next = iter.next().value
// assert.notEqual(iter, null)
// assert.notEqual(next, null)
// })
// it('returns an item with the correct structure', () => {
// const iter = db.iterator()
// const next = iter.next().value
// assert.notEqual(next, null)
// assert.equal(next.hash.startsWith('zd'), true)
// assert.equal(next.payload.key, null)
// assert.equal(next.payload.value, 'hello4')
// })
// it('implements Iterator interface', () => {
// const iter = db.iterator({ limit: -1 })
// let messages = []
// for(let i of iter)
// messages.push(i.key)
// assert.equal(messages.length, hashes.length)
// })
// it('returns 1 item as default', () => {
// const iter = db.iterator()
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, hashes[hashes.length - 1])
// assert.equal(second, null)
// assert.equal(first.payload.value, 'hello4')
// })
// it('returns items in the correct order', () => {
// const amount = 3
// const iter = db.iterator({ limit: amount })
// let i = hashes.length - amount
// for(let item of iter) {
// assert.equal(item.payload.value, 'hello' + i)
// i ++
// }
// })
// })
// describe('Collect', function() {
// it('returns all items', () => {
// const messages = db.iterator({ limit: -1 }).collect()
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0].payload.value, 'hello0')
// assert.equal(messages[messages.length - 1].payload.value, 'hello4')
// })
// it('returns 1 item', () => {
// const messages = db.iterator().collect()
// assert.equal(messages.length, 1)
// })
// it('returns 3 items', () => {
// const messages = db.iterator({ limit: 3 }).collect()
// assert.equal(messages.length, 3)
// })
// })
// describe('Options: limit', function() {
// it('returns 1 item when limit is 0', () => {
// const iter = db.iterator({ limit: 0 })
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, last(hashes))
// assert.equal(second, null)
// })
// it('returns 1 item when limit is 1', () => {
// const iter = db.iterator({ limit: 1 })
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, last(hashes))
// assert.equal(second, null)
// })
// it('returns 3 items', () => {
// const iter = db.iterator({ limit: 3 })
// const first = iter.next().value
// const second = iter.next().value
// const third = iter.next().value
// const fourth = iter.next().value
// assert.equal(first.hash, hashes[hashes.length - 3])
// assert.equal(second.hash, hashes[hashes.length - 2])
// assert.equal(third.hash, hashes[hashes.length - 1])
// assert.equal(fourth, null)
// })
// it('returns all items', () => {
// const messages = db.iterator({ limit: -1 })
// .collect()
// .map((e) => e.hash)
// messages.reverse()
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[hashes.length - 1])
// })
// it('returns all items when limit is bigger than -1', () => {
// const messages = db.iterator({ limit: -300 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// })
// it('returns all items when limit is bigger than number of items', () => {
// const messages = db.iterator({ limit: 300 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// })
// })
// describe('Option: ranges', function() {
// describe('gt & gte', function() {
// it('returns 1 item when gte is the head', () => {
// const messages = db.iterator({ gte: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], last(hashes))
// })
// it('returns 0 items when gt is the head', () => {
// const messages = db.iterator({ gt: last(hashes) }).collect()
// assert.equal(messages.length, 0)
// })
// it('returns 2 item when gte is defined', () => {
// const gte = hashes[hashes.length - 2]
// const messages = db.iterator({ gte: gte, limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 2)
// assert.equal(messages[0], hashes[hashes.length - 2])
// assert.equal(messages[1], hashes[hashes.length - 1])
// })
// it('returns all items when gte is the root item', () => {
// const messages = db.iterator({ gte: hashes[0], limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[messages.length - 1], last(hashes))
// })
// it('returns items when gt is the root item', () => {
// const messages = db.iterator({ gt: hashes[0], limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, itemCount - 1)
// assert.equal(messages[0], hashes[1])
// assert.equal(messages[3], last(hashes))
// })
// it('returns items when gt is defined', () => {
// const messages = db.iterator({ limit: -1})
// .collect()
// .map((e) => e.hash)
// const gt = messages[2]
// const messages2 = db.iterator({ gt: gt, limit: 100 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages2.length, 2)
// assert.equal(messages2[0], messages[messages.length - 2])
// assert.equal(messages2[1], messages[messages.length - 1])
// })
// })
// describe('lt & lte', function() {
// it('returns one item after head when lt is the head', () => {
// const messages = db.iterator({ lt: last(hashes) })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], hashes[hashes.length - 2])
// })
// it('returns all items when lt is head and limit is -1', () => {
// const messages = db.iterator({ lt: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length - 1)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[messages.length - 1], hashes[hashes.length - 2])
// })
// it('returns 3 items when lt is head and limit is 3', () => {
// const messages = db.iterator({ lt: last(hashes), limit: 3 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 3)
// assert.equal(messages[0], hashes[hashes.length - 4])
// assert.equal(messages[2], hashes[hashes.length - 2])
// })
// it('returns null when lt is the root item', () => {
// const messages = db.iterator({ lt: hashes[0] }).collect()
// assert.equal(messages.length, 0)
// })
// it('returns one item when lte is the root item', () => {
// const messages = db.iterator({ lte: hashes[0] })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], hashes[0])
// })
// it('returns all items when lte is the head', () => {
// const messages = db.iterator({ lte: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, itemCount)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[4], last(hashes))
// })
// it('returns 3 items when lte is the head', () => {
// const messages = db.iterator({ lte: last(hashes), limit: 3 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 3)
// assert.equal(messages[0], hashes[hashes.length - 3])
// assert.equal(messages[1], hashes[hashes.length - 2])
// assert.equal(messages[2], last(hashes))
// })
// })
// })
// })
// })
// })

View File

@ -1,283 +0,0 @@
import { deepStrictEqual, strictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../src/oplog/index.js'
import { Identities } from '../src/identities/index.js'
import KeyStore from '../src/key-store.js'
import { Feed, Database } from '../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../src/storage/index.js'
// Test utils
import { config, testAPIs, getIpfsPeerId, waitForPeers, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from './utils/connect-nodes.js'
import waitFor from './utils/wait-for.js'
import { createTestIdentities, cleanUpTestIdentities } from './fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const { createIdentity } = Identities
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('Feed Database (' + IPFS + ')', function () {
this.timeout(config.timeout)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore
let peerId1, peerId2
let identities1, identities2
let testIdentity1, testIdentity2
let kv1, kv2
const databaseId = 'feed-AAA'
const accessController = {
canAppend: async (entry) => {
const identity = await identities1.getIdentity(entry.identity)
return identity.id === testIdentity1.id
}
}
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
afterEach(async () => {
if (kv1) {
await kv1.drop()
await kv1.close()
}
if (kv2) {
await kv2.drop()
await kv2.close()
}
})
after(async () => {
await cleanUpTestIdentities([identities1, identities2])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
describe('using database', () => {
it('returns all entries in the database', async () => {
let updateCount = 0
// let syncCount = 0
const onUpdate = (entry) => {
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
const onError = () => {
}
kv1 = await Feed({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await Feed({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
kv1.events.on('error', onError)
kv2.events.on('error', onError)
strictEqual(kv1.type, 'feed')
strictEqual(kv2.type, 'feed')
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
// send a garbage message to pubsub to test onError firing
// await ipfs1.pubsub.publish(databaseId, Uint8Array.from([1, 2, 3, 4, 5]))
await kv1.add('init')
await kv1.add(true)
await kv1.add('hello')
await kv1.add('friend')
await kv1.add(12345)
await kv1.add('empty')
await kv1.add('')
await kv1.add('friend33')
// const hash = await kv1.add('friend33')
// const lastEntry = await kv1.get(hash)
// // sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 8)
// onUpdate test
strictEqual(updateCount, 8)
// // write access test
// let errorMessage
// try {
// await kv2.set('hello', 'friend4')
// } catch (e) {
// errorMessage = e.message
// } finally {
// const valueNotUpdated = await kv2.get('hello')
// strictEqual(valueNotUpdated, 'friend3')
// notStrictEqual(errorMessage, undefined)
// strictEqual(errorMessage.startsWith('Could not append entry:\nKey'), true)
// }
// all() test
const all2 = []
console.time('all2')
for await (const event of kv2.iterator()) {
all2.unshift(event)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
const all1 = await kv2.all()
deepStrictEqual(all1, [
'init',
true,
'hello',
'friend',
12345,
'empty',
'',
'friend33'
])
// onError test
// notStrictEqual(error, undefined)
// strictEqual(error.message, 'CBOR decode error: too many terminals, data makes no sense')
})
})
describe('load database', () => {
it('returns all entries in the database', async () => {
let updateCount = 0
// let syncCount = 0
const onUpdate = (entry) => {
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
const onError = () => {
}
kv1 = await Feed({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await Feed({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
kv1.events.on('error', onError)
kv2.events.on('error', onError)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
await kv1.add('init')
const hashA = await kv1.add(true)
await kv1.add('hello')
await kv1.add('friend')
await kv1.add(12345)
await kv1.del(hashA)
const hashB = await kv1.add('empty')
await kv1.add('')
const hash = await kv1.add('friend33')
await kv1.del(hashB)
await kv1.del(hash)
// const hashX = await kv1.del(hash)
// const lastEntry = await kv1.log.get(hashX)
// sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 11)
// onUpdate test
strictEqual(updateCount, 11)
await kv1.close()
await kv2.close()
// // await sleep(1000) // give some time for ipfs peers to sync
kv1 = await Feed({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await Feed({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// all() test
const all2 = []
console.time('all2')
for await (const event of kv2.iterator()) {
all2.unshift(event)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
'init',
'hello',
'friend',
12345,
''
])
const all1 = await kv2.all()
deepStrictEqual(all1, [
'init',
'hello',
'friend',
12345,
''
])
})
})
})
})

View File

@ -1,412 +0,0 @@
// import assert from 'assert'
// import mapSeries from 'p-map-series'
// import rmrf from 'rimraf'
// import path from 'path'
// import OrbitDB from '../src/OrbitDB.js'
// // Include test utilities
// import {
// config,
// startIpfs,
// stopIpfs,
// testAPIs,
// } from 'orbit-db-test-utils'
// const last = arr => arr[arr.length - 1]
// const dbPath = './orbitdb/tests/feed'
// Object.keys(testAPIs).forEach(API => {
// describe(`orbit-db - Feed Database (${API})`, function() {
// this.timeout(config.timeout)
// let ipfsd, ipfs, orbitdb1, address
// before(async () => {
// rmrf.sync(dbPath)
// ipfsd = await startIpfs(API, config.daemon1)
// ipfs = ipfsd.api
// orbitdb1 = await OrbitDB.createInstance(ipfs, { directory: path.join(dbPath, '1') })
// })
// after(async () => {
// if(orbitdb1)
// await orbitdb1.stop()
// if (ipfsd)
// await stopIpfs(ipfsd)
// })
// describe('Feed', function() {
// it('creates and opens a database', async () => {
// const db = await orbitdb1.feed('feed database')
// assert.notEqual(db, null)
// assert.equal(db.type, 'feed')
// assert.equal(db.dbname, 'feed database')
// await db.drop()
// })
// it('returns 0 items when it\'s a fresh database', async () => {
// const db = await orbitdb1.feed('feed database')
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 0)
// await db.drop()
// })
// it('returns the added entry\'s hash, 1 entry', async () => {
// const db = await orbitdb1.feed('first')
// address = db.address.toString()
// const hash = await db.add('hello1')
// const items = db.iterator({ limit: -1 }).collect()
// assert.notEqual(hash, null)
// assert.equal(hash, last(items).hash)
// assert.equal(items.length, 1)
// await db.drop()
// })
// it('returns the added entry\'s hash, 2 entries', async () => {
// const db = await orbitdb1.feed(address)
// await db.load()
// await db.add('hello1')
// const prevHash = db.iterator().collect()[0].hash
// const hash = await db.add('hello2')
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 2)
// assert.notEqual(hash, null)
// assert.notEqual(hash, prevHash)
// assert.equal(hash, last(items).hash)
// await db.drop()
// })
// it('adds five items', async () => {
// const db = await orbitdb1.feed('second')
// await mapSeries([1, 2, 3, 4, 5], (i) => db.add('hello' + i))
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 5)
// assert.equal(items[0].payload.value, 'hello1')
// assert.equal(items[items.length - 1].payload.value, 'hello5')
// await db.drop()
// })
// it('adds an item that is > 256 bytes', async () => {
// const db = await orbitdb1.feed('third')
// let msg = Buffer.alloc(1024)
// msg.fill('a')
// const hash = await db.add(msg.toString())
// assert.notEqual(hash, null)
// assert.equal(hash.startsWith('zd'), true)
// assert.equal(hash.length, 49)
// await db.drop()
// })
// it('deletes an item when only one item in the database', async () => {
// const db = await orbitdb1.feed('fourth')
// const hash = await db.add('hello3')
// const delopHash = await db.remove(hash)
// const items = db.iterator().collect()
// assert.equal(delopHash.startsWith('zd'), true)
// assert.equal(items.length, 0)
// await db.drop()
// })
// it('deletes an item when two items in the database', async () => {
// const db = await orbitdb1.feed('fifth')
// await db.add('hello1')
// const hash = await db.add('hello2')
// await db.remove(hash)
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 1)
// assert.equal(items[0].payload.value, 'hello1')
// await db.drop()
// })
// it('deletes an item between adds', async () => {
// const db = await orbitdb1.feed('sixth')
// const hash = await db.add('hello1')
// await db.add('hello2')
// await db.remove(hash)
// await db.add('hello3')
// const items = db.iterator({ limit: -1 }).collect()
// assert.equal(items.length, 2)
// const firstItem = items[0]
// const secondItem = items[1]
// assert.equal(firstItem.hash.startsWith('zd'), true)
// assert.equal(firstItem.payload.key, null)
// assert.equal(firstItem.payload.value, 'hello2')
// assert.equal(secondItem.payload.value, 'hello3')
// await db.drop()
// })
// })
// describe('Iterator', function() {
// let db
// let hashes = []
// const itemCount = 5
// before(async () => {
// hashes = []
// db = await orbitdb1.feed('feed-iterator')
// hashes = await mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
// })
// describe('Defaults', function() {
// it('returns an iterator', () => {
// const iter = db.iterator()
// const next = iter.next().value
// assert.notEqual(iter, null)
// assert.notEqual(next, null)
// })
// it('returns an item with the correct structure', () => {
// const iter = db.iterator()
// const next = iter.next().value
// assert.notEqual(next, null)
// assert.equal(next.hash.startsWith('zd'), true)
// assert.equal(next.payload.key, null)
// assert.equal(next.payload.value, 'hello4')
// })
// it('implements Iterator interface', () => {
// const iter = db.iterator({ limit: -1 })
// let messages = []
// for(let i of iter)
// messages.push(i.key)
// assert.equal(messages.length, hashes.length)
// })
// it('returns 1 item as default', () => {
// const iter = db.iterator()
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, hashes[hashes.length - 1])
// assert.equal(second, null)
// assert.equal(first.payload.value, 'hello4')
// })
// it('returns items in the correct order', () => {
// const amount = 3
// const iter = db.iterator({ limit: amount })
// let i = hashes.length - amount
// for(let item of iter) {
// assert.equal(item.payload.value, 'hello' + i)
// i ++
// }
// })
// })
// describe('Collect', function() {
// it('returns all items', () => {
// const messages = db.iterator({ limit: -1 }).collect()
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0].payload.value, 'hello0')
// assert.equal(messages[messages.length - 1].payload.value, 'hello4')
// })
// it('returns 1 item', () => {
// const messages = db.iterator().collect()
// assert.equal(messages.length, 1)
// })
// it('returns 3 items', () => {
// const messages = db.iterator({ limit: 3 }).collect()
// assert.equal(messages.length, 3)
// })
// })
// describe('Options: limit', function() {
// it('returns 1 item when limit is 0', () => {
// const iter = db.iterator({ limit: 0 })
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, last(hashes))
// assert.equal(second, null)
// })
// it('returns 1 item when limit is 1', () => {
// const iter = db.iterator({ limit: 1 })
// const first = iter.next().value
// const second = iter.next().value
// assert.equal(first.hash, last(hashes))
// assert.equal(second, null)
// })
// it('returns 3 items', () => {
// const iter = db.iterator({ limit: 3 })
// const first = iter.next().value
// const second = iter.next().value
// const third = iter.next().value
// const fourth = iter.next().value
// assert.equal(first.hash, hashes[hashes.length - 3])
// assert.equal(second.hash, hashes[hashes.length - 2])
// assert.equal(third.hash, hashes[hashes.length - 1])
// assert.equal(fourth, null)
// })
// it('returns all items', () => {
// const messages = db.iterator({ limit: -1 })
// .collect()
// .map((e) => e.hash)
// messages.reverse()
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[hashes.length - 1])
// })
// it('returns all items when limit is bigger than -1', () => {
// const messages = db.iterator({ limit: -300 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// })
// it('returns all items when limit is bigger than number of items', () => {
// const messages = db.iterator({ limit: 300 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// })
// })
// describe('Option: ranges', function() {
// describe('gt & gte', function() {
// it('returns 1 item when gte is the head', () => {
// const messages = db.iterator({ gte: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], last(hashes))
// })
// it('returns 0 items when gt is the head', () => {
// const messages = db.iterator({ gt: last(hashes) }).collect()
// assert.equal(messages.length, 0)
// })
// it('returns 2 item when gte is defined', () => {
// const gte = hashes[hashes.length - 2]
// const messages = db.iterator({ gte: gte, limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 2)
// assert.equal(messages[0], hashes[hashes.length - 2])
// assert.equal(messages[1], hashes[hashes.length - 1])
// })
// it('returns all items when gte is the root item', () => {
// const messages = db.iterator({ gte: hashes[0], limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[messages.length - 1], last(hashes))
// })
// it('returns items when gt is the root item', () => {
// const messages = db.iterator({ gt: hashes[0], limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, itemCount - 1)
// assert.equal(messages[0], hashes[1])
// assert.equal(messages[3], last(hashes))
// })
// it('returns items when gt is defined', () => {
// const messages = db.iterator({ limit: -1})
// .collect()
// .map((e) => e.hash)
// const gt = messages[2]
// const messages2 = db.iterator({ gt: gt, limit: 100 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages2.length, 2)
// assert.equal(messages2[0], messages[messages.length - 2])
// assert.equal(messages2[1], messages[messages.length - 1])
// })
// })
// describe('lt & lte', function() {
// it('returns one item after head when lt is the head', () => {
// const messages = db.iterator({ lt: last(hashes) })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], hashes[hashes.length - 2])
// })
// it('returns all items when lt is head and limit is -1', () => {
// const messages = db.iterator({ lt: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, hashes.length - 1)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[messages.length - 1], hashes[hashes.length - 2])
// })
// it('returns 3 items when lt is head and limit is 3', () => {
// const messages = db.iterator({ lt: last(hashes), limit: 3 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 3)
// assert.equal(messages[0], hashes[hashes.length - 4])
// assert.equal(messages[2], hashes[hashes.length - 2])
// })
// it('returns null when lt is the root item', () => {
// const messages = db.iterator({ lt: hashes[0] }).collect()
// assert.equal(messages.length, 0)
// })
// it('returns one item when lte is the root item', () => {
// const messages = db.iterator({ lte: hashes[0] })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 1)
// assert.equal(messages[0], hashes[0])
// })
// it('returns all items when lte is the head', () => {
// const messages = db.iterator({ lte: last(hashes), limit: -1 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, itemCount)
// assert.equal(messages[0], hashes[0])
// assert.equal(messages[4], last(hashes))
// })
// it('returns 3 items when lte is the head', () => {
// const messages = db.iterator({ lte: last(hashes), limit: 3 })
// .collect()
// .map((e) => e.hash)
// assert.equal(messages.length, 3)
// assert.equal(messages[0], hashes[hashes.length - 3])
// assert.equal(messages[1], hashes[hashes.length - 2])
// assert.equal(messages[2], last(hashes))
// })
// })
// })
// })
// })
// })

View File

@ -1,314 +0,0 @@
import { deepStrictEqual, strictEqual } from 'assert'
import rimraf from 'rimraf'
import { Log, Entry } from '../src/oplog/index.js'
import { Identities } from '../src/identities/index.js'
import KeyStore from '../src/key-store.js'
import { KeyValue, KeyValuePersisted, Database } from '../src/db/index.js'
import { IPFSBlockStorage, LevelStorage } from '../src/storage/index.js'
// Test utils
import { config, testAPIs, getIpfsPeerId, waitForPeers, startIpfs, stopIpfs } from 'orbit-db-test-utils'
import connectPeers from './utils/connect-nodes.js'
import waitFor from './utils/wait-for.js'
import { createTestIdentities, cleanUpTestIdentities } from './fixtures/orbit-db-identity-keys.js'
const { sync: rmrf } = rimraf
const { createIdentity } = Identities
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
Object.keys(testAPIs).forEach((IPFS) => {
describe('KeyValue Database (' + IPFS + ')', function () {
this.timeout(config.timeout)
let ipfsd1, ipfsd2
let ipfs1, ipfs2
let keystore
let peerId1, peerId2
let identities1, identities2
let testIdentity1, testIdentity2
let kv1, kv2
const databaseId = 'kv-AAA'
const accessController = {
canAppend: async (entry) => {
const identity = await identities1.getIdentity(entry.identity)
return identity.id === testIdentity1.id
}
}
before(async () => {
// Start two IPFS instances
ipfsd1 = await startIpfs(IPFS, config.daemon1)
ipfsd2 = await startIpfs(IPFS, config.daemon2)
ipfs1 = ipfsd1.api
ipfs2 = ipfsd2.api
await connectPeers(ipfs1, ipfs2)
// Get the peer IDs
peerId1 = await getIpfsPeerId(ipfs1)
peerId2 = await getIpfsPeerId(ipfs2)
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
identities1 = identities[0]
identities2 = identities[1]
testIdentity1 = testIdentities[0]
testIdentity2 = testIdentities[1]
rmrf(testIdentity1.id)
rmrf(testIdentity2.id)
})
after(async () => {
await cleanUpTestIdentities([identities1, identities2])
if (ipfsd1) {
await stopIpfs(ipfsd1)
}
if (ipfsd2) {
await stopIpfs(ipfsd2)
}
if (keystore) {
await keystore.close()
}
if (testIdentity1) {
rmrf(testIdentity1.id)
}
if (testIdentity2) {
rmrf(testIdentity2.id)
}
})
afterEach(async () => {
if (kv1) {
await kv1.drop()
await kv1.close()
}
if (kv2) {
await kv2.drop()
await kv2.close()
}
})
describe('using database', () => {
it('returns all entries in the database', async () => {
// let error
let updateCount = 0
// const syncCount = 0
const onUpdate = (entry) => {
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
const onError = () => {
// error = err
}
// kv1 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
// kv2 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
kv1 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
kv1.events.on('error', onError)
kv2.events.on('error', onError)
strictEqual(kv1.type, 'kv')
strictEqual(kv2.type, 'kv')
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
// send a garbage message to pubsub to test onError firing
// await ipfs1.pubsub.publish(databaseId, Uint8Array.from([1, 2, 3, 4, 5]))
await kv1.set('init', true)
await kv1.set('hello', 'friend')
await kv1.del('hello')
await kv1.set('hello', 'friend2')
await kv1.del('hello')
await kv1.set('empty', '')
await kv1.del('empty')
await kv1.set('hello', 'friend3')
// const hash = await kv1.set('hello', 'friend3')
// const lastEntry = await kv1.database.log.get(hash)
// sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 8)
// update event test
strictEqual(updateCount, 8)
// sync event test
// strictEqual(syncCount, 8)
// write access test
// let errorMessage
// try {
// await kv2.set('hello', 'friend4')
// } catch (e) {
// errorMessage = e.message
// } finally {
// const valueNotUpdated = await kv2.get('hello')
// strictEqual(valueNotUpdated, 'friend3')
// notStrictEqual(errorMessage, undefined)
// strictEqual(errorMessage.startsWith('Could not append entry:\nKey'), true)
// }
// get() test
console.time('get')
const value0 = await kv2.get('init')
console.timeEnd('get')
console.log(value0)
deepStrictEqual(value0, true)
const value2 = await kv2.get('hello')
console.log(value2)
deepStrictEqual(value2, 'friend3')
const value1 = await kv1.get('hello')
console.log(value1)
deepStrictEqual(value1, 'friend3')
const value9 = await kv1.get('empty')
console.log(value9)
deepStrictEqual(value9, undefined)
// all() test
const all2 = []
console.time('all2')
for await (const keyValue of kv2.iterator()) {
console.log('>', keyValue)
all2.push(keyValue)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
{ key: 'hello', value: 'friend3' },
{ key: 'init', value: true }
])
const all1 = []
console.time('all1')
for await (const keyValue of kv1.iterator()) {
console.log('>', keyValue)
all1.push(keyValue)
}
console.timeEnd('all1')
deepStrictEqual(all1, [
{ key: 'hello', value: 'friend3' },
{ key: 'init', value: true }
])
// onError test
// notStrictEqual(error, undefined)
// strictEqual(error.message, 'CBOR decode error: too many terminals, data makes no sense')
})
})
describe('load database', () => {
it('returns all entries in the database', async () => {
let updateCount = 0
// let syncCount = 0
const onUpdate = (entry) => {
++updateCount
}
// const onSync = (entry) => {
// ++syncCount
// }
// kv1 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
// kv2 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
kv1 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
// kv1.events.on('update', onUpdate)
kv2.events.on('update', onUpdate)
// kv1.events.on('sync', onSync)
// kv2.events.on('sync', onSync)
await waitForPeers(ipfs1, [peerId2], databaseId)
await waitForPeers(ipfs2, [peerId1], databaseId)
await kv1.set('init', true)
await kv1.set('hello', 'friend')
await kv1.del('hello')
await kv1.set('hello', 'friend2')
await kv1.del('hello')
await kv1.set('empty', '')
await kv1.del('empty')
await kv1.set('hello', 'friend3')
// const hash = await kv1.set('hello', 'friend3')
// const lastEntry = await kv1.log.get(hash)
// sync() test
// console.time('sync')
// await kv2.sync(lastEntry.bytes)
// console.timeEnd('sync')
await waitFor(() => updateCount, () => 8)
strictEqual(updateCount, 8)
await kv1.close()
await kv2.close()
// kv1 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
// kv2 = await KeyValue({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
kv1 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
kv2 = await KeyValuePersisted({ KeyValue: KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
console.time('get')
const value0 = await kv2.get('init')
console.timeEnd('get')
console.log(value0)
deepStrictEqual(value0, true)
const value2 = await kv2.get('hello')
console.log(value2)
deepStrictEqual(value2, 'friend3')
const value1 = await kv1.get('hello')
console.log(value1)
deepStrictEqual(value1, 'friend3')
const value9 = await kv1.get('empty')
console.log(value9)
deepStrictEqual(value9, undefined)
const all2 = []
console.time('all2')
for await (const keyValue of kv2.iterator()) {
console.log('>', keyValue)
all2.push(keyValue)
}
console.timeEnd('all2')
deepStrictEqual(all2, [
{ key: 'hello', value: 'friend3' },
{ key: 'init', value: true }
])
const all1 = []
console.time('all1')
for await (const keyValue of kv1.iterator()) {
console.log('>', keyValue)
all1.push(keyValue)
}
console.timeEnd('all1')
deepStrictEqual(all1, [
{ key: 'hello', value: 'friend3' },
{ key: 'init', value: true }
])
})
})
})
})

View File

@ -1,138 +0,0 @@
// import assert from 'assert'
// import rmrf from 'rimraf'
// import path from 'path'
// import OrbitDB from '../src/OrbitDB.js'
// // Include test utilities
// import {
// config,
// startIpfs,
// stopIpfs,
// testAPIs,
// } from 'orbit-db-test-utils'
// const dbPath = './orbitdb/tests/kvstore'
// Object.keys(testAPIs).forEach(API => {
// describe(`orbit-db - Key-Value Database (${API})`, function() {
// this.timeout(config.timeout)
// let ipfsd, ipfs, orbitdb1
// before(async () => {
// rmrf.sync(dbPath)
// ipfsd = await startIpfs(API, config.daemon1)
// ipfs = ipfsd.api
// orbitdb1 = await OrbitDB.createInstance(ipfs, { directory: path.join(dbPath, '1') })
// })
// after(() => {
// setTimeout(async () => {
// await orbitdb1.stop()
// await stopIpfs(ipfsd)
// }, 0)
// })
// it('creates and opens a database', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// assert.notEqual(db, null)
// assert.equal(db.type, 'keyvalue')
// assert.equal(db.dbname, 'first kv database')
// await db.drop()
// })
// it('put', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello1')
// const value = db.get('key1')
// assert.equal(value, 'hello1')
// await db.drop()
// })
// it('get', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello2')
// const value = db.get('key1')
// assert.equal(value, 'hello2')
// await db.drop()
// })
// it('put updates a value', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello3')
// await db.put('key1', 'hello4')
// const value = db.get('key1')
// assert.equal(value, 'hello4')
// await db.drop()
// })
// it('set is an alias for put', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.set('key1', 'hello5')
// const value = db.get('key1')
// assert.equal(value, 'hello5')
// await db.drop()
// })
// it('put/get - multiple keys', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello1')
// await db.put('key2', 'hello2')
// await db.put('key3', 'hello3')
// const v1 = db.get('key1')
// const v2 = db.get('key2')
// const v3 = db.get('key3')
// assert.equal(v1, 'hello1')
// assert.equal(v2, 'hello2')
// assert.equal(v3, 'hello3')
// await db.drop()
// })
// it('deletes a key', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello!')
// await db.del('key1')
// const value = db.get('key1')
// assert.equal(value, null)
// await db.drop()
// })
// it('deletes a key after multiple updates', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// await db.put('key1', 'hello1')
// await db.put('key1', 'hello2')
// await db.put('key1', 'hello3')
// await db.del('key1')
// const value = db.get('key1')
// assert.equal(value, null)
// await db.drop()
// })
// it('get - integer value', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// const val = 123
// await db.put('key1', val)
// const v1 = db.get('key1')
// assert.equal(v1, val)
// await db.drop()
// })
// it('get - object value', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// const val = { one: 'first', two: 2 }
// await db.put('key1', val)
// const v1 = db.get('key1')
// assert.deepEqual(v1, val)
// await db.drop()
// })
// it('get - array value', async () => {
// const db = await orbitdb1.keyvalue('first kv database')
// const val = [1, 2, 3, 4, 5]
// await db.put('key1', val)
// const v1 = db.get('key1')
// assert.deepEqual(v1, val)
// await db.drop()
// })
// })
// })

View File

@ -89,6 +89,7 @@ Object.keys(testAPIs).forEach((IPFS) => {
const result = await all(it)
strictEqual([...result].length, 10)
strictEqual(result[0].hash, startHash)
})
it('returns entries with lte and amount', async () => {
@ -146,7 +147,7 @@ Object.keys(testAPIs).forEach((IPFS) => {
let i = 0
for await (const entry of it) {
strictEqual(entry.payload, 'entry' + (73 - i++))
strictEqual(entry.payload, 'entry' + (72 - i++))
}
strictEqual(i, amount)
@ -163,6 +164,7 @@ Object.keys(testAPIs).forEach((IPFS) => {
const result = await all(it)
strictEqual([...result].length, amount)
strictEqual(result[result.length - 1].hash, startHash)
})
it('returns entries with gte and amount', async () => {
@ -175,7 +177,7 @@ Object.keys(testAPIs).forEach((IPFS) => {
let i = 0
for await (const entry of it) {
strictEqual(entry.payload, 'entry' + (79 - i++))
strictEqual(entry.payload, 'entry' + (78 - i++))
}
strictEqual(i, amount)