mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-03-30 15:08:28 +00:00
feat: Do not include Feed as a store.
This commit is contained in:
parent
3f70f4ddd2
commit
678ba05e50
@ -1,57 +0,0 @@
|
||||
const Feed = async ({ OpLog, Database, ipfs, identity, databaseId, accessController, storage }) => {
|
||||
const database = await Database({ OpLog, ipfs, identity, databaseId, accessController, storage })
|
||||
|
||||
const { addOperation, log } = database
|
||||
|
||||
const put = async (key = null, value) => {
|
||||
return add(value)
|
||||
}
|
||||
|
||||
const add = async (value) => {
|
||||
return addOperation({ op: 'ADD', key: null, value })
|
||||
}
|
||||
|
||||
const del = async (hash) => {
|
||||
return addOperation({ op: 'DEL', key: hash, value: null })
|
||||
}
|
||||
|
||||
const get = async (hash) => {
|
||||
const entry = await log.get(hash)
|
||||
return entry.payload.value
|
||||
}
|
||||
|
||||
const iterator = async function * ({ gt, gte, lt, lte, amount } = {}) {
|
||||
const deleted = {}
|
||||
const it = log.iterator({ gt, gte, lt, lte, amount })
|
||||
for await (const entry of it) {
|
||||
const { hash, payload } = entry
|
||||
const { op, key, value } = payload
|
||||
if (op === 'ADD' && !deleted[hash]) {
|
||||
yield { hash, value }
|
||||
} else if (op === 'DEL' && !deleted[key]) {
|
||||
deleted[key] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const all = async () => {
|
||||
const values = []
|
||||
for await (const entry of iterator()) {
|
||||
values.unshift(entry)
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
return {
|
||||
...database,
|
||||
type: 'feed',
|
||||
put,
|
||||
add,
|
||||
del,
|
||||
get,
|
||||
iterator,
|
||||
all
|
||||
}
|
||||
}
|
||||
|
||||
export default Feed
|
@ -1,383 +0,0 @@
|
||||
import { deepStrictEqual, strictEqual } from 'assert'
|
||||
import mapSeries from 'p-map-series'
|
||||
import rimraf from 'rimraf'
|
||||
import { Log, Entry } from '../../src/oplog/index.js'
|
||||
import { Feed, Database } from '../../src/db/index.js'
|
||||
import { IPFSBlockStorage, LevelStorage } from '../../src/storage/index.js'
|
||||
import { config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
|
||||
import { createTestIdentities, cleanUpTestIdentities } from '../fixtures/orbit-db-identity-keys.js'
|
||||
|
||||
const { sync: rmrf } = rimraf
|
||||
|
||||
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
|
||||
|
||||
Object.keys(testAPIs).forEach((IPFS) => {
|
||||
describe('Feed Database (' + IPFS + ')', function () {
|
||||
this.timeout(config.timeout * 2)
|
||||
|
||||
let ipfsd
|
||||
let ipfs
|
||||
let keystore, signingKeyStore
|
||||
let accessController
|
||||
let identities1
|
||||
let testIdentity1
|
||||
let db
|
||||
|
||||
const databaseId = 'feed-AAA'
|
||||
|
||||
before(async () => {
|
||||
// Start two IPFS instances
|
||||
ipfsd = await startIpfs(IPFS, config.daemon1)
|
||||
ipfs = ipfsd.api
|
||||
|
||||
const [identities, testIdentities] = await createTestIdentities(ipfs)
|
||||
identities1 = identities[0]
|
||||
testIdentity1 = testIdentities[0]
|
||||
|
||||
rmrf(testIdentity1.id)
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await cleanUpTestIdentities([identities1])
|
||||
|
||||
if (ipfsd) {
|
||||
await stopIpfs(ipfsd)
|
||||
}
|
||||
if (keystore) {
|
||||
await keystore.close()
|
||||
}
|
||||
if (signingKeyStore) {
|
||||
await signingKeyStore.close()
|
||||
}
|
||||
if (testIdentity1) {
|
||||
rmrf(testIdentity1.id)
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await Feed({ OpLog, Database, ipfs, identity: testIdentity1, databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('creates a feed', async () => {
|
||||
strictEqual(db.databaseId, databaseId)
|
||||
strictEqual(db.type, 'feed')
|
||||
})
|
||||
|
||||
it('returns 0 items when it\'s a fresh database', async () => {
|
||||
const all = []
|
||||
for await (const item of db.iterator()) {
|
||||
all.unshift(item)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
|
||||
it('adds a feed item', async () => {
|
||||
const expected = 'zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE'
|
||||
|
||||
const actual = await db.add('init')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('puts a feed item', async () => {
|
||||
const expected = 'zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE'
|
||||
|
||||
const actual = await db.put(null, 'init')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('gets a feed item', async () => {
|
||||
const expected = 'init'
|
||||
|
||||
const hash = await db.add(expected)
|
||||
|
||||
const actual = await db.get(hash)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a feed item', async () => {
|
||||
const expected = null
|
||||
|
||||
const add = await db.add('delete me')
|
||||
const del = await db.del(add)
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a non-existent feed item', async () => {
|
||||
const expected = null
|
||||
|
||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('returns all feed items', async () => {
|
||||
const feed = [
|
||||
'init',
|
||||
true,
|
||||
'hello',
|
||||
'friend',
|
||||
'12345',
|
||||
'empty',
|
||||
'friend33'
|
||||
]
|
||||
|
||||
for (const f of feed) {
|
||||
await db.add(f)
|
||||
}
|
||||
|
||||
const all = await db.all()
|
||||
|
||||
deepStrictEqual(all.map(e => e.value), feed)
|
||||
})
|
||||
|
||||
describe('Iterator Options', () => {
|
||||
let hashes = []
|
||||
const last = arr => arr[arr.length - 1]
|
||||
const first = arr => arr[0]
|
||||
|
||||
beforeEach(async () => {
|
||||
hashes = []
|
||||
hashes = await mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
|
||||
})
|
||||
|
||||
describe('amount', () => {
|
||||
it('returns one item', async () => {
|
||||
const expected = ['hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ amount: 1 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 1)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns two items', async () => {
|
||||
const expected = ['hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ amount: 2 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 2)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns three items', async () => {
|
||||
const expected = ['hello2', 'hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ amount: 3 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 3)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('sets \'amount\' greater than items available', async () => {
|
||||
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ amount: 100 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 5)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('sets \'amount\' to 0', async () => {
|
||||
const expected = []
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ amount: 0 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('lt', () => {
|
||||
it('returns all items less than head', async () => {
|
||||
const expected = ['hello0', 'hello1', 'hello2', 'hello3']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lt: last(hashes) })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 4)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns one item less than head', async () => {
|
||||
const expected = ['hello3']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lt: last(hashes), amount: 1 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 1)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns two items less than head', async () => {
|
||||
const expected = ['hello2', 'hello3']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lt: last(hashes), amount: 2 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 2)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('lte', () => {
|
||||
it('returns all items less or equal to head', async () => {
|
||||
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lte: last(hashes) })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 5)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns one item less than or equal to head', async () => {
|
||||
const expected = ['hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lte: last(hashes), amount: 1 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 1)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns two items less than or equal to head', async () => {
|
||||
const expected = ['hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ lte: last(hashes), amount: 2 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 2)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('gt', () => {
|
||||
it('returns all items greater than root', async () => {
|
||||
const expected = ['hello1', 'hello2', 'hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gt: first(hashes) })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 4)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns one item greater than root', async () => {
|
||||
const expected = ['hello1']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gt: first(hashes), amount: 1 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 1)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns two items greater than root', async () => {
|
||||
const expected = ['hello1', 'hello2']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gt: first(hashes), amount: 2 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 2)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('gte', () => {
|
||||
it('returns all items greater than or equal to root', async () => {
|
||||
const expected = ['hello0', 'hello1', 'hello2', 'hello3', 'hello4']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gte: first(hashes) })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 5)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns one item greater than or equal to root', async () => {
|
||||
const expected = ['hello0']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gte: first(hashes), amount: 1 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 1)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
|
||||
it('returns two items greater than or equal to root', async () => {
|
||||
const expected = ['hello0', 'hello1']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gte: first(hashes), amount: 2 })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 2)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('range', async () => {
|
||||
it('returns all items greater than root and less than head', async () => {
|
||||
const expected = ['hello1', 'hello2', 'hello3']
|
||||
|
||||
const all = []
|
||||
for await (const record of db.iterator({ gt: first(hashes), lt: last(hashes) })) {
|
||||
all.unshift(record)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 3)
|
||||
deepStrictEqual(all.map(e => e.value), expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,144 +0,0 @@
|
||||
import { deepStrictEqual } from 'assert'
|
||||
import rimraf from 'rimraf'
|
||||
import { Log, Entry } from '../../../src/oplog/index.js'
|
||||
import { Feed, Database } from '../../../src/db/index.js'
|
||||
import { IPFSBlockStorage, LevelStorage } from '../../../src/storage/index.js'
|
||||
import { getIpfsPeerId, waitForPeers, config, testAPIs, startIpfs, stopIpfs } from 'orbit-db-test-utils'
|
||||
import connectPeers from '../../utils/connect-nodes.js'
|
||||
import { createTestIdentities, cleanUpTestIdentities } from '../../fixtures/orbit-db-identity-keys.js'
|
||||
import waitFor from '../../utils/wait-for.js'
|
||||
|
||||
const { sync: rmrf } = rimraf
|
||||
|
||||
const OpLog = { Log, Entry, IPFSBlockStorage, LevelStorage }
|
||||
|
||||
Object.keys(testAPIs).forEach((IPFS) => {
|
||||
describe('Feed Replication (' + IPFS + ')', function () {
|
||||
this.timeout(config.timeout * 2)
|
||||
|
||||
let ipfsd1, ipfsd2
|
||||
let ipfs1, ipfs2
|
||||
let keystore, signingKeyStore
|
||||
let peerId1, peerId2
|
||||
let accessController
|
||||
let identities1, identities2
|
||||
let testIdentity1, testIdentity2
|
||||
let db1, db2
|
||||
|
||||
const databaseId = 'feed-AAA'
|
||||
|
||||
before(async () => {
|
||||
// Start two IPFS instances
|
||||
ipfsd1 = await startIpfs(IPFS, config.daemon1)
|
||||
ipfsd2 = await startIpfs(IPFS, config.daemon2)
|
||||
ipfs1 = ipfsd1.api
|
||||
ipfs2 = ipfsd2.api
|
||||
|
||||
await connectPeers(ipfs1, ipfs2)
|
||||
|
||||
// Get the peer IDs
|
||||
peerId1 = await getIpfsPeerId(ipfs1)
|
||||
peerId2 = await getIpfsPeerId(ipfs2)
|
||||
|
||||
const [identities, testIdentities] = await createTestIdentities(ipfs1, ipfs2)
|
||||
identities1 = identities[0]
|
||||
identities2 = identities[1]
|
||||
testIdentity1 = testIdentities[0]
|
||||
testIdentity2 = testIdentities[1]
|
||||
|
||||
accessController = {
|
||||
canAppend: async (entry) => {
|
||||
const identity1 = await identities1.getIdentity(entry.identity)
|
||||
const identity2 = await identities2.getIdentity(entry.identity)
|
||||
return identity1.id === testIdentity1.id || identity2.id === testIdentity2.id
|
||||
}
|
||||
}
|
||||
|
||||
rmrf(testIdentity1.id)
|
||||
rmrf(testIdentity2.id)
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await cleanUpTestIdentities([identities1, identities2])
|
||||
|
||||
if (ipfsd1) {
|
||||
await stopIpfs(ipfsd1)
|
||||
}
|
||||
if (ipfsd2) {
|
||||
await stopIpfs(ipfsd2)
|
||||
}
|
||||
if (keystore) {
|
||||
await keystore.close()
|
||||
}
|
||||
if (signingKeyStore) {
|
||||
await signingKeyStore.close()
|
||||
}
|
||||
if (testIdentity1) {
|
||||
rmrf(testIdentity1.id)
|
||||
}
|
||||
if (testIdentity2) {
|
||||
rmrf(testIdentity2.id)
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db1 = await Feed({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, databaseId, accessController })
|
||||
db2 = await Feed({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db1) {
|
||||
await db1.drop()
|
||||
await db1.close()
|
||||
}
|
||||
if (db2) {
|
||||
await db2.drop()
|
||||
await db2.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('gets all documents', async () => {
|
||||
let updateDB1Count = 0
|
||||
let updateDB2Count = 0
|
||||
|
||||
const onDB1Update = (entry) => {
|
||||
++updateDB1Count
|
||||
}
|
||||
|
||||
const onDB2Update = (entry) => {
|
||||
++updateDB2Count
|
||||
}
|
||||
|
||||
db1.events.on('update', onDB1Update)
|
||||
db2.events.on('update', onDB2Update)
|
||||
|
||||
await waitForPeers(ipfs1, [peerId2], databaseId)
|
||||
await waitForPeers(ipfs2, [peerId1], databaseId)
|
||||
|
||||
const ops = []
|
||||
ops.push(await db1.add('init'))
|
||||
ops.push(await db2.add(true))
|
||||
ops.push(await db1.add('hello'))
|
||||
ops.push(await db2.add('friend'))
|
||||
ops.push(await db2.add('12345'))
|
||||
ops.push(await db2.add('empty'))
|
||||
ops.push(await db2.add(''))
|
||||
ops.push(await db2.add('friend33'))
|
||||
|
||||
await waitFor(() => updateDB1Count, () => ops.length)
|
||||
await waitFor(() => updateDB2Count, () => ops.length)
|
||||
|
||||
const all1 = []
|
||||
for await (const record of db1.iterator()) {
|
||||
all1.unshift(record)
|
||||
}
|
||||
|
||||
const all2 = []
|
||||
for await (const record of db2.iterator()) {
|
||||
all2.unshift(record)
|
||||
}
|
||||
|
||||
deepStrictEqual(all1, all2)
|
||||
})
|
||||
})
|
||||
})
|
Loading…
x
Reference in New Issue
Block a user