mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-05-28 09:46:41 +00:00
commit
e6e72fee9e
@ -1,7 +1,7 @@
|
||||
import Database from './database.js'
|
||||
import { EventStore, KeyValue, DocumentStore } from './db/index.js'
|
||||
import { Log, Entry } from './oplog/index.js'
|
||||
import { IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
||||
import { ComposedStorage, IPFSBlockStorage, LevelStorage, LRUStorage } from './storage/index.js'
|
||||
import KeyStore from './key-store.js'
|
||||
import { Identities } from './identities/index.js'
|
||||
import IPFSAccessController from './access-controllers/ipfs.js'
|
||||
@ -43,7 +43,10 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
const identities = await Identities({ ipfs, keystore })
|
||||
identity = identity || await identities.createIdentity({ id, keystore })
|
||||
|
||||
const storage = await IPFSBlockStorage({ ipfs, pin: true })
|
||||
const manifestStorage = await ComposedStorage(
|
||||
await LRUStorage({ size: 1000 }),
|
||||
await IPFSBlockStorage({ ipfs, pin: true })
|
||||
)
|
||||
|
||||
let databases = {}
|
||||
|
||||
@ -57,18 +60,18 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
if (isValidAddress(address)) {
|
||||
// If the address given was a valid OrbitDB address, eg. '/orbitdb/zdpuAuK3BHpS7NvMBivynypqciYCuy2UW77XYBPUYRnLjnw13'
|
||||
const addr = OrbitDBAddress(address)
|
||||
const bytes = await storage.get(addr.path)
|
||||
const bytes = await manifestStorage.get(addr.path)
|
||||
const { value } = await Block.decode({ bytes, codec, hasher })
|
||||
manifest = value
|
||||
const acAddress = manifest.accessController.replaceAll('/ipfs/', '')
|
||||
accessController = await IPFSAccessController({ ipfs, identities, identity, address: acAddress, storage })
|
||||
accessController = await IPFSAccessController({ ipfs, identities, identity, address: acAddress, storage: manifestStorage })
|
||||
name = manifest.name
|
||||
type = type || manifest.type
|
||||
} else {
|
||||
// If the address given was not valid, eg. just the name of the database
|
||||
type = type || 'events'
|
||||
accessController = await IPFSAccessController({ ipfs, identities, identity, storage })
|
||||
const m = await createDBManifest(storage, address, type, accessController.address, {})
|
||||
accessController = await IPFSAccessController({ ipfs, identities, identity, storage: manifestStorage })
|
||||
const m = await createDBManifest(manifestStorage, address, type, accessController.address, {})
|
||||
manifest = m.manifest
|
||||
address = OrbitDBAddress(m.hash)
|
||||
accessController = m.accessController
|
||||
@ -81,7 +84,7 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
throw new Error(`Unspported database type: '${type}'`)
|
||||
}
|
||||
|
||||
const db = await DatabaseModel({ OpLog, Database, ipfs, identity, address, name, accessController, directory })
|
||||
const db = await DatabaseModel({ OpLog, Database, ipfs, identity, address: address.toString(), name, accessController, directory })
|
||||
|
||||
db.events.on('close', onDatabaseClosed(address.toString()))
|
||||
|
||||
@ -98,8 +101,8 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
if (keystore) {
|
||||
await keystore.close()
|
||||
}
|
||||
if (storage) {
|
||||
await storage.close()
|
||||
if (manifestStorage) {
|
||||
await manifestStorage.close()
|
||||
}
|
||||
databases = {}
|
||||
}
|
||||
|
@ -33,7 +33,6 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
||||
const manifestBytes = await storage.get(address)
|
||||
const { value } = await Block.decode({ bytes: manifestBytes, codec, hasher })
|
||||
write = value.write
|
||||
address = await AccessControllerManifest({ storage, type, params: { write } })
|
||||
} else {
|
||||
address = await AccessControllerManifest({ storage, type, params: { write } })
|
||||
}
|
||||
@ -53,6 +52,7 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
||||
}
|
||||
|
||||
return {
|
||||
type,
|
||||
address,
|
||||
write,
|
||||
canAppend
|
||||
@ -60,56 +60,3 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
||||
}
|
||||
|
||||
export { IPFSAccessController as default }
|
||||
|
||||
// constructor (ipfs, options) {
|
||||
// super()
|
||||
// this._ipfs = ipfs
|
||||
// this._write = Array.from(options.write || [])
|
||||
// }
|
||||
|
||||
// // Returns the type of the access controller
|
||||
// static get type () { return type }
|
||||
|
||||
// // Return a Set of keys that have `access` capability
|
||||
// get write () {
|
||||
// return this._write
|
||||
// }
|
||||
|
||||
// async canAppend (entry, identityProvider) {
|
||||
// // Allow if access list contain the writer's publicKey or is '*'
|
||||
// const key = entry.identity.id
|
||||
// if (this.write.includes(key) || this.write.includes('*')) {
|
||||
// // check identity is valid
|
||||
// return identityProvider.verifyIdentity(entry.identity)
|
||||
// }
|
||||
// return false
|
||||
// }
|
||||
|
||||
// async load (address) {
|
||||
// // Transform '/ipfs/QmPFtHi3cmfZerxtH9ySLdzpg1yFhocYDZgEZywdUXHxFU'
|
||||
// // to 'QmPFtHi3cmfZerxtH9ySLdzpg1yFhocYDZgEZywdUXHxFU'
|
||||
// if (address.indexOf('/ipfs') === 0) { address = address.split('/')[2] }
|
||||
|
||||
// try {
|
||||
// this._write = await io.read(this._ipfs, address)
|
||||
// } catch (e) {
|
||||
// console.log('IPFSAccessController.load ERROR:', e)
|
||||
// }
|
||||
// }
|
||||
|
||||
// async save ({ ipfs }) {
|
||||
// let cid
|
||||
// try {
|
||||
// cid = await io.write(this._ipfs, 'dag-cbor', { write: JSON.stringify(this.write, null, 2) })
|
||||
// } catch (e) {
|
||||
// console.log('IPFSAccessController.save ERROR:', e)
|
||||
// }
|
||||
// // return the manifest data
|
||||
// return { address: cid }
|
||||
// }
|
||||
|
||||
// static async create ({ ipfs, identity }, options = {}) {
|
||||
// options = { ...options, ...{ write: options.write || [identity.id] } }
|
||||
// return new IPFSAccessController(ipfs, options)
|
||||
// }
|
||||
// }
|
||||
|
@ -2,19 +2,28 @@ import { EventEmitter } from 'events'
|
||||
import PQueue from 'p-queue'
|
||||
import Path from 'path'
|
||||
import Sync from './sync.js'
|
||||
import { IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
||||
import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
||||
|
||||
const defaultPointerCount = 16
|
||||
const defaultPointerCount = 0
|
||||
const defaultCacheSize = 1000
|
||||
|
||||
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, storage, headsStorage, pointerCount }) => {
|
||||
const { Log, Entry } = OpLog
|
||||
|
||||
const entryStorage = storage || await IPFSBlockStorage({ ipfs, pin: true })
|
||||
directory = Path.join(directory || './orbitdb', `./${address}/`)
|
||||
pointerCount = pointerCount || defaultPointerCount
|
||||
|
||||
directory = Path.join(directory || './orbitdb', `./${address.path}/`)
|
||||
headsStorage = headsStorage || await LevelStorage({ path: Path.join(directory, '/log/_heads/') })
|
||||
const entryStorage = await ComposedStorage(
|
||||
await LRUStorage({ size: defaultCacheSize }),
|
||||
await IPFSBlockStorage({ ipfs, pin: true })
|
||||
)
|
||||
|
||||
const log = await Log(identity, { logId: address.toString(), access: accessController, entryStorage, headsStorage })
|
||||
headsStorage = await ComposedStorage(
|
||||
await LRUStorage({ size: defaultCacheSize }),
|
||||
await LevelStorage({ path: Path.join(directory, '/log/_heads/') })
|
||||
)
|
||||
|
||||
const log = await Log(identity, { logId: address, access: accessController, entryStorage, headsStorage })
|
||||
|
||||
// const indexStorage = await LevelStorage({ path: Path.join(directory, '/log/_index/') })
|
||||
// const log = await Log(identity, { logId: address.toString(), access: accessController, entryStorage, headsStorage, indexStorage })
|
||||
@ -22,8 +31,6 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
|
||||
const events = new EventEmitter()
|
||||
const queue = new PQueue({ concurrency: 1 })
|
||||
|
||||
pointerCount = pointerCount || defaultPointerCount
|
||||
|
||||
const addOperation = async (op) => {
|
||||
const task = async () => {
|
||||
const entry = await log.append(op, { pointerCount })
|
||||
@ -55,7 +62,6 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
|
||||
events.emit('close')
|
||||
}
|
||||
|
||||
// TODO: rename to clear()
|
||||
const drop = async () => {
|
||||
await queue.onIdle()
|
||||
await log.clear()
|
||||
|
@ -61,16 +61,21 @@ const DocumentStore = async ({ OpLog, Database, ipfs, identity, address, name, a
|
||||
return results
|
||||
}
|
||||
|
||||
const iterator = async function * () {
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
const keys = {}
|
||||
for await (const entry of log.traverse()) {
|
||||
let count = 0
|
||||
for await (const entry of log.iterator()) {
|
||||
const { op, key, value } = entry.payload
|
||||
if (op === 'PUT' && !keys[key]) {
|
||||
keys[key] = true
|
||||
count++
|
||||
yield value
|
||||
} else if (op === 'DEL' && !keys[key]) {
|
||||
keys[key] = true
|
||||
}
|
||||
if (count >= amount) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, nam
|
||||
|
||||
const queue = new PQueue({ concurrency: 1 })
|
||||
|
||||
directory = path.join(directory || './orbitdb', `./${address.path}/_index/`)
|
||||
directory = path.join(directory || './orbitdb', `./${address}/_index/`)
|
||||
const index = await LevelStorage({ path: directory, valueEncoding })
|
||||
|
||||
let latestOplogHash
|
||||
@ -48,9 +48,9 @@ const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, nam
|
||||
return keyValueStore.get(key)
|
||||
}
|
||||
|
||||
const iterator = async function * () {
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
await queue.onIdle()
|
||||
for await (const { key, value } of keyValueStore.iterator()) {
|
||||
for await (const { key, value } of keyValueStore.iterator({ amount })) {
|
||||
yield { key, value }
|
||||
}
|
||||
}
|
||||
|
@ -22,16 +22,21 @@ const KeyValue = async ({ OpLog, Database, ipfs, identity, address, name, access
|
||||
}
|
||||
}
|
||||
|
||||
const iterator = async function * () {
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
const keys = {}
|
||||
let count = 0
|
||||
for await (const entry of log.traverse()) {
|
||||
const { op, key, value } = entry.payload
|
||||
if (op === 'PUT' && !keys[key]) {
|
||||
keys[key] = true
|
||||
count++
|
||||
yield { key, value }
|
||||
} else if (op === 'DEL' && !keys[key]) {
|
||||
keys[key] = true
|
||||
}
|
||||
if (count >= amount) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,41 +1,51 @@
|
||||
const ComposedStorage = async (...storages) => {
|
||||
// Compose storages:
|
||||
// const storage1 = await ComposedStorage(await LRUStorage(), await LevelStorage())
|
||||
// const storage2 = await ComposedStorage(storage1, await IPFSBlockStorage())
|
||||
|
||||
const ComposedStorage = async (storage1, storage2) => {
|
||||
const put = async (hash, data) => {
|
||||
for await (const storage of storages) {
|
||||
await storage.put(hash, data)
|
||||
}
|
||||
await storage1.put(hash, data)
|
||||
await storage2.put(hash, data)
|
||||
}
|
||||
|
||||
const get = async (hash) => {
|
||||
for await (const storage of storages) {
|
||||
const value = await storage.get(hash)
|
||||
let value = await storage1.get(hash)
|
||||
if (!value) {
|
||||
value = await storage2.get(hash)
|
||||
if (value) {
|
||||
return value
|
||||
await storage1.put(hash, value)
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
const iterator = async function * () {
|
||||
return storages[0].iterator()
|
||||
}
|
||||
|
||||
const merge = async (other) => {
|
||||
for await (const storage1 of storages) {
|
||||
for await (const storage2 of storages) {
|
||||
await storage1.merge(storage2)
|
||||
const keys = []
|
||||
for (const storage of [storage1, storage2]) {
|
||||
for await (const [key, value] of storage.iterator()) {
|
||||
if (!keys[key]) {
|
||||
keys[key] = true
|
||||
yield [key, value]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const merge = async (other) => {
|
||||
await storage1.merge(other)
|
||||
await storage2.merge(other)
|
||||
await other.merge(storage1)
|
||||
await other.merge(storage2)
|
||||
}
|
||||
|
||||
const clear = async () => {
|
||||
for await (const storage of storages) {
|
||||
await storage.clear()
|
||||
}
|
||||
await storage1.clear()
|
||||
await storage2.clear()
|
||||
}
|
||||
|
||||
const close = async () => {
|
||||
for await (const storage of storages) {
|
||||
await storage.close()
|
||||
}
|
||||
await storage1.close()
|
||||
await storage2.close()
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { deepStrictEqual, strictEqual } from 'assert'
|
||||
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||
import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs'
|
||||
@ -249,4 +249,79 @@ describe('DocumentStore Database', function () {
|
||||
deepStrictEqual(await db.query(findFn), [])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Iterator', () => {
|
||||
before(async () => {
|
||||
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('has an iterator function', async () => {
|
||||
notStrictEqual(db.iterator, undefined)
|
||||
strictEqual(typeof db.iterator, 'function')
|
||||
})
|
||||
|
||||
it('returns no documents when the database is empty', async () => {
|
||||
const all = []
|
||||
for await (const doc of db.iterator()) {
|
||||
all.unshift(doc)
|
||||
}
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
|
||||
it('returns all documents when the database is not empty', async () => {
|
||||
await db.put({ _id: 'doc1', something: true })
|
||||
await db.put({ _id: 'doc2', something: true })
|
||||
await db.put({ _id: 'doc3', something: true })
|
||||
await db.put({ _id: 'doc4', something: true })
|
||||
await db.put({ _id: 'doc5', something: true })
|
||||
|
||||
// Add one more document and then delete it to count
|
||||
// for the fact that the amount returned should be
|
||||
// the amount of actual documents returned and not
|
||||
// the oplog length, and deleted documents don't
|
||||
// count towards the returned amount.
|
||||
await db.put({ _id: 'doc6', something: true })
|
||||
await db.del('doc6')
|
||||
|
||||
const all = []
|
||||
for await (const doc of db.iterator()) {
|
||||
all.unshift(doc)
|
||||
}
|
||||
strictEqual(all.length, 5)
|
||||
})
|
||||
|
||||
it('returns only the amount of documents given as a parameter', async () => {
|
||||
const amount = 3
|
||||
const all = []
|
||||
for await (const doc of db.iterator({ amount })) {
|
||||
all.unshift(doc)
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
it('returns only two documents if amount given as a parameter is 2', async () => {
|
||||
const amount = 2
|
||||
const all = []
|
||||
for await (const doc of db.iterator({ amount })) {
|
||||
all.unshift(doc)
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
it('returns only one document if amount given as a parameter is 1', async () => {
|
||||
const amount = 1
|
||||
const all = []
|
||||
for await (const doc of db.iterator({ amount })) {
|
||||
all.unshift(doc)
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -1,9 +1,11 @@
|
||||
import { deepStrictEqual, strictEqual } from 'assert'
|
||||
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
||||
import { KeyValuePersisted, KeyValue } from '../../src/db/index.js'
|
||||
import { KeyValuePersisted } from '../../src/db/index.js'
|
||||
import config from '../config.js'
|
||||
import testKeysPath from '../fixtures/test-keys-path.js '
|
||||
|
||||
@ -43,134 +45,230 @@ describe('KeyValuePersisted Database', function () {
|
||||
await rmrf('./ipfs1')
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, KeyValue, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
describe('Creating a KeyValuePersisted database', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('creates a keyvalue store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'keyvalue')
|
||||
})
|
||||
|
||||
it('creates a directory for the persisted index', async () => {
|
||||
const expectedPath = path.join('./orbitdb', `./${db.address}`, '/_index')
|
||||
const directoryExists = fs.existsSync(expectedPath)
|
||||
strictEqual(directoryExists, true)
|
||||
})
|
||||
|
||||
it('returns 0 items when it\'s a fresh database', async () => {
|
||||
const all = []
|
||||
for await (const item of db.iterator()) {
|
||||
all.unshift(item)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
describe('KeyValuePersisted database API', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('gets a key/value pair\'s value', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'value1'
|
||||
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set then put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put then set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a key/value pair', async () => {
|
||||
const key = 'key1'
|
||||
const expected = undefined
|
||||
|
||||
await db.put(key, 'value1')
|
||||
const hash = await db.del(key)
|
||||
|
||||
const actual = await db.get(hash)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a non-existent key/value pair', async () => {
|
||||
const expected = undefined
|
||||
|
||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ key: 'key1', value: 'init' },
|
||||
{ key: 'key2', value: true },
|
||||
{ key: 'key3', value: 'hello' },
|
||||
{ key: 'key4', value: 'friend' },
|
||||
{ key: 'key5', value: '12345' },
|
||||
{ key: 'key6', value: 'empty' },
|
||||
{ key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
await db.put(key, value)
|
||||
}
|
||||
|
||||
const all = []
|
||||
for await (const pair of db.iterator()) {
|
||||
all.unshift(pair)
|
||||
}
|
||||
|
||||
deepStrictEqual(all, keyvalue)
|
||||
})
|
||||
})
|
||||
|
||||
it('creates a keyvalue store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'keyvalue')
|
||||
})
|
||||
describe('Iterator', () => {
|
||||
before(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
it('returns 0 items when it\'s a fresh database', async () => {
|
||||
const all = []
|
||||
for await (const item of db.iterator()) {
|
||||
all.unshift(item)
|
||||
}
|
||||
after(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
it('has an iterator function', async () => {
|
||||
notStrictEqual(db.iterator, undefined)
|
||||
strictEqual(typeof db.iterator, 'function')
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
it('returns no documents when the database is empty', async () => {
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator()) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
it('returns all documents when the database is not empty', async () => {
|
||||
await db.put('key1', 1)
|
||||
await db.put('key2', 2)
|
||||
await db.put('key3', 3)
|
||||
await db.put('key4', 4)
|
||||
await db.put('key5', 5)
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
// Add one more document and then delete it to count
|
||||
// for the fact that the amount returned should be
|
||||
// the amount of actual documents returned and not
|
||||
// the oplog length, and deleted documents don't
|
||||
// count towards the returned amount.
|
||||
await db.put('key6', 6)
|
||||
await db.del('key6')
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator()) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, 5)
|
||||
})
|
||||
|
||||
it('gets a key/value pair\'s value', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'value1'
|
||||
it('returns only the amount of documents given as a parameter', async () => {
|
||||
const amount = 3
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
it('returns only two documents if amount given as a parameter is 2', async () => {
|
||||
const amount = 2
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set then put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put then set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a key/value pair', async () => {
|
||||
const key = 'key1'
|
||||
const expected = undefined
|
||||
|
||||
await db.put(key, 'value1')
|
||||
const hash = await db.del(key)
|
||||
|
||||
const actual = await db.get(hash)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a non-existent key/value pair', async () => {
|
||||
const expected = undefined
|
||||
|
||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ key: 'key1', value: 'init' },
|
||||
{ key: 'key2', value: true },
|
||||
{ key: 'key3', value: 'hello' },
|
||||
{ key: 'key4', value: 'friend' },
|
||||
{ key: 'key5', value: '12345' },
|
||||
{ key: 'key6', value: 'empty' },
|
||||
{ key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
await db.put(key, value)
|
||||
}
|
||||
|
||||
const all = []
|
||||
for await (const pair of db.iterator()) {
|
||||
all.unshift(pair)
|
||||
}
|
||||
|
||||
deepStrictEqual(all, keyvalue)
|
||||
it('returns only one document if amount given as a parameter is 1', async () => {
|
||||
const amount = 1
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { deepStrictEqual, strictEqual } from 'assert'
|
||||
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||
import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs'
|
||||
@ -43,134 +43,224 @@ describe('KeyValue Database', function () {
|
||||
await rmrf('./ipfs1')
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
describe('Creating a KeyValue database', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('creates a keyvalue store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'keyvalue')
|
||||
})
|
||||
|
||||
it('returns 0 items when it\'s a fresh database', async () => {
|
||||
const all = []
|
||||
for await (const item of db.iterator()) {
|
||||
all.unshift(item)
|
||||
}
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
describe('KeyValue database API', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('gets a key/value pair\'s value', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'value1'
|
||||
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set then put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put then set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a key/value pair', async () => {
|
||||
const key = 'key1'
|
||||
const expected = undefined
|
||||
|
||||
await db.put(key, 'value1')
|
||||
const hash = await db.del(key)
|
||||
|
||||
const actual = await db.get(hash)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a non-existent key/value pair', async () => {
|
||||
const expected = undefined
|
||||
|
||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ key: 'key1', value: 'init' },
|
||||
{ key: 'key2', value: true },
|
||||
{ key: 'key3', value: 'hello' },
|
||||
{ key: 'key4', value: 'friend' },
|
||||
{ key: 'key5', value: '12345' },
|
||||
{ key: 'key6', value: 'empty' },
|
||||
{ key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
await db.put(key, value)
|
||||
}
|
||||
|
||||
const all = []
|
||||
for await (const pair of db.iterator()) {
|
||||
all.unshift(pair)
|
||||
}
|
||||
|
||||
deepStrictEqual(all, keyvalue)
|
||||
})
|
||||
})
|
||||
|
||||
it('creates a keyvalue store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'keyvalue')
|
||||
})
|
||||
describe('Iterator', () => {
|
||||
before(async () => {
|
||||
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
it('returns 0 items when it\'s a fresh database', async () => {
|
||||
const all = []
|
||||
for await (const item of db.iterator()) {
|
||||
all.unshift(item)
|
||||
}
|
||||
after(async () => {
|
||||
if (db) {
|
||||
await db.drop()
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
it('has an iterator function', async () => {
|
||||
notStrictEqual(db.iterator, undefined)
|
||||
strictEqual(typeof db.iterator, 'function')
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
it('returns no key/value pairs when the database is empty', async () => {
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator()) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, 0)
|
||||
})
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
it('returns all key/value pairs when the database is not empty', async () => {
|
||||
await db.put('key1', 1)
|
||||
await db.put('key2', 2)
|
||||
await db.put('key3', 3)
|
||||
await db.put('key4', 4)
|
||||
await db.put('key5', 5)
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
// Add one more document and then delete it to count
|
||||
// for the fact that the amount returned should be
|
||||
// the amount of actual documents returned and not
|
||||
// the oplog length, and deleted documents don't
|
||||
// count towards the returned amount.
|
||||
await db.put('key6', 6)
|
||||
await db.del('key6')
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator()) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, 5)
|
||||
})
|
||||
|
||||
it('gets a key/value pair\'s value', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'value1'
|
||||
it('returns only the amount of key/value pairs given as a parameter', async () => {
|
||||
const amount = 3
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
it('returns only two key/value pairs if amount given as a parameter is 2', async () => {
|
||||
const amount = 2
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using set then put', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.set(key, 'value1')
|
||||
await db.put(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('get key\'s updated value when using put then set', async () => {
|
||||
const key = 'key1'
|
||||
const expected = 'hello2'
|
||||
|
||||
await db.put(key, 'value1')
|
||||
await db.set(key, expected)
|
||||
const actual = await db.get(key)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a key/value pair', async () => {
|
||||
const key = 'key1'
|
||||
const expected = undefined
|
||||
|
||||
await db.put(key, 'value1')
|
||||
const hash = await db.del(key)
|
||||
|
||||
const actual = await db.get(hash)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('deletes a non-existent key/value pair', async () => {
|
||||
const expected = undefined
|
||||
|
||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||
|
||||
const actual = await db.get(del)
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ key: 'key1', value: 'init' },
|
||||
{ key: 'key2', value: true },
|
||||
{ key: 'key3', value: 'hello' },
|
||||
{ key: 'key4', value: 'friend' },
|
||||
{ key: 'key5', value: '12345' },
|
||||
{ key: 'key6', value: 'empty' },
|
||||
{ key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
await db.put(key, value)
|
||||
}
|
||||
|
||||
const all = []
|
||||
for await (const pair of db.iterator()) {
|
||||
all.unshift(pair)
|
||||
}
|
||||
|
||||
deepStrictEqual(all, keyvalue)
|
||||
it('returns only one key/value pairs if amount given as a parameter is 1', async () => {
|
||||
const amount = 1
|
||||
const all = []
|
||||
for await (const { key, value } of db.iterator({ amount })) {
|
||||
all.unshift({ key, value })
|
||||
}
|
||||
strictEqual(all.length, amount)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -134,7 +134,7 @@ describe('Open databases', function () {
|
||||
})
|
||||
|
||||
it('creates a directory for the database oplog', async () => {
|
||||
const expectedPath = path.join(orbitdb1.directory, `./${db.address.path}`, '/log/_heads')
|
||||
const expectedPath = path.join(orbitdb1.directory, `./${db.address}`, '/log/_heads')
|
||||
const directoryExists = fs.existsSync(expectedPath)
|
||||
strictEqual(directoryExists, true)
|
||||
})
|
||||
|
Loading…
x
Reference in New Issue
Block a user