mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-06-02 04:06:37 +00:00
commit
e6e72fee9e
@ -1,7 +1,7 @@
|
|||||||
import Database from './database.js'
|
import Database from './database.js'
|
||||||
import { EventStore, KeyValue, DocumentStore } from './db/index.js'
|
import { EventStore, KeyValue, DocumentStore } from './db/index.js'
|
||||||
import { Log, Entry } from './oplog/index.js'
|
import { Log, Entry } from './oplog/index.js'
|
||||||
import { IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
import { ComposedStorage, IPFSBlockStorage, LevelStorage, LRUStorage } from './storage/index.js'
|
||||||
import KeyStore from './key-store.js'
|
import KeyStore from './key-store.js'
|
||||||
import { Identities } from './identities/index.js'
|
import { Identities } from './identities/index.js'
|
||||||
import IPFSAccessController from './access-controllers/ipfs.js'
|
import IPFSAccessController from './access-controllers/ipfs.js'
|
||||||
@ -43,7 +43,10 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
|||||||
const identities = await Identities({ ipfs, keystore })
|
const identities = await Identities({ ipfs, keystore })
|
||||||
identity = identity || await identities.createIdentity({ id, keystore })
|
identity = identity || await identities.createIdentity({ id, keystore })
|
||||||
|
|
||||||
const storage = await IPFSBlockStorage({ ipfs, pin: true })
|
const manifestStorage = await ComposedStorage(
|
||||||
|
await LRUStorage({ size: 1000 }),
|
||||||
|
await IPFSBlockStorage({ ipfs, pin: true })
|
||||||
|
)
|
||||||
|
|
||||||
let databases = {}
|
let databases = {}
|
||||||
|
|
||||||
@ -57,18 +60,18 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
|||||||
if (isValidAddress(address)) {
|
if (isValidAddress(address)) {
|
||||||
// If the address given was a valid OrbitDB address, eg. '/orbitdb/zdpuAuK3BHpS7NvMBivynypqciYCuy2UW77XYBPUYRnLjnw13'
|
// If the address given was a valid OrbitDB address, eg. '/orbitdb/zdpuAuK3BHpS7NvMBivynypqciYCuy2UW77XYBPUYRnLjnw13'
|
||||||
const addr = OrbitDBAddress(address)
|
const addr = OrbitDBAddress(address)
|
||||||
const bytes = await storage.get(addr.path)
|
const bytes = await manifestStorage.get(addr.path)
|
||||||
const { value } = await Block.decode({ bytes, codec, hasher })
|
const { value } = await Block.decode({ bytes, codec, hasher })
|
||||||
manifest = value
|
manifest = value
|
||||||
const acAddress = manifest.accessController.replaceAll('/ipfs/', '')
|
const acAddress = manifest.accessController.replaceAll('/ipfs/', '')
|
||||||
accessController = await IPFSAccessController({ ipfs, identities, identity, address: acAddress, storage })
|
accessController = await IPFSAccessController({ ipfs, identities, identity, address: acAddress, storage: manifestStorage })
|
||||||
name = manifest.name
|
name = manifest.name
|
||||||
type = type || manifest.type
|
type = type || manifest.type
|
||||||
} else {
|
} else {
|
||||||
// If the address given was not valid, eg. just the name of the database
|
// If the address given was not valid, eg. just the name of the database
|
||||||
type = type || 'events'
|
type = type || 'events'
|
||||||
accessController = await IPFSAccessController({ ipfs, identities, identity, storage })
|
accessController = await IPFSAccessController({ ipfs, identities, identity, storage: manifestStorage })
|
||||||
const m = await createDBManifest(storage, address, type, accessController.address, {})
|
const m = await createDBManifest(manifestStorage, address, type, accessController.address, {})
|
||||||
manifest = m.manifest
|
manifest = m.manifest
|
||||||
address = OrbitDBAddress(m.hash)
|
address = OrbitDBAddress(m.hash)
|
||||||
accessController = m.accessController
|
accessController = m.accessController
|
||||||
@ -81,7 +84,7 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
|||||||
throw new Error(`Unspported database type: '${type}'`)
|
throw new Error(`Unspported database type: '${type}'`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = await DatabaseModel({ OpLog, Database, ipfs, identity, address, name, accessController, directory })
|
const db = await DatabaseModel({ OpLog, Database, ipfs, identity, address: address.toString(), name, accessController, directory })
|
||||||
|
|
||||||
db.events.on('close', onDatabaseClosed(address.toString()))
|
db.events.on('close', onDatabaseClosed(address.toString()))
|
||||||
|
|
||||||
@ -98,8 +101,8 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
|||||||
if (keystore) {
|
if (keystore) {
|
||||||
await keystore.close()
|
await keystore.close()
|
||||||
}
|
}
|
||||||
if (storage) {
|
if (manifestStorage) {
|
||||||
await storage.close()
|
await manifestStorage.close()
|
||||||
}
|
}
|
||||||
databases = {}
|
databases = {}
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,6 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
|||||||
const manifestBytes = await storage.get(address)
|
const manifestBytes = await storage.get(address)
|
||||||
const { value } = await Block.decode({ bytes: manifestBytes, codec, hasher })
|
const { value } = await Block.decode({ bytes: manifestBytes, codec, hasher })
|
||||||
write = value.write
|
write = value.write
|
||||||
address = await AccessControllerManifest({ storage, type, params: { write } })
|
|
||||||
} else {
|
} else {
|
||||||
address = await AccessControllerManifest({ storage, type, params: { write } })
|
address = await AccessControllerManifest({ storage, type, params: { write } })
|
||||||
}
|
}
|
||||||
@ -53,6 +52,7 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
|||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
type,
|
||||||
address,
|
address,
|
||||||
write,
|
write,
|
||||||
canAppend
|
canAppend
|
||||||
@ -60,56 +60,3 @@ const IPFSAccessController = async ({ ipfs, identities, identity, address, stora
|
|||||||
}
|
}
|
||||||
|
|
||||||
export { IPFSAccessController as default }
|
export { IPFSAccessController as default }
|
||||||
|
|
||||||
// constructor (ipfs, options) {
|
|
||||||
// super()
|
|
||||||
// this._ipfs = ipfs
|
|
||||||
// this._write = Array.from(options.write || [])
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Returns the type of the access controller
|
|
||||||
// static get type () { return type }
|
|
||||||
|
|
||||||
// // Return a Set of keys that have `access` capability
|
|
||||||
// get write () {
|
|
||||||
// return this._write
|
|
||||||
// }
|
|
||||||
|
|
||||||
// async canAppend (entry, identityProvider) {
|
|
||||||
// // Allow if access list contain the writer's publicKey or is '*'
|
|
||||||
// const key = entry.identity.id
|
|
||||||
// if (this.write.includes(key) || this.write.includes('*')) {
|
|
||||||
// // check identity is valid
|
|
||||||
// return identityProvider.verifyIdentity(entry.identity)
|
|
||||||
// }
|
|
||||||
// return false
|
|
||||||
// }
|
|
||||||
|
|
||||||
// async load (address) {
|
|
||||||
// // Transform '/ipfs/QmPFtHi3cmfZerxtH9ySLdzpg1yFhocYDZgEZywdUXHxFU'
|
|
||||||
// // to 'QmPFtHi3cmfZerxtH9ySLdzpg1yFhocYDZgEZywdUXHxFU'
|
|
||||||
// if (address.indexOf('/ipfs') === 0) { address = address.split('/')[2] }
|
|
||||||
|
|
||||||
// try {
|
|
||||||
// this._write = await io.read(this._ipfs, address)
|
|
||||||
// } catch (e) {
|
|
||||||
// console.log('IPFSAccessController.load ERROR:', e)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// async save ({ ipfs }) {
|
|
||||||
// let cid
|
|
||||||
// try {
|
|
||||||
// cid = await io.write(this._ipfs, 'dag-cbor', { write: JSON.stringify(this.write, null, 2) })
|
|
||||||
// } catch (e) {
|
|
||||||
// console.log('IPFSAccessController.save ERROR:', e)
|
|
||||||
// }
|
|
||||||
// // return the manifest data
|
|
||||||
// return { address: cid }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// static async create ({ ipfs, identity }, options = {}) {
|
|
||||||
// options = { ...options, ...{ write: options.write || [identity.id] } }
|
|
||||||
// return new IPFSAccessController(ipfs, options)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
@ -2,19 +2,28 @@ import { EventEmitter } from 'events'
|
|||||||
import PQueue from 'p-queue'
|
import PQueue from 'p-queue'
|
||||||
import Path from 'path'
|
import Path from 'path'
|
||||||
import Sync from './sync.js'
|
import Sync from './sync.js'
|
||||||
import { IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js'
|
||||||
|
|
||||||
const defaultPointerCount = 16
|
const defaultPointerCount = 0
|
||||||
|
const defaultCacheSize = 1000
|
||||||
|
|
||||||
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, storage, headsStorage, pointerCount }) => {
|
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, storage, headsStorage, pointerCount }) => {
|
||||||
const { Log, Entry } = OpLog
|
const { Log, Entry } = OpLog
|
||||||
|
|
||||||
const entryStorage = storage || await IPFSBlockStorage({ ipfs, pin: true })
|
directory = Path.join(directory || './orbitdb', `./${address}/`)
|
||||||
|
pointerCount = pointerCount || defaultPointerCount
|
||||||
|
|
||||||
directory = Path.join(directory || './orbitdb', `./${address.path}/`)
|
const entryStorage = await ComposedStorage(
|
||||||
headsStorage = headsStorage || await LevelStorage({ path: Path.join(directory, '/log/_heads/') })
|
await LRUStorage({ size: defaultCacheSize }),
|
||||||
|
await IPFSBlockStorage({ ipfs, pin: true })
|
||||||
|
)
|
||||||
|
|
||||||
const log = await Log(identity, { logId: address.toString(), access: accessController, entryStorage, headsStorage })
|
headsStorage = await ComposedStorage(
|
||||||
|
await LRUStorage({ size: defaultCacheSize }),
|
||||||
|
await LevelStorage({ path: Path.join(directory, '/log/_heads/') })
|
||||||
|
)
|
||||||
|
|
||||||
|
const log = await Log(identity, { logId: address, access: accessController, entryStorage, headsStorage })
|
||||||
|
|
||||||
// const indexStorage = await LevelStorage({ path: Path.join(directory, '/log/_index/') })
|
// const indexStorage = await LevelStorage({ path: Path.join(directory, '/log/_index/') })
|
||||||
// const log = await Log(identity, { logId: address.toString(), access: accessController, entryStorage, headsStorage, indexStorage })
|
// const log = await Log(identity, { logId: address.toString(), access: accessController, entryStorage, headsStorage, indexStorage })
|
||||||
@ -22,8 +31,6 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
|
|||||||
const events = new EventEmitter()
|
const events = new EventEmitter()
|
||||||
const queue = new PQueue({ concurrency: 1 })
|
const queue = new PQueue({ concurrency: 1 })
|
||||||
|
|
||||||
pointerCount = pointerCount || defaultPointerCount
|
|
||||||
|
|
||||||
const addOperation = async (op) => {
|
const addOperation = async (op) => {
|
||||||
const task = async () => {
|
const task = async () => {
|
||||||
const entry = await log.append(op, { pointerCount })
|
const entry = await log.append(op, { pointerCount })
|
||||||
@ -55,7 +62,6 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
|
|||||||
events.emit('close')
|
events.emit('close')
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: rename to clear()
|
|
||||||
const drop = async () => {
|
const drop = async () => {
|
||||||
await queue.onIdle()
|
await queue.onIdle()
|
||||||
await log.clear()
|
await log.clear()
|
||||||
|
@ -61,16 +61,21 @@ const DocumentStore = async ({ OpLog, Database, ipfs, identity, address, name, a
|
|||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
const iterator = async function * () {
|
const iterator = async function * ({ amount } = {}) {
|
||||||
const keys = {}
|
const keys = {}
|
||||||
for await (const entry of log.traverse()) {
|
let count = 0
|
||||||
|
for await (const entry of log.iterator()) {
|
||||||
const { op, key, value } = entry.payload
|
const { op, key, value } = entry.payload
|
||||||
if (op === 'PUT' && !keys[key]) {
|
if (op === 'PUT' && !keys[key]) {
|
||||||
keys[key] = true
|
keys[key] = true
|
||||||
|
count++
|
||||||
yield value
|
yield value
|
||||||
} else if (op === 'DEL' && !keys[key]) {
|
} else if (op === 'DEL' && !keys[key]) {
|
||||||
keys[key] = true
|
keys[key] = true
|
||||||
}
|
}
|
||||||
|
if (count >= amount) {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, nam
|
|||||||
|
|
||||||
const queue = new PQueue({ concurrency: 1 })
|
const queue = new PQueue({ concurrency: 1 })
|
||||||
|
|
||||||
directory = path.join(directory || './orbitdb', `./${address.path}/_index/`)
|
directory = path.join(directory || './orbitdb', `./${address}/_index/`)
|
||||||
const index = await LevelStorage({ path: directory, valueEncoding })
|
const index = await LevelStorage({ path: directory, valueEncoding })
|
||||||
|
|
||||||
let latestOplogHash
|
let latestOplogHash
|
||||||
@ -48,9 +48,9 @@ const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, nam
|
|||||||
return keyValueStore.get(key)
|
return keyValueStore.get(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
const iterator = async function * () {
|
const iterator = async function * ({ amount } = {}) {
|
||||||
await queue.onIdle()
|
await queue.onIdle()
|
||||||
for await (const { key, value } of keyValueStore.iterator()) {
|
for await (const { key, value } of keyValueStore.iterator({ amount })) {
|
||||||
yield { key, value }
|
yield { key, value }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,16 +22,21 @@ const KeyValue = async ({ OpLog, Database, ipfs, identity, address, name, access
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const iterator = async function * () {
|
const iterator = async function * ({ amount } = {}) {
|
||||||
const keys = {}
|
const keys = {}
|
||||||
|
let count = 0
|
||||||
for await (const entry of log.traverse()) {
|
for await (const entry of log.traverse()) {
|
||||||
const { op, key, value } = entry.payload
|
const { op, key, value } = entry.payload
|
||||||
if (op === 'PUT' && !keys[key]) {
|
if (op === 'PUT' && !keys[key]) {
|
||||||
keys[key] = true
|
keys[key] = true
|
||||||
|
count++
|
||||||
yield { key, value }
|
yield { key, value }
|
||||||
} else if (op === 'DEL' && !keys[key]) {
|
} else if (op === 'DEL' && !keys[key]) {
|
||||||
keys[key] = true
|
keys[key] = true
|
||||||
}
|
}
|
||||||
|
if (count >= amount) {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,41 +1,51 @@
|
|||||||
const ComposedStorage = async (...storages) => {
|
// Compose storages:
|
||||||
|
// const storage1 = await ComposedStorage(await LRUStorage(), await LevelStorage())
|
||||||
|
// const storage2 = await ComposedStorage(storage1, await IPFSBlockStorage())
|
||||||
|
|
||||||
|
const ComposedStorage = async (storage1, storage2) => {
|
||||||
const put = async (hash, data) => {
|
const put = async (hash, data) => {
|
||||||
for await (const storage of storages) {
|
await storage1.put(hash, data)
|
||||||
await storage.put(hash, data)
|
await storage2.put(hash, data)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const get = async (hash) => {
|
const get = async (hash) => {
|
||||||
for await (const storage of storages) {
|
let value = await storage1.get(hash)
|
||||||
const value = await storage.get(hash)
|
if (!value) {
|
||||||
|
value = await storage2.get(hash)
|
||||||
if (value) {
|
if (value) {
|
||||||
return value
|
await storage1.put(hash, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
const iterator = async function * () {
|
const iterator = async function * () {
|
||||||
return storages[0].iterator()
|
const keys = []
|
||||||
}
|
for (const storage of [storage1, storage2]) {
|
||||||
|
for await (const [key, value] of storage.iterator()) {
|
||||||
const merge = async (other) => {
|
if (!keys[key]) {
|
||||||
for await (const storage1 of storages) {
|
keys[key] = true
|
||||||
for await (const storage2 of storages) {
|
yield [key, value]
|
||||||
await storage1.merge(storage2)
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const merge = async (other) => {
|
||||||
|
await storage1.merge(other)
|
||||||
|
await storage2.merge(other)
|
||||||
|
await other.merge(storage1)
|
||||||
|
await other.merge(storage2)
|
||||||
|
}
|
||||||
|
|
||||||
const clear = async () => {
|
const clear = async () => {
|
||||||
for await (const storage of storages) {
|
await storage1.clear()
|
||||||
await storage.clear()
|
await storage2.clear()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const close = async () => {
|
const close = async () => {
|
||||||
for await (const storage of storages) {
|
await storage1.close()
|
||||||
await storage.close()
|
await storage2.close()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { deepStrictEqual, strictEqual } from 'assert'
|
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||||
import rmrf from 'rimraf'
|
import rmrf from 'rimraf'
|
||||||
import { copy } from 'fs-extra'
|
import { copy } from 'fs-extra'
|
||||||
import * as IPFS from 'ipfs'
|
import * as IPFS from 'ipfs'
|
||||||
@ -249,4 +249,79 @@ describe('DocumentStore Database', function () {
|
|||||||
deepStrictEqual(await db.query(findFn), [])
|
deepStrictEqual(await db.query(findFn), [])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('Iterator', () => {
|
||||||
|
before(async () => {
|
||||||
|
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
|
})
|
||||||
|
|
||||||
|
after(async () => {
|
||||||
|
if (db) {
|
||||||
|
await db.drop()
|
||||||
|
await db.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has an iterator function', async () => {
|
||||||
|
notStrictEqual(db.iterator, undefined)
|
||||||
|
strictEqual(typeof db.iterator, 'function')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns no documents when the database is empty', async () => {
|
||||||
|
const all = []
|
||||||
|
for await (const doc of db.iterator()) {
|
||||||
|
all.unshift(doc)
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns all documents when the database is not empty', async () => {
|
||||||
|
await db.put({ _id: 'doc1', something: true })
|
||||||
|
await db.put({ _id: 'doc2', something: true })
|
||||||
|
await db.put({ _id: 'doc3', something: true })
|
||||||
|
await db.put({ _id: 'doc4', something: true })
|
||||||
|
await db.put({ _id: 'doc5', something: true })
|
||||||
|
|
||||||
|
// Add one more document and then delete it to count
|
||||||
|
// for the fact that the amount returned should be
|
||||||
|
// the amount of actual documents returned and not
|
||||||
|
// the oplog length, and deleted documents don't
|
||||||
|
// count towards the returned amount.
|
||||||
|
await db.put({ _id: 'doc6', something: true })
|
||||||
|
await db.del('doc6')
|
||||||
|
|
||||||
|
const all = []
|
||||||
|
for await (const doc of db.iterator()) {
|
||||||
|
all.unshift(doc)
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 5)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns only the amount of documents given as a parameter', async () => {
|
||||||
|
const amount = 3
|
||||||
|
const all = []
|
||||||
|
for await (const doc of db.iterator({ amount })) {
|
||||||
|
all.unshift(doc)
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns only two documents if amount given as a parameter is 2', async () => {
|
||||||
|
const amount = 2
|
||||||
|
const all = []
|
||||||
|
for await (const doc of db.iterator({ amount })) {
|
||||||
|
all.unshift(doc)
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns only one document if amount given as a parameter is 1', async () => {
|
||||||
|
const amount = 1
|
||||||
|
const all = []
|
||||||
|
for await (const doc of db.iterator({ amount })) {
|
||||||
|
all.unshift(doc)
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
import { deepStrictEqual, strictEqual } from 'assert'
|
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||||
|
import path from 'path'
|
||||||
|
import fs from 'fs'
|
||||||
import rmrf from 'rimraf'
|
import rmrf from 'rimraf'
|
||||||
import { copy } from 'fs-extra'
|
import { copy } from 'fs-extra'
|
||||||
import * as IPFS from 'ipfs'
|
import * as IPFS from 'ipfs'
|
||||||
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
||||||
import { KeyValuePersisted, KeyValue } from '../../src/db/index.js'
|
import { KeyValuePersisted } from '../../src/db/index.js'
|
||||||
import config from '../config.js'
|
import config from '../config.js'
|
||||||
import testKeysPath from '../fixtures/test-keys-path.js '
|
import testKeysPath from '../fixtures/test-keys-path.js '
|
||||||
|
|
||||||
@ -43,134 +45,230 @@ describe('KeyValuePersisted Database', function () {
|
|||||||
await rmrf('./ipfs1')
|
await rmrf('./ipfs1')
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
describe('Creating a KeyValuePersisted database', () => {
|
||||||
db = await KeyValuePersisted({ OpLog, KeyValue, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
beforeEach(async () => {
|
||||||
|
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
if (db) {
|
||||||
|
await db.drop()
|
||||||
|
await db.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('creates a keyvalue store', async () => {
|
||||||
|
strictEqual(db.address.toString(), databaseId)
|
||||||
|
strictEqual(db.type, 'keyvalue')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('creates a directory for the persisted index', async () => {
|
||||||
|
const expectedPath = path.join('./orbitdb', `./${db.address}`, '/_index')
|
||||||
|
const directoryExists = fs.existsSync(expectedPath)
|
||||||
|
strictEqual(directoryExists, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 0 items when it\'s a fresh database', async () => {
|
||||||
|
const all = []
|
||||||
|
for await (const item of db.iterator()) {
|
||||||
|
all.unshift(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
strictEqual(all.length, 0)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
describe('KeyValuePersisted database API', () => {
|
||||||
if (db) {
|
beforeEach(async () => {
|
||||||
await db.drop()
|
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
await db.close()
|
})
|
||||||
}
|
|
||||||
|
afterEach(async () => {
|
||||||
|
if (db) {
|
||||||
|
await db.drop()
|
||||||
|
await db.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('sets a key/value pair', async () => {
|
||||||
|
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||||
|
|
||||||
|
const actual = await db.set('key1', 'value1')
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('puts a key/value pair', async () => {
|
||||||
|
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||||
|
|
||||||
|
const actual = await db.put('key1', 'value1')
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('gets a key/value pair\'s value', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'value1'
|
||||||
|
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using put', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using set', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.set(key, 'value1')
|
||||||
|
await db.set(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using set then put', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.set(key, 'value1')
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using put then set', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
await db.set(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('deletes a key/value pair', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = undefined
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
const hash = await db.del(key)
|
||||||
|
|
||||||
|
const actual = await db.get(hash)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('deletes a non-existent key/value pair', async () => {
|
||||||
|
const expected = undefined
|
||||||
|
|
||||||
|
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||||
|
|
||||||
|
const actual = await db.get(del)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns all key/value pairs', async () => {
|
||||||
|
const keyvalue = [
|
||||||
|
{ key: 'key1', value: 'init' },
|
||||||
|
{ key: 'key2', value: true },
|
||||||
|
{ key: 'key3', value: 'hello' },
|
||||||
|
{ key: 'key4', value: 'friend' },
|
||||||
|
{ key: 'key5', value: '12345' },
|
||||||
|
{ key: 'key6', value: 'empty' },
|
||||||
|
{ key: 'key7', value: 'friend33' }
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const { key, value } of Object.values(keyvalue)) {
|
||||||
|
await db.put(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
const all = []
|
||||||
|
for await (const pair of db.iterator()) {
|
||||||
|
all.unshift(pair)
|
||||||
|
}
|
||||||
|
|
||||||
|
deepStrictEqual(all, keyvalue)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('creates a keyvalue store', async () => {
|
describe('Iterator', () => {
|
||||||
strictEqual(db.address.toString(), databaseId)
|
before(async () => {
|
||||||
strictEqual(db.type, 'keyvalue')
|
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns 0 items when it\'s a fresh database', async () => {
|
after(async () => {
|
||||||
const all = []
|
if (db) {
|
||||||
for await (const item of db.iterator()) {
|
await db.drop()
|
||||||
all.unshift(item)
|
await db.close()
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
|
||||||
strictEqual(all.length, 0)
|
it('has an iterator function', async () => {
|
||||||
})
|
notStrictEqual(db.iterator, undefined)
|
||||||
|
strictEqual(typeof db.iterator, 'function')
|
||||||
|
})
|
||||||
|
|
||||||
it('sets a key/value pair', async () => {
|
it('returns no documents when the database is empty', async () => {
|
||||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator()) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 0)
|
||||||
|
})
|
||||||
|
|
||||||
const actual = await db.set('key1', 'value1')
|
it('returns all documents when the database is not empty', async () => {
|
||||||
strictEqual(actual, expected)
|
await db.put('key1', 1)
|
||||||
})
|
await db.put('key2', 2)
|
||||||
|
await db.put('key3', 3)
|
||||||
|
await db.put('key4', 4)
|
||||||
|
await db.put('key5', 5)
|
||||||
|
|
||||||
it('puts a key/value pair', async () => {
|
// Add one more document and then delete it to count
|
||||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
// for the fact that the amount returned should be
|
||||||
|
// the amount of actual documents returned and not
|
||||||
|
// the oplog length, and deleted documents don't
|
||||||
|
// count towards the returned amount.
|
||||||
|
await db.put('key6', 6)
|
||||||
|
await db.del('key6')
|
||||||
|
|
||||||
const actual = await db.put('key1', 'value1')
|
const all = []
|
||||||
strictEqual(actual, expected)
|
for await (const { key, value } of db.iterator()) {
|
||||||
})
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 5)
|
||||||
|
})
|
||||||
|
|
||||||
it('gets a key/value pair\'s value', async () => {
|
it('returns only the amount of documents given as a parameter', async () => {
|
||||||
const key = 'key1'
|
const amount = 3
|
||||||
const expected = 'value1'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
await db.put(key, expected)
|
it('returns only two documents if amount given as a parameter is 2', async () => {
|
||||||
const actual = await db.get(key)
|
const amount = 2
|
||||||
strictEqual(actual, expected)
|
const all = []
|
||||||
})
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
it('get key\'s updated value when using put', async () => {
|
it('returns only one document if amount given as a parameter is 1', async () => {
|
||||||
const key = 'key1'
|
const amount = 1
|
||||||
const expected = 'hello2'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
await db.put(key, 'value1')
|
all.unshift({ key, value })
|
||||||
await db.put(key, expected)
|
}
|
||||||
const actual = await db.get(key)
|
strictEqual(all.length, amount)
|
||||||
strictEqual(actual, expected)
|
})
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using set', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.set(key, 'value1')
|
|
||||||
await db.set(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using set then put', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.set(key, 'value1')
|
|
||||||
await db.put(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using put then set', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.put(key, 'value1')
|
|
||||||
await db.set(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('deletes a key/value pair', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = undefined
|
|
||||||
|
|
||||||
await db.put(key, 'value1')
|
|
||||||
const hash = await db.del(key)
|
|
||||||
|
|
||||||
const actual = await db.get(hash)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('deletes a non-existent key/value pair', async () => {
|
|
||||||
const expected = undefined
|
|
||||||
|
|
||||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
|
||||||
|
|
||||||
const actual = await db.get(del)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns all key/value pairs', async () => {
|
|
||||||
const keyvalue = [
|
|
||||||
{ key: 'key1', value: 'init' },
|
|
||||||
{ key: 'key2', value: true },
|
|
||||||
{ key: 'key3', value: 'hello' },
|
|
||||||
{ key: 'key4', value: 'friend' },
|
|
||||||
{ key: 'key5', value: '12345' },
|
|
||||||
{ key: 'key6', value: 'empty' },
|
|
||||||
{ key: 'key7', value: 'friend33' }
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const { key, value } of Object.values(keyvalue)) {
|
|
||||||
await db.put(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const all = []
|
|
||||||
for await (const pair of db.iterator()) {
|
|
||||||
all.unshift(pair)
|
|
||||||
}
|
|
||||||
|
|
||||||
deepStrictEqual(all, keyvalue)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { deepStrictEqual, strictEqual } from 'assert'
|
import { deepStrictEqual, strictEqual, notStrictEqual } from 'assert'
|
||||||
import rmrf from 'rimraf'
|
import rmrf from 'rimraf'
|
||||||
import { copy } from 'fs-extra'
|
import { copy } from 'fs-extra'
|
||||||
import * as IPFS from 'ipfs'
|
import * as IPFS from 'ipfs'
|
||||||
@ -43,134 +43,224 @@ describe('KeyValue Database', function () {
|
|||||||
await rmrf('./ipfs1')
|
await rmrf('./ipfs1')
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
describe('Creating a KeyValue database', () => {
|
||||||
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
beforeEach(async () => {
|
||||||
|
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
if (db) {
|
||||||
|
await db.drop()
|
||||||
|
await db.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('creates a keyvalue store', async () => {
|
||||||
|
strictEqual(db.address.toString(), databaseId)
|
||||||
|
strictEqual(db.type, 'keyvalue')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 0 items when it\'s a fresh database', async () => {
|
||||||
|
const all = []
|
||||||
|
for await (const item of db.iterator()) {
|
||||||
|
all.unshift(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
strictEqual(all.length, 0)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
describe('KeyValue database API', () => {
|
||||||
if (db) {
|
beforeEach(async () => {
|
||||||
await db.drop()
|
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
await db.close()
|
})
|
||||||
}
|
|
||||||
|
afterEach(async () => {
|
||||||
|
if (db) {
|
||||||
|
await db.drop()
|
||||||
|
await db.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('sets a key/value pair', async () => {
|
||||||
|
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||||
|
|
||||||
|
const actual = await db.set('key1', 'value1')
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('puts a key/value pair', async () => {
|
||||||
|
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||||
|
|
||||||
|
const actual = await db.put('key1', 'value1')
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('gets a key/value pair\'s value', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'value1'
|
||||||
|
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using put', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using set', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.set(key, 'value1')
|
||||||
|
await db.set(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using set then put', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.set(key, 'value1')
|
||||||
|
await db.put(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('get key\'s updated value when using put then set', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = 'hello2'
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
await db.set(key, expected)
|
||||||
|
const actual = await db.get(key)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('deletes a key/value pair', async () => {
|
||||||
|
const key = 'key1'
|
||||||
|
const expected = undefined
|
||||||
|
|
||||||
|
await db.put(key, 'value1')
|
||||||
|
const hash = await db.del(key)
|
||||||
|
|
||||||
|
const actual = await db.get(hash)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('deletes a non-existent key/value pair', async () => {
|
||||||
|
const expected = undefined
|
||||||
|
|
||||||
|
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
||||||
|
|
||||||
|
const actual = await db.get(del)
|
||||||
|
strictEqual(actual, expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns all key/value pairs', async () => {
|
||||||
|
const keyvalue = [
|
||||||
|
{ key: 'key1', value: 'init' },
|
||||||
|
{ key: 'key2', value: true },
|
||||||
|
{ key: 'key3', value: 'hello' },
|
||||||
|
{ key: 'key4', value: 'friend' },
|
||||||
|
{ key: 'key5', value: '12345' },
|
||||||
|
{ key: 'key6', value: 'empty' },
|
||||||
|
{ key: 'key7', value: 'friend33' }
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const { key, value } of Object.values(keyvalue)) {
|
||||||
|
await db.put(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
const all = []
|
||||||
|
for await (const pair of db.iterator()) {
|
||||||
|
all.unshift(pair)
|
||||||
|
}
|
||||||
|
|
||||||
|
deepStrictEqual(all, keyvalue)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('creates a keyvalue store', async () => {
|
describe('Iterator', () => {
|
||||||
strictEqual(db.address.toString(), databaseId)
|
before(async () => {
|
||||||
strictEqual(db.type, 'keyvalue')
|
db = await KeyValue({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns 0 items when it\'s a fresh database', async () => {
|
after(async () => {
|
||||||
const all = []
|
if (db) {
|
||||||
for await (const item of db.iterator()) {
|
await db.drop()
|
||||||
all.unshift(item)
|
await db.close()
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
|
||||||
strictEqual(all.length, 0)
|
it('has an iterator function', async () => {
|
||||||
})
|
notStrictEqual(db.iterator, undefined)
|
||||||
|
strictEqual(typeof db.iterator, 'function')
|
||||||
|
})
|
||||||
|
|
||||||
it('sets a key/value pair', async () => {
|
it('returns no key/value pairs when the database is empty', async () => {
|
||||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator()) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 0)
|
||||||
|
})
|
||||||
|
|
||||||
const actual = await db.set('key1', 'value1')
|
it('returns all key/value pairs when the database is not empty', async () => {
|
||||||
strictEqual(actual, expected)
|
await db.put('key1', 1)
|
||||||
})
|
await db.put('key2', 2)
|
||||||
|
await db.put('key3', 3)
|
||||||
|
await db.put('key4', 4)
|
||||||
|
await db.put('key5', 5)
|
||||||
|
|
||||||
it('puts a key/value pair', async () => {
|
// Add one more document and then delete it to count
|
||||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
// for the fact that the amount returned should be
|
||||||
|
// the amount of actual documents returned and not
|
||||||
|
// the oplog length, and deleted documents don't
|
||||||
|
// count towards the returned amount.
|
||||||
|
await db.put('key6', 6)
|
||||||
|
await db.del('key6')
|
||||||
|
|
||||||
const actual = await db.put('key1', 'value1')
|
const all = []
|
||||||
strictEqual(actual, expected)
|
for await (const { key, value } of db.iterator()) {
|
||||||
})
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, 5)
|
||||||
|
})
|
||||||
|
|
||||||
it('gets a key/value pair\'s value', async () => {
|
it('returns only the amount of key/value pairs given as a parameter', async () => {
|
||||||
const key = 'key1'
|
const amount = 3
|
||||||
const expected = 'value1'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
await db.put(key, expected)
|
it('returns only two key/value pairs if amount given as a parameter is 2', async () => {
|
||||||
const actual = await db.get(key)
|
const amount = 2
|
||||||
strictEqual(actual, expected)
|
const all = []
|
||||||
})
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
|
all.unshift({ key, value })
|
||||||
|
}
|
||||||
|
strictEqual(all.length, amount)
|
||||||
|
})
|
||||||
|
|
||||||
it('get key\'s updated value when using put', async () => {
|
it('returns only one key/value pairs if amount given as a parameter is 1', async () => {
|
||||||
const key = 'key1'
|
const amount = 1
|
||||||
const expected = 'hello2'
|
const all = []
|
||||||
|
for await (const { key, value } of db.iterator({ amount })) {
|
||||||
await db.put(key, 'value1')
|
all.unshift({ key, value })
|
||||||
await db.put(key, expected)
|
}
|
||||||
const actual = await db.get(key)
|
strictEqual(all.length, amount)
|
||||||
strictEqual(actual, expected)
|
})
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using set', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.set(key, 'value1')
|
|
||||||
await db.set(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using set then put', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.set(key, 'value1')
|
|
||||||
await db.put(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('get key\'s updated value when using put then set', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = 'hello2'
|
|
||||||
|
|
||||||
await db.put(key, 'value1')
|
|
||||||
await db.set(key, expected)
|
|
||||||
const actual = await db.get(key)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('deletes a key/value pair', async () => {
|
|
||||||
const key = 'key1'
|
|
||||||
const expected = undefined
|
|
||||||
|
|
||||||
await db.put(key, 'value1')
|
|
||||||
const hash = await db.del(key)
|
|
||||||
|
|
||||||
const actual = await db.get(hash)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('deletes a non-existent key/value pair', async () => {
|
|
||||||
const expected = undefined
|
|
||||||
|
|
||||||
const del = await db.del('zdpuApFgnZNp6qQqeuHRLJhEKsmMnXEEJfSZofLc3ZZXEihWE')
|
|
||||||
|
|
||||||
const actual = await db.get(del)
|
|
||||||
strictEqual(actual, expected)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns all key/value pairs', async () => {
|
|
||||||
const keyvalue = [
|
|
||||||
{ key: 'key1', value: 'init' },
|
|
||||||
{ key: 'key2', value: true },
|
|
||||||
{ key: 'key3', value: 'hello' },
|
|
||||||
{ key: 'key4', value: 'friend' },
|
|
||||||
{ key: 'key5', value: '12345' },
|
|
||||||
{ key: 'key6', value: 'empty' },
|
|
||||||
{ key: 'key7', value: 'friend33' }
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const { key, value } of Object.values(keyvalue)) {
|
|
||||||
await db.put(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const all = []
|
|
||||||
for await (const pair of db.iterator()) {
|
|
||||||
all.unshift(pair)
|
|
||||||
}
|
|
||||||
|
|
||||||
deepStrictEqual(all, keyvalue)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -134,7 +134,7 @@ describe('Open databases', function () {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('creates a directory for the database oplog', async () => {
|
it('creates a directory for the database oplog', async () => {
|
||||||
const expectedPath = path.join(orbitdb1.directory, `./${db.address.path}`, '/log/_heads')
|
const expectedPath = path.join(orbitdb1.directory, `./${db.address}`, '/log/_heads')
|
||||||
const directoryExists = fs.existsSync(expectedPath)
|
const directoryExists = fs.existsSync(expectedPath)
|
||||||
strictEqual(directoryExists, true)
|
strictEqual(directoryExists, true)
|
||||||
})
|
})
|
||||||
|
Loading…
x
Reference in New Issue
Block a user