More refactoring

This commit is contained in:
haad 2023-04-06 14:27:13 +03:00
parent eddf5b853b
commit 3030336db9
11 changed files with 67 additions and 81 deletions

View File

@ -65,7 +65,7 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
let databases = {}
const open = async (address, { type, meta, sync, Database, AccessController } = {}) => {
const open = async (address, { type, meta, sync, Database, AccessController, headsStorage, entryStorage, indexStorage, referencesCount } = {}) => {
let name, manifest, accessController
if (type && !databaseTypes[type]) {
@ -108,7 +108,7 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
if (!Database) {
throw new Error(`Unsupported database type: '${type}'`)
}
const db = await Database({ ipfs, identity, address: address.toString(), name, access: accessController, directory, meta, syncAutomatically: sync != null ? sync : true })
const db = await Database({ ipfs, identity, address: address.toString(), name, access: accessController, directory, meta, syncAutomatically: sync != null ? sync : true, headsStorage, entryStorage, indexStorage, referencesCount })
db.events.on('close', onDatabaseClosed(address.toString()))

View File

@ -38,7 +38,7 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta
const entry = await log.append(op, { referencesCount })
await sync.add(entry)
if (onUpdate) {
await onUpdate(entry)
await onUpdate(log, entry)
}
events.emit('update', entry)
return entry.hash
@ -55,7 +55,7 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta
const updated = await log.joinEntry(entry)
if (updated) {
if (onUpdate) {
await onUpdate(entry)
await onUpdate(log, entry)
}
events.emit('update', entry)
}

View File

@ -2,8 +2,8 @@ import Database from '../database.js'
const DefaultOptions = { indexBy: '_id' }
const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically })
const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically })
const { addOperation, log } = database

View File

@ -1,14 +1,10 @@
import Database from '../database.js'
const Events = () => async ({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically })
const Events = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
const { addOperation, log } = database
const put = async (key = null, value) => {
return add(value)
}
const add = async (value) => {
return addOperation({ op: 'ADD', key: null, value })
}
@ -38,7 +34,6 @@ const Events = () => async ({ ipfs, identity, address, name, access, directory,
return {
...database,
type: 'events',
put,
add,
get,
iterator,

View File

@ -1,18 +1,20 @@
import { KeyValue } from './index.js'
import LevelStorage from '../storage/level.js'
import pathJoin from '../utils/path-join.js'
import PQueue from 'p-queue'
const valueEncoding = 'json'
const KeyValueIndexed = ({ indexStorage } = {}) => async ({ ipfs, identity, address, name, access, directory, storage, meta }) => {
directory = pathJoin(directory || './orbitdb', `./${address}/_index/`)
const index = indexStorage || await LevelStorage({ path: directory, valueEncoding })
const KeyValueIndexed = ({ storage } = {}) => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
const indexDirectory = pathJoin(directory || './orbitdb', `./${address}/_index/`)
const index = storage || await LevelStorage({ path: indexDirectory, valueEncoding })
const updateIndex = async (entry) => {
let latestOplogHash
const _updateIndex = async (log, entry) => {
const keys = {}
const it = await log.iterator({ gt: latestOplogHash })
for await (const entry of log.iterator({ gt: latestOplogHash })) {
for await (const entry of it) {
const { op, key, value } = entry.payload
if (op === 'PUT' && !keys[key]) {
@ -23,18 +25,17 @@ const KeyValueIndexed = ({ indexStorage } = {}) => async ({ ipfs, identity, addr
await index.del(key)
}
}
latestOplogHash = entry.hash
latestOplogHash = entry ? entry.hash : null
}
const keyValueStore = await KeyValue()({ ipfs, identity, address, name, access, directory, storage, meta, onUpdate: updateIndex })
const { events, log } = keyValueStore
// Create the underlying KeyValue database
const keyValueStore = await KeyValue()({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate: _updateIndex })
const queue = new PQueue({ concurrency: 1 })
let latestOplogHash
// Compute the index
await _updateIndex(keyValueStore.log)
const get = async (key) => {
await queue.onIdle()
const value = await index.get(key)
if (value) {
return value
@ -43,34 +44,22 @@ const KeyValueIndexed = ({ indexStorage } = {}) => async ({ ipfs, identity, addr
}
const iterator = async function * ({ amount } = {}) {
await queue.onIdle()
for await (const { hash, key, value } of keyValueStore.iterator({ amount })) {
yield { hash, key, value }
const it = keyValueStore.iterator({ amount })
for await (const { key, value, hash } of it) {
yield { key, value, hash }
}
}
const task = async () => {
await queue.add(updateIndex(index))
}
const close = async () => {
events.off('update', task)
await queue.onIdle()
await index.close()
await keyValueStore.close()
}
// TOD: rename to clear()
const drop = async () => {
events.off('update', task)
await queue.onIdle()
await index.clear()
await keyValueStore.drop()
}
// Listen for update events from the database and update the index on every update
// events.on('update', task)
return {
...keyValueStore,
get,

View File

@ -1,7 +1,7 @@
import Database from '../database.js'
const KeyValue = () => async ({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically, onUpdate }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically, onUpdate })
const KeyValue = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
const { addOperation, log } = database
@ -33,7 +33,7 @@ const KeyValue = () => async ({ ipfs, identity, address, name, access, directory
keys[key] = true
count++
const hash = entry.hash
yield { hash, key, value }
yield { key, value, hash }
} else if (op === 'DEL' && !keys[key]) {
keys[key] = true
}

View File

@ -59,10 +59,10 @@ describe('Events Database', function () {
strictEqual(db.type, 'events')
})
it('puts an event', async () => {
it('adds an event', async () => {
const expected = 'init'
const hash = await db.put(null, expected)
const hash = await db.add(expected)
const actual = await db.get(hash)
strictEqual(actual, expected)

View File

@ -35,6 +35,10 @@ describe('KeyValueIndexed Database', function () {
await ipfs.stop()
}
if (db) {
await db.close()
}
if (keystore) {
await keystore.close()
}
@ -270,14 +274,21 @@ describe('KeyValueIndexed Database', function () {
})
describe('Parameters', () => {
after(async () => {
if (db) {
await db.drop()
await db.close()
}
})
it('can use a custom indexStorage', async () => {
const indexStorage = await MemoryStorage()
const db = await KeyValueIndexed({ indexStorage })({ ipfs, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
const storage = await MemoryStorage()
db = await KeyValueIndexed({ storage })({ ipfs, identity: testIdentity1, address: databaseId, accessController })
await db.put('key', 'value')
let result
for await (const [key, value] of indexStorage.iterator()) {
for await (const [key, value] of storage.iterator()) {
result = [key, value]
}

View File

@ -6,8 +6,8 @@ import config from './config.js'
const type = 'custom!'
const CustomStore = () => async ({ ipfs, identity, address, name, accessController, directory, storage, meta }) => {
const database = await Database({ ipfs, identity, address, name, accessController, directory, storage, meta })
const CustomStore = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
return {
...database,

View File

@ -108,11 +108,6 @@ describe('Open databases', function () {
strictEqual(db.type, 'events')
})
it('has a put function', async () => {
notStrictEqual(db.put, undefined)
strictEqual(typeof db.put, 'function')
})
it('has a add function', async () => {
notStrictEqual(db.add, undefined)
strictEqual(typeof db.add, 'function')
@ -381,7 +376,7 @@ describe('Open databases', function () {
address = db.address
for (let i = 0; i < amount; i++) {
await db.put('hello' + i, 'hello' + i)
await db.put('key' + i, 'hello' + i)
}
await db.close()
@ -405,7 +400,7 @@ describe('Open databases', function () {
const expected = []
for (let i = 0; i < amount; i++) {
expected.push({ key: 'hello' + i, value: 'hello' + i })
expected.push({ key: 'key' + i, value: 'hello' + i })
}
const all = []
@ -418,27 +413,27 @@ describe('Open databases', function () {
})
describe('opening an indexed keyvalue database', () => {
let indexStorage
let storage
let db, address
const amount = 10
before(async () => {
orbitdb1 = await OrbitDB({ ipfs: ipfs1, id: 'user1' })
indexStorage = await LevelStorage({ path: './index', valueEncoding: 'json' })
db = await orbitdb1.open('helloworld', { Database: KeyValueIndexed({ indexStorage }) })
db = await orbitdb1.open('helloworld', { type: 'keyvalue' })
address = db.address
for (let i = 0; i < amount; i++) {
await db.put('hello' + i, 'hello' + i)
await db.put('key' + i, 'hello' + i)
}
await db.close()
})
after(async () => {
if (storage) {
await storage.close()
}
if (db) {
await db.close()
}
@ -450,30 +445,31 @@ describe('Open databases', function () {
})
it('returns all entries in the database and in the index', async () => {
indexStorage = await LevelStorage({ path: './index', valueEncoding: 'json' })
db = await orbitdb1.open(address, { Database: KeyValueIndexed({ indexStorage }) })
storage = await LevelStorage({ path: './index', valueEncoding: 'json' })
db = await orbitdb1.open(address, { Database: KeyValueIndexed({ storage }) })
strictEqual(db.address, address)
strictEqual(db.type, 'keyvalue')
strictEqual(db.name, 'helloworld')
const expected = []
for (let i = 0; i < amount; i++) {
expected.push({ key: 'hello' + i, value: 'hello' + i })
expected.push({ key: 'key' + i, value: 'hello' + i })
}
const result = []
for await (const [key, value] of storage.iterator()) {
result.push({ key, value })
}
deepStrictEqual(result, expected)
const all = []
for await (const { key, value } of db.iterator()) {
all.unshift({ key, value })
}
deepStrictEqual(all, expected)
const result = []
for await (const [key, value] of indexStorage.iterator()) {
result.push({ key, value })
}
deepStrictEqual(result, expected)
})
})

View File

@ -41,23 +41,18 @@ describe('Storages', function () {
const runTestWithStorage = async (storage) => {
const amount = 100
const log1 = await Log(testIdentity, { logId: 'A', storage })
const log2 = await Log(testIdentity, { logId: 'A', storage })
const log1 = await Log(testIdentity, { logId: 'A', entryStorage: storage })
const log2 = await Log(testIdentity, { logId: 'A', entryStorage: storage })
for (let i = 0; i < amount; i++) {
await log1.append('hello' + i)
await log2.append('hello' + i)
}
// await log2.join(log1)
const values = await log1.values()
const heads = await log1.heads()
strictEqual(heads.length, 1)
strictEqual(values.length, amount)
await log1.storage.clear()
await log2.storage.clear()
// const values2 = await log2.values()
// const heads2 = await log2.heads()
// strictEqual(heads2.length, 0)
// strictEqual(values2.length, 0)
await log1.storage.close()
await log2.storage.close()
}