mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-03-30 15:08:28 +00:00
Renaming pass
This commit is contained in:
parent
428ce83878
commit
a94d2a7918
@ -1,5 +1,5 @@
|
||||
import Database from './database.js'
|
||||
import { EventStore, KeyValue, DocumentStore } from './db/index.js'
|
||||
import { Events, KeyValue, Documents } from './db/index.js'
|
||||
import { Log, Entry } from './oplog/index.js'
|
||||
import { ComposedStorage, IPFSBlockStorage, LevelStorage, LRUStorage } from './storage/index.js'
|
||||
import KeyStore from './key-store.js'
|
||||
@ -22,8 +22,8 @@ const hasher = sha256
|
||||
|
||||
// Mapping for 'database type' -> Store
|
||||
const databaseTypes = {
|
||||
events: EventStore,
|
||||
documents: DocumentStore,
|
||||
events: Events,
|
||||
documents: Documents,
|
||||
keyvalue: KeyValue
|
||||
}
|
||||
//
|
||||
|
@ -1,4 +1,4 @@
|
||||
const DocumentStore = async ({ OpLog, Database, ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically, indexBy = '_id' }) => {
|
||||
const Documents = async ({ OpLog, Database, ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically, indexBy = '_id' }) => {
|
||||
const database = await Database({ OpLog, ipfs, identity, address, name, access, directory, storage, meta, syncAutomatically })
|
||||
|
||||
const { addOperation, log } = database
|
||||
@ -90,7 +90,7 @@ const DocumentStore = async ({ OpLog, Database, ipfs, identity, address, name, a
|
||||
|
||||
return {
|
||||
...database,
|
||||
type: 'documentstore',
|
||||
type: 'documents',
|
||||
put,
|
||||
del,
|
||||
get,
|
||||
@ -101,4 +101,4 @@ const DocumentStore = async ({ OpLog, Database, ipfs, identity, address, name, a
|
||||
}
|
||||
}
|
||||
|
||||
export default DocumentStore
|
||||
export default Documents
|
@ -35,7 +35,7 @@ const Events = async ({ OpLog, Database, ipfs, identity, address, name, access,
|
||||
|
||||
return {
|
||||
...database,
|
||||
type: 'eventstore',
|
||||
type: 'events',
|
||||
put,
|
||||
add,
|
||||
get,
|
@ -1,4 +1,4 @@
|
||||
export { default as DocumentStore } from './document-store.js'
|
||||
export { default as EventStore } from './event-store.js'
|
||||
export { default as KeyValuePersisted } from './keyvalue-persisted.js'
|
||||
export { default as Documents } from './documents.js'
|
||||
export { default as Events } from './events.js'
|
||||
export { default as KeyValue } from './keyvalue.js'
|
||||
export { default as KeyValueIndexed } from './keyvalue-indexed.js'
|
||||
|
@ -5,7 +5,7 @@ import PQueue from 'p-queue'
|
||||
|
||||
const valueEncoding = 'json'
|
||||
|
||||
const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, name, access, directory, storage, meta }) => {
|
||||
const KeyValueIndexed = async ({ OpLog, Database, ipfs, identity, address, name, access, directory, storage, meta }) => {
|
||||
const keyValueStore = await KeyValue({ OpLog, Database, ipfs, identity, address, name, access, directory, storage, meta })
|
||||
const { events, log } = keyValueStore
|
||||
|
||||
@ -80,4 +80,4 @@ const KeyValuePersisted = async ({ OpLog, Database, ipfs, identity, address, nam
|
||||
}
|
||||
}
|
||||
|
||||
export default KeyValuePersisted
|
||||
export default KeyValueIndexed
|
@ -1,16 +1,16 @@
|
||||
import Identity, { isIdentity, isEqual, decodeIdentity } from './identity.js'
|
||||
import OrbitDBIdentityProvider from './providers/orbitdb.js'
|
||||
import { PublicKeyIdentityProvider } from './providers/index.js'
|
||||
// import DIDIdentityProvider from './identity-providers/did.js'
|
||||
// import EthIdentityProvider from './identity-providers/ethereum.js'
|
||||
import KeyStore, { signMessage, verifyMessage } from '../key-store.js'
|
||||
import { LRUStorage, IPFSBlockStorage, MemoryStorage, ComposedStorage } from '../storage/index.js'
|
||||
import pathJoin from '../utils/path-join.js'
|
||||
|
||||
const DefaultProviderType = 'orbitdb'
|
||||
const DefaultProviderType = PublicKeyIdentityProvider.type
|
||||
const DefaultIdentityKeysPath = pathJoin('./orbitdb', 'identities')
|
||||
|
||||
const supportedTypes = {
|
||||
orbitdb: OrbitDBIdentityProvider
|
||||
publickey: PublicKeyIdentityProvider
|
||||
// [DIDIdentityProvider.type]: DIDIdentityProvider,
|
||||
// [EthIdentityProvider.type]: EthIdentityProvider
|
||||
}
|
||||
|
@ -10,3 +10,5 @@ export {
|
||||
isIdentity,
|
||||
isEqual
|
||||
} from './identity.js'
|
||||
|
||||
export { default as PublicKeyIdentityProvider } from './providers/publickey.js'
|
||||
|
@ -1,4 +1,4 @@
|
||||
// export { default as DIDIdentityProvider } from './did.js'
|
||||
// export { default as EthIdentityProvider } from './ethereum.js'
|
||||
export { default as IdentityProvider } from './interface.js'
|
||||
export { default as OrbitDBIdentityProvider } from './orbitdb.js'
|
||||
export { default as PublicKeyIdentityProvider } from './publickey.js'
|
||||
|
@ -2,14 +2,14 @@ import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
|
||||
import IdentityProvider from './interface.js'
|
||||
import { signMessage, verifyMessage } from '../../key-store.js'
|
||||
|
||||
const type = 'orbitdb'
|
||||
const type = 'publickey'
|
||||
|
||||
class OrbitDBIdentityProvider extends IdentityProvider {
|
||||
class PublicKeyIdentityProvider extends IdentityProvider {
|
||||
constructor ({ keystore }) {
|
||||
super()
|
||||
|
||||
if (!keystore) {
|
||||
throw new Error('OrbitDBIdentityProvider requires a keystore parameter')
|
||||
throw new Error('PublicKeyIdentityProvider requires a keystore parameter')
|
||||
}
|
||||
|
||||
this._keystore = keystore
|
||||
@ -46,4 +46,4 @@ class OrbitDBIdentityProvider extends IdentityProvider {
|
||||
}
|
||||
}
|
||||
|
||||
export default OrbitDBIdentityProvider
|
||||
export default PublicKeyIdentityProvider
|
@ -1,20 +1,21 @@
|
||||
class LamportClock {
|
||||
/* Lamport Clock */
|
||||
class Clock {
|
||||
constructor (id, time) {
|
||||
this.id = id
|
||||
this.time = time || 0
|
||||
}
|
||||
|
||||
tick () {
|
||||
return new LamportClock(this.id, ++this.time)
|
||||
return new Clock(this.id, ++this.time)
|
||||
}
|
||||
|
||||
merge (clock) {
|
||||
this.time = Math.max(this.time, clock.time)
|
||||
return new LamportClock(this.id, this.time)
|
||||
return new Clock(this.id, this.time)
|
||||
}
|
||||
|
||||
clone () {
|
||||
return new LamportClock(this.id, this.time)
|
||||
return new Clock(this.id, this.time)
|
||||
}
|
||||
|
||||
static compare (a, b) {
|
||||
@ -29,4 +30,4 @@ class LamportClock {
|
||||
}
|
||||
}
|
||||
|
||||
export default LamportClock
|
||||
export default Clock
|
@ -1,4 +1,4 @@
|
||||
import Clock from './lamport-clock.js'
|
||||
import Clock from './clock.js'
|
||||
|
||||
/**
|
||||
* Sort two entries as Last-Write-Wins (LWW).
|
@ -1,4 +1,4 @@
|
||||
import Clock from './lamport-clock.js'
|
||||
import Clock from './clock.js'
|
||||
import * as Block from 'multiformats/block'
|
||||
import * as dagCbor from '@ipld/dag-cbor'
|
||||
import { sha256 } from 'multiformats/hashes/sha2'
|
||||
@ -18,7 +18,7 @@ const hashStringEncoding = base58btc
|
||||
* @param {Identity} identity The identity instance
|
||||
* @param {string} logId The unique identifier for this log
|
||||
* @param {*} data Data of the entry to be added. Can be any JSON.stringifyable data
|
||||
* @param {LamportClock} [clock] The lamport clock
|
||||
* @param {Clock} [clock] The clock
|
||||
* @param {Array<string|Entry>} [next=[]] An array of CIDs as base58btc encoded strings
|
||||
* @param {Array<string|Entry>} [refs=[]] An array of CIDs as base58btc encoded strings
|
||||
* @returns {Promise<Entry>}
|
||||
@ -40,7 +40,7 @@ const create = async (identity, id, payload, clock = null, next = [], refs = [])
|
||||
payload, // Can be any dag-cbor encodeable data
|
||||
next, // Array of strings of CIDs
|
||||
refs, // Array of strings of CIDs
|
||||
clock, // Lamport Clock
|
||||
clock, // Clock
|
||||
v: 2 // To tag the version of this data structure
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
export { default as Log, DefaultAccessController } from './log.js'
|
||||
export { default as Entry } from './entry.js'
|
||||
export { default as Clock } from './lamport-clock.js'
|
||||
export { default as Sorting } from './sorting.js'
|
||||
export { default as Clock } from './clock.js'
|
||||
export { default as ConflictResolution } from './conflict-resolution.js'
|
||||
|
@ -1,12 +1,12 @@
|
||||
import LRU from 'lru'
|
||||
import Entry from './entry.js'
|
||||
import Clock from './lamport-clock.js'
|
||||
import Clock from './clock.js'
|
||||
import Heads from './heads.js'
|
||||
import Sorting from './sorting.js'
|
||||
import ConflictResolution from './conflict-resolution.js'
|
||||
import MemoryStorage from '../storage/memory.js'
|
||||
import pMap from 'p-map'
|
||||
|
||||
const { LastWriteWins, NoZeroes } = Sorting
|
||||
const { LastWriteWins, NoZeroes } = ConflictResolution
|
||||
|
||||
const randomId = () => new Date().getTime().toString()
|
||||
const maxClockTimeReducer = (res, acc) => Math.max(res, acc.clock.time)
|
||||
@ -72,7 +72,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
|
||||
|
||||
/**
|
||||
* Returns the clock of the log.
|
||||
* @returns {LamportClock}
|
||||
* @returns {Clock}
|
||||
*/
|
||||
const clock = async () => {
|
||||
// Find the latest clock from the heads
|
||||
|
@ -23,7 +23,7 @@ describe('Database - Replication', function () {
|
||||
let testIdentity1, testIdentity2
|
||||
let db1, db2
|
||||
|
||||
const databaseId = 'documentstore-AAA'
|
||||
const databaseId = 'documents-AAA'
|
||||
|
||||
const accessController = {
|
||||
canAppend: async (entry) => {
|
||||
|
@ -22,7 +22,7 @@ describe('Database', function () {
|
||||
let testIdentity
|
||||
let db
|
||||
|
||||
const databaseId = 'documentstore-AAA'
|
||||
const databaseId = 'database-AAA'
|
||||
|
||||
const accessController = {
|
||||
canAppend: async (entry) => {
|
||||
@ -59,7 +59,7 @@ describe('Database', function () {
|
||||
|
||||
it('adds an operation', async () => {
|
||||
db = await Database({ OpLog, ipfs, identity: testIdentity, address: databaseId, accessController, directory: './orbitdb' })
|
||||
const expected = 'zdpuAqQ9TJpMhPShuT315m2D9LUBkBPy8YX9zatjEynd2suZv'
|
||||
const expected = 'zdpuAwhx6xVpnMPUA7Q4JrvZsyoti5wZ18iDeFwBjPAwsRNof'
|
||||
const op = { op: 'PUT', key: 1, value: 'record 1 on db 1' }
|
||||
const actual = await db.addOperation(op)
|
||||
|
||||
|
@ -3,14 +3,14 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
||||
import { DocumentStore } from '../../src/db/index.js'
|
||||
import { Documents } from '../../src/db/index.js'
|
||||
import config from '../config.js'
|
||||
import testKeysPath from '../fixtures/test-keys-path.js'
|
||||
|
||||
const OpLog = { Log, Entry }
|
||||
const keysPath = './testkeys'
|
||||
|
||||
describe('DocumentStore Database', function () {
|
||||
describe('Documents Database', function () {
|
||||
let ipfs
|
||||
let keystore
|
||||
let accessController
|
||||
@ -18,7 +18,7 @@ describe('DocumentStore Database', function () {
|
||||
let testIdentity1
|
||||
let db
|
||||
|
||||
const databaseId = 'documentstore-AAA'
|
||||
const databaseId = 'documents-AAA'
|
||||
|
||||
before(async () => {
|
||||
ipfs = await IPFS.create({ ...config.daemon1, repo: './ipfs1' })
|
||||
@ -45,7 +45,7 @@ describe('DocumentStore Database', function () {
|
||||
|
||||
describe('Default index \'_id\'', () => {
|
||||
beforeEach(async () => {
|
||||
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await Documents({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@ -57,7 +57,7 @@ describe('DocumentStore Database', function () {
|
||||
|
||||
it('creates a document store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'documentstore')
|
||||
strictEqual(db.type, 'documents')
|
||||
strictEqual(db.indexBy, '_id')
|
||||
})
|
||||
|
||||
@ -149,7 +149,7 @@ describe('DocumentStore Database', function () {
|
||||
|
||||
describe('Custom index \'doc\'', () => {
|
||||
beforeEach(async () => {
|
||||
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController, indexBy: 'doc' })
|
||||
db = await Documents({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController, indexBy: 'doc' })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@ -161,7 +161,7 @@ describe('DocumentStore Database', function () {
|
||||
|
||||
it('creates a document store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'documentstore')
|
||||
strictEqual(db.type, 'documents')
|
||||
strictEqual(db.indexBy, 'doc')
|
||||
})
|
||||
|
||||
@ -252,7 +252,7 @@ describe('DocumentStore Database', function () {
|
||||
|
||||
describe('Iterator', () => {
|
||||
before(async () => {
|
||||
db = await DocumentStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await Documents({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
|
@ -4,14 +4,14 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
||||
import { EventStore } from '../../src/db/index.js'
|
||||
import { Events } from '../../src/db/index.js'
|
||||
import config from '../config.js'
|
||||
import testKeysPath from '../fixtures/test-keys-path.js'
|
||||
|
||||
const OpLog = { Log, Entry }
|
||||
const keysPath = './testkeys'
|
||||
|
||||
describe('EventStore Database', function () {
|
||||
describe('Events Database', function () {
|
||||
let ipfs
|
||||
let keystore
|
||||
let accessController
|
||||
@ -19,7 +19,7 @@ describe('EventStore Database', function () {
|
||||
let testIdentity1
|
||||
let db
|
||||
|
||||
const databaseId = 'eventstore-AAA'
|
||||
const databaseId = 'events-AAA'
|
||||
|
||||
before(async () => {
|
||||
ipfs = await IPFS.create({ ...config.daemon1, repo: './ipfs1' })
|
||||
@ -45,7 +45,7 @@ describe('EventStore Database', function () {
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await EventStore({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await Events({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@ -57,7 +57,7 @@ describe('EventStore Database', function () {
|
||||
|
||||
it('creates an event store', async () => {
|
||||
strictEqual(db.address.toString(), databaseId)
|
||||
strictEqual(db.type, 'eventstore')
|
||||
strictEqual(db.type, 'events')
|
||||
})
|
||||
|
||||
it('puts an event', async () => {
|
||||
|
@ -5,14 +5,14 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../src/index.js'
|
||||
import { KeyValuePersisted } from '../../src/db/index.js'
|
||||
import { KeyValueIndexed } from '../../src/db/index.js'
|
||||
import config from '../config.js'
|
||||
import testKeysPath from '../fixtures/test-keys-path.js'
|
||||
|
||||
const OpLog = { Log, Entry }
|
||||
const keysPath = './testkeys'
|
||||
|
||||
describe('KeyValuePersisted Database', function () {
|
||||
describe('KeyValueIndexed Database', function () {
|
||||
let ipfs
|
||||
let keystore
|
||||
let accessController
|
||||
@ -45,9 +45,9 @@ describe('KeyValuePersisted Database', function () {
|
||||
await rmrf('./ipfs1')
|
||||
})
|
||||
|
||||
describe('Creating a KeyValuePersisted database', () => {
|
||||
describe('Creating a KeyValueIndexed database', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await KeyValueIndexed({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@ -78,9 +78,9 @@ describe('KeyValuePersisted Database', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('KeyValuePersisted database API', () => {
|
||||
describe('KeyValueIndexed database API', () => {
|
||||
beforeEach(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await KeyValueIndexed({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@ -91,14 +91,14 @@ describe('KeyValuePersisted Database', function () {
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
const expected = 'zdpuAwr2JfE9TNMoXwupvsssCzemc3g8MTKRfVTG7ZS5gH6md'
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
const expected = 'zdpuAwr2JfE9TNMoXwupvsssCzemc3g8MTKRfVTG7ZS5gH6md'
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
@ -173,13 +173,13 @@ describe('KeyValuePersisted Database', function () {
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
|
||||
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
|
||||
{ hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
|
||||
{ hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
|
||||
{ hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
|
||||
{ hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
|
||||
{ hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
|
||||
{ hash: 'zdpuAnpWUWQFo7E7Q4fredrBdHWHTtSzMmo8CG7HRkWCu8Pbq', key: 'key1', value: 'init' },
|
||||
{ hash: 'zdpuAwTM75uy1xbBJzHRHUeYTJR67rhHND1w6EpHVH6ThHdos', key: 'key2', value: true },
|
||||
{ hash: 'zdpuAvYtscmvsQT7sgsJVsK7Gf7S3HweRJzs2D5TWBqz8wPGq', key: 'key3', value: 'hello' },
|
||||
{ hash: 'zdpuAqAGnfa8eryZZm4z4UHcGQKZe4ACwoe1bwfq1AnJRwcPC', key: 'key4', value: 'friend' },
|
||||
{ hash: 'zdpuAxHZs93Ys31jktM28GCwzrGP2vwuotr7MrSzLacGAS3dS', key: 'key5', value: '12345' },
|
||||
{ hash: 'zdpuAuGJ6UoncMuTjkknG4ySjxvAgkdMiRNecR6nDbLoPFDXX', key: 'key6', value: 'empty' },
|
||||
{ hash: 'zdpuAyi1oGLiYbH2UmRvXdGGC7z1vQYGE8oCvrfUvR5bGx6PN', key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
@ -197,7 +197,7 @@ describe('KeyValuePersisted Database', function () {
|
||||
|
||||
describe('Iterator', () => {
|
||||
before(async () => {
|
||||
db = await KeyValuePersisted({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
db = await KeyValueIndexed({ OpLog, Database, ipfs, identity: testIdentity1, address: databaseId, accessController })
|
||||
})
|
||||
|
||||
after(async () => {
|
@ -83,14 +83,14 @@ describe('KeyValue Database', function () {
|
||||
})
|
||||
|
||||
it('sets a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
const expected = 'zdpuAwr2JfE9TNMoXwupvsssCzemc3g8MTKRfVTG7ZS5gH6md'
|
||||
|
||||
const actual = await db.set('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
})
|
||||
|
||||
it('puts a key/value pair', async () => {
|
||||
const expected = 'zdpuAqEDJtUf3Kxg6qZgGv8XFqjtSyyxjF8qbz176Kcro5zwr'
|
||||
const expected = 'zdpuAwr2JfE9TNMoXwupvsssCzemc3g8MTKRfVTG7ZS5gH6md'
|
||||
|
||||
const actual = await db.put('key1', 'value1')
|
||||
strictEqual(actual, expected)
|
||||
@ -165,13 +165,13 @@ describe('KeyValue Database', function () {
|
||||
|
||||
it('returns all key/value pairs', async () => {
|
||||
const keyvalue = [
|
||||
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
|
||||
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
|
||||
{ hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
|
||||
{ hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
|
||||
{ hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
|
||||
{ hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
|
||||
{ hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
|
||||
{ hash: 'zdpuAnpWUWQFo7E7Q4fredrBdHWHTtSzMmo8CG7HRkWCu8Pbq', key: 'key1', value: 'init' },
|
||||
{ hash: 'zdpuAwTM75uy1xbBJzHRHUeYTJR67rhHND1w6EpHVH6ThHdos', key: 'key2', value: true },
|
||||
{ hash: 'zdpuAvYtscmvsQT7sgsJVsK7Gf7S3HweRJzs2D5TWBqz8wPGq', key: 'key3', value: 'hello' },
|
||||
{ hash: 'zdpuAqAGnfa8eryZZm4z4UHcGQKZe4ACwoe1bwfq1AnJRwcPC', key: 'key4', value: 'friend' },
|
||||
{ hash: 'zdpuAxHZs93Ys31jktM28GCwzrGP2vwuotr7MrSzLacGAS3dS', key: 'key5', value: '12345' },
|
||||
{ hash: 'zdpuAuGJ6UoncMuTjkknG4ySjxvAgkdMiRNecR6nDbLoPFDXX', key: 'key6', value: 'empty' },
|
||||
{ hash: 'zdpuAyi1oGLiYbH2UmRvXdGGC7z1vQYGE8oCvrfUvR5bGx6PN', key: 'key7', value: 'friend33' }
|
||||
]
|
||||
|
||||
for (const { key, value } of Object.values(keyvalue)) {
|
||||
|
@ -3,7 +3,7 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../../src/index.js'
|
||||
import { DocumentStore } from '../../../src/db/index.js'
|
||||
import { Documents } from '../../../src/db/index.js'
|
||||
import config from '../../config.js'
|
||||
import testKeysPath from '../../fixtures/test-keys-path.js'
|
||||
import connectPeers from '../../utils/connect-nodes.js'
|
||||
@ -21,7 +21,7 @@ describe('Documents Database Replication', function () {
|
||||
let testIdentity1, testIdentity2
|
||||
let db1, db2
|
||||
|
||||
const databaseId = 'documentstore-AAA'
|
||||
const databaseId = 'documents-AAA'
|
||||
|
||||
const accessController = {
|
||||
canAppend: async (entry) => {
|
||||
@ -64,8 +64,8 @@ describe('Documents Database Replication', function () {
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
db1 = await DocumentStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await DocumentStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
db1 = await Documents({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await Documents({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
|
@ -3,7 +3,7 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../../src/index.js'
|
||||
import { EventStore } from '../../../src/db/index.js'
|
||||
import { Events } from '../../../src/db/index.js'
|
||||
import config from '../../config.js'
|
||||
import testKeysPath from '../../fixtures/test-keys-path.js'
|
||||
import connectPeers from '../../utils/connect-nodes.js'
|
||||
@ -100,8 +100,8 @@ describe('Events Database Replication', function () {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
db1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
db1 = await Events({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await Events({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
db2.events.on('join', onConnected)
|
||||
db2.events.on('update', onUpdate)
|
||||
@ -131,8 +131,8 @@ describe('Events Database Replication', function () {
|
||||
})
|
||||
|
||||
it('loads the database after replication', async () => {
|
||||
db1 = await EventStore({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
db1 = await Events({ OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
db2 = await Events({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
let replicated = false
|
||||
let expectedEntryHash = null
|
||||
@ -172,7 +172,7 @@ describe('Events Database Replication', function () {
|
||||
|
||||
await db2.close()
|
||||
|
||||
db2 = await EventStore({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
db2 = await Events({ OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
const all2 = []
|
||||
for await (const event of db2.iterator()) {
|
||||
|
@ -3,7 +3,7 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { Log, Entry, Database, KeyStore, Identities } from '../../../src/index.js'
|
||||
import { KeyValue, KeyValuePersisted } from '../../../src/db/index.js'
|
||||
import { KeyValue, KeyValueIndexed } from '../../../src/db/index.js'
|
||||
import config from '../../config.js'
|
||||
import testKeysPath from '../../fixtures/test-keys-path.js'
|
||||
import connectPeers from '../../utils/connect-nodes.js'
|
||||
@ -12,7 +12,7 @@ import waitFor from '../../utils/wait-for.js'
|
||||
const OpLog = { Log, Entry }
|
||||
const keysPath = './testkeys'
|
||||
|
||||
describe('KeyValue-persisted Database Replication', function () {
|
||||
describe('KeyValueIndexed Database Replication', function () {
|
||||
this.timeout(30000)
|
||||
|
||||
let ipfs1, ipfs2
|
||||
@ -89,8 +89,8 @@ describe('KeyValue-persisted Database Replication', function () {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
kv1 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
kv1 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
kv2.events.on('join', onConnected)
|
||||
kv2.events.on('update', onUpdate)
|
||||
@ -156,8 +156,8 @@ describe('KeyValue-persisted Database Replication', function () {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
kv1 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
kv1 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
kv2.events.on('join', onConnected)
|
||||
kv2.events.on('update', onUpdate)
|
||||
@ -179,8 +179,8 @@ describe('KeyValue-persisted Database Replication', function () {
|
||||
await kv1.close()
|
||||
await kv2.close()
|
||||
|
||||
kv1 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValuePersisted({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
kv1 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs1, identity: testIdentity1, address: databaseId, accessController, directory: './orbitdb1' })
|
||||
kv2 = await KeyValueIndexed({ KeyValue, OpLog, Database, ipfs: ipfs2, identity: testIdentity2, address: databaseId, accessController, directory: './orbitdb2' })
|
||||
|
||||
const value0 = await kv2.get('init')
|
||||
deepStrictEqual(value0, true)
|
@ -3,11 +3,10 @@ import rmrf from 'rimraf'
|
||||
import { copy } from 'fs-extra'
|
||||
import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
|
||||
import KeyStore, { signMessage, verifyMessage } from '../../src/key-store.js'
|
||||
import Identities, { addIdentityProvider } from '../../src/identities/identities.js'
|
||||
import Identity from '../../src/identities/identity.js'
|
||||
import { Identities, addIdentityProvider, Identity, PublicKeyIdentityProvider } from '../../src/identities/index.js'
|
||||
import testKeysPath from '../fixtures/test-keys-path.js'
|
||||
|
||||
const type = 'orbitdb'
|
||||
const type = PublicKeyIdentityProvider.type
|
||||
const keysPath = './testkeys'
|
||||
|
||||
describe('Identities', function () {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import Clock from '../../src/oplog/lamport-clock.js'
|
||||
import Clock from '../../src/oplog/clock.js'
|
||||
import { strictEqual } from 'assert'
|
||||
|
||||
describe('Lamport Clock', () => {
|
||||
describe('Clock', () => {
|
||||
it('creates a new clock', () => {
|
||||
const id = 'A'
|
||||
const time = 0
|
@ -1,13 +1,13 @@
|
||||
import { strictEqual, deepStrictEqual } from 'assert'
|
||||
import Clock from '../../src/oplog/lamport-clock.js'
|
||||
import Sorting from '../../src/oplog/sorting.js'
|
||||
import Clock from '../../src/oplog/clock.js'
|
||||
import ConflictResolution from '../../src/oplog/conflict-resolution.js'
|
||||
|
||||
describe('Sorting', () => {
|
||||
describe('ConflictResolution', () => {
|
||||
describe('NoZeroes', () => {
|
||||
it('passed function cannot return 0', () => {
|
||||
let err
|
||||
const func = (a, b) => { return 0 }
|
||||
const sortFn = Sorting.NoZeroes(func)
|
||||
const sortFn = ConflictResolution.NoZeroes(func)
|
||||
const expected = 'Error: Your log\'s tiebreaker function, func, has returned zero and therefore cannot be'
|
||||
|
||||
const record1 = 1
|
||||
@ -33,21 +33,21 @@ describe('Sorting', () => {
|
||||
const expected = -1
|
||||
const record1 = { clock: new Clock('A') }
|
||||
const record2 = { clock: new Clock('B') }
|
||||
strictEqual(Sorting.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
strictEqual(ConflictResolution.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
|
||||
it('returns 1 when first clock\'s id is greater than second clock\'s', () => {
|
||||
const expected = 1
|
||||
const record1 = { clock: new Clock('B') }
|
||||
const record2 = { clock: new Clock('A') }
|
||||
strictEqual(Sorting.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
strictEqual(ConflictResolution.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
|
||||
it('returns the clock when clocks have the same id', () => {
|
||||
const expected = { clock: new Clock('A') }
|
||||
const record1 = { clock: new Clock('A') }
|
||||
const record2 = { clock: new Clock('A') }
|
||||
deepStrictEqual(Sorting.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
deepStrictEqual(ConflictResolution.SortByClockId(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
})
|
||||
|
||||
@ -61,21 +61,21 @@ describe('Sorting', () => {
|
||||
const expected = -1
|
||||
const record1 = { clock: new Clock('A', 1) }
|
||||
const record2 = { clock: new Clock('B', 2) }
|
||||
strictEqual(Sorting.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
strictEqual(ConflictResolution.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
|
||||
it('returns 1 when a\'s time is greater than b\'s', () => {
|
||||
const expected = 1
|
||||
const record1 = { clock: new Clock('A', 2) }
|
||||
const record2 = { clock: new Clock('B', 1) }
|
||||
strictEqual(Sorting.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
strictEqual(ConflictResolution.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
|
||||
it('returns -1 when a\'s time is equal to b\'s', () => {
|
||||
const expected = -1
|
||||
const record1 = { clock: new Clock('A', 1) }
|
||||
const record2 = { clock: new Clock('B', 1) }
|
||||
strictEqual(Sorting.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
strictEqual(ConflictResolution.SortByClocks(record1, record2, fallbackFn), expected)
|
||||
})
|
||||
})
|
||||
|
||||
@ -84,32 +84,32 @@ describe('Sorting', () => {
|
||||
const expected = -1
|
||||
const record1 = { clock: new Clock('A', 1) }
|
||||
const record2 = { clock: new Clock('B', 2) }
|
||||
strictEqual(Sorting.LastWriteWins(record1, record2), expected)
|
||||
strictEqual(ConflictResolution.LastWriteWins(record1, record2), expected)
|
||||
})
|
||||
|
||||
it('returns 1 when a\'s time is greater than b\'s', () => {
|
||||
const expected = 1
|
||||
const record1 = { clock: new Clock('A', 2) }
|
||||
const record2 = { clock: new Clock('B', 1) }
|
||||
strictEqual(Sorting.LastWriteWins(record1, record2), expected)
|
||||
strictEqual(ConflictResolution.LastWriteWins(record1, record2), expected)
|
||||
})
|
||||
|
||||
it('returns -1 when a\'s time is equal to b\'s', () => {
|
||||
const expected = -1
|
||||
const record1 = { clock: new Clock('A', 1) }
|
||||
const record2 = { clock: new Clock('B', 1) }
|
||||
strictEqual(Sorting.LastWriteWins(record1, record2), expected)
|
||||
strictEqual(ConflictResolution.LastWriteWins(record1, record2), expected)
|
||||
})
|
||||
|
||||
it('returns the clock when a and b are the same', () => {
|
||||
const expected = { clock: new Clock('A') }
|
||||
const record1 = { clock: new Clock('A') }
|
||||
const record2 = { clock: new Clock('A') }
|
||||
deepStrictEqual(Sorting.LastWriteWins(record1, record2), expected)
|
||||
deepStrictEqual(ConflictResolution.LastWriteWins(record1, record2), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Sorting records', () => {
|
||||
describe('ConflictResolution records', () => {
|
||||
it('sorts by clock time', () => {
|
||||
const expected = [
|
||||
{ clock: new Clock('A', 1) },
|
||||
@ -125,7 +125,7 @@ describe('Sorting', () => {
|
||||
{ clock: new Clock('B', 2) }
|
||||
]
|
||||
|
||||
deepStrictEqual(records.sort(Sorting.LastWriteWins), expected)
|
||||
deepStrictEqual(records.sort(ConflictResolution.LastWriteWins), expected)
|
||||
})
|
||||
|
||||
it('sorts by clock time when id is the same', () => {
|
||||
@ -143,7 +143,7 @@ describe('Sorting', () => {
|
||||
{ clock: new Clock('A', 2) }
|
||||
]
|
||||
|
||||
deepStrictEqual(records.sort(Sorting.LastWriteWins), expected)
|
||||
deepStrictEqual(records.sort(ConflictResolution.LastWriteWins), expected)
|
||||
})
|
||||
|
||||
it('sorts by clock id', () => {
|
||||
@ -161,7 +161,7 @@ describe('Sorting', () => {
|
||||
{ clock: new Clock('B') }
|
||||
]
|
||||
|
||||
deepStrictEqual(records.sort(Sorting.LastWriteWins), expected)
|
||||
deepStrictEqual(records.sort(ConflictResolution.LastWriteWins), expected)
|
||||
})
|
||||
|
||||
it('sorts the same clock', () => {
|
||||
@ -179,7 +179,7 @@ describe('Sorting', () => {
|
||||
{ clock: new Clock('A') }
|
||||
]
|
||||
|
||||
deepStrictEqual(records.sort(Sorting.LastWriteWins), expected)
|
||||
deepStrictEqual(records.sort(ConflictResolution.LastWriteWins), expected)
|
||||
})
|
||||
})
|
||||
})
|
@ -30,7 +30,7 @@ describe('Entry', function () {
|
||||
|
||||
describe('create', () => {
|
||||
it('creates a an empty entry', async () => {
|
||||
const expectedHash = 'zdpuAyX6yUV5BQMGPaLEvQRa5SDxebEYvQPni6FHyPsRZ7San'
|
||||
const expectedHash = 'zdpuAsKzwUEa8cz9pkJxxFMxLuP3cutA9PDGoLZytrg4RSVEa'
|
||||
const entry = await create(testIdentity, 'A', 'hello')
|
||||
strictEqual(entry.hash, expectedHash)
|
||||
strictEqual(entry.id, 'A')
|
||||
@ -43,7 +43,7 @@ describe('Entry', function () {
|
||||
})
|
||||
|
||||
it('creates a entry with payload', async () => {
|
||||
const expectedHash = 'zdpuAs4V7Wq9smdoHrzYQA46nFfqCF8iWaz98rZJC56bst3kx'
|
||||
const expectedHash = 'zdpuAmthfqpHRQjdSpKN5etr1GrreJb7QcU1Hshm6pERnzsxi'
|
||||
const payload = 'hello world'
|
||||
const entry = await create(testIdentity, 'A', payload)
|
||||
strictEqual(entry.hash, expectedHash)
|
||||
|
@ -12,7 +12,7 @@ const dbPath2 = './orbitdb/tests/multiple-databases/2'
|
||||
|
||||
const databaseInterfaces = [
|
||||
{
|
||||
name: 'event-store',
|
||||
name: 'events',
|
||||
open: async (orbitdb, address, options) => await orbitdb.open(address, options),
|
||||
write: async (db, index) => {
|
||||
await db.add('hello' + index)
|
||||
|
@ -4,7 +4,7 @@ import fs from 'fs'
|
||||
import path from 'path'
|
||||
import * as IPFS from 'ipfs-core'
|
||||
import { OrbitDB, isValidAddress } from '../src/index.js'
|
||||
import { KeyValuePersisted } from '../src/db/index.js'
|
||||
import { KeyValueIndexed } from '../src/db/index.js'
|
||||
import config from './config.js'
|
||||
import connectPeers from './utils/connect-nodes.js'
|
||||
import waitFor from './utils/wait-for.js'
|
||||
@ -105,7 +105,7 @@ describe('Open databases', function () {
|
||||
})
|
||||
|
||||
it('has a type that equals the database type', async () => {
|
||||
strictEqual(db.type, 'eventstore')
|
||||
strictEqual(db.type, 'events')
|
||||
})
|
||||
|
||||
it('has a put function', async () => {
|
||||
@ -199,7 +199,7 @@ describe('Open databases', function () {
|
||||
it('returns all entries in the database', async () => {
|
||||
db = await orbitdb1.open('helloworld')
|
||||
|
||||
strictEqual(db.type, 'eventstore')
|
||||
strictEqual(db.type, 'events')
|
||||
strictEqual(db.name, 'helloworld')
|
||||
|
||||
const expected = []
|
||||
@ -250,7 +250,7 @@ describe('Open databases', function () {
|
||||
it('returns all entries in the database', async () => {
|
||||
db = await orbitdb2.open(address)
|
||||
|
||||
strictEqual(db.type, 'eventstore')
|
||||
strictEqual(db.type, 'events')
|
||||
strictEqual(db.name, 'helloworld2')
|
||||
|
||||
const expected = []
|
||||
@ -416,12 +416,12 @@ describe('Open databases', function () {
|
||||
deepStrictEqual(all, expected)
|
||||
})
|
||||
|
||||
it('opens the database with a custom Store - KeyValuePersisted', async () => {
|
||||
it('opens the database with a custom Store - KeyValueIndexed', async () => {
|
||||
if (db) {
|
||||
await db.close()
|
||||
}
|
||||
|
||||
db = await orbitdb1.open(address, { Store: KeyValuePersisted })
|
||||
db = await orbitdb1.open(address, { Store: KeyValueIndexed })
|
||||
|
||||
strictEqual(db.type, 'keyvalue')
|
||||
strictEqual(db.name, 'helloworld')
|
||||
@ -470,7 +470,7 @@ describe('Open databases', function () {
|
||||
it('returns all entries in the database', async () => {
|
||||
db = await orbitdb1.open(address)
|
||||
|
||||
strictEqual(db.type, 'documentstore')
|
||||
strictEqual(db.type, 'documents')
|
||||
strictEqual(db.name, 'helloworld')
|
||||
|
||||
const expected = []
|
||||
|
Loading…
x
Reference in New Issue
Block a user