Fix log fetching parallelism (#60)

* Fix log traverse parallelism

* Rename pointerCount to referencesCount
This commit is contained in:
Haad 2023-03-29 16:10:24 +03:00 committed by GitHub
parent 0b635666fd
commit a5c14d5377
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 132 additions and 161 deletions

15
package-lock.json generated
View File

@ -16,6 +16,7 @@
"logplease": "^1.2.15",
"lru": "^3.1.0",
"multiformats": "^11.0.1",
"p-map": "^5.5.0",
"p-queue": "^7.3.4",
"timeout-abort-controller": "^3.0.0",
"uint8arrays": "^4.0.3"
@ -3445,7 +3446,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz",
"integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==",
"dev": true,
"dependencies": {
"clean-stack": "^4.0.0",
"indent-string": "^5.0.0"
@ -4323,7 +4323,6 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz",
"integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==",
"dev": true,
"dependencies": {
"escape-string-regexp": "5.0.0"
},
@ -4338,7 +4337,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"dev": true,
"engines": {
"node": ">=12"
},
@ -6881,7 +6879,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz",
"integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==",
"dev": true,
"engines": {
"node": ">=12"
},
@ -10926,7 +10923,6 @@
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz",
"integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==",
"dev": true,
"dependencies": {
"aggregate-error": "^4.0.0"
},
@ -17330,7 +17326,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz",
"integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==",
"dev": true,
"requires": {
"clean-stack": "^4.0.0",
"indent-string": "^5.0.0"
@ -17946,7 +17941,6 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz",
"integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==",
"dev": true,
"requires": {
"escape-string-regexp": "5.0.0"
},
@ -17954,8 +17948,7 @@
"escape-string-regexp": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"dev": true
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="
}
}
},
@ -19801,8 +19794,7 @@
"indent-string": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz",
"integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==",
"dev": true
"integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg=="
},
"inflight": {
"version": "1.0.6",
@ -22669,7 +22661,6 @@
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz",
"integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==",
"dev": true,
"requires": {
"aggregate-error": "^4.0.0"
}

View File

@ -25,6 +25,7 @@
"logplease": "^1.2.15",
"lru": "^3.1.0",
"multiformats": "^11.0.1",
"p-map": "^5.5.0",
"p-queue": "^7.3.4",
"timeout-abort-controller": "^3.0.0",
"uint8arrays": "^4.0.3"

View File

@ -4,15 +4,15 @@ import Path from 'path'
import Sync from './sync.js'
import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js'
const defaultPointerCount = 0
const defaultReferencesCount = 16
const defaultCacheSize = 1000
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, meta, headsStorage, entryStorage, indexStorage, pointerCount, syncAutomatically }) => {
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically }) => {
const { Log, Entry } = OpLog
directory = Path.join(directory || './orbitdb', `./${address}/`)
meta = meta || {}
pointerCount = pointerCount || defaultPointerCount
referencesCount = referencesCount || defaultReferencesCount
entryStorage = entryStorage || await ComposedStorage(
await LRUStorage({ size: defaultCacheSize }),
@ -36,7 +36,7 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
const addOperation = async (op) => {
const task = async () => {
const entry = await log.append(op, { pointerCount })
const entry = await log.append(op, { referencesCount })
await sync.add(entry)
events.emit('update', entry)
return entry.hash

View File

@ -4,6 +4,7 @@ import Clock from './lamport-clock.js'
import Heads from './heads.js'
import Sorting from './sorting.js'
import MemoryStorage from '../storage/memory.js'
import pMap from 'p-map'
const { LastWriteWins, NoZeroes } = Sorting
@ -124,16 +125,21 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
* @param {data} data Payload to add to the entry
* @return {Promise<Entry>} Entry that was appended
*/
const append = async (data, options = { pointerCount: 0 }) => {
const append = async (data, options = { referencesCount: 0 }) => {
// 1. Prepare entry
// 2. Authorize entry
// 3. Store entry
// 4. return Entry
// Get references (entry at every pow2 of distance)
const refs = await getReferences(options.pointerCount)
// Get current heads of the log
const heads_ = await heads()
// Get references (we skip the heads which are covered by the next field)
let refs = []
for await (const { hash } of iterator({ amount: options.referencesCount + heads_.length })) {
refs.push(hash)
}
refs = refs.slice(heads_.length, options.referencesCount + heads_.length)
// Create the next pointers from heads
const nexts = (await heads()).map(entry => entry.hash)
const nexts = heads_.map(entry => entry.hash)
// Create the entry
const entry = await Entry.create(
identity,
@ -253,14 +259,20 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
let stack = rootEntries.sort(sortFn)
// Keep a record of all the hashes of entries we've traversed and yielded
const traversed = {}
// Keep a record of all the hashes we are fetching or have already fetched
let toFetch = []
const fetched = {}
// A function to check if we've seen a hash
const notIndexed = (hash) => !(traversed[hash] || fetched[hash])
// Current entry during traversal
let entry
// Start traversal and process stack until it's empty (traversed the full log)
while (stack.length > 0) {
stack = stack.sort(sortFn)
// Get the next entry from the stack
entry = stack.pop()
if (entry) {
const hash = entry.hash
const { hash, next, refs } = entry
// If we have an entry that we haven't traversed yet, process it
if (!traversed[hash]) {
// Yield the current entry
@ -270,21 +282,28 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
if (done === true) {
break
}
// Add to the hashes we've traversed
// Add to the hash indices
traversed[hash] = true
// Add hashes of next entries to the stack from entry's
// causal connection (next) and references to history (refs)
for (const nextHash of [...entry.next, ...entry.refs]) {
// Check if we've already traversed this entry
if (!traversed[nextHash]) {
// Fetch the next entry
const next = await get(nextHash)
if (next) {
// Add the next entry in front of the stack and sort
stack = [next, ...stack].sort(sortFn)
}
fetched[hash] = true
// Add the next and refs hashes to the list of hashes to fetch next,
// filter out traversed and fetched hashes
toFetch = [...toFetch, ...next, ...refs].filter(notIndexed)
// Function to fetch an entry and making sure it's not a duplicate (check the hash indices)
const fetchEntries = async (hash) => {
if (!traversed[hash] && !fetched[hash]) {
fetched[hash] = true
return get(hash)
}
}
// Fetch the next/reference entries
const nexts = await pMap(toFetch, fetchEntries)
// Add the next and refs fields from the fetched entries to the next round
toFetch = nexts
.filter(e => e != null)
.reduce((res, acc) => [...res, ...acc.next, ...acc.refs], [])
.filter(notIndexed)
// Add the fetched entries to the stack to be processed
stack = [...nexts, ...stack]
}
}
}
@ -405,31 +424,6 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
await _entries.close()
}
/**
* TODO
* Get references at every pow2 distance
* If pointer count is 4, returns 2
* If pointer count is 8, returns 3 references
* If pointer count is 512, returns 9 references
* If pointer count is 2048, returns 11 references
*/
const getReferences = async (pointerCount = 1) => {
let nextPointerDistance = 2
let distance = 0
const refs = []
const shouldStopFn = () => distance >= pointerCount
for await (const entry of traverse(null, shouldStopFn)) {
distance++
if (distance === nextPointerDistance) {
if (entry.hash) {
refs.push(entry.hash)
}
nextPointerDistance *= 2
}
}
return refs
}
/**
* Check if an object is a Log.
* @param {Log} obj

View File

@ -175,11 +175,11 @@ describe('KeyValuePersisted Database', function () {
const keyvalue = [
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
{ hash: 'zdpuB2CBCwvPBdHjZRKfFtL5JuDo9sc5QinKhbtYu1YkCLq22', key: 'key3', value: 'hello' },
{ hash: 'zdpuAyWWtvFfxKWMcV3NJ7XXbjiQC6MkA8h6TrhFA2ihLrt82', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2Z5coKXGMAZtb7X8UQYgo6vWAP4VshBvE4xwBCrR5Laa', key: 'key5', value: '12345' },
{ hash: 'zdpuAnn2kuStzcTCJ5ULMxCvB7RtgAScJPmg3YAVYju4oPEtC', key: 'key6', value: 'empty' },
{ hash: 'zdpuAv1jSFz4GHRieAXGvRGnVWdEdxDp2HefREoTJJWYC8Zqw', key: 'key7', value: 'friend33' }
{ hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
{ hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
{ hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
{ hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
]
for (const { key, value } of Object.values(keyvalue)) {

View File

@ -167,11 +167,11 @@ describe('KeyValue Database', function () {
const keyvalue = [
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
{ hash: 'zdpuB2CBCwvPBdHjZRKfFtL5JuDo9sc5QinKhbtYu1YkCLq22', key: 'key3', value: 'hello' },
{ hash: 'zdpuAyWWtvFfxKWMcV3NJ7XXbjiQC6MkA8h6TrhFA2ihLrt82', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2Z5coKXGMAZtb7X8UQYgo6vWAP4VshBvE4xwBCrR5Laa', key: 'key5', value: '12345' },
{ hash: 'zdpuAnn2kuStzcTCJ5ULMxCvB7RtgAScJPmg3YAVYju4oPEtC', key: 'key6', value: 'empty' },
{ hash: 'zdpuAv1jSFz4GHRieAXGvRGnVWdEdxDp2HefREoTJJWYC8Zqw', key: 'key7', value: 'friend33' }
{ hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
{ hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
{ hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
{ hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
]
for (const { key, value } of Object.values(keyvalue)) {

View File

@ -72,7 +72,7 @@ describe('Log - Append', function () {
describe('append 100 items to a log', async () => {
const amount = 100
const nextPointerAmount = 64
const referencesCount = 64
let log
let values = []
@ -81,7 +81,7 @@ describe('Log - Append', function () {
before(async () => {
log = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) {
await log.append('hello' + i, { pointerCount: nextPointerAmount })
await log.append('hello' + i, { referencesCount })
}
values = await log.values()
heads = await log.heads()
@ -110,8 +110,12 @@ describe('Log - Append', function () {
})
it('added the correct amount of refs pointers', async () => {
values.forEach((entry, index) => {
strictEqual(entry.refs.length, index > 0 ? Math.floor(Math.log2(Math.min(nextPointerAmount, index))) : 0)
values.reverse().forEach((entry, index) => {
index = values.length - index - 1
const expectedRefCount = index < referencesCount
? Math.max(0, index - 1)
: Math.max(0, Math.min(referencesCount, index - 1))
strictEqual(entry.refs.length, expectedRefCount)
})
})
})

View File

@ -2,19 +2,12 @@ import { strictEqual } from 'assert'
import rmrf from 'rimraf'
import { copy } from 'fs-extra'
import { Log } from '../../src/oplog/index.js'
import { Identities, KeyStore, MemoryStorage } from '../../src/index.js'
import { Identities, KeyStore } from '../../src/index.js'
import testKeysPath from '../fixtures/test-keys-path.js'
const keysPath = './testkeys'
const isBrowser = () => typeof window !== 'undefined'
describe('Log - References', function () {
if (isBrowser()) {
// Skip these tests when running in the browser since they take a long time
return
}
this.timeout(60000)
let keystore
@ -36,132 +29,118 @@ describe('Log - References', function () {
})
describe('References', async () => {
const amount = 64
it('creates entries with 2 references', async () => {
const maxReferenceDistance = 2
it('creates entries with 1 references', async () => {
const amount = 32
const referencesCount = 1
const log1 = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) {
await log1.append(i.toString(), { pointerCount: maxReferenceDistance })
await log1.append(i.toString(), { referencesCount })
}
const values1 = await log1.values()
strictEqual(values1[values1.length - 1].refs.length, 1)
strictEqual(values1[values1.length - 1].refs.length, referencesCount)
})
it('creates entries with 2 references', async () => {
const amount = 32
const referencesCount = 2
const log1 = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) {
await log1.append(i.toString(), { referencesCount })
}
const values1 = await log1.values()
strictEqual(values1[values1.length - 1].refs.length, referencesCount)
})
it('creates entries with 4 references', async () => {
const maxReferenceDistance = 2
const amount = 32
const referencesCount = 4
const log2 = await Log(testIdentity, { logId: 'B' })
for (let i = 0; i < amount * 2; i++) {
await log2.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 2) })
for (let i = 0; i < amount; i++) {
await log2.append(i.toString(), { referencesCount })
}
const values2 = await log2.values()
strictEqual(values2[values2.length - 1].refs.length, 2)
strictEqual(values2[values2.length - 1].refs.length, referencesCount)
})
it('creates entries with 8 references', async () => {
const maxReferenceDistance = 2
const amount = 64
const referencesCount = 8
const log3 = await Log(testIdentity, { logId: 'C' })
for (let i = 0; i < amount * 3; i++) {
await log3.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 3) })
for (let i = 0; i < amount; i++) {
await log3.append(i.toString(), { referencesCount })
}
const values3 = await log3.values()
strictEqual(values3[values3.length - 1].refs.length, 3)
strictEqual(values3[values3.length - 1].refs.length, referencesCount)
})
it('creates entries with 16 references', async () => {
const maxReferenceDistance = 2
const amount = 64
const referencesCount = 16
const log4 = await Log(testIdentity, { logId: 'D' })
for (let i = 0; i < amount * 4; i++) {
await log4.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 4) })
for (let i = 0; i < amount; i++) {
await log4.append(i.toString(), { referencesCount })
}
const values4 = await log4.values()
strictEqual(values4[values4.length - 1].refs.length, 4)
strictEqual(values4[values4.length - 1].refs.length, referencesCount)
})
const inputs = [
{ amount: 1, referenceCount: 1, refLength: 0 },
{ amount: 1, referenceCount: 2, refLength: 0 },
{ amount: 2, referenceCount: 1, refLength: 1 },
{ amount: 2, referenceCount: 2, refLength: 1 },
{ amount: 3, referenceCount: 2, refLength: 1 },
{ amount: 3, referenceCount: 4, refLength: 1 },
{ amount: 4, referenceCount: 4, refLength: 2 },
{ amount: 4, referenceCount: 4, refLength: 2 },
{ amount: 32, referenceCount: 4, refLength: 2 },
{ amount: 32, referenceCount: 8, refLength: 3 },
{ amount: 32, referenceCount: 16, refLength: 4 },
{ amount: 18, referenceCount: 32, refLength: 5 },
{ amount: 128, referenceCount: 32, refLength: 5 },
{ amount: 63, referenceCount: 64, refLength: 5 },
{ amount: 64, referenceCount: 64, refLength: 6 },
{ amount: 65, referenceCount: 64, refLength: 6 },
{ amount: 91, referenceCount: 64, refLength: 6 },
{ amount: 128, referenceCount: 64, refLength: 6 },
{ amount: 128, referenceCount: 1, refLength: 0 },
{ amount: 128, referenceCount: 2, refLength: 1 },
{ amount: 256, referenceCount: 1, refLength: 0 },
{ amount: 256, referenceCount: 4, refLength: 2 },
{ amount: 256, referenceCount: 8, refLength: 3 },
{ amount: 256, referenceCount: 16, refLength: 4 },
{ amount: 256, referenceCount: 32, refLength: 5 },
{ amount: 1024, referenceCount: 2, refLength: 1 }
]
it('creates entries with 32 references', async () => {
const amount = 64
const referencesCount = 32
const log4 = await Log(testIdentity, { logId: 'D' })
inputs.forEach(input => {
it(`has ${input.refLength} references, max distance ${input.referenceCount}, total of ${input.amount} entries`, async () => {
const test = async (amount, referenceCount, refLength) => {
const storage = await MemoryStorage()
const log1 = await Log(testIdentity, { logId: 'A', storage })
for (let i = 0; i < amount; i++) {
await log1.append((i + 1).toString(), { pointerCount: referenceCount })
}
for (let i = 0; i < amount; i++) {
await log4.append(i.toString(), { referencesCount })
}
const values = await log1.values()
const values4 = await log4.values()
strictEqual(values.length, input.amount)
strictEqual(values[values.length - 1].clock.time, input.amount)
strictEqual(values4[values4.length - 1].refs.length, referencesCount)
})
for (let k = 0; k < input.amount; k++) {
const idx = values.length - k - 1
strictEqual(values[idx].clock.time, idx + 1)
it('creates entries with 64 references', async () => {
const amount = 128
const referencesCount = 64
const log4 = await Log(testIdentity, { logId: 'D' })
// Check the first ref (distance 2)
if (values[idx].refs.length > 0) { strictEqual(values[idx].refs[0], values[idx - 2].hash) }
for (let i = 0; i < amount; i++) {
await log4.append(i.toString(), { referencesCount })
}
// Check the second ref (distance 4)
if (values[idx].refs.length > 1 && idx > referenceCount) { strictEqual(values[idx].refs[1], values[idx - 4].hash) }
const values4 = await log4.values()
// Check the third ref (distance 8)
if (values[idx].refs.length > 2 && idx > referenceCount) { strictEqual(values[idx].refs[2], values[idx - 8].hash) }
strictEqual(values4[values4.length - 1].refs.length, referencesCount)
})
// Check the fourth ref (distance 16)
if (values[idx].refs.length > 3 && idx > referenceCount) { strictEqual(values[idx].refs[3], values[idx - 16].hash) }
it('creates entries with 128 references', async () => {
// +2 because first ref is always skipped (covered by next field) and
// we need 129 entries to have 128 back references
const amount = 128 + 2
const referencesCount = 128
const log4 = await Log(testIdentity, { logId: 'D' })
// Check the fifth ref (distance 32)
if (values[idx].refs.length > 4 && idx > referenceCount) { strictEqual(values[idx].refs[4], values[idx - 32].hash) }
for (let i = 0; i < amount; i++) {
await log4.append(i.toString(), { referencesCount })
}
// Check the fifth ref (distance 64)
if (values[idx].refs.length > 5 && idx > referenceCount) { strictEqual(values[idx].refs[5], values[idx - 64].hash) }
const values4 = await log4.values()
// Check the reference of each entry
if (idx > referenceCount) { strictEqual(values[idx].refs.length, refLength) }
}
}
await test(input.amount, input.referenceCount, input.refLength)
})
strictEqual(values4[values4.length - 1].refs.length, referencesCount)
})
})
})

View File

@ -44,9 +44,11 @@ describe('Replicating databases', function () {
before(async () => {
db1 = await orbitdb1.open('helloworld')
console.time('write')
for (let i = 0; i < expected.length; i++) {
await db1.add(expected[i])
}
console.timeEnd('write')
})
after(async () => {
@ -59,7 +61,7 @@ describe('Replicating databases', function () {
let replicated = false
const onJoin = async (peerId) => {
const onJoin = async (peerId, heads) => {
const head = (await db2.log.heads())[0]
if (head && head.clock.time === amount) {
replicated = true