Fix log fetching parallelism (#60)

* Fix log traverse parallelism

* Rename pointerCount to referencesCount
This commit is contained in:
Haad 2023-03-29 16:10:24 +03:00 committed by GitHub
parent 0b635666fd
commit a5c14d5377
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 132 additions and 161 deletions

15
package-lock.json generated
View File

@ -16,6 +16,7 @@
"logplease": "^1.2.15", "logplease": "^1.2.15",
"lru": "^3.1.0", "lru": "^3.1.0",
"multiformats": "^11.0.1", "multiformats": "^11.0.1",
"p-map": "^5.5.0",
"p-queue": "^7.3.4", "p-queue": "^7.3.4",
"timeout-abort-controller": "^3.0.0", "timeout-abort-controller": "^3.0.0",
"uint8arrays": "^4.0.3" "uint8arrays": "^4.0.3"
@ -3445,7 +3446,6 @@
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz",
"integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==", "integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==",
"dev": true,
"dependencies": { "dependencies": {
"clean-stack": "^4.0.0", "clean-stack": "^4.0.0",
"indent-string": "^5.0.0" "indent-string": "^5.0.0"
@ -4323,7 +4323,6 @@
"version": "4.2.0", "version": "4.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz",
"integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==", "integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==",
"dev": true,
"dependencies": { "dependencies": {
"escape-string-regexp": "5.0.0" "escape-string-regexp": "5.0.0"
}, },
@ -4338,7 +4337,6 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"dev": true,
"engines": { "engines": {
"node": ">=12" "node": ">=12"
}, },
@ -6881,7 +6879,6 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz",
"integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==",
"dev": true,
"engines": { "engines": {
"node": ">=12" "node": ">=12"
}, },
@ -10926,7 +10923,6 @@
"version": "5.5.0", "version": "5.5.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz", "resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz",
"integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==", "integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==",
"dev": true,
"dependencies": { "dependencies": {
"aggregate-error": "^4.0.0" "aggregate-error": "^4.0.0"
}, },
@ -17330,7 +17326,6 @@
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz",
"integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==", "integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==",
"dev": true,
"requires": { "requires": {
"clean-stack": "^4.0.0", "clean-stack": "^4.0.0",
"indent-string": "^5.0.0" "indent-string": "^5.0.0"
@ -17946,7 +17941,6 @@
"version": "4.2.0", "version": "4.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz",
"integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==", "integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==",
"dev": true,
"requires": { "requires": {
"escape-string-regexp": "5.0.0" "escape-string-regexp": "5.0.0"
}, },
@ -17954,8 +17948,7 @@
"escape-string-regexp": { "escape-string-regexp": {
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="
"dev": true
} }
} }
}, },
@ -19801,8 +19794,7 @@
"indent-string": { "indent-string": {
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz",
"integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg=="
"dev": true
}, },
"inflight": { "inflight": {
"version": "1.0.6", "version": "1.0.6",
@ -22669,7 +22661,6 @@
"version": "5.5.0", "version": "5.5.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz", "resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz",
"integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==", "integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==",
"dev": true,
"requires": { "requires": {
"aggregate-error": "^4.0.0" "aggregate-error": "^4.0.0"
} }

View File

@ -25,6 +25,7 @@
"logplease": "^1.2.15", "logplease": "^1.2.15",
"lru": "^3.1.0", "lru": "^3.1.0",
"multiformats": "^11.0.1", "multiformats": "^11.0.1",
"p-map": "^5.5.0",
"p-queue": "^7.3.4", "p-queue": "^7.3.4",
"timeout-abort-controller": "^3.0.0", "timeout-abort-controller": "^3.0.0",
"uint8arrays": "^4.0.3" "uint8arrays": "^4.0.3"

View File

@ -4,15 +4,15 @@ import Path from 'path'
import Sync from './sync.js' import Sync from './sync.js'
import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js' import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js'
const defaultPointerCount = 0 const defaultReferencesCount = 16
const defaultCacheSize = 1000 const defaultCacheSize = 1000
const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, meta, headsStorage, entryStorage, indexStorage, pointerCount, syncAutomatically }) => { const Database = async ({ OpLog, ipfs, identity, address, name, accessController, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically }) => {
const { Log, Entry } = OpLog const { Log, Entry } = OpLog
directory = Path.join(directory || './orbitdb', `./${address}/`) directory = Path.join(directory || './orbitdb', `./${address}/`)
meta = meta || {} meta = meta || {}
pointerCount = pointerCount || defaultPointerCount referencesCount = referencesCount || defaultReferencesCount
entryStorage = entryStorage || await ComposedStorage( entryStorage = entryStorage || await ComposedStorage(
await LRUStorage({ size: defaultCacheSize }), await LRUStorage({ size: defaultCacheSize }),
@ -36,7 +36,7 @@ const Database = async ({ OpLog, ipfs, identity, address, name, accessController
const addOperation = async (op) => { const addOperation = async (op) => {
const task = async () => { const task = async () => {
const entry = await log.append(op, { pointerCount }) const entry = await log.append(op, { referencesCount })
await sync.add(entry) await sync.add(entry)
events.emit('update', entry) events.emit('update', entry)
return entry.hash return entry.hash

View File

@ -4,6 +4,7 @@ import Clock from './lamport-clock.js'
import Heads from './heads.js' import Heads from './heads.js'
import Sorting from './sorting.js' import Sorting from './sorting.js'
import MemoryStorage from '../storage/memory.js' import MemoryStorage from '../storage/memory.js'
import pMap from 'p-map'
const { LastWriteWins, NoZeroes } = Sorting const { LastWriteWins, NoZeroes } = Sorting
@ -124,16 +125,21 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
* @param {data} data Payload to add to the entry * @param {data} data Payload to add to the entry
* @return {Promise<Entry>} Entry that was appended * @return {Promise<Entry>} Entry that was appended
*/ */
const append = async (data, options = { pointerCount: 0 }) => { const append = async (data, options = { referencesCount: 0 }) => {
// 1. Prepare entry // 1. Prepare entry
// 2. Authorize entry // 2. Authorize entry
// 3. Store entry // 3. Store entry
// 4. return Entry // 4. return Entry
// Get current heads of the log
// Get references (entry at every pow2 of distance) const heads_ = await heads()
const refs = await getReferences(options.pointerCount) // Get references (we skip the heads which are covered by the next field)
let refs = []
for await (const { hash } of iterator({ amount: options.referencesCount + heads_.length })) {
refs.push(hash)
}
refs = refs.slice(heads_.length, options.referencesCount + heads_.length)
// Create the next pointers from heads // Create the next pointers from heads
const nexts = (await heads()).map(entry => entry.hash) const nexts = heads_.map(entry => entry.hash)
// Create the entry // Create the entry
const entry = await Entry.create( const entry = await Entry.create(
identity, identity,
@ -253,14 +259,20 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
let stack = rootEntries.sort(sortFn) let stack = rootEntries.sort(sortFn)
// Keep a record of all the hashes of entries we've traversed and yielded // Keep a record of all the hashes of entries we've traversed and yielded
const traversed = {} const traversed = {}
// Keep a record of all the hashes we are fetching or have already fetched
let toFetch = []
const fetched = {}
// A function to check if we've seen a hash
const notIndexed = (hash) => !(traversed[hash] || fetched[hash])
// Current entry during traversal // Current entry during traversal
let entry let entry
// Start traversal and process stack until it's empty (traversed the full log) // Start traversal and process stack until it's empty (traversed the full log)
while (stack.length > 0) { while (stack.length > 0) {
stack = stack.sort(sortFn)
// Get the next entry from the stack // Get the next entry from the stack
entry = stack.pop() entry = stack.pop()
if (entry) { if (entry) {
const hash = entry.hash const { hash, next, refs } = entry
// If we have an entry that we haven't traversed yet, process it // If we have an entry that we haven't traversed yet, process it
if (!traversed[hash]) { if (!traversed[hash]) {
// Yield the current entry // Yield the current entry
@ -270,21 +282,28 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
if (done === true) { if (done === true) {
break break
} }
// Add to the hashes we've traversed // Add to the hash indices
traversed[hash] = true traversed[hash] = true
// Add hashes of next entries to the stack from entry's fetched[hash] = true
// causal connection (next) and references to history (refs) // Add the next and refs hashes to the list of hashes to fetch next,
for (const nextHash of [...entry.next, ...entry.refs]) { // filter out traversed and fetched hashes
// Check if we've already traversed this entry toFetch = [...toFetch, ...next, ...refs].filter(notIndexed)
if (!traversed[nextHash]) { // Function to fetch an entry and making sure it's not a duplicate (check the hash indices)
// Fetch the next entry const fetchEntries = async (hash) => {
const next = await get(nextHash) if (!traversed[hash] && !fetched[hash]) {
if (next) { fetched[hash] = true
// Add the next entry in front of the stack and sort return get(hash)
stack = [next, ...stack].sort(sortFn)
}
} }
} }
// Fetch the next/reference entries
const nexts = await pMap(toFetch, fetchEntries)
// Add the next and refs fields from the fetched entries to the next round
toFetch = nexts
.filter(e => e != null)
.reduce((res, acc) => [...res, ...acc.next, ...acc.refs], [])
.filter(notIndexed)
// Add the fetched entries to the stack to be processed
stack = [...nexts, ...stack]
} }
} }
} }
@ -405,31 +424,6 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
await _entries.close() await _entries.close()
} }
/**
* TODO
* Get references at every pow2 distance
* If pointer count is 4, returns 2
* If pointer count is 8, returns 3 references
* If pointer count is 512, returns 9 references
* If pointer count is 2048, returns 11 references
*/
const getReferences = async (pointerCount = 1) => {
let nextPointerDistance = 2
let distance = 0
const refs = []
const shouldStopFn = () => distance >= pointerCount
for await (const entry of traverse(null, shouldStopFn)) {
distance++
if (distance === nextPointerDistance) {
if (entry.hash) {
refs.push(entry.hash)
}
nextPointerDistance *= 2
}
}
return refs
}
/** /**
* Check if an object is a Log. * Check if an object is a Log.
* @param {Log} obj * @param {Log} obj

View File

@ -175,11 +175,11 @@ describe('KeyValuePersisted Database', function () {
const keyvalue = [ const keyvalue = [
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' }, { hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true }, { hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
{ hash: 'zdpuB2CBCwvPBdHjZRKfFtL5JuDo9sc5QinKhbtYu1YkCLq22', key: 'key3', value: 'hello' }, { hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
{ hash: 'zdpuAyWWtvFfxKWMcV3NJ7XXbjiQC6MkA8h6TrhFA2ihLrt82', key: 'key4', value: 'friend' }, { hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2Z5coKXGMAZtb7X8UQYgo6vWAP4VshBvE4xwBCrR5Laa', key: 'key5', value: '12345' }, { hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
{ hash: 'zdpuAnn2kuStzcTCJ5ULMxCvB7RtgAScJPmg3YAVYju4oPEtC', key: 'key6', value: 'empty' }, { hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
{ hash: 'zdpuAv1jSFz4GHRieAXGvRGnVWdEdxDp2HefREoTJJWYC8Zqw', key: 'key7', value: 'friend33' } { hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
] ]
for (const { key, value } of Object.values(keyvalue)) { for (const { key, value } of Object.values(keyvalue)) {

View File

@ -167,11 +167,11 @@ describe('KeyValue Database', function () {
const keyvalue = [ const keyvalue = [
{ hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' }, { hash: 'zdpuAm6QEA29wFnd6re7X2XWe7AmrzVbsvdHhSPXci2CqXryw', key: 'key1', value: 'init' },
{ hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true }, { hash: 'zdpuAvfTQwogEAhEaAtb85ugEzxvfDVUnALoZeNbrz3s4jMYd', key: 'key2', value: true },
{ hash: 'zdpuB2CBCwvPBdHjZRKfFtL5JuDo9sc5QinKhbtYu1YkCLq22', key: 'key3', value: 'hello' }, { hash: 'zdpuB2ZCXwfkbgXQDHaP13rGSLVzZdZXuFPAk988VCZyMV1Er', key: 'key3', value: 'hello' },
{ hash: 'zdpuAyWWtvFfxKWMcV3NJ7XXbjiQC6MkA8h6TrhFA2ihLrt82', key: 'key4', value: 'friend' }, { hash: 'zdpuAnDHm5qkyzkdtEiedF2VwyuUvrgsgM7mCVrjLya3G7nFS', key: 'key4', value: 'friend' },
{ hash: 'zdpuB2Z5coKXGMAZtb7X8UQYgo6vWAP4VshBvE4xwBCrR5Laa', key: 'key5', value: '12345' }, { hash: 'zdpuB2XjqLhSEEB6CxCwCLWoas77Db6T9TJDNKcyX35kwbNmb', key: 'key5', value: '12345' },
{ hash: 'zdpuAnn2kuStzcTCJ5ULMxCvB7RtgAScJPmg3YAVYju4oPEtC', key: 'key6', value: 'empty' }, { hash: 'zdpuB1GyECVHxwFBxa9QYeTYRFJRLDnujyekhXAmStG26stU9', key: 'key6', value: 'empty' },
{ hash: 'zdpuAv1jSFz4GHRieAXGvRGnVWdEdxDp2HefREoTJJWYC8Zqw', key: 'key7', value: 'friend33' } { hash: 'zdpuAsj9ZgSCWSuRYFkQ56Eiffpi6j6761ueHHNwNf3VaZfms', key: 'key7', value: 'friend33' }
] ]
for (const { key, value } of Object.values(keyvalue)) { for (const { key, value } of Object.values(keyvalue)) {

View File

@ -72,7 +72,7 @@ describe('Log - Append', function () {
describe('append 100 items to a log', async () => { describe('append 100 items to a log', async () => {
const amount = 100 const amount = 100
const nextPointerAmount = 64 const referencesCount = 64
let log let log
let values = [] let values = []
@ -81,7 +81,7 @@ describe('Log - Append', function () {
before(async () => { before(async () => {
log = await Log(testIdentity, { logId: 'A' }) log = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) { for (let i = 0; i < amount; i++) {
await log.append('hello' + i, { pointerCount: nextPointerAmount }) await log.append('hello' + i, { referencesCount })
} }
values = await log.values() values = await log.values()
heads = await log.heads() heads = await log.heads()
@ -110,8 +110,12 @@ describe('Log - Append', function () {
}) })
it('added the correct amount of refs pointers', async () => { it('added the correct amount of refs pointers', async () => {
values.forEach((entry, index) => { values.reverse().forEach((entry, index) => {
strictEqual(entry.refs.length, index > 0 ? Math.floor(Math.log2(Math.min(nextPointerAmount, index))) : 0) index = values.length - index - 1
const expectedRefCount = index < referencesCount
? Math.max(0, index - 1)
: Math.max(0, Math.min(referencesCount, index - 1))
strictEqual(entry.refs.length, expectedRefCount)
}) })
}) })
}) })

View File

@ -2,19 +2,12 @@ import { strictEqual } from 'assert'
import rmrf from 'rimraf' import rmrf from 'rimraf'
import { copy } from 'fs-extra' import { copy } from 'fs-extra'
import { Log } from '../../src/oplog/index.js' import { Log } from '../../src/oplog/index.js'
import { Identities, KeyStore, MemoryStorage } from '../../src/index.js' import { Identities, KeyStore } from '../../src/index.js'
import testKeysPath from '../fixtures/test-keys-path.js' import testKeysPath from '../fixtures/test-keys-path.js'
const keysPath = './testkeys' const keysPath = './testkeys'
const isBrowser = () => typeof window !== 'undefined'
describe('Log - References', function () { describe('Log - References', function () {
if (isBrowser()) {
// Skip these tests when running in the browser since they take a long time
return
}
this.timeout(60000) this.timeout(60000)
let keystore let keystore
@ -36,132 +29,118 @@ describe('Log - References', function () {
}) })
describe('References', async () => { describe('References', async () => {
const amount = 64 it('creates entries with 1 references', async () => {
const amount = 32
it('creates entries with 2 references', async () => { const referencesCount = 1
const maxReferenceDistance = 2
const log1 = await Log(testIdentity, { logId: 'A' }) const log1 = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) { for (let i = 0; i < amount; i++) {
await log1.append(i.toString(), { pointerCount: maxReferenceDistance }) await log1.append(i.toString(), { referencesCount })
} }
const values1 = await log1.values() const values1 = await log1.values()
strictEqual(values1[values1.length - 1].refs.length, 1) strictEqual(values1[values1.length - 1].refs.length, referencesCount)
})
it('creates entries with 2 references', async () => {
const amount = 32
const referencesCount = 2
const log1 = await Log(testIdentity, { logId: 'A' })
for (let i = 0; i < amount; i++) {
await log1.append(i.toString(), { referencesCount })
}
const values1 = await log1.values()
strictEqual(values1[values1.length - 1].refs.length, referencesCount)
}) })
it('creates entries with 4 references', async () => { it('creates entries with 4 references', async () => {
const maxReferenceDistance = 2 const amount = 32
const referencesCount = 4
const log2 = await Log(testIdentity, { logId: 'B' }) const log2 = await Log(testIdentity, { logId: 'B' })
for (let i = 0; i < amount * 2; i++) { for (let i = 0; i < amount; i++) {
await log2.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 2) }) await log2.append(i.toString(), { referencesCount })
} }
const values2 = await log2.values() const values2 = await log2.values()
strictEqual(values2[values2.length - 1].refs.length, 2) strictEqual(values2[values2.length - 1].refs.length, referencesCount)
}) })
it('creates entries with 8 references', async () => { it('creates entries with 8 references', async () => {
const maxReferenceDistance = 2 const amount = 64
const referencesCount = 8
const log3 = await Log(testIdentity, { logId: 'C' }) const log3 = await Log(testIdentity, { logId: 'C' })
for (let i = 0; i < amount * 3; i++) { for (let i = 0; i < amount; i++) {
await log3.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 3) }) await log3.append(i.toString(), { referencesCount })
} }
const values3 = await log3.values() const values3 = await log3.values()
strictEqual(values3[values3.length - 1].refs.length, 3) strictEqual(values3[values3.length - 1].refs.length, referencesCount)
}) })
it('creates entries with 16 references', async () => { it('creates entries with 16 references', async () => {
const maxReferenceDistance = 2 const amount = 64
const referencesCount = 16
const log4 = await Log(testIdentity, { logId: 'D' }) const log4 = await Log(testIdentity, { logId: 'D' })
for (let i = 0; i < amount * 4; i++) { for (let i = 0; i < amount; i++) {
await log4.append(i.toString(), { pointerCount: Math.pow(maxReferenceDistance, 4) }) await log4.append(i.toString(), { referencesCount })
} }
const values4 = await log4.values() const values4 = await log4.values()
strictEqual(values4[values4.length - 1].refs.length, 4) strictEqual(values4[values4.length - 1].refs.length, referencesCount)
}) })
const inputs = [ it('creates entries with 32 references', async () => {
{ amount: 1, referenceCount: 1, refLength: 0 }, const amount = 64
{ amount: 1, referenceCount: 2, refLength: 0 }, const referencesCount = 32
{ amount: 2, referenceCount: 1, refLength: 1 }, const log4 = await Log(testIdentity, { logId: 'D' })
{ amount: 2, referenceCount: 2, refLength: 1 },
{ amount: 3, referenceCount: 2, refLength: 1 },
{ amount: 3, referenceCount: 4, refLength: 1 },
{ amount: 4, referenceCount: 4, refLength: 2 },
{ amount: 4, referenceCount: 4, refLength: 2 },
{ amount: 32, referenceCount: 4, refLength: 2 },
{ amount: 32, referenceCount: 8, refLength: 3 },
{ amount: 32, referenceCount: 16, refLength: 4 },
{ amount: 18, referenceCount: 32, refLength: 5 },
{ amount: 128, referenceCount: 32, refLength: 5 },
{ amount: 63, referenceCount: 64, refLength: 5 },
{ amount: 64, referenceCount: 64, refLength: 6 },
{ amount: 65, referenceCount: 64, refLength: 6 },
{ amount: 91, referenceCount: 64, refLength: 6 },
{ amount: 128, referenceCount: 64, refLength: 6 },
{ amount: 128, referenceCount: 1, refLength: 0 },
{ amount: 128, referenceCount: 2, refLength: 1 },
{ amount: 256, referenceCount: 1, refLength: 0 },
{ amount: 256, referenceCount: 4, refLength: 2 },
{ amount: 256, referenceCount: 8, refLength: 3 },
{ amount: 256, referenceCount: 16, refLength: 4 },
{ amount: 256, referenceCount: 32, refLength: 5 },
{ amount: 1024, referenceCount: 2, refLength: 1 }
]
inputs.forEach(input => { for (let i = 0; i < amount; i++) {
it(`has ${input.refLength} references, max distance ${input.referenceCount}, total of ${input.amount} entries`, async () => { await log4.append(i.toString(), { referencesCount })
const test = async (amount, referenceCount, refLength) => { }
const storage = await MemoryStorage()
const log1 = await Log(testIdentity, { logId: 'A', storage })
for (let i = 0; i < amount; i++) {
await log1.append((i + 1).toString(), { pointerCount: referenceCount })
}
const values = await log1.values() const values4 = await log4.values()
strictEqual(values.length, input.amount) strictEqual(values4[values4.length - 1].refs.length, referencesCount)
strictEqual(values[values.length - 1].clock.time, input.amount) })
for (let k = 0; k < input.amount; k++) { it('creates entries with 64 references', async () => {
const idx = values.length - k - 1 const amount = 128
strictEqual(values[idx].clock.time, idx + 1) const referencesCount = 64
const log4 = await Log(testIdentity, { logId: 'D' })
// Check the first ref (distance 2) for (let i = 0; i < amount; i++) {
if (values[idx].refs.length > 0) { strictEqual(values[idx].refs[0], values[idx - 2].hash) } await log4.append(i.toString(), { referencesCount })
}
// Check the second ref (distance 4) const values4 = await log4.values()
if (values[idx].refs.length > 1 && idx > referenceCount) { strictEqual(values[idx].refs[1], values[idx - 4].hash) }
// Check the third ref (distance 8) strictEqual(values4[values4.length - 1].refs.length, referencesCount)
if (values[idx].refs.length > 2 && idx > referenceCount) { strictEqual(values[idx].refs[2], values[idx - 8].hash) } })
// Check the fourth ref (distance 16) it('creates entries with 128 references', async () => {
if (values[idx].refs.length > 3 && idx > referenceCount) { strictEqual(values[idx].refs[3], values[idx - 16].hash) } // +2 because first ref is always skipped (covered by next field) and
// we need 129 entries to have 128 back references
const amount = 128 + 2
const referencesCount = 128
const log4 = await Log(testIdentity, { logId: 'D' })
// Check the fifth ref (distance 32) for (let i = 0; i < amount; i++) {
if (values[idx].refs.length > 4 && idx > referenceCount) { strictEqual(values[idx].refs[4], values[idx - 32].hash) } await log4.append(i.toString(), { referencesCount })
}
// Check the fifth ref (distance 64) const values4 = await log4.values()
if (values[idx].refs.length > 5 && idx > referenceCount) { strictEqual(values[idx].refs[5], values[idx - 64].hash) }
// Check the reference of each entry strictEqual(values4[values4.length - 1].refs.length, referencesCount)
if (idx > referenceCount) { strictEqual(values[idx].refs.length, refLength) }
}
}
await test(input.amount, input.referenceCount, input.refLength)
})
}) })
}) })
}) })

View File

@ -44,9 +44,11 @@ describe('Replicating databases', function () {
before(async () => { before(async () => {
db1 = await orbitdb1.open('helloworld') db1 = await orbitdb1.open('helloworld')
console.time('write')
for (let i = 0; i < expected.length; i++) { for (let i = 0; i < expected.length; i++) {
await db1.add(expected[i]) await db1.add(expected[i])
} }
console.timeEnd('write')
}) })
after(async () => { after(async () => {
@ -59,7 +61,7 @@ describe('Replicating databases', function () {
let replicated = false let replicated = false
const onJoin = async (peerId) => { const onJoin = async (peerId, heads) => {
const head = (await db2.log.heads())[0] const head = (await db2.log.heads())[0]
if (head && head.clock.time === amount) { if (head && head.clock.time === amount) {
replicated = true replicated = true