Merge pull request #185 from haadcode/feat/immutable-log

Use immutable ipfs-log
This commit is contained in:
Haad 2017-03-21 18:23:58 +01:00 committed by GitHub
commit 00865a3350
27 changed files with 3260 additions and 13981 deletions

1
.gitignore vendored
View File

@ -7,6 +7,7 @@ dump.rdb
Vagrantfile
examples/browser/bundle.js
examples/browser/*.map
examples/browser/lib
dist/*.map
orbit-db/
ipfs/

View File

@ -8,6 +8,9 @@ test: deps
build: test
npm run build
mkdir -p examples/browser/lib/
cp dist/orbitdb.min.js examples/browser/lib/orbitdb.min.js
cp node_modules/ipfs-daemon/dist/ipfs-browser-daemon.min.js examples/browser/lib/ipfs-browser-daemon.min.js
@echo "Build success!"
@echo "Output: 'dist/', 'examples/browser/'"

View File

@ -1,3 +1,5 @@
'use strict'
const webpack = require('webpack')
const path = require('path')
@ -6,59 +8,26 @@ module.exports = {
output: {
libraryTarget: 'var',
library: 'OrbitDB',
filename: './dist/orbitdb.js'
filename: './dist/orbitdb.min.js'
},
devtool: 'sourcemap',
stats: {
colors: true,
cached: false
devtool: 'source-map',
resolve: {
modules: [
'node_modules',
path.resolve(__dirname, '../node_modules')
]
},
resolveLoader: {
modules: [
'node_modules',
path.resolve(__dirname, '../node_modules')
],
moduleExtensions: ['-loader']
},
node: {
console: false,
process: 'mock',
Buffer: true
},
plugins: [
],
resolve: {
modules: [
path.join(__dirname, '../node_modules')
],
alias: {
http: 'stream-http',
https: 'https-browserify',
Buffer: 'buffer'
}
},
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
query: {
presets: require.resolve('babel-preset-es2015'),
plugins: require.resolve('babel-plugin-transform-runtime')
}
},
{
test: /\.js$/,
include: /node_modules\/(hoek|qs|wreck|boom|ipfs-.+|orbit-db.+|logplease|crdts|promisify-es6)/,
loader: 'babel-loader',
query: {
presets: require.resolve('babel-preset-es2015'),
plugins: require.resolve('babel-plugin-transform-runtime')
}
},
{
test: /\.json$/,
loader: 'json-loader'
}
]
},
externals: {
net: '{}',
tls: '{}',
'require-dir': '{}'
}
plugins: [],
target: 'web'
}

View File

@ -1,68 +0,0 @@
const webpack = require('webpack')
const path = require('path')
module.exports = {
entry: './src/OrbitDB.js',
output: {
libraryTarget: 'var',
library: 'OrbitDB',
filename: './dist/orbitdb.min.js'
},
devtool: 'sourcemap',
stats: {
colors: true,
cached: false
},
node: {
console: false,
process: 'mock',
Buffer: true
},
plugins: [
new webpack.optimize.UglifyJsPlugin({
mangle: false,
compress: { warnings: false }
})
],
resolve: {
modules: [
path.join(__dirname, '../node_modules')
],
alias: {
http: 'stream-http',
https: 'https-browserify',
Buffer: 'buffer'
}
},
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
query: {
presets: require.resolve('babel-preset-es2015'),
plugins: require.resolve('babel-plugin-transform-runtime')
}
},
{
test: /\.js$/,
include: /node_modules\/(hoek|qs|wreck|boom|ipfs-.+|orbit-db.+|logplease|crdts|promisify-es6)/,
loader: 'babel-loader',
query: {
presets: require.resolve('babel-preset-es2015'),
plugins: require.resolve('babel-plugin-transform-runtime')
}
},
{
test: /\.json$/,
loader: 'json-loader'
}
]
},
externals: {
net: '{}',
tls: '{}',
'require-dir': '{}'
}
}

View File

@ -33,7 +33,7 @@ module.exports = {
//
// Can be dropped once https://github.com/devongovett/browserify-zlib/pull/18
// is shipped
zlib: 'browserify-zlib',
zlib: 'browserify-zlib-next',
// Can be dropped once https://github.com/webpack/node-libs-browser/pull/41
// is shipped
http: 'stream-http'

14388
dist/orbitdb.js vendored

File diff suppressed because it is too large Load Diff

29
dist/orbitdb.min.js vendored

File diff suppressed because one or more lines are too long

View File

@ -16,7 +16,7 @@ const queryLoop = (db) => {
totalQueries ++
lastTenSeconds ++
queriesPerSecond ++
process.nextTick(() => queryLoop(db))
setImmediate(() => queryLoop(db))
})
.catch((e) => console.error(e))
}
@ -30,7 +30,7 @@ ipfs.on('error', (err) => console.error(err))
ipfs.on('ready', () => {
const orbit = new OrbitDB(ipfs, 'benchmark')
const db = orbit.eventlog('orbit-db.benchmark')
const db = orbit.eventlog('orbit-db.benchmark', { maxHistory: 100 })
// Metrics output
setInterval(() => {

View File

@ -3,66 +3,78 @@
<meta charset="utf-8">
</head>
<body>
<div id="result">Loading...</div>
<input id="dbname" type="text" placeholder="Database name"/>
<button id="open" type="button">Open</button>
<br><br>
<div id="output"></div>
<script type="text/javascript" src="../../dist/orbitdb.min.js" charset="utf-8"></script>
<script type="text/javascript" src="../../node_modules/ipfs-daemon/dist/ipfs-browser-daemon.min.js" charset="utf-8"></script>
<script type="text/javascript" src="lib/orbitdb.min.js" charset="utf-8"></script>
<script type="text/javascript" src="lib/ipfs-browser-daemon.min.js" charset="utf-8"></script>
<script type="text/javascript">
const elm = document.getElementById("output")
const dbnameField = document.getElementById("dbname")
const openButton = document.getElementById("open")
const username = new Date().getTime()
const channel = 'orbitdb-browser-examples'
const key = 'greeting'
const openDatabase = () => {
openButton.disabled = true
elm.innerHTML = "Starting IPFS..."
const elm = document.getElementById("result")
const dbname = dbnameField.value
const username = new Date().getTime()
const key = 'greeting'
const ipfs = new IpfsDaemon({
IpfsDataDir: '/tmp/orbit-db-examples',
// dev server: webrtc-star-signalling.cloud.ipfs.team
SignalServer: 'star-signal.cloud.ipfs.team', // IPFS dev server
Discovery: {
MDNS: {
Enabled: false,
Interval: 10
},
webRTCStar: {
Enabled: true
}
},
})
const ipfs = new IpfsDaemon({
IpfsDataDir: '/orbit-db-/examples/browser',
SignalServer: 'star-signal.cloud.ipfs.team', // IPFS dev server
})
function handleError(e) {
console.error(e.stack)
elm.innerHTML = e.message
}
function handleError(e) {
console.error(e.stack)
elm.innerHTML = e.message
}
ipfs.on('error', (e) => handleError(e))
ipfs.on('error', (e) => handleError(e))
ipfs.on('ready', () => {
const orbit = new OrbitDB(ipfs, username, { maxHistory: 5 })
ipfs.on('ready', () => {
elm.innerHTML = "Loading database..."
const db = orbit.kvstore(channel)
const log = orbit.eventlog(channel + ".log")
const counter = orbit.counter(channel + ".count")
const orbit = new OrbitDB(ipfs, username)
const creatures = ['👻', '🐙', '🐷', '🐬', '🐞', '🐈', '🙉', '🐸', '🐓']
const db = orbit.kvstore(dbname, { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
const log = orbit.eventlog(dbname + ".log", { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
const counter = orbit.counter(dbname + ".count", { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
let count = 1
const query = () => {
const creatures = ['👻', '🐙', '🐷', '🐬', '🐞', '🐈', '🙉', '🐸', '🐓']
const idx = Math.floor(Math.random() * creatures.length)
const creature = creatures[idx] + " " + creatures[idx]
const creature = creatures[idx]
// Set a key-value pair
db.put(key, "db.put #" + count + " - GrEEtinGs from " + creature)
.then((res) => count ++)
.then(() => counter.inc()) // Increase the counter by one
.then(() => log.add(creature)) // Add an event to 'latest visitors' log
.then(() => {
const result = db.get(key)
const latest = log.iterator({ limit: 5 }).collect()
const count = counter.value
const interval = Math.floor((Math.random() * 5000) + 3000)
let count = 0
const query = () => {
const value = "GrEEtinGs from " + username + " " + creature + ": Hello #" + count + " (" + new Date().getTime() + ")"
// Set a key-value pair
count ++
db.put(key, value)
.then(() => counter.inc()) // Increase the counter by one
.then(() => log.add(value)) // Add an event to 'latest visitors' log
.then(() => getData())
.catch((e) => handleError(e))
}
const getData = () => {
const result = db.get(key)
const latest = log.iterator({ limit: 5 }).collect()
const count = counter.value
ipfs.pubsub.peers(dbname + ".log")
.then((peers) => {
const output = `
<b>You are:</b> ${username} ${creature}<br>
<b>Peers:</b> ${peers.length}<br>
<b>Database:</b> ${dbname}<br>
<br><b>Writing to database every ${interval} milliseconds...</b><br><br>
<b>Key-Value Store</b>
-------------------------------------------------------
Key | Value
@ -72,9 +84,9 @@
<b>Eventlog</b>
-------------------------------------------------------
Latest Visitors
Latest Updates
-------------------------------------------------------
${latest.reverse().map((e) => e.payload.value + " at " + new Date(e.payload.meta.ts).toISOString()).join('\n')}
${latest.reverse().map((e) => e.payload.value).join('\n')}
<b>Counter</b>
-------------------------------------------------------
@ -82,13 +94,29 @@
-------------------------------------------------------
`
elm.innerHTML = output.split("\n").join("<br>")
})
.catch((e) => handleError(e))
}
})
}
// Start query loop when the databse has loaded its history
db.events.on('ready', () => setInterval(query, 1000))
})
db.events.on('synced', () => getData())
counter.events.on('synced', () => getData())
log.events.on('synced', () => getData())
db.events.on('ready', () => getData())
counter.events.on('ready', () => getData())
log.events.on('ready', () => getData())
// Start query loop when the databse has loaded its history
db.load(10)
.then(() => counter.load(10))
.then(() => log.load(10))
.then(() => {
count = counter.value
setInterval(query, interval)
})
})
}
openButton.addEventListener('click', openDatabase)
</script>
</body>
</html>

View File

@ -3,7 +3,10 @@
<meta charset="utf-8">
</head>
<body>
<div id="result">Loading...</div>
<input id="dbname" type="text" placeholder="Database name"/>
<button id="open" type="button">Open</button>
<br><br>
<div id="output"></div>
<script type="text/javascript" src="bundle.js" charset="utf-8"></script>
</body>
</html>

View File

@ -3,81 +3,108 @@
const IPFS = require('ipfs-daemon/src/ipfs-browser-daemon')
const OrbitDB = require('../../src/OrbitDB')
const username = new Date().getTime()
const channel = 'orbitdb-browser-examples'
const key = 'greeting'
const elm = document.getElementById("output")
const dbnameField = document.getElementById("dbname")
const openButton = document.getElementById("open")
const elm = document.getElementById("result")
const openDatabase = () => {
openButton.disabled = true
elm.innerHTML = "Starting IPFS..."
const ipfs = new IPFS({
IpfsDataDir: '/tmp/orbit-db-examples',
// dev server: webrtc-star-signalling.cloud.ipfs.team
SignalServer: 'star-signal.cloud.ipfs.team', // IPFS dev server
Discovery: {
MDNS: {
Enabled: false,
Interval: 10
},
webRTCStar: {
Enabled: true
}
},
})
const dbname = dbnameField.value
const username = new Date().getTime()
const key = 'greeting'
function handleError(e) {
console.error(e.stack)
elm.innerHTML = e.message
}
const ipfs = new IPFS({
IpfsDataDir: '/orbit-db-/examples/browser',
SignalServer: 'star-signal.cloud.ipfs.team', // IPFS dev server
})
ipfs.on('error', (e) => handleError(e))
ipfs.on('ready', () => {
const orbit = new OrbitDB(ipfs, username, { maxHistory: 5 })
const db = orbit.kvstore(channel)
const log = orbit.eventlog(channel + ".log")
const counter = orbit.counter(channel + ".count")
const creatures = ['👻', '🐙', '🐷', '🐬', '🐞', '🐈', '🙉', '🐸', '🐓']
let count = 1
const query = () => {
const idx = Math.floor(Math.random() * creatures.length)
// Set a key-value pair
db.put(key, "db.put #" + count + " - GrEEtinGs to " + creatures[idx])
.then((res) => count ++)
.then(() => counter.inc()) // Increase the counter by one
.then(() => log.add(creatures[idx])) // Add an event to 'latest visitors' log
.then(() => {
const result = db.get(key)
const latest = log.iterator({ limit: 5 }).collect()
const count = counter.value
const output = `
<b>Key-Value Store</b>
-------------------------------------------------------
Key | Value
-------------------------------------------------------
${key} | ${result}
-------------------------------------------------------
<b>Eventlog</b>
-------------------------------------------------------
Latest Visitors
-------------------------------------------------------
${latest.reverse().map((e) => e.payload.value + " at " + new Date(e.payload.meta.ts).toISOString()).join('\n')}
<b>Counter</b>
-------------------------------------------------------
Visitor Count: ${count}
-------------------------------------------------------
`
elm.innerHTML = output.split("\n").join("<br>")
})
.catch((e) => handleError(e))
function handleError(e) {
console.error(e.stack)
elm.innerHTML = e.message
}
// Start query loop when the databse has loaded its history
db.events.on('ready', () => setInterval(query, 1000))
})
ipfs.on('error', (e) => handleError(e))
ipfs.on('ready', () => {
elm.innerHTML = "Loading database..."
const orbit = new OrbitDB(ipfs, username)
const db = orbit.kvstore(dbname, { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
const log = orbit.eventlog(dbname + ".log", { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
const counter = orbit.counter(dbname + ".count", { maxHistory: 5, syncHistory: false, cachePath: '/orbit-db' })
const creatures = ['👻', '🐙', '🐷', '🐬', '🐞', '🐈', '🙉', '🐸', '🐓']
const idx = Math.floor(Math.random() * creatures.length)
const creature = creatures[idx]
const interval = Math.floor((Math.random() * 5000) + 3000)
let count = 0
const query = () => {
const value = "GrEEtinGs from " + username + " " + creature + ": Hello #" + count + " (" + new Date().getTime() + ")"
// Set a key-value pair
count ++
db.put(key, value)
.then(() => counter.inc()) // Increase the counter by one
.then(() => log.add(value)) // Add an event to 'latest visitors' log
.then(() => getData())
.catch((e) => handleError(e))
}
const getData = () => {
const result = db.get(key)
const latest = log.iterator({ limit: 5 }).collect()
const count = counter.value
ipfs.pubsub.peers(dbname + ".log")
.then((peers) => {
const output = `
<b>You are:</b> ${username} ${creature}<br>
<b>Peers:</b> ${peers.length}<br>
<b>Database:</b> ${dbname}<br>
<br><b>Writing to database every ${interval} milliseconds...</b><br><br>
<b>Key-Value Store</b>
-------------------------------------------------------
Key | Value
-------------------------------------------------------
${key} | ${result}
-------------------------------------------------------
<b>Eventlog</b>
-------------------------------------------------------
Latest Updates
-------------------------------------------------------
${latest.reverse().map((e) => e.payload.value).join('\n')}
<b>Counter</b>
-------------------------------------------------------
Visitor Count: ${count}
-------------------------------------------------------
`
elm.innerHTML = output.split("\n").join("<br>")
})
}
db.events.on('synced', () => getData())
counter.events.on('synced', () => getData())
log.events.on('synced', () => getData())
db.events.on('ready', () => getData())
counter.events.on('ready', () => getData())
log.events.on('ready', () => getData())
// Start query loop when the databse has loaded its history
db.load(10)
.then(() => counter.load(10))
.then(() => log.load(10))
.then(() => {
count = counter.value
setInterval(query, interval)
})
})
}
openButton.addEventListener('click', openDatabase)

View File

@ -16,43 +16,36 @@
},
"main": "src/OrbitDB.js",
"dependencies": {
"fs-pull-blob-store": "^0.4.1",
"idb-pull-blob-store": "^0.5.1",
"lock": "^0.1.3",
"libp2p-floodsub": "github:libp2p/js-libp2p-floodsub#orbit-floodsub",
"logplease": "^1.2.12",
"orbit-db-counterstore": "^0.1.8",
"orbit-db-docstore": "^0.0.9",
"orbit-db-eventstore": "^0.1.9",
"orbit-db-feedstore": "^0.1.8",
"orbit-db-kvstore": "^0.1.7",
"orbit-db-pubsub": "^0.1.6",
"pull-stream": "^3.4.5"
"orbit-db-counterstore": "~0.2.0",
"orbit-db-docstore": "github:orbitdb/orbit-db-docstore#v0.2.0",
"orbit-db-eventstore": "~0.2.0",
"orbit-db-feedstore": "~0.2.0",
"orbit-db-kvstore": "~0.2.0",
"orbit-db-pubsub": "~0.2.1"
},
"devDependencies": {
"asyncawait": "^1.0.6",
"babel-core": "^6.21.0",
"babel-loader": "^6.2.9",
"babel-plugin-transform-runtime": "^6.15.0",
"babel-polyfill": "^6.20.0",
"babel-preset-es2015": "^6.18.0",
"bluebird": "^3.4.6",
"ipfs-daemon": "^0.3.0-beta.16",
"babel-core": "^6.22.1",
"babel-loader": "^6.2.10",
"babel-plugin-transform-runtime": "^6.22.0",
"babel-polyfill": "^6.22.0",
"babel-preset-es2015": "^6.22.0",
"ipfs-daemon": "~0.3.0-beta.24",
"json-loader": "^0.5.4",
"lodash": "^4.17.4",
"mocha": "^3.1.2",
"mocha": "^3.2.0",
"p-each-series": "^1.0.0",
"rimraf": "^2.5.4",
"stream-http": "^2.6.2",
"webpack": "^2.1.0-beta.28"
"uglify-js": "github:mishoo/UglifyJS2#harmony",
"webpack": "^2.2.1"
},
"scripts": {
"examples": "npm run examples:node",
"examples:node": "node examples/eventlog.js",
"examples:browser": "open examples/browser/index.html && LOG=debug node examples/start-daemon.js",
"test": "mocha",
"build": "npm run build:dist && npm run build:minified && npm run build:examples",
"build:dist": "webpack --config conf/webpack.config.js",
"build:minified": "webpack --config conf/webpack.config.minified.js",
"build:examples": "webpack --config conf/webpack.example.config.js",
"stats": "webpack --json > stats.json"
"build": "npm run build:examples && npm run build:dist",
"build:examples": "webpack --config conf/webpack.example.config.js --sort-modules-by size",
"build:dist": "webpack -p --config conf/webpack.config.js --sort-modules-by size"
}
}

View File

@ -1,85 +0,0 @@
'use strict'
const pull = require('pull-stream')
const BlobStore = require('fs-pull-blob-store')
const Lock = require('lock')
let filePath
let store
let cache = {}
const lock = new Lock()
class Cache {
static set(key, value) {
return new Promise((resolve, reject) => {
if (cache[key] === value)
return resolve()
cache[key] = value
if(filePath && store) {
lock(filePath, (release) => {
// console.log("write cache:", filePath, JSON.stringify(cache, null, 2))
pull(
pull.values([cache]),
pull.map((v) => JSON.stringify(v, null, 2)),
store.write(filePath, release((err) => {
if (err) {
return reject(err)
}
resolve()
}))
)
})
} else {
resolve()
}
})
}
static get(key) {
return cache[key]
}
static loadCache(cachePath, cacheFile = 'orbit-db.cache') {
cache = {}
if (!cachePath)
return Promise.resolve()
// console.log("cache data:", cachePath)
store = new BlobStore(cachePath)
filePath = cacheFile
return new Promise((resolve, reject) => {
// console.log("load cache:", cacheFile)
store.exists(cacheFile, (err, exists) => {
if (err || !exists) {
return resolve()
}
lock(cacheFile, (release) => {
pull(
store.read(cacheFile),
pull.collect(release((err, res) => {
if (err) {
return reject(err)
}
cache = JSON.parse(Buffer.concat(res).toString() || '{}')
resolve()
}))
)
})
})
})
}
static reset() {
cache = {}
store = null
}
}
module.exports = Cache

View File

@ -7,7 +7,6 @@ const KeyValueStore = require('orbit-db-kvstore')
const CounterStore = require('orbit-db-counterstore')
const DocumentStore = require('orbit-db-docstore')
const Pubsub = require('orbit-db-pubsub')
const Cache = require('./Cache')
const defaultNetworkName = 'Orbit DEV Network'
@ -42,79 +41,58 @@ class OrbitDB {
return this._createStore(DocumentStore, dbname, options)
}
close(dbname) {
if(this._pubsub) this._pubsub.unsubscribe(dbname)
if (this.stores[dbname]) {
this.stores[dbname].events.removeAllListeners('write')
delete this.stores[dbname]
}
}
disconnect() {
Object.keys(this.stores).forEach((e) => this.close(e))
if (this._pubsub) this._pubsub.disconnect()
this.events.removeAllListeners('data')
Object.keys(this.stores).map((e) => this.stores[e]).forEach((store) => {
store.events.removeAllListeners('data')
store.events.removeAllListeners('write')
store.events.removeAllListeners('close')
})
this.stores = {}
this.user = null
this.network = null
}
/* Private methods */
_createStore(Store, dbname, options = { subscribe: true }) {
const store = new Store(this._ipfs, this.user.id, dbname, options)
this.stores[dbname] = store
return this._subscribe(store, dbname, options.subscribe, options.cachePath)
}
_createStore(Store, dbname, options) {
const opts = Object.assign({ replicate: true }, options)
_subscribe(store, dbname, subscribe = true, cachePath = './orbit-db') {
store.events.on('data', this._onData.bind(this))
const store = new Store(this._ipfs, this.user.id, dbname, opts)
store.events.on('write', this._onWrite.bind(this))
store.events.on('close', this._onClose.bind(this))
store.events.on('ready', this._onReady.bind(this))
if(subscribe && this._pubsub)
this._pubsub.subscribe(dbname, this._onMessage.bind(this), this._onConnected.bind(this), store.options.maxHistory > 0)
else
store.loadHistory().catch((e) => console.error(e.stack))
this.stores[dbname] = store
Cache.loadCache(cachePath).then(() => {
const hash = Cache.get(dbname)
store.loadHistory(hash).catch((e) => console.error(e.stack))
})
if(opts.replicate && this._pubsub)
this._pubsub.subscribe(dbname, this._onMessage.bind(this))
return store
}
/* Connected to the message broker */
_onConnected(dbname, hash) {
// console.log(".CONNECTED", dbname, hash)
const store = this.stores[dbname]
store.loadHistory(hash).catch((e) => console.error(e.stack))
}
/* Replication request from the message broker */
_onMessage(dbname, hash) {
// console.log(".MESSAGE", dbname, hash)
_onMessage(dbname, heads) {
// console.log(".MESSAGE", dbname, heads)
const store = this.stores[dbname]
store.sync(hash)
.then((res) => Cache.set(dbname, res))
.then(() => this.events.emit('synced', dbname, hash))
.catch((e) => console.error(e.stack))
store.sync(heads)
}
/* Data events */
_onWrite(dbname, hash) {
_onWrite(dbname, hash, entry, heads) {
// 'New entry written to database...', after adding a new db entry locally
// console.log(".WRITE", dbname, hash, this.user.username)
if(!hash) throw new Error("Hash can't be null!")
if(this._pubsub) this._pubsub.publish(dbname, hash)
Cache.set(dbname, hash)
if(!heads) throw new Error("'heads' not defined")
if(this._pubsub) setImmediate(() => this._pubsub.publish(dbname, heads))
}
_onData(dbname, items) {
// 'New database entry...', after a new entry was added to the database
// console.log(".SYNCED", dbname, items.length)
this.events.emit('data', dbname, items)
}
_onClose(dbname) {
if(this._pubsub) this._pubsub.unsubscribe(dbname)
delete this.stores[dbname]
_onReady(dbname, heads) {
// if(heads && this._pubsub) setImmediate(() => this._pubsub.publish(dbname, heads))
if(heads && this._pubsub) {
setTimeout(() => this._pubsub.publish(dbname, heads), 1000)
}
}
}

View File

@ -1,657 +0,0 @@
'use strict'
const _ = require('lodash')
const fs = require('fs')
const path = require('path')
const assert = require('assert')
const async = require('asyncawait/async')
const await = require('asyncawait/await')
const Promise = require('bluebird')
// const IpfsApis = require('ipfs-test-apis')
const OrbitDB = require('../src/OrbitDB')
const rmrf = require('rimraf')
const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
if (typeof window !== 'undefined')
window.LOG = 'ERROR'
// Data directories
const defaultIpfsDirectory = './ipfs'
const defaultOrbitDBDirectory = './orbit-db'
// Orbit
const username = 'testrunner'
const hasIpfsApiWithPubsub = (ipfs) => {
return ipfs.object.get !== undefined
&& ipfs.object.put !== undefined
// && ipfs.pubsub.publish !== undefined
// && ipfs.pubsub.subscribe !== undefined
}
// [IpfsNativeDaemon, IpfsNodeDaemon].forEach((IpfsDaemon) => {
[IpfsNodeDaemon].forEach((IpfsDaemon) => {
// IpfsApis.forEach(function(ipfsApi) {
describe('orbit-db client', function() {
this.timeout(40000)
let ipfs, client, client2, db
let channel = 'abcdefghijklmn'
before(function (done) {
rmrf.sync(defaultIpfsDirectory)
rmrf.sync(defaultOrbitDBDirectory)
ipfs = new IpfsDaemon()
ipfs.on('error', done)
ipfs.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs), true)
client = new OrbitDB(ipfs, username)
client2 = new OrbitDB(ipfs, username + '2')
done()
})
})
after(() => {
if(db) db.delete()
if(client) client.disconnect()
if(client2) client2.disconnect()
ipfs.stop()
rmrf.sync(defaultOrbitDBDirectory)
rmrf.sync(defaultIpfsDirectory)
})
describe('Add events', function() {
beforeEach(() => {
db = client.eventlog(channel, { subscribe: false, maxHistory: 0 })
db.delete()
})
it('adds an item to an empty channel', () => {
return db.add('hello')
.then((head) => {
assert.notEqual(head, null)
assert.equal(head.startsWith('Qm'), true)
assert.equal(head.length, 46)
})
})
it('adds a new item to a channel with one item', () => {
const head = db.iterator().collect()
return db.add('hello')
.then((second) => {
assert.notEqual(second, null)
assert.notEqual(second, head)
assert.equal(second.startsWith('Qm'), true)
assert.equal(second.length, 46)
})
})
it('adds five items', async(() => {
for(let i = 1; i <= 5; i ++)
await(db.add('hello' + i))
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 5)
assert.equal(_.first(items.map((f) => f.payload.value)), 'hello1')
assert.equal(_.last(items.map((f) => f.payload.value)), 'hello5')
}))
it('adds an item that is > 256 bytes', () => {
let msg = new Buffer(1024)
msg.fill('a')
return db.add(msg.toString())
.then((hash) => {
assert.notEqual(hash, null)
assert.equal(hash.startsWith('Qm'), true)
assert.equal(hash.length, 46)
})
})
})
describe('Delete events (Feed)', function() {
beforeEach(() => {
db = client.feed(channel, { subscribe: false, maxHistory: 0 })
db.delete()
})
it('deletes an item when only one item in the database', async(() => {
const head = await(db.add('hello1'))
const delop = await(db.remove(head))
const items = db.iterator().collect()
assert.equal(delop.startsWith('Qm'), true)
assert.equal(items.length, 0)
}))
it('deletes an item when two items in the database', async(() => {
await(db.add('hello1'))
const head = await(db.add('hello2'))
await(db.remove(head))
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 1)
assert.equal(items[0].payload.value, 'hello1')
}))
it('deletes an item between adds', async(() => {
const head = await(db.add('hello1'))
await(db.add('hello2'))
db.remove(head)
await(db.add('hello3'))
const items = db.iterator().collect()
assert.equal(items.length, 1)
assert.equal(items[0].hash.startsWith('Qm'), true)
assert.equal(items[0].payload.key, null)
assert.equal(items[0].payload.value, 'hello3')
}))
})
describe('Iterator', function() {
let items = []
const itemCount = 5
beforeEach(async(() => {
items = []
db = client.eventlog(channel, { subscribe: false, maxHistory: 0 })
db.delete()
for(let i = 0; i < itemCount; i ++) {
const hash = await(db.add('hello' + i))
items.push(hash)
}
}))
describe('Defaults', function() {
it('returns an iterator', async((done) => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(iter, null)
assert.notEqual(next, null)
done()
}))
it('returns an item with the correct structure', async((done) => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(next, null)
assert.equal(next.hash.startsWith('Qm'), true)
assert.equal(next.payload.key, null)
assert.equal(next.payload.value, 'hello4')
assert.notEqual(next.payload.meta.ts, null)
done()
}))
it('implements Iterator interface', async((done) => {
const iter = db.iterator({ limit: -1 })
let messages = []
for(let i of iter)
messages.push(i.key)
assert.equal(messages.length, items.length)
done()
}))
it('returns 1 item as default', async((done) => {
const iter = db.iterator()
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, items[items.length - 1])
assert.equal(second, null)
assert.equal(first.payload.value, 'hello4')
done()
}))
it('returns items in the correct order', async((done) => {
const amount = 3
const iter = db.iterator({ limit: amount })
let i = items.length - amount
for(let item of iter) {
assert.equal(item.payload.value, 'hello' + i)
i ++
}
done()
}))
})
describe('Collect', function() {
it('returns all items', async((done) => {
const messages = db.iterator({ limit: -1 }).collect()
assert.equal(messages.length, items.length)
assert.equal(messages[0].payload.value, 'hello0')
assert.equal(messages[messages.length - 1].payload.value, 'hello4')
done()
}))
it('returns 1 item', async((done) => {
const messages = db.iterator().collect()
assert.equal(messages.length, 1)
done()
}))
it('returns 3 items', async((done) => {
const messages = db.iterator({ limit: 3 }).collect()
assert.equal(messages.length, 3)
done()
}))
})
describe('Options: limit', function() {
it('returns 1 item when limit is 0', async((done) => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, _.last(items))
assert.equal(second, null)
done()
}))
it('returns 1 item when limit is 1', async((done) => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, _.last(items))
assert.equal(second, null)
done()
}))
it('returns 3 items', async((done) => {
const iter = db.iterator({ limit: 3 })
const first = iter.next().value
const second = iter.next().value
const third = iter.next().value
const fourth = iter.next().value
assert.equal(first.hash, items[items.length - 3])
assert.equal(second.hash, items[items.length - 2])
assert.equal(third.hash, items[items.length - 1])
assert.equal(fourth, null)
done()
}))
it('returns all items', async((done) => {
const messages = db.iterator({ limit: -1 })
.collect()
.map((e) => e.hash)
messages.reverse()
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[items.length - 1])
done()
}))
it('returns all items when limit is bigger than -1', async((done) => {
const messages = db.iterator({ limit: -300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
done()
}))
it('returns all items when limit is bigger than number of items', async((done) => {
const messages = db.iterator({ limit: 300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
done()
}))
})
describe('Option: ranges', function() {
describe('gt & gte', function() {
it('returns 1 item when gte is the head', async((done) => {
const messages = db.iterator({ gte: _.last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], _.last(items))
done()
}))
it('returns 0 items when gt is the head', async((done) => {
const messages = db.iterator({ gt: _.last(items) }).collect()
assert.equal(messages.length, 0)
done()
}))
it('returns 2 item when gte is defined', async((done) => {
const gte = items[items.length - 2]
const messages = db.iterator({ gte: gte, limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 2)
assert.equal(messages[0], items[items.length - 2])
assert.equal(messages[1], items[items.length - 1])
done()
}))
it('returns all items when gte is the root item', async((done) => {
const messages = db.iterator({ gte: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], _.last(items))
done()
}))
it('returns items when gt is the root item', async((done) => {
const messages = db.iterator({ gt: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount - 1)
assert.equal(messages[0], items[1])
assert.equal(messages[3], _.last(items))
done()
}))
it('returns items when gt is defined', async((done) => {
const messages = db.iterator({ limit: -1})
.collect()
.map((e) => e.hash)
const gt = messages[2]
const messages2 = db.iterator({ gt: gt, limit: 100 })
.collect()
.map((e) => e.hash)
assert.equal(messages2.length, 2)
assert.equal(messages2[0], messages[messages.length - 2])
assert.equal(messages2[1], messages[messages.length - 1])
done()
}))
})
describe('lt & lte', function() {
it('returns one item after head when lt is the head', async((done) => {
const messages = db.iterator({ lt: _.last(items) })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[items.length - 2])
done()
}))
it('returns all items when lt is head and limit is -1', async((done) => {
const messages = db.iterator({ lt: _.last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length - 1)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], items[items.length - 2])
done()
}))
it('returns 3 items when lt is head and limit is 3', async((done) => {
const messages = db.iterator({ lt: _.last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 4])
assert.equal(messages[2], items[items.length - 2])
done()
}))
it('returns null when lt is the root item', async((done) => {
const messages = db.iterator({ lt: items[0] }).collect()
assert.equal(messages.length, 0)
done()
}))
it('returns one item when lte is the root item', async((done) => {
const messages = db.iterator({ lte: items[0] })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[0])
done()
}))
it('returns all items when lte is the head', async((done) => {
const messages = db.iterator({ lte: _.last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount)
assert.equal(messages[0], items[0])
assert.equal(messages[4], _.last(items))
done()
}))
it('returns 3 items when lte is the head', async((done) => {
const messages = db.iterator({ lte: _.last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 3])
assert.equal(messages[1], items[items.length - 2])
assert.equal(messages[2], _.last(items))
done()
}))
})
})
})
describe('Delete', function() {
it('deletes a channel from the local database', () => {
const result = db.delete()
// assert.equal(result, true)
const iter = db.iterator()
assert.equal(iter.next().value, null)
})
})
describe('Key-Value Store', function() {
beforeEach(() => {
db = client.kvstore(channel, { subscribe: false, maxHistory: 0 })
db.delete()
})
afterEach(() => {
db.close()
})
it('put', async(() => {
await(db.put('key1', 'hello!'))
const value = db.get('key1')
assert.equal(value, 'hello!')
}))
it('get', async(() => {
await(db.put('key1', 'hello!'))
const value = db.get('key1')
assert.equal(value, 'hello!')
}))
it('put updates a value', async(() => {
await(db.put('key1', 'hello!'))
await(db.put('key1', 'hello again'))
const value = db.get('key1')
assert.equal(value, 'hello again')
}))
it('put/get - multiple keys', async(() => {
await(db.put('key1', 'hello1'))
await(db.put('key2', 'hello2'))
await(db.put('key3', 'hello3'))
const v1 = db.get('key1')
const v2 = db.get('key2')
const v3 = db.get('key3')
assert.equal(v1, 'hello1')
assert.equal(v2, 'hello2')
assert.equal(v3, 'hello3')
}))
it('deletes a key', async(() => {
await(db.put('key1', 'hello!'))
await(db.del('key1'))
const value = db.get('key1')
assert.equal(value, null)
}))
it('deletes a key after multiple updates', async(() => {
await(db.put('key1', 'hello1'))
await(db.put('key1', 'hello2'))
await(db.put('key1', 'hello3'))
await(db.del('key1'))
const value = db.get('key1')
assert.equal(value, null)
}))
it('get - integer value', async(() => {
await(db.put('key1', 123))
const v1 = db.get('key1')
assert.equal(v1, 123)
}))
it('get - object value', (done) => {
const val = { one: 'first', two: 2 }
db.put('key1', val).then(() => {
const v1 = db.get('key1')
assert.equal(_.isEqual(v1, val), true)
done()
})
})
it('get - array value', async(() => {
const val = [1, 2, 3, 4, 5]
await(db.put('key1', val))
const v1 = db.get('key1')
assert.equal(_.isEqual(v1, val), true)
}))
it('syncs databases', (done) => {
const db2 = client2.kvstore(channel, { subscribe: false, maxHistory: 0 })
db2.events.on('write', (dbname, hash) => {
assert.equal(db.get('key1', null))
db.sync(hash).then((hash) => {
const value = db.get('key1')
assert.equal(value, 'hello2')
done()
})
})
db2.put('key1', 'hello2')
})
it('sync returns the updated log\'s hash', (done) => {
let firstHash, secondHash
const db2 = client2.kvstore(channel, { subscribe: false, maxHistory: 0 })
db2.events.on('write', (dbname, hash) => {
db.sync(hash).then((hash) => {
const value1 = db.get('key1')
const value2 = db.get('key2')
assert.equal(value1, 'hello1')
assert.equal(value2, 'hello2')
assert.notEqual(firstHash, hash)
done()
})
})
db.events.on('write', (dbname, hash) => {
firstHash = hash
db2.put('key2', 'hello2')
})
db.put('key1', 'hello1')
})
})
describe('Document Store - default index \'_id\'', function() {
beforeEach(() => {
db = client.docstore(channel, { subscribe: false, maxHistory: 0 })
db.delete()
})
afterEach(() => {
db.close()
})
it('put', async(() => {
await(db.put({ _id: 'hello world', doc: 'all the things'}))
const value = db.get('hello world')
assert.deepEqual(value, [{ _id: 'hello world', doc: 'all the things'}])
}))
it('get - partial term match', async(() => {
await(db.put({ _id: 'hello world', doc: 'some things'}))
await(db.put({ _id: 'hello universe', doc: 'all the things'}))
await(db.put({ _id: 'sup world', doc: 'other things'}))
const value = db.get('hello')
assert.deepEqual(value, [{ _id: 'hello world', doc: 'some things' },
{ _id: 'hello universe', doc: 'all the things'}])
}))
it('get after delete', async(() => {
await(db.put({ _id: 'hello world', doc: 'some things'}))
await(db.put({ _id: 'hello universe', doc: 'all the things'}))
await(db.put({ _id: 'sup world', doc: 'other things'}))
await(db.del('hello universe'))
const value = db.get('hello')
assert.deepEqual(value, [{ _id: 'hello world', doc: 'some things'}])
}))
it('put updates a value', async(() => {
await(db.put({ _id: 'hello world', doc: 'all the things'}))
await(db.put({ _id: 'hello world', doc: 'some of the things'}))
const value = db.get('hello')
assert.deepEqual(value, [{ _id: 'hello world', doc: 'some of the things'}])
}))
it('query', async(() => {
await(db.put({ _id: 'hello world', doc: 'all the things', views: 17}))
await(db.put({ _id: 'sup world', doc: 'some of the things', views: 10}))
await(db.put({ _id: 'hello other world', doc: 'none of the things', views: 5}))
await(db.put({ _id: 'hey universe', doc: ''}))
const value = db.query((e) => e.views > 5)
assert.deepEqual(value, [{ _id: 'hello world', doc: 'all the things', views: 17},
{ _id: 'sup world', doc: 'some of the things', views: 10}])
}))
it('query after delete', async(() => {
await(db.put({ _id: 'hello world', doc: 'all the things', views: 17}))
await(db.put({ _id: 'sup world', doc: 'some of the things', views: 10}))
await(db.put({ _id: 'hello other world', doc: 'none of the things', views: 5}))
await(db.del('hello world'))
await(db.put({ _id: 'hey universe', doc: ''}))
const value = db.query((e) => e.views > 5)
assert.deepEqual(value, [{ _id: 'sup world', doc: 'some of the things', views: 10}])
}))
})
describe('Document Store - specified index', function() {
beforeEach(() => {
db = client.docstore(channel, { subscribe: false, indexBy: 'doc', maxHistory: 0 })
db.delete()
})
afterEach(() => {
db.close()
})
it('put', async(() => {
await(db.put({ _id: 'hello world', doc: 'all the things'}))
const value = db.get('all')
assert.deepEqual(value, [{ _id: 'hello world', doc: 'all the things'}])
}))
it('get - matches specified index', async(() => {
await(db.put({ _id: 'hello universe', doc: 'all the things'}))
await(db.put({ _id: 'hello world', doc: 'some things'}))
const value = db.get('all')
assert.deepEqual(value, [{ _id: 'hello universe', doc: 'all the things'}])
}))
})
})
})

View File

@ -1,13 +1,12 @@
// 'use strict'
// const assert = require('assert')
// const path = require('path')
// const fs = require('fs')
// const Promise = require('bluebird')
// const rimraf = require('rimraf')
// const IpfsApis = require('ipfs-test-apis')
// const IpfsDaemon = require('ipfs-daemon')
// const OrbitDB = require('../src/OrbitDB')
// const path = require('path')
// const assert = require('assert')
// const Promise = require('bluebird')
// const rmrf = require('rimraf')
// const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
// const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
// const OrbitDB = require('../src/OrbitDB')
// const username = 'testrunner'
// const username2 = 'rennurtset'
@ -31,16 +30,16 @@
// }, 1000)
// }
// IpfsApis.forEach(function(ipfsApi) {
// [IpfsNodeDaemon].forEach((IpfsDaemon) => {
// let ipfs, ipfsDaemon
// describe('CounterStore with ' + ipfsApi.name, function() {
// describe('CounterStore', function() {
// this.timeout(20000)
// let client1, client2
// let daemon1, daemon2
// before((done) => {
// // rimraf.sync('./orbit-db-cache.json')
// rmrf.sync(cacheFile)
// daemon1 = new IpfsDaemon(daemonConfs.daemon1)
// daemon1.on('ready', () => {
// daemon2 = new IpfsDaemon(daemonConfs.daemon2)
@ -54,17 +53,18 @@
// after((done) => {
// daemon1.stop()
// daemon2.stop()
// rmrf.sync(cacheFile)
// done()
// })
// beforeEach(() => {
// client1 = new OrbitDB(ipfs[0], username, { cacheFile: cacheFile })
// client2 = new OrbitDB(ipfs[1], username2, { cacheFile: cacheFile })
// client1 = new OrbitDB(ipfs[0])
// client2 = new OrbitDB(ipfs[1])
// })
// afterEach(() => {
// if(client1) client1.disconnect()
// if(client2) client2.disconnect()
// if (client1) client1.disconnect()
// if (client2) client2.disconnect()
// })
// describe('counters', function() {
@ -72,9 +72,9 @@
// const timeout = setTimeout(() => done(new Error('event was not fired')), 2000)
// const counter = client1.counter('counter test', { subscribe: false, cacheFile: cacheFile })
// counter.events.on('ready', () => {
// Promise.map([13, 1], (f) => counter.inc(f), { concurrency: 1 })
// Promise.map([13, 1], (f) => counter.inc(f), { concurrency: 1, cacheFile: cacheFile })
// .then(() => {
// assert.equal(counter.value(), 14)
// assert.equal(counter.value, 14)
// clearTimeout(timeout)
// client1.disconnect()
// done()
@ -83,11 +83,11 @@
// })
// })
// it('creates a new counter from cached data', function(done) {
// it.skip('creates a new counter from cached data', function(done) {
// const timeout = setTimeout(() => done(new Error('event was not fired')), 2000)
// const counter = client1.counter('counter test', { subscribe: false, cacheFile: cacheFile })
// counter.events.on('ready', () => {
// assert.equal(counter.value(), 14)
// assert.equal(counter.value, 14)
// clearTimeout(timeout)
// client1.disconnect()
// done()
@ -104,9 +104,11 @@
// waitForPeers(daemon1, [daemon2.PeerId], name, (err, res) => {
// waitForPeers(daemon2, [daemon1.PeerId], name, (err, res) => {
// console.log("load!!!")
// const increaseCounter = (counter, i) => numbers[i].map((e) => counter.inc(e))
// Promise.map([counter1, counter2], increaseCounter, { concurrency: 1 })
// .then((res) => {
// console.log("..", res)
// // wait for a while to make sure db's have been synced
// setTimeout(() => {
// assert.equal(counter2.value, 30)

191
test/docstore.test.js Normal file
View File

@ -0,0 +1,191 @@
'use strict'
const assert = require('assert')
const rmrf = require('rimraf')
const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
const OrbitDB = require('../src/OrbitDB')
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const config = require('./test-config')
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db - Document Store', function() {
this.timeout(config.timeout)
let ipfs, client1, client2, db
before(function (done) {
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
ipfs = new IpfsDaemon()
ipfs.on('error', done)
ipfs.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs), true)
client1 = new OrbitDB(ipfs, 'A')
client2 = new OrbitDB(ipfs, 'B')
done()
})
})
after(() => {
if(client1) client1.disconnect()
if(client2) client2.disconnect()
ipfs.stop()
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync(config.defaultIpfsDirectory)
})
describe('Default index \'_id\'', function() {
beforeEach(() => {
db = client1.docstore(config.dbname, { replicate: false, maxHistory: 0 })
})
it('put', () => {
const doc = { _id: 'hello world', doc: 'all the things'}
return db.put(doc)
.then(() => {
const value = db.get('hello world')
assert.deepEqual(value, [doc])
})
})
it('get - partial term match', () => {
const doc1 = { _id: 'hello world', doc: 'some things'}
const doc2 = { _id: 'hello universe', doc: 'all the things'}
const doc3 = { _id: 'sup world', doc: 'other things'}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => db.put(doc3))
.then(() => {
const value = db.get('hello')
assert.deepEqual(value, [doc1, doc2])
})
})
it('get after delete', () => {
const doc1 = { _id: 'hello world', doc: 'some things'}
const doc2 = { _id: 'hello universe', doc: 'all the things'}
const doc3 = { _id: 'sup world', doc: 'other things'}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => db.put(doc3))
.then(() => db.del('hello universe'))
.then(() => {
const value1 = db.get('hello')
const value2 = db.get('sup')
assert.deepEqual(value1, [doc1])
assert.deepEqual(value2, [doc3])
})
})
it('put updates a value', () => {
const doc1 = { _id: 'hello world', doc: 'all the things'}
const doc2 = { _id: 'hello world', doc: 'some of the things'}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => {
const value = db.get('hello')
assert.deepEqual(value, [doc2])
})
})
it('query', () => {
const doc1 = { _id: 'hello world', doc: 'all the things', views: 17}
const doc2 = { _id: 'sup world', doc: 'some of the things', views: 10}
const doc3 = { _id: 'hello other world', doc: 'none of the things', views: 5}
const doc4 = { _id: 'hey universe', doc: ''}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => db.put(doc3))
.then(() => db.put(doc4))
.then(() => {
const value1 = db.query((e) => e.views > 5)
const value2 = db.query((e) => e.views > 10)
const value3 = db.query((e) => e.views > 17)
assert.deepEqual(value1, [doc1, doc2])
assert.deepEqual(value2, [doc1])
assert.deepEqual(value3, [])
})
})
it('query after delete', () => {
const doc1 = { _id: 'hello world', doc: 'all the things', views: 17}
const doc2 = { _id: 'sup world', doc: 'some of the things', views: 10}
const doc3 = { _id: 'hello other world', doc: 'none of the things', views: 5}
const doc4 = { _id: 'hey universe', doc: ''}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => db.put(doc3))
.then(() => db.del('hello world'))
.then(() => db.put(doc4))
.then(() => {
const value1 = db.query((e) => e.views >= 5)
const value2 = db.query((e) => e.views >= 10)
assert.deepEqual(value1, [doc2, doc3])
assert.deepEqual(value2, [doc2])
})
})
})
describe('Specified index', function() {
beforeEach(() => {
db = client1.docstore(config.dbname, { indexBy: 'doc', replicate: false, maxHistory: 0 })
})
it('put', () => {
const doc = { _id: 'hello world', doc: 'all the things'}
return db.put(doc)
.then(() => {
const value = db.get('all')
assert.deepEqual(value, [doc])
})
})
it('get - matches specified index', () => {
const doc1 = { _id: 'hello world', doc: 'all the things'}
const doc2 = { _id: 'hello world', doc: 'some things'}
return db.put(doc1)
.then(() => db.put(doc2))
.then(() => {
const value1 = db.get('all')
const value2 = db.get('some')
assert.deepEqual(value1, [doc1])
assert.deepEqual(value2, [doc2])
})
})
})
describe('Sync', function() {
const doc1 = { _id: 'hello world', doc: 'all the things'}
const doc2 = { _id: 'moi moi', doc: 'everything'}
const options = {
replicate: false,
maxHistory: 0,
}
it('syncs databases', (done) => {
const db1 = client1.docstore(config.dbname, options)
const db2 = client2.docstore(config.dbname, options)
db2.events.on('write', (dbname, hash, entry, heads) => {
assert.deepEqual(db1.get('hello world'), [])
db1.sync(heads)
})
db1.events.on('synced', () => {
const value = db1.get(doc1._id)
assert.deepEqual(value, [doc1])
done()
})
db2.put(doc1)
.catch(done)
})
})
})
})

387
test/eventlog.test.js Normal file
View File

@ -0,0 +1,387 @@
'use strict'
const assert = require('assert')
const rmrf = require('rimraf')
const mapSeries = require('./promise-map-series')
const OrbitDB = require('../src/OrbitDB')
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const first = require('./test-utils').first
const last = require('./test-utils').last
const config = require('./test-config')
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db - Eventlog', function() {
this.timeout(config.timeout)
let ipfs, client1, client2, db
before(function (done) {
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
ipfs = new IpfsDaemon()
ipfs.on('error', done)
ipfs.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs), true)
client1 = new OrbitDB(ipfs, 'A')
client2 = new OrbitDB(ipfs, 'B')
done()
})
})
after(() => {
if(client1) client1.disconnect()
if(client2) client2.disconnect()
ipfs.stop()
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync(config.defaultIpfsDirectory)
})
describe('Eventlog', function() {
it('returns the added entry\'s hash, 1 entry', () => {
db = client1.eventlog(config.dbname, { replicate: false, maxHistory: 0 })
return db.add('hello1')
.then((hash) => {
const items = db.iterator({ limit: -1 }).collect()
assert.notEqual(hash, null)
assert.equal(hash, last(items).hash)
assert.equal(items.length, 1)
})
})
it('returns the added entry\'s hash, 2 entries', () => {
const prevHash = db.iterator().collect()[0].hash
return db.add('hello2')
.then((hash) => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 2)
assert.notEqual(hash, null)
assert.notEqual(hash, prevHash)
assert.equal(hash, last(items).hash)
})
})
it('adds five items', () => {
db = client1.eventlog(config.dbname, { replicate: false, maxHistory: 0 })
return mapSeries([1, 2, 3, 4, 5], (i) => db.add('hello' + i))
.then(() => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 5)
assert.equal(first(items.map((f) => f.payload.value)), 'hello1')
assert.equal(last(items.map((f) => f.payload.value)), 'hello5')
})
})
it('adds an item that is > 256 bytes', () => {
db = client1.eventlog(config.dbname, { replicate: false, maxHistory: 0 })
let msg = new Buffer(1024)
msg.fill('a')
return db.add(msg.toString())
.then((hash) => {
assert.notEqual(hash, null)
assert.equal(hash.startsWith('Qm'), true)
assert.equal(hash.length, 46)
})
})
})
describe('Iterator', function() {
let items = []
const itemCount = 5
beforeEach(() => {
items = []
db = client1.eventlog(config.dbname, { replicate: false, maxHistory: 0 })
return mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
.then((res) => items = res)
})
describe('Defaults', function() {
it('returns an iterator', () => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(iter, null)
assert.notEqual(next, null)
})
it('returns an item with the correct structure', () => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(next, null)
assert.equal(next.hash.startsWith('Qm'), true)
assert.equal(next.payload.key, null)
assert.equal(next.payload.value, 'hello4')
})
it('implements Iterator interface', () => {
const iter = db.iterator({ limit: -1 })
let messages = []
for(let i of iter)
messages.push(i.key)
assert.equal(messages.length, items.length)
})
it('returns 1 item as default', () => {
const iter = db.iterator()
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, items[items.length - 1])
assert.equal(second, null)
assert.equal(first.payload.value, 'hello4')
})
it('returns items in the correct order', () => {
const amount = 3
const iter = db.iterator({ limit: amount })
let i = items.length - amount
for(let item of iter) {
assert.equal(item.payload.value, 'hello' + i)
i ++
}
})
})
describe('Collect', function() {
it('returns all items', () => {
const messages = db.iterator({ limit: -1 }).collect()
assert.equal(messages.length, items.length)
assert.equal(messages[0].payload.value, 'hello0')
assert.equal(messages[messages.length - 1].payload.value, 'hello4')
})
it('returns 1 item', () => {
const messages = db.iterator().collect()
assert.equal(messages.length, 1)
})
it('returns 3 items', () => {
const messages = db.iterator({ limit: 3 }).collect()
assert.equal(messages.length, 3)
})
})
describe('Options: limit', function() {
it('returns 1 item when limit is 0', () => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, last(items))
assert.equal(second, null)
})
it('returns 1 item when limit is 1', () => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, last(items))
assert.equal(second, null)
})
it('returns 3 items', () => {
const iter = db.iterator({ limit: 3 })
const first = iter.next().value
const second = iter.next().value
const third = iter.next().value
const fourth = iter.next().value
assert.equal(first.hash, items[items.length - 3])
assert.equal(second.hash, items[items.length - 2])
assert.equal(third.hash, items[items.length - 1])
assert.equal(fourth, null)
})
it('returns all items', () => {
const messages = db.iterator({ limit: -1 })
.collect()
.map((e) => e.hash)
messages.reverse()
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[items.length - 1])
})
it('returns all items when limit is bigger than -1', () => {
const messages = db.iterator({ limit: -300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
})
it('returns all items when limit is bigger than number of items', () => {
const messages = db.iterator({ limit: 300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
})
})
describe('Option: ranges', function() {
describe('gt & gte', function() {
it('returns 1 item when gte is the head', () => {
const messages = db.iterator({ gte: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], last(items))
})
it('returns 0 items when gt is the head', () => {
const messages = db.iterator({ gt: last(items) }).collect()
assert.equal(messages.length, 0)
})
it('returns 2 item when gte is defined', () => {
const gte = items[items.length - 2]
const messages = db.iterator({ gte: gte, limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 2)
assert.equal(messages[0], items[items.length - 2])
assert.equal(messages[1], items[items.length - 1])
})
it('returns all items when gte is the root item', () => {
const messages = db.iterator({ gte: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], last(items))
})
it('returns items when gt is the root item', () => {
const messages = db.iterator({ gt: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount - 1)
assert.equal(messages[0], items[1])
assert.equal(messages[3], last(items))
})
it('returns items when gt is defined', () => {
const messages = db.iterator({ limit: -1})
.collect()
.map((e) => e.hash)
const gt = messages[2]
const messages2 = db.iterator({ gt: gt, limit: 100 })
.collect()
.map((e) => e.hash)
assert.equal(messages2.length, 2)
assert.equal(messages2[0], messages[messages.length - 2])
assert.equal(messages2[1], messages[messages.length - 1])
})
})
describe('lt & lte', function() {
it('returns one item after head when lt is the head', () => {
const messages = db.iterator({ lt: last(items) })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[items.length - 2])
})
it('returns all items when lt is head and limit is -1', () => {
const messages = db.iterator({ lt: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length - 1)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], items[items.length - 2])
})
it('returns 3 items when lt is head and limit is 3', () => {
const messages = db.iterator({ lt: last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 4])
assert.equal(messages[2], items[items.length - 2])
})
it('returns null when lt is the root item', () => {
const messages = db.iterator({ lt: items[0] }).collect()
assert.equal(messages.length, 0)
})
it('returns one item when lte is the root item', () => {
const messages = db.iterator({ lte: items[0] })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[0])
})
it('returns all items when lte is the head', () => {
const messages = db.iterator({ lte: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount)
assert.equal(messages[0], items[0])
assert.equal(messages[4], last(items))
})
it('returns 3 items when lte is the head', () => {
const messages = db.iterator({ lte: last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 3])
assert.equal(messages[1], items[items.length - 2])
assert.equal(messages[2], last(items))
})
})
})
})
describe('sync', () => {
const options = {
replicate: false,
}
it('syncs databases', (done) => {
const db1 = client1.eventlog(config.dbname, options)
const db2 = client2.eventlog(config.dbname, options)
db1.events.on('error', (e) => {
console.log(e.stack())
done(e)
})
db2.events.on('write', (dbname, hash, entry, heads) => {
assert.equal(db1.iterator({ limit: -1 }).collect().length, 0)
db1.sync(heads)
})
db1.events.on('synced', () => {
const items = db1.iterator({ limit: -1 }).collect()
assert.equal(items.length, 1)
assert.equal(items[0].payload.value, 'hello2')
done()
})
db2.add('hello2')
.catch(done)
})
})
})
})

428
test/feed.test.js Normal file
View File

@ -0,0 +1,428 @@
'use strict'
const assert = require('assert')
const rmrf = require('rimraf')
const mapSeries = require('./promise-map-series')
const OrbitDB = require('../src/OrbitDB')
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const first = require('./test-utils').first
const last = require('./test-utils').last
const config = require('./test-config')
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db - Feed', function() {
this.timeout(config.timeout)
let ipfs, client1, client2, db
before(function (done) {
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
ipfs = new IpfsDaemon()
ipfs.on('error', done)
ipfs.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs), true)
client1 = new OrbitDB(ipfs, 'A')
client2 = new OrbitDB(ipfs, 'B')
done()
})
})
after(() => {
if(client1) client1.disconnect()
if(client2) client2.disconnect()
ipfs.stop()
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync(config.defaultIpfsDirectory)
})
describe('Feed', function() {
it('returns the added entry\'s hash, 1 entry', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
return db.add('hello1')
.then((hash) => {
const items = db.iterator({ limit: -1 }).collect()
assert.notEqual(hash, null)
assert.equal(hash, last(items).hash)
assert.equal(items.length, 1)
})
})
it('returns the added entry\'s hash, 2 entries', () => {
const prevHash = db.iterator().collect()[0].hash
return db.add('hello2')
.then((hash) => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 2)
assert.notEqual(hash, null)
assert.notEqual(hash, prevHash)
assert.equal(hash, last(items).hash)
})
})
it('adds five items', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
return mapSeries([1, 2, 3, 4, 5], (i) => db.add('hello' + i))
.then(() => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 5)
assert.equal(first(items.map((f) => f.payload.value)), 'hello1')
assert.equal(last(items.map((f) => f.payload.value)), 'hello5')
})
})
it('adds an item that is > 256 bytes', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
let msg = new Buffer(1024)
msg.fill('a')
return db.add(msg.toString())
.then((hash) => {
assert.notEqual(hash, null)
assert.equal(hash.startsWith('Qm'), true)
assert.equal(hash.length, 46)
})
})
it('deletes an item when only one item in the database', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
return db.add('hello3')
.then((hash) => db.remove(hash))
.then((delopHash) => {
const items = db.iterator().collect()
assert.equal(delopHash.startsWith('Qm'), true)
assert.equal(items.length, 0)
})
})
it('deletes an item when two items in the database', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
return db.add('hello1')
.then(() => db.add('hello2'))
.then((hash) => db.remove(hash))
.then(() => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 1)
assert.equal(first(items).payload.value, 'hello1')
})
})
it('deletes an item between adds', () => {
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
let hash
return db.add('hello1')
.then((res) => hash = res)
.then(() => db.add('hello2'))
.then(() => db.remove(hash))
.then(() => db.add('hello3'))
.then(() => {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, 2)
const firstItem = first(items)
const secondItem = items[1]
assert.equal(firstItem.hash.startsWith('Qm'), true)
assert.equal(firstItem.payload.key, null)
assert.equal(firstItem.payload.value, 'hello2')
assert.equal(secondItem.payload.value, 'hello3')
})
})
})
describe('Iterator', function() {
let items = []
const itemCount = 5
beforeEach(() => {
items = []
db = client1.feed(config.dbname, { replicate: false, maxHistory: 0 })
return mapSeries([0, 1, 2, 3, 4], (i) => db.add('hello' + i))
.then((res) => items = res)
})
describe('Defaults', function() {
it('returns an iterator', () => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(iter, null)
assert.notEqual(next, null)
})
it('returns an item with the correct structure', () => {
const iter = db.iterator()
const next = iter.next().value
assert.notEqual(next, null)
assert.equal(next.hash.startsWith('Qm'), true)
assert.equal(next.payload.key, null)
assert.equal(next.payload.value, 'hello4')
})
it('implements Iterator interface', () => {
const iter = db.iterator({ limit: -1 })
let messages = []
for(let i of iter)
messages.push(i.key)
assert.equal(messages.length, items.length)
})
it('returns 1 item as default', () => {
const iter = db.iterator()
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, items[items.length - 1])
assert.equal(second, null)
assert.equal(first.payload.value, 'hello4')
})
it('returns items in the correct order', () => {
const amount = 3
const iter = db.iterator({ limit: amount })
let i = items.length - amount
for(let item of iter) {
assert.equal(item.payload.value, 'hello' + i)
i ++
}
})
})
describe('Collect', function() {
it('returns all items', () => {
const messages = db.iterator({ limit: -1 }).collect()
assert.equal(messages.length, items.length)
assert.equal(messages[0].payload.value, 'hello0')
assert.equal(messages[messages.length - 1].payload.value, 'hello4')
})
it('returns 1 item', () => {
const messages = db.iterator().collect()
assert.equal(messages.length, 1)
})
it('returns 3 items', () => {
const messages = db.iterator({ limit: 3 }).collect()
assert.equal(messages.length, 3)
})
})
describe('Options: limit', function() {
it('returns 1 item when limit is 0', () => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, last(items))
assert.equal(second, null)
})
it('returns 1 item when limit is 1', () => {
const iter = db.iterator({ limit: 1 })
const first = iter.next().value
const second = iter.next().value
assert.equal(first.hash, last(items))
assert.equal(second, null)
})
it('returns 3 items', () => {
const iter = db.iterator({ limit: 3 })
const first = iter.next().value
const second = iter.next().value
const third = iter.next().value
const fourth = iter.next().value
assert.equal(first.hash, items[items.length - 3])
assert.equal(second.hash, items[items.length - 2])
assert.equal(third.hash, items[items.length - 1])
assert.equal(fourth, null)
})
it('returns all items', () => {
const messages = db.iterator({ limit: -1 })
.collect()
.map((e) => e.hash)
messages.reverse()
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[items.length - 1])
})
it('returns all items when limit is bigger than -1', () => {
const messages = db.iterator({ limit: -300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
})
it('returns all items when limit is bigger than number of items', () => {
const messages = db.iterator({ limit: 300 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
})
})
describe('Option: ranges', function() {
describe('gt & gte', function() {
it('returns 1 item when gte is the head', () => {
const messages = db.iterator({ gte: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], last(items))
})
it('returns 0 items when gt is the head', () => {
const messages = db.iterator({ gt: last(items) }).collect()
assert.equal(messages.length, 0)
})
it('returns 2 item when gte is defined', () => {
const gte = items[items.length - 2]
const messages = db.iterator({ gte: gte, limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 2)
assert.equal(messages[0], items[items.length - 2])
assert.equal(messages[1], items[items.length - 1])
})
it('returns all items when gte is the root item', () => {
const messages = db.iterator({ gte: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], last(items))
})
it('returns items when gt is the root item', () => {
const messages = db.iterator({ gt: items[0], limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount - 1)
assert.equal(messages[0], items[1])
assert.equal(messages[3], last(items))
})
it('returns items when gt is defined', () => {
const messages = db.iterator({ limit: -1})
.collect()
.map((e) => e.hash)
const gt = messages[2]
const messages2 = db.iterator({ gt: gt, limit: 100 })
.collect()
.map((e) => e.hash)
assert.equal(messages2.length, 2)
assert.equal(messages2[0], messages[messages.length - 2])
assert.equal(messages2[1], messages[messages.length - 1])
})
})
describe('lt & lte', function() {
it('returns one item after head when lt is the head', () => {
const messages = db.iterator({ lt: last(items) })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[items.length - 2])
})
it('returns all items when lt is head and limit is -1', () => {
const messages = db.iterator({ lt: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, items.length - 1)
assert.equal(messages[0], items[0])
assert.equal(messages[messages.length - 1], items[items.length - 2])
})
it('returns 3 items when lt is head and limit is 3', () => {
const messages = db.iterator({ lt: last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 4])
assert.equal(messages[2], items[items.length - 2])
})
it('returns null when lt is the root item', () => {
const messages = db.iterator({ lt: items[0] }).collect()
assert.equal(messages.length, 0)
})
it('returns one item when lte is the root item', () => {
const messages = db.iterator({ lte: items[0] })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 1)
assert.equal(messages[0], items[0])
})
it('returns all items when lte is the head', () => {
const messages = db.iterator({ lte: last(items), limit: -1 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, itemCount)
assert.equal(messages[0], items[0])
assert.equal(messages[4], last(items))
})
it('returns 3 items when lte is the head', () => {
const messages = db.iterator({ lte: last(items), limit: 3 })
.collect()
.map((e) => e.hash)
assert.equal(messages.length, 3)
assert.equal(messages[0], items[items.length - 3])
assert.equal(messages[1], items[items.length - 2])
assert.equal(messages[2], last(items))
})
})
})
})
describe('sync', () => {
const options = {
replicate: false,
}
it('syncs databases', (done) => {
const db1 = client1.feed(config.dbname, options)
const db2 = client2.feed(config.dbname, options)
db2.events.on('write', (dbname, hash, entry, heads) => {
assert.equal(db1.iterator({ limit: -1 }).collect().length, 0)
db1.sync(heads)
})
db1.events.on('synced', () => {
const items = db1.iterator({ limit: -1 }).collect()
assert.equal(items.length, 1)
assert.equal(items[0].payload.value, 'hello2')
done()
})
db2.add('hello2')
.catch(done)
})
})
})
})

View File

@ -6,16 +6,16 @@ module.exports = {
Swarm: ['/ip4/0.0.0.0/tcp/0'],
Gateway: '/ip4/0.0.0.0/tcp/0'
},
Bootstrap: [],
Discovery: {
MDNS: {
Enabled: true,
Interval: 10
},
webRTCStar: {
Enabled: true
Enabled: false
}
},
Bootstrap: []
}
},
daemon2: {
IpfsDataDir: '/tmp/orbit-db-tests-2',
@ -24,15 +24,15 @@ module.exports = {
Swarm: ['/ip4/0.0.0.0/tcp/0'],
Gateway: '/ip4/0.0.0.0/tcp/0'
},
Bootstrap: [],
Discovery: {
MDNS: {
Enabled: true,
Interval: 10
},
webRTCStar: {
Enabled: true
Enabled: false
}
},
Bootstrap: []
},
}
}
}

163
test/kvstore.test.js Normal file
View File

@ -0,0 +1,163 @@
'use strict'
const assert = require('assert')
const rmrf = require('rimraf')
const OrbitDB = require('../src/OrbitDB')
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const config = require('./test-config')
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db - Key-Value Store', function() {
this.timeout(config.timeout)
let ipfs, client1, client2, db
before(function (done) {
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
ipfs = new IpfsDaemon()
ipfs.on('error', done)
ipfs.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs), true)
client1 = new OrbitDB(ipfs, 'A')
client2 = new OrbitDB(ipfs, 'B')
done()
})
})
after(() => {
if(client1) client1.disconnect()
if(client2) client2.disconnect()
ipfs.stop()
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync(config.defaultIpfsDirectory)
})
beforeEach(() => {
db = client1.kvstore(config.dbname, { replicate: false, maxHistory: 0 })
})
it('put', () => {
return db.put('key1', 'hello1')
.then(() => {
const value = db.get('key1')
assert.equal(value, 'hello1')
})
})
it('get', () => {
return db.put('key1', 'hello2')
.then(() => {
const value = db.get('key1')
assert.equal(value, 'hello2')
})
})
it('put updates a value', () => {
return db.put('key1', 'hello3')
.then(() => db.put('key1', 'hello4'))
.then(() => {
const value = db.get('key1')
assert.equal(value, 'hello4')
})
})
it('set is an alias for put', () => {
return db.set('key1', 'hello5')
.then(() => {
const value = db.get('key1')
assert.equal(value, 'hello5')
})
})
it('put/get - multiple keys', () => {
return db.put('key1', 'hello1')
.then(() => db.put('key2', 'hello2'))
.then(() => db.put('key3', 'hello3'))
.then(() => {
const v1 = db.get('key1')
const v2 = db.get('key2')
const v3 = db.get('key3')
assert.equal(v1, 'hello1')
assert.equal(v2, 'hello2')
assert.equal(v3, 'hello3')
})
})
it('deletes a key', () => {
return db.put('key1', 'hello!')
.then(() => db.del('key1'))
.then(() => {
const value = db.get('key1')
assert.equal(value, null)
})
})
it('deletes a key after multiple updates', () => {
return db.put('key1', 'hello1')
.then(() => db.put('key1', 'hello2'))
.then(() => db.put('key1', 'hello3'))
.then(() => db.del('key1'))
.then(() => {
const value = db.get('key1')
assert.equal(value, null)
})
})
it('get - integer value', () => {
const val = 123
return db.put('key1', val)
.then(() => {
const v1 = db.get('key1')
assert.equal(v1, val)
})
})
it('get - object value', () => {
const val = { one: 'first', two: 2 }
return db.put('key1', val)
.then(() => {
const v1 = db.get('key1')
assert.deepEqual(v1, val)
})
})
it('get - array value', () => {
const val = [1, 2, 3, 4, 5]
return db.put('key1', val)
.then(() => {
const v1 = db.get('key1')
assert.deepEqual(v1, val)
})
})
describe('sync', () => {
const options = {
replicate: false,
}
it('syncs databases', (done) => {
const db1 = client1.kvstore(config.dbname, options)
const db2 = client2.kvstore(config.dbname, options)
db1.events.on('error', done)
db2.events.on('write', (dbname, hash, entry, heads) => {
assert.equal(db1.get('key1'), null)
assert.equal(db2.get('key1'), 'hello1')
db1.sync(heads)
})
db1.events.on('synced', () => {
const value = db1.get('key1')
assert.equal(value, 'hello1')
done()
})
db2.put('key1', 'hello1')
.catch(done)
})
})
})
})

96
test/persistency.js Normal file
View File

@ -0,0 +1,96 @@
'use strict'
const assert = require('assert')
const mapSeries = require('./promise-map-series')
const rmrf = require('rimraf')
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const OrbitDB = require('../src/OrbitDB')
const config = require('./test-config')
// Daemon settings
const daemonsConf = require('./ipfs-daemons.conf.js')
// orbit-db path
const testDataDir = './orbit-db'
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db - Persistency', function() {
this.timeout(config.timeout)
let ipfs1, ipfs2, client1, client2, db1, db2
const removeDirectories = () => {
rmrf.sync(daemonsConf.daemon1.IpfsDataDir)
rmrf.sync(daemonsConf.daemon2.IpfsDataDir)
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync(testDataDir)
}
before(function (done) {
removeDirectories()
ipfs1 = new IpfsDaemon(daemonsConf.daemon1)
ipfs1.on('error', done)
ipfs1.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs1), true)
ipfs2 = new IpfsDaemon(daemonsConf.daemon2)
ipfs2.on('error', done)
ipfs2.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs2), true)
client1 = new OrbitDB(ipfs1, "one")
client2 = new OrbitDB(ipfs2, "two")
done()
})
})
})
after(() => {
ipfs1.stop()
ipfs2.stop()
removeDirectories()
})
describe('load', function() {
it('loads database from local cache', function(done) {
const entryCount = 100
const entryArr = []
for (let i = 0; i < entryCount; i ++)
entryArr.push(i)
const options = {
replicate: false,
maxHistory: -1,
cachePath: testDataDir,
}
let db = client1.eventlog(config.dbname, options)
db.events.on('error', done)
db.load().then(function () {
mapSeries(entryArr, (i) => db.add('hello' + i))
.then(function() {
db = null
db = client1.eventlog(config.dbname, options)
db.events.on('error', done)
db.events.on('ready', () => {
try {
const items = db.iterator({ limit: -1 }).collect()
assert.equal(items.length, entryCount)
assert.equal(items[0].payload.value, 'hello0')
assert.equal(items[entryCount - 1].payload.value, 'hello99')
done()
} catch(e) {
done(e)
}
})
db.load()
.catch(done)
})
.catch(done)
}).catch(done)
})
})
})
})

View File

@ -0,0 +1,16 @@
'use strict'
// https://gist.github.com/dignifiedquire/dd08d2f3806a7b87f45b00c41fe109b7
module.exports = function mapSeries (list, func) {
const res = []
return list.reduce((acc, next) => {
return acc.then((val) => {
res.push(val)
return func(next)
})
}, Promise.resolve(null)).then((val) => {
res.push(val)
return res.slice(1)
})
}

View File

@ -1,66 +1,50 @@
'use strict'
const _ = require('lodash')
const fs = require('fs')
const path = require('path')
const assert = require('assert')
const async = require('asyncawait/async')
const await = require('asyncawait/await')
const OrbitDB = require('../src/OrbitDB')
const mapSeries = require('p-each-series')
const rmrf = require('rimraf')
const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
if (typeof window !== 'undefined')
window.LOG = 'ERROR'
// Data directories
const defaultIpfsDirectory = './ipfs'
const defaultOrbitDBDirectory = './orbit-db'
const hasIpfsApiWithPubsub = require('./test-utils').hasIpfsApiWithPubsub
const OrbitDB = require('../src/OrbitDB')
const config = require('./test-config')
// Daemon settings
const daemonsConf = require('./ipfs-daemons.conf.js')
const databaseName = 'oribt-db-tests'
const hasIpfsApiWithPubsub = (ipfs) => {
return ipfs.object.get !== undefined
&& ipfs.object.put !== undefined
// && ipfs.pubsub.publish !== undefined
// && ipfs.pubsub.subscribe !== undefined
}
// Shared database name
const waitForPeers = (ipfs, channel) => {
return new Promise((resolve) => {
console.log("Waiting for peers for '" + channel + "'...")
return new Promise((resolve, reject) => {
console.log("Waiting for peers...")
const interval = setInterval(() => {
ipfs.pubsub.peers(channel)
// The tests pass if used with swarm.peers (peers find each other)
// ipfs.swarm.peers()
.then((peers) => {
// console.log(peers)
if (peers.length > 0) {
console.log("Found peers, running tests...")
clearInterval(interval)
resolve()
}
})
.catch((e) => {
clearInterval(interval)
reject(e)
})
}, 1000)
})
}
// [IpfsNativeDaemon, IpfsNodeDaemon].forEach((IpfsDaemon) => {
[IpfsNodeDaemon].forEach((IpfsDaemon) => {
config.daemons.forEach((IpfsDaemon) => {
describe('orbit-db replication', function() {
this.timeout(40000)
describe('orbit-db - Replication', function() {
this.timeout(config.timeout)
let ipfs1, ipfs2, client1, client2, db1, db2
const removeDirectories= () => {
const removeDirectories = () => {
rmrf.sync(daemonsConf.daemon1.IpfsDataDir)
rmrf.sync(daemonsConf.daemon2.IpfsDataDir)
rmrf.sync(defaultIpfsDirectory)
rmrf.sync(defaultOrbitDBDirectory)
rmrf.sync(config.defaultIpfsDirectory)
rmrf.sync(config.defaultOrbitDBDirectory)
rmrf.sync('/tmp/daemon1')
rmrf.sync('/tmp/daemon2')
}
before(function (done) {
@ -73,57 +57,77 @@ const waitForPeers = (ipfs, channel) => {
ipfs2.on('error', done)
ipfs2.on('ready', () => {
assert.equal(hasIpfsApiWithPubsub(ipfs2), true)
client1 = new OrbitDB(ipfs1, databaseName)
client2 = new OrbitDB(ipfs2, databaseName + '2')
client1 = new OrbitDB(ipfs1, "one")
client2 = new OrbitDB(ipfs2, "two")
done()
})
})
})
after(() => {
ipfs1.stop()
ipfs2.stop()
after((done) => {
if (client1) client1.disconnect()
if (client2) client2.disconnect()
if (ipfs1) ipfs1.stop()
if (ipfs2) ipfs2.stop()
removeDirectories()
setTimeout(() => done(), 10000)
})
describe('two peers', function() {
beforeEach(() => {
db1 = client1.eventlog(databaseName, { maxHistory: 0 })
db2 = client2.eventlog(databaseName, { maxHistory: 0 })
db1 = client1.eventlog(config.dbname, { maxHistory: 1, cachePath: '/tmp/daemon1' })
db2 = client2.eventlog(config.dbname, { maxHistory: 1, cachePath: '/tmp/daemon2' })
})
it.only('replicates database of 1 entry', (done) => {
waitForPeers(ipfs1, databaseName + '2')
.then(async(() => {
db2.events.on('history', (db, data) => {
it('replicates database of 1 entry', (done) => {
waitForPeers(ipfs1, config.dbname)
.then(() => {
db2.events.once('error', done)
db2.events.once('synced', (db) => {
const items = db2.iterator().collect()
assert.equal(items.length, 1)
assert.equal(items[0].payload.value, 'hello')
done()
})
db1.add('hello')
}))
.catch(done)
})
.catch(done)
})
it('replicates database of 100 entries', (done) => {
const entryCount = 100
waitForPeers(ipfs1, databaseName + '2')
.then(async(() => {
const entryArr = []
let timer
for (let i = 0; i < entryCount; i ++)
entryArr.push(i)
waitForPeers(ipfs1, config.dbname)
.then(() => {
let count = 0
db2.events.on('history', (db, data) => {
count ++
if (count === entryCount) {
const items = db2.iterator({ limit: 100 }).collect()
assert.equal(items.length, entryCount)
assert.equal(items[0].payload.value, 'hello0')
assert.equal(_.last(items).payload.value, 'hello99')
done()
db2.events.once('error', done)
db2.events.on('synced', (d) => {
if (count === entryCount && !timer) {
timer = setInterval(() => {
const items = db2.iterator({ limit: -1 }).collect()
if (items.length === count) {
clearInterval(timer)
assert.equal(items.length, entryCount)
assert.equal(items[0].payload.value, 'hello0')
assert.equal(items[items.length - 1].payload.value, 'hello99')
setTimeout(done, 5000)
}
}, 1000)
}
})
for(let i = 0; i < entryCount; i ++)
await(db1.add('hello' + i))
}))
db1.events.on('write', () => count++)
mapSeries(entryArr, (i) => db1.add('hello' + i))
.catch(done)
})
.catch(done)
})
})
})

18
test/test-config.js Normal file
View File

@ -0,0 +1,18 @@
'use strict'
const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
const testDaemons = require('./test-daemons')
// Set logplease logging level if in the browser
if (typeof window !== 'undefined')
window.LOG = 'ERROR'
// Config
module.exports = {
daemons: testDaemons,
timeout: 60000,
defaultIpfsDirectory: './ipfs',
defaultOrbitDBDirectory: './orbit-db',
dbname: 'abcdefghijklmn',
}

9
test/test-daemons.js Normal file
View File

@ -0,0 +1,9 @@
'use strict'
const IpfsNodeDaemon = require('ipfs-daemon/src/ipfs-node-daemon')
const IpfsNativeDaemon = require('ipfs-daemon/src/ipfs-native-daemon')
// module.exports = [IpfsNodeDaemon]
// module.exports = [IpfsNativeDaemon]
// module.exports = [IpfsNativeDaemon, IpfsNodeDaemon]
module.exports = [IpfsNodeDaemon, IpfsNativeDaemon]

18
test/test-utils.js Normal file
View File

@ -0,0 +1,18 @@
'use strict'
// Helper functions
exports.hasIpfsApiWithPubsub = (ipfs) => {
return ipfs.object.get !== undefined
&& ipfs.object.put !== undefined
&& ipfs.pubsub.publish !== undefined
&& ipfs.pubsub.subscribe !== undefined
}
exports.first = (arr) => {
return arr[0]
}
exports.last = (arr) => {
return arr[arr.length - 1]
}