mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-10-07 22:57:07 +00:00
Remove obsolete files
This commit is contained in:
@@ -1,46 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const isEqual = require('./utils').isEqual;
|
||||
|
||||
class GCounter {
|
||||
constructor(id, payload) {
|
||||
this.id = id;
|
||||
this._counters = payload ? payload : {};
|
||||
this._counters[this.id] = this._counters[this.id] ? this._counters[this.id] : 0;
|
||||
}
|
||||
|
||||
increment(amount) {
|
||||
if(!amount) amount = 1;
|
||||
this._counters[this.id] = this._counters[this.id] + amount;
|
||||
}
|
||||
|
||||
get value() {
|
||||
return Object.keys(this._counters)
|
||||
.map((f) => this._counters[f])
|
||||
.reduce((previousValue, currentValue) => previousValue + currentValue, 0);
|
||||
}
|
||||
|
||||
get payload() {
|
||||
return { id: this.id, counters: this._counters };
|
||||
}
|
||||
|
||||
compare(other) {
|
||||
if(other.id !== this.id)
|
||||
return false;
|
||||
|
||||
return isEqual(other._counters, this._counters);
|
||||
}
|
||||
|
||||
merge(other) {
|
||||
Object.keys(other._counters).forEach((f) => {
|
||||
this._counters[f] = Math.max(this._counters[f] ? this._counters[f] : 0, other._counters[f]);
|
||||
});
|
||||
}
|
||||
|
||||
static from(payload) {
|
||||
return new GCounter(payload.id, payload.counters);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = GCounter;
|
||||
@@ -1,53 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const isEqual = require('./utils').isEqual;
|
||||
|
||||
class GSet {
|
||||
constructor(id, payload) {
|
||||
this.id = id;
|
||||
this._added = {};
|
||||
this._removed = {};
|
||||
}
|
||||
|
||||
add(data, ts) {
|
||||
this._added[data] = { ts: ts || new Date().getTime() }
|
||||
}
|
||||
|
||||
remove(data, ts) {
|
||||
this._removed[data] = { ts: ts || new Date().getTime() }
|
||||
}
|
||||
|
||||
get value() {
|
||||
return Object.keys(this._added)
|
||||
.map((f) => {
|
||||
const removed = this._removed[f];
|
||||
if(!removed || (removed && removed.ts < this._added[f].ts)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((f) => f !== null)
|
||||
}
|
||||
|
||||
compare(other) {
|
||||
return false;
|
||||
// if(other.id !== this.id)
|
||||
// return false;
|
||||
|
||||
// return isEqual(other._counters, this._counters);
|
||||
}
|
||||
|
||||
merge(other) {
|
||||
// Object.keys(other._counters).forEach((f) => {
|
||||
// this._counters[f] = Math.max(this._counters[f] ? this._counters[f] : 0, other._counters[f]);
|
||||
// });
|
||||
}
|
||||
|
||||
static from(payload) {
|
||||
return new GSet(payload.id, payload.items);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = GSet;
|
||||
@@ -1,17 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
exports.isEqual = (a, b) => {
|
||||
const propsA = Object.getOwnPropertyNames(a);
|
||||
const propsB = Object.getOwnPropertyNames(b);
|
||||
|
||||
if(propsA.length !== propsB.length)
|
||||
return false;
|
||||
|
||||
for(let i = 0; i < propsA.length; i ++) {
|
||||
const prop = propsA[i];
|
||||
if(a[prop] !== b[prop])
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const logger = require('logplease').create("orbit-db.Cache");
|
||||
|
||||
// const defaultFilepath = path.resolve('./orbit-db-cache.json');
|
||||
// let filePath = defaultFilepath;
|
||||
let filePath;
|
||||
let cache = {};
|
||||
|
||||
class Cache {
|
||||
static set(key, value) {
|
||||
return new Promise((resolve, reject) => {
|
||||
cache[key] = value;
|
||||
if(filePath) {
|
||||
fs.writeFile(filePath, JSON.stringify(cache, null, 2) + "\n", resolve);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
static get(key) {
|
||||
return cache[key];
|
||||
}
|
||||
|
||||
static loadCache(cacheFile) {
|
||||
cache = {};
|
||||
return new Promise((resolve, reject) => {
|
||||
if(cacheFile) {
|
||||
filePath = cacheFile;
|
||||
fs.exists(cacheFile, (res) => {
|
||||
logger.debug(res);
|
||||
if(res) {
|
||||
logger.debug('Load cache from ' + cacheFile);
|
||||
cache = JSON.parse(fs.readFileSync(cacheFile));
|
||||
resolve();
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static reset() {
|
||||
cache = {};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Cache;
|
||||
@@ -1,18 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
class DefaultIndex {
|
||||
constructor(id) {
|
||||
this.id = id;
|
||||
this._index = [];
|
||||
}
|
||||
|
||||
get() {
|
||||
return this._index;
|
||||
}
|
||||
|
||||
updateIndex(oplog, entries) {
|
||||
this._index = oplog.ops
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DefaultIndex;
|
||||
@@ -1,92 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const EventEmitter = require('events').EventEmitter;
|
||||
const Log = require('ipfs-log');
|
||||
const DefaultIndex = require('./DefaultIndex');
|
||||
const Cache = require('../oplog/Cache');
|
||||
|
||||
class Store {
|
||||
constructor(ipfs, id, dbname, options) {
|
||||
this.id = id;
|
||||
this.dbname = dbname;
|
||||
this.events = new EventEmitter();
|
||||
|
||||
if(!options) options = {};
|
||||
if(!options.Index) Object.assign(options, { Index: DefaultIndex });
|
||||
if(!options.cacheFile) Object.assign(options, { cacheFile: null });
|
||||
|
||||
this.options = options;
|
||||
this._index = new this.options.Index(this.id);
|
||||
this._oplog = null;
|
||||
this._ipfs = ipfs;
|
||||
this._lastWrite = null;
|
||||
}
|
||||
|
||||
use() {
|
||||
this.events.emit('load', this.dbname);
|
||||
this._oplog = new Log(this._ipfs, this.id, this.dbname, this.options);
|
||||
return Cache.loadCache(this.options.cacheFile).then(() => {
|
||||
const cached = Cache.get(this.dbname);
|
||||
if(cached) {
|
||||
return Log.fromIpfsHash(this._ipfs, cached)
|
||||
.then((log) => this._oplog.join(log))
|
||||
.then((merged) => this._index.updateIndex(this._oplog, merged))
|
||||
.then(() => this.events.emit('readable', this.dbname))
|
||||
.then(() => this.events);
|
||||
}
|
||||
|
||||
return Promise.resolve(this.events);
|
||||
});
|
||||
}
|
||||
|
||||
close() {
|
||||
this.events.emit('close', this.dbname);
|
||||
}
|
||||
|
||||
sync(hash) {
|
||||
if(!hash || hash === this._lastWrite)
|
||||
return Promise.resolve([]);
|
||||
|
||||
const oldCount = this._oplog.items.length;
|
||||
let newItems = [];
|
||||
this.events.emit('load', this.dbname);
|
||||
return Log.fromIpfsHash(this._ipfs, hash)
|
||||
.then((log) => this._oplog.join(log))
|
||||
.then((merged) => newItems = merged)
|
||||
.then(() => Log.getIpfsHash(this._ipfs, this._oplog))
|
||||
.then((hash) => Cache.set(this.dbname, hash))
|
||||
.then(() => this._index.updateIndex(this._oplog, newItems))
|
||||
.then(() => {
|
||||
if(newItems.length > 0)
|
||||
this.events.emit('readable', this.dbname);
|
||||
})
|
||||
.then(() => newItems)
|
||||
}
|
||||
|
||||
delete() {
|
||||
this._index = new this.options.Index(this.id);
|
||||
if(this._oplog)
|
||||
this._oplog.clear();
|
||||
}
|
||||
|
||||
_addOperation(data) {
|
||||
let result, logHash;
|
||||
if(this._oplog) {
|
||||
return this._oplog.add(data)
|
||||
.then((res) => {
|
||||
result = res;
|
||||
Object.assign(result.payload, { hash: res.hash })
|
||||
return result;
|
||||
})
|
||||
.then(() => Log.getIpfsHash(this._ipfs, this._oplog))
|
||||
.then((hash) => logHash = hash)
|
||||
.then(() => this._lastWrite = logHash)
|
||||
.then(() => Cache.set(this.dbname, logHash))
|
||||
.then(() => this._index.updateIndex(this._oplog, [result]))
|
||||
.then(() => this.events.emit('data', this.dbname, logHash))
|
||||
.then(() => result.hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Store;
|
||||
@@ -1,23 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const Counter = require('../../crdts/GCounter');
|
||||
|
||||
class CounterIndex {
|
||||
constructor(id) {
|
||||
this._counter = new Counter(id);
|
||||
}
|
||||
|
||||
get() {
|
||||
return this._counter;
|
||||
}
|
||||
|
||||
updateIndex(oplog, added) {
|
||||
if(this._counter) {
|
||||
added.filter((f) => f && f.payload.op === 'COUNTER')
|
||||
.map((f) => Counter.from(f.payload.value))
|
||||
.forEach((f) => this._counter.merge(f))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CounterIndex;
|
||||
@@ -1,34 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const Log = require('ipfs-log')
|
||||
const Store = require('../Store');
|
||||
const CounterIndex = require('./CounterIndex');
|
||||
|
||||
class CounterStore extends Store {
|
||||
constructor(ipfs, id, dbname, options) {
|
||||
Object.assign(options || {}, { Index: CounterIndex });
|
||||
super(ipfs, id, dbname, options)
|
||||
}
|
||||
|
||||
value() {
|
||||
return this._index.get().value;
|
||||
}
|
||||
|
||||
inc(amount) {
|
||||
const counter = this._index.get();
|
||||
if(counter) {
|
||||
counter.increment(amount);
|
||||
const operation = {
|
||||
op: 'COUNTER',
|
||||
key: null,
|
||||
value: counter.payload,
|
||||
meta: {
|
||||
ts: new Date().getTime()
|
||||
}
|
||||
};
|
||||
return this._addOperation(operation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CounterStore;
|
||||
@@ -1,27 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
class EventIndex {
|
||||
constructor() {
|
||||
this._index = {};
|
||||
}
|
||||
|
||||
get() {
|
||||
return Object.keys(this._index).map((f) => this._index[f]);
|
||||
}
|
||||
|
||||
updateIndex(oplog, added) {
|
||||
added.reduce((handled, item) => {
|
||||
if(handled.indexOf(item.hash) === -1) {
|
||||
handled.push(item.hash);
|
||||
if(item.payload.op === 'ADD') {
|
||||
this._index[item.hash] = item.payload
|
||||
} else if(item.payload.op === 'DEL') {
|
||||
delete this._index[item.payload.value];
|
||||
}
|
||||
}
|
||||
return handled;
|
||||
}, []);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EventIndex;
|
||||
@@ -1,86 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const Lazy = require('lazy.js');
|
||||
const Store = require('../Store');
|
||||
const EventIndex = require('./EventIndex');
|
||||
|
||||
class EventStore extends Store {
|
||||
constructor(ipfs, id, dbname, options) {
|
||||
Object.assign(options || {}, { Index: EventIndex });
|
||||
super(ipfs, id, dbname, options)
|
||||
}
|
||||
|
||||
add(data) {
|
||||
const operation = {
|
||||
op: 'ADD',
|
||||
key: null,
|
||||
value: data,
|
||||
meta: {
|
||||
ts: new Date().getTime()
|
||||
}
|
||||
};
|
||||
return this._addOperation(operation);
|
||||
}
|
||||
|
||||
remove(hash) {
|
||||
const operation = {
|
||||
op: 'DEL',
|
||||
key: null,
|
||||
value: hash,
|
||||
meta: {
|
||||
ts: new Date().getTime()
|
||||
}
|
||||
};
|
||||
return this._addOperation(operation);
|
||||
}
|
||||
|
||||
iterator(options) {
|
||||
const messages = this._query(this.dbname, options);
|
||||
let currentIndex = 0;
|
||||
let iterator = {
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
},
|
||||
next() {
|
||||
let item = { value: null, done: true };
|
||||
if(currentIndex < messages.length) {
|
||||
item = { value: messages[currentIndex], done: false };
|
||||
currentIndex ++;
|
||||
}
|
||||
return item;
|
||||
},
|
||||
collect: () => messages
|
||||
}
|
||||
|
||||
return iterator;
|
||||
}
|
||||
|
||||
_query(dbname, opts) {
|
||||
if(!opts) opts = {};
|
||||
|
||||
const amount = opts.limit ? (opts.limit > -1 ? opts.limit : this._index.get().length) : 1; // Return 1 if no limit is provided
|
||||
const operations = this._index.get();
|
||||
let result = [];
|
||||
|
||||
if(opts.gt || opts.gte) {
|
||||
// Greater than case
|
||||
result = this._read(operations, opts.gt ? opts.gt : opts.gte, amount, opts.gte ? true : false)
|
||||
} else {
|
||||
// Lower than and lastN case, search latest first by reversing the sequence
|
||||
result = this._read(operations.reverse(), opts.lt ? opts.lt : opts.lte, amount, opts.lte || !opts.lt).reverse()
|
||||
}
|
||||
|
||||
if(opts.reverse) result.reverse();
|
||||
|
||||
return result.toArray();
|
||||
}
|
||||
|
||||
_read(ops, hash, amount, inclusive) {
|
||||
return Lazy(ops)
|
||||
.skipWhile((f) => hash && f.hash !== hash)
|
||||
.drop(inclusive ? 0 : 1)
|
||||
.take(amount);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EventStore;
|
||||
@@ -1,27 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
class KeyValueIndex {
|
||||
constructor() {
|
||||
this._index = {};
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this._index[key];
|
||||
}
|
||||
|
||||
updateIndex(oplog, added) {
|
||||
added.reverse().reduce((handled, item) => {
|
||||
if(handled.indexOf(item.payload.key) === -1) {
|
||||
handled.push(item.payload.key);
|
||||
if(item.payload.op === 'PUT') {
|
||||
this._index[item.payload.key] = item.payload.value
|
||||
} else if(item.payload.op === 'DEL') {
|
||||
delete this._index[item.payload.key];
|
||||
}
|
||||
}
|
||||
return handled;
|
||||
}, []);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = KeyValueIndex;
|
||||
@@ -1,45 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const Store = require('../Store');
|
||||
const KeyValueIndex = require('./KeyValueIndex');
|
||||
|
||||
class KeyValueStore extends Store {
|
||||
constructor(ipfs, id, dbname, options) {
|
||||
Object.assign(options || {}, { Index: KeyValueIndex });
|
||||
super(ipfs, id, dbname, options)
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this._index.get(key);
|
||||
}
|
||||
|
||||
set(key, data) {
|
||||
this.put(key, data);
|
||||
}
|
||||
|
||||
put(key, data) {
|
||||
const operation = {
|
||||
op: 'PUT',
|
||||
key: key,
|
||||
value: data,
|
||||
meta: {
|
||||
ts: new Date().getTime()
|
||||
}
|
||||
};
|
||||
return this._addOperation(operation);
|
||||
}
|
||||
|
||||
del(key) {
|
||||
const operation = {
|
||||
op: 'DEL',
|
||||
key: key,
|
||||
value: null,
|
||||
meta: {
|
||||
ts: new Date().getTime()
|
||||
}
|
||||
};
|
||||
return this._addOperation(operation);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = KeyValueStore;
|
||||
Reference in New Issue
Block a user