Merge branch 'master' into rnsupport

This commit is contained in:
Hadar Rottenberg 2019-11-04 12:49:25 +02:00
commit 92a4f7ad24
22 changed files with 321 additions and 90 deletions

View File

@ -115,7 +115,8 @@ Thanks to:<br/>
<a href="http://github.com/alanmimms">Alan Mimms</a>,
<a href="https://github.com/dfreire">Dário Freire</a>,
<a href="http://github.com/velua">John Williamson</a>,
<a href="http://github.com/finwo">Robin Bron</a>
<a href="http://github.com/finwo">Robin Bron</a>,
<a href="http://github.com/ElieMakhoul">Elie Makhoul</a>
</p>
- Join others in sponsoring code: https://www.patreon.com/gunDB !
@ -232,6 +233,13 @@ now --npm amark/gun
Then visit the URL in the output of the 'now --npm' step, in your browser.
### [Unebo](https://unubo.app/)
Fork this GUN repo (Unebo only deploys from your own GitHub repo's).
Add a Node.js app, select your GUN fork, set `npm start` start as the command and deploy.
Then visit the deployed app by following the 'view app' button, in your browser.
### [Docker](https://www.docker.com/)
[![Docker Automated buil](https://img.shields.io/docker/automated/gundb/gun.svg)](https://hub.docker.com/r/gundb/gun/) [![](https://images.microbadger.com/badges/image/gundb/gun.svg)](https://microbadger.com/images/gundb/gun "Get your own image badge on microbadger.com") [![Docker Pulls](https://img.shields.io/docker/pulls/gundb/gun.svg)](https://hub.docker.com/r/gundb/gun/) [![Docker Stars](https://img.shields.io/docker/stars/gundb/gun.svg)](https://hub.docker.com/r/gundb/gun/)

View File

@ -20,9 +20,9 @@
border: none;
}
</style>
<a href="/todo/index.html"><iframe src="/todo/index.html"></iframe></a>
<a href="/todo/index.html"><iframe src="/todo/index.html" style="height: 100%;"></iframe></a>
<!-- a href="/json/index.html"><iframe src="/json/index.html"></iframe></a -->
<a href="/chat/index.html"><iframe src="/chat/index.html"></iframe></a>
<!-- a href="/chat/index.html"><iframe src="/chat/index.html"></iframe></a --> <!-- removing until DOM bug fixed -->
<!-- script src="../gun.js"></script -->
</body>
</html>

55
gun.js
View File

@ -811,16 +811,20 @@
// Maybe... in case the in-memory key we have is a local write
// we still need to trigger a pull/merge from peers.
} else {
//var S = +new Date;
node = Gun.obj.copy(node);
//console.log(+new Date - S, 'copy node');
}
node = Gun.graph.node(node);
tmp = (at||empty).ack;
//var S = +new Date;
root.on('in', {
'@': msg['#'],
how: 'mem',
put: node,
$: gun
});
//console.log(+new Date - S, 'root got send');
//if(0 < tmp){ return }
root.on('get', msg);
}
@ -1295,19 +1299,18 @@
if(tmp = cat.soul || cat.link || cat.dub){ return cb(tmp, as, cat) }
if(cat.jam){ return cat.jam.push([cb, as]) }
cat.jam = [[cb,as]];
gun.get(function(msg, eve){
gun.get(function go(msg, eve){
if(u === msg.put && (tmp = Object.keys(cat.root.opt.peers).length) && ++acks < tmp){
return;
}
eve.rid(msg);
var at = ((at = msg.$) && at._) || {};
tmp = cat.jam; Gun.obj.del(cat, 'jam');
Gun.obj.map(tmp, function(as, cb){
cb = as[0]; as = as[1];
if(!cb){ return }
var id = at.link || at.soul || rel.is(msg.put) || node_soul(msg.put) || at.dub;
cb(id, as, msg, eve);
});
var at = ((at = msg.$) && at._) || {}, i = 0, as;
tmp = cat.jam; delete cat.jam; // tmp = cat.jam.splice(0, 100);
//if(tmp.length){ process.nextTick(function(){ go(msg, eve) }) }
while(as = tmp[i++]){ //Gun.obj.map(tmp, function(as, cb){
var cb = as[0], id; as = as[1];
cb && cb(id = at.link || at.soul || rel.is(msg.put) || node_soul(msg.put) || at.dub, as, msg, eve);
} //);
}, {out: {get: {'.':true}}});
return gun;
}
@ -1701,7 +1704,7 @@
return;
}
if(link && u === link.put && (tmp = rel.is(data))){ data = Gun.node.ify({}, tmp) }
eve.rid(msg);
eve.rid? eve.rid(msg) : eve.off();
opt.ok.call(gun || opt.$, data, msg.get);
}
@ -1954,6 +1957,7 @@
;USE(function(module){
var Type = USE('../type');
var puff = (typeof setImmediate !== "undefined")? setImmediate : setTimeout;
function Mesh(root){
var mesh = function(){};
@ -1972,12 +1976,18 @@
if('[' === tmp){
try{msg = JSON.parse(raw);}catch(e){opt.log('DAM JSON parse error', e)}
if(!msg){ return }
var i = 0, m;
var S = +new Date; // STATS!
while(m = msg[i++]){
mesh.hear(m, peer);
}
(mesh.hear.long || (mesh.hear.long = [])).push(+new Date - S);
//console.log('hear batch length of', msg.length);
(function go(){
var S = +new Date; // STATS!
var m, c = 100; // hardcoded for now?
while(c-- && (m = msg.shift())){
mesh.hear(m, peer);
}
//console.log(+new Date - S, 'hear batch');
(mesh.hear.long || (mesh.hear.long = [])).push(+new Date - S);
if(!msg.length){ return }
puff(go, 0);
}());
return;
}
if('{' === tmp || (Type.obj.is(raw) && (msg = raw))){
@ -1985,6 +1995,7 @@
}catch(e){return opt.log('DAM JSON parse error', e)}
if(!msg){ return }
if(!(id = msg['#'])){ id = msg['#'] = Type.text.random(9) }
if(msg.DBG_s){ console.log(+new Date - msg.DBG_s, 'to hear', id) }
if(dup.check(id)){ return }
dup.track(id, true).it = msg; // GUN core also dedups, so `true` is needed. // Does GUN core need to dedup anymore?
if(!(hash = msg['##']) && u !== msg.put){ hash = msg['##'] = Type.obj.hash(msg.put) }
@ -2000,7 +2011,9 @@
}
return;
}
//var S = +new Date;
root.on('in', msg);
//!msg.nts && console.log(+new Date - S, 'msg', msg['#']);
return;
}
}
@ -2014,6 +2027,7 @@
if(this.to){ this.to.next(msg) } // compatible with middleware adapters.
if(!msg){ return false }
var id, hash, tmp, raw;
//var S = +new Date; //msg.DBG_s = msg.DBG_s || +new Date;
var meta = msg._||(msg._=function(){});
if(!(id = msg['#'])){ id = msg['#'] = Type.text.random(9) }
if(!(hash = msg['##']) && u !== msg.put){ hash = msg['##'] = Type.obj.hash(msg.put) }
@ -2027,12 +2041,15 @@
}
}
}
//console.log(+new Date - S, 'mesh say prep');
dup.track(id).it = msg; // track for 9 seconds, default. Earth<->Mars would need more!
if(!peer){ peer = (tmp = dup.s[msg['@']]) && (tmp = tmp.it) && (tmp = tmp._) && (tmp = tmp.via) }
if(!peer && mesh.way){ return mesh.way(msg) }
if(!peer || !peer.id){ message = msg;
if(!Type.obj.is(peer || opt.peers)){ return false }
//var S = +new Date;
Type.obj.map(peer || opt.peers, each); // in case peer is a peer list.
//console.log(+new Date - S, 'mesh say loop');
return;
}
if(!peer.wire && mesh.wire){ mesh.wire(peer) }
@ -2056,8 +2073,10 @@
peer.batch = peer.tail = null;
if(!tmp){ return }
if(!tmp.length){ return } // if(3 > tmp.length){ return } // TODO: ^
//var S = +new Date;
try{tmp = (1 === tmp.length? tmp[0] : JSON.stringify(tmp));
}catch(e){return opt.log('DAM JSON stringify error', e)}
//console.log(+new Date - S, 'mesh flush', tmp.length);
if(!tmp){ return }
send(tmp, peer);
}
@ -2067,12 +2086,14 @@
// for now - find better place later.
function send(raw, peer){ try{
var wire = peer.wire;
//var S = +new Date;
if(peer.say){
peer.say(raw);
} else
if(wire.send){
wire.send(raw);
}
//console.log(+new Date - S, 'wire send', raw.length);
mesh.say.d += raw.length||0; ++mesh.say.c; // STATS!
}catch(e){
(peer.queue = peer.queue || []).push(raw);
@ -2251,6 +2272,8 @@
return wire;
}catch(e){}}
setTimeout(function(){ root.on('out', {dam:'hi'}) },1); // it can take a while to open a socket, so maybe no longer lazy load for perf reasons?
var wait = 2 * 1000;
function reconnect(peer){
clearTimeout(peer.defer);

2
gun.min.js vendored

File diff suppressed because one or more lines are too long

View File

@ -1,24 +1,22 @@
var Gun = (typeof window !== "undefined")? window.Gun : require('../gun');
Gun.on('opt', function(root){
Gun.on('create', function(root){
this.to.next(root);
if(root.once){ return }
root.on('in', function(msg){
//Msg did not have a peer property saved before, so nothing ever went further
if(!msg._ || !msg.BYE){ return this.to.next(msg) }
var peer = msg._.via;
(peer.bye = peer.bye || []).push(msg.BYE);
})
var mesh = root.opt.mesh;
if(!mesh){ return }
mesh.hear['bye'] = function(msg, peer){
(peer.byes = peer.byes || []).push(msg.bye);
}
root.on('bye', function(peer){
this.to.next(peer);
if(!peer.bye){ return }
var gun = root.gun;
Gun.obj.map(peer.bye, function(data){
if(!peer.byes){ return }
var gun = root.$;
Gun.obj.map(peer.byes, function(data){
Gun.obj.map(data, function(put, soul){
gun.get(soul).put(put);
});
});
peer.bye = [];
peer.byes = [];
});
});
@ -30,7 +28,7 @@ Gun.chain.bye = function(){
var tmp = data;
(data = {})[at.get] = tmp;
});
root.on('out', {BYE: data});
root.on('out', {bye: data});
return gun;
}
return bye;

View File

@ -19,12 +19,12 @@
function GC(){
var souls = Object.keys(root.graph||empty);
var toss = Math.ceil(souls.length * 0.01);
//var start = Gun.state(), i = toss;
//var S = +new Date;
Gun.list.map(souls, function(soul){
if(--toss < 0){ return }
root.gun.get(soul).off();
});
//console.log("evicted", i, 'nodes in', ((Gun.state() - start)/1000).toFixed(2), 'sec.');
//console.log(+new Date - S, 'gc');
}
/*
root.on('in', function(msg){

View File

@ -19,7 +19,11 @@ Gun.on('create', function(root){
try{ dgram = require("dgram") }catch(e){ return }
var socket = dgram.createSocket({type: "udp4", reuseAddr: true});
socket.bind(udp.port);
socket.bind({port: udp.port, exclusive: true}, function(){
socket.setBroadcast(true);
socket.setMulticastTTL(128);
socket.addMembership(udp.address);
});
socket.on("listening", function(){
try { socket.addMembership(udp.address) }catch(e){ return }

View File

@ -1,4 +1,4 @@
/* Promise Library v1.0 for GUN DB
/* Promise Library v1.1 for GUN DB
* Turn any part of a gun chain into a promise, that you can then use
* .then().catch() pattern.
* In normal gun doing var item = gun.get('someKey'), gun returns a reference
@ -67,7 +67,7 @@ Gun.chain.promPut = async function (item, opt) {
var gun = this;
return (new Promise((res, rej)=>{
gun.put(item, function(ack) {
if(ack.err){rej(ack.err)}
if(ack.err){console.log(ack.err); ack.ok=-1; res({ref:gun, ack:ack})}
res({ref:gun, ack:ack});
}, opt);
}))

View File

@ -11,15 +11,15 @@
opt.pack = opt.pack || (opt.memory? (opt.memory * 1000 * 1000) : 1399000000) * 0.3; // max_old_space_size defaults to 1400 MB.
opt.until = opt.until || opt.wait || 250;
opt.batch = opt.batch || (10 * 1000);
opt.chunk = opt.chunk || (1024 * 1024 * 10); // 10MB
opt.chunk = opt.chunk || (1024 * 1024 * 1); // 1MB
opt.code = opt.code || {};
opt.code.from = opt.code.from || '!';
//opt.jsonify = true; if(opt.jsonify){ console.log("JSON RAD!!!") } // TODO: REMOVE!!!!
opt.jsonify = true;
function ename(t){ return encodeURIComponent(t).replace(/\*/g, '%2A') }
function atomic(v){ return u !== v && (!v || 'object' != typeof v) }
var map = Gun.obj.map;
var LOG = false;//true;
var LOG = false;
if(!opt.store){
return opt.log("ERROR: Radisk needs `opt.store` interface with `{get: fn, put: fn (, list: fn)}`!");
@ -44,7 +44,9 @@
if(val instanceof Function){
var o = cb || {};
cb = val;
var S; LOG && (S = +new Date);
val = r.batch(key);
LOG && console.log(+new Date - S, 'rad mem');
if(u !== val){
cb(u, r.range(val, o), o);
if(atomic(val)){ return }
@ -230,19 +232,22 @@
r.read = function(key, cb, o){
o = o || {};
if(RAD && !o.next){ // cache
var S; LOG && (S = +new Date);
var val = RAD(key);
LOG && console.log(+new Date - S, 'rad cached');
//if(u !== val){
//cb(u, val, o);
if(atomic(val)){ cb(u, val, o); return }
// if a node is requested and some of it is cached... the other parts might not be.
//}
}
o.span = (u !== o.start) || (u !== o.end);
o.span = (u !== o.start) || (u !== o.end); // is there a start or end?
var g = function Get(){};
g.lex = function(file){ var tmp;
file = (u === file)? u : decodeURIComponent(file);
tmp = o.next || key || (o.reverse? o.end || '\uffff' : o.start || '');
if(!file || (o.reverse? file < tmp : file > tmp)){
LOG && console.log(+new Date - S, 'rad read lex'); S = +new Date;
if(o.next || o.reverse){ g.file = file }
if(tmp = Q[g.file]){
tmp.push({key: key, ack: cb, file: g.file, opt: o});
@ -263,25 +268,33 @@
g.info = info;
if(disk){ RAD = g.disk = disk }
disk = Q[g.file]; delete Q[g.file];
LOG && console.log(+new Date - S, 'rad read it in, now ack to:', disk.length); S = +new Date;
map(disk, g.ack);
LOG && console.log(+new Date - S, 'rad read acked');
}
g.ack = function(as){
if(!as.ack){ return }
var tmp = as.key, o = as.opt, info = g.info, rad = g.disk || noop, data = r.range(rad(tmp), o), last = rad.last || Radix.map(rad, rev, revo);
var key = as.key, o = as.opt, info = g.info, rad = g.disk || noop, data = r.range(rad(key), o), last = rad.last || Radix.map(rad, rev, revo);
o.parsed = (o.parsed || 0) + (info.parsed||0);
o.chunks = (o.chunks || 0) + 1;
if(!o.some){ o.some = (u !== data) }
if(u !== data){ as.ack(g.err, data, o) }
else if(!as.file){ !o.some && as.ack(g.err, u, o); return }
if(!o.span){
if(/*!last || */last === tmp){ !o.some && as.ack(g.err, u, o); return }
if(last && last > tmp && 0 != last.indexOf(tmp)){ !o.some && as.ack(g.err, u, o); return }
o.more = true;
if((!as.file) // if no more places to look
|| (!o.span && last === key) // if our key exactly matches the very last atomic record
|| (!o.span && last && last > key && 0 != last.indexOf(key)) // 'zach' may be lexically larger than 'za', but there still might be more, like 'zane' in the 'za' prefix bucket so do not end here.
){
o.more = u;
as.ack(g.err, data, o);
return
}
if(o.some && o.parsed >= o.limit){ return }
if(u !== data){
as.ack(g.err, data, o); // more might be coming!
if(o.parsed >= o.limit){ return } // even if more, we've hit our limit, asking peer will need to make a new ask with a new starting point.
}
o.next = as.file;
r.read(tmp, as.ack, o);
r.read(key, as.ack, o);
}
if(o.reverse){ g.lex.reverse = true }
LOG && (S = +new Date);
r.list(g.lex);
}
function rev(a,b){ return b }
@ -302,7 +315,7 @@
var p = function Parse(){}, info = {};
p.disk = Radix();
p.read = function(err, data){ var tmp;
LOG && console.log('read disk in', +new Date - start, ename(file)); // keep this commented out in
LOG && console.log('read disk in', +new Date - S, ename(file)); // keep this commented out in
delete Q[file];
if((p.err = err) || (p.not = !data)){
return map(q, p.ack);
@ -319,12 +332,12 @@
}
info.parsed = data.length;
LOG && (start = +new Date); // keep this commented out in production!
if(opt.jsonify){ // temporary testing idea
LOG && (S = +new Date); // keep this commented out in production!
if(opt.jsonify || '{' === data[0]){ // temporary testing idea
try{
var json = JSON.parse(data);
p.disk.$ = json;
LOG && console.log('parsed JSON in', +new Date - start); // keep this commented out in production!
LOG && console.log('parsed JSON in', +new Date - S); // keep this commented out in production!
map(q, p.ack);
return;
}catch(e){ tmp = e }
@ -333,7 +346,7 @@
return map(q, p.ack);
}
}
LOG && (start = +new Date); // keep this commented out in production!
LOG && (S = +new Date); // keep this commented out in production!
var tmp = p.split(data), pre = [], i, k, v;
if(!tmp || 0 !== tmp[1]){
p.err = "File '"+file+"' does not have root radix! ";
@ -356,7 +369,7 @@
if(u !== k && u !== v){ p.disk(pre.join(''), v) }
tmp = p.split(tmp[2]);
}
LOG && console.log('parsed RAD in', +new Date - start); // keep this commented out in production!
LOG && console.log('parsed RAD in', +new Date - S); // keep this commented out in production!
//cb(err, p.disk);
map(q, p.ack);
};
@ -376,7 +389,7 @@
if(p.err || p.not){ return cb(p.err, u, info) }
cb(u, p.disk, info);
}
var start; LOG && (start = +new Date); // keep this commented out in production!
var S; LOG && (S = +new Date); // keep this commented out in production!
if(raw){ return p.read(null, raw) }
opt.store.get(ename(file), p.read);
}

View File

@ -12,7 +12,7 @@
opt.pack = opt.pack || (opt.memory? (opt.memory * 1000 * 1000) : 1399000000) * 0.3; // max_old_space_size defaults to 1400 MB.
opt.until = opt.until || opt.wait || 250;
opt.batch = opt.batch || (10 * 1000);
opt.chunk = opt.chunk || (1024 * 1024 * 10); // 10MB
opt.chunk = opt.chunk || (1024 * 1024 * 1); // 1MB
opt.code = opt.code || {};
opt.code.from = opt.code.from || '!';
//opt.jsonify = true; // TODO: REMOVE!!!!

View File

@ -3,7 +3,6 @@
function Store(opt){
opt = opt || {};
opt.file = String(opt.file || 'radata');
opt.chunk = opt.chunk || (1024 * 1024); // 1MB
var db = null, u;
try{opt.indexedDB = opt.indexedDB || indexedDB}catch(e){}

View File

@ -8,9 +8,9 @@ Gun.on('create', function(root){
this.to.next(root);
var opt = root.opt;
if(!opt.s3 && !process.env.AWS_S3_BUCKET){ return }
opt.batch = opt.batch || (1000 * 10);
opt.until = opt.until || (1000 * 3);
opt.chunk = opt.chunk || (1024 * 1024 * 10); // 10MB
//opt.batch = opt.batch || (1000 * 10);
//opt.until = opt.until || (1000 * 3); // ignoring these now, cause perf > cost
//opt.chunk = opt.chunk || (1024 * 1024 * 10); // 10MB // when cost only cents
try{AWS = require('aws-sdk');
}catch(e){

View File

@ -3,7 +3,7 @@ var Gun = (typeof window !== "undefined")? window.Gun : require('../gun');
Gun.on('create', function(root){
if(Gun.TESTING){ root.opt.file = 'radatatest' }
this.to.next(root);
var opt = root.opt, u;
var opt = root.opt, empty = {}, u;
if(false === opt.radisk){ return }
var Radisk = (Gun.window && Gun.window.Radisk) || require('./radisk');
var Radix = Radisk.Radix;
@ -14,14 +14,23 @@ Gun.on('create', function(root){
root.on('put', function(msg){
this.to.next(msg);
var id = msg['#'] || Gun.text.random(3), track = !msg['@'], acks = track? 0 : u; // only ack non-acks.
if(msg.rad && !track){ return } // don't save our own acks
var start = (+new Date); // STATS!
var got = (msg._||empty).rad, now = Gun.state();
var S = (+new Date); // STATS!
Gun.graph.is(msg.put, null, function(val, key, node, soul){
if(!track && got){
var at = (root.next||empty)[soul];
if(!at){ return }
if(u !== got['.']){ at = (at.next||empty)[key] }
if(!at){ return }
at.rad = now;
return;
}
if(track){ ++acks }
//console.log('put:', soul, key, val);
val = Radisk.encode(val, null, esc)+'>'+Radisk.encode(Gun.state.is(node, key), null, esc);
rad(soul+esc+key, val, (track? ack : u));
});
//console.log(+new Date - S, 'put loop');
function ack(err, ok){
acks--;
if(ack.err){ return }
@ -31,11 +40,12 @@ Gun.on('create', function(root){
return;
}
if(acks){ return }
try{opt.store.stats.put.time[statp % 50] = (+new Date) - start; ++statp;
try{opt.store.stats.put.time[statp % 50] = (+new Date) - S; ++statp;
opt.store.stats.put.count++;
}catch(e){} // STATS!
//console.log("PAT!", id);
//console.log(+new Date - S, 'put'); S = +new Date;
root.on('in', {'@': id, ok: 1});
//console.log(+new Date - S, 'put sent');
}
});
@ -67,12 +77,19 @@ Gun.on('create', function(root){
o.limit = (tmp <= (o.pack || (1000 * 100)))? tmp : 1;
}
if(has['-'] || (soul||{})['-']){ o.reverse = true }
var start = (+new Date); // STATS! // console.log("GET!", id, JSON.stringify(key));
if(tmp = (root.next||empty)[soul]){
if(tmp && tmp.rad){ return }
if(o.atom){ tmp = (tmp.next||empty)[o.atom] }
if(tmp && tmp.rad){ return }
}
var S = (+new Date); // STATS!
rad(key||'', function(err, data, o){
try{opt.store.stats.get.time[statg % 50] = (+new Date) - start; ++statg;
try{opt.store.stats.get.time[statg % 50] = (+new Date) - S; ++statg;
opt.store.stats.get.count++;
if(err){ opt.store.stats.get.err = err }
}catch(e){} // STATS!
//if(u === data && o.chunks > 1){ return } // if we already sent a chunk, ignore ending empty responses. // this causes tests to fail.
//console.log(+new Date - S, 'got'); S = +new Date;
if(data){
if(typeof data !== 'string'){
if(o.atom){
@ -83,20 +100,24 @@ Gun.on('create', function(root){
}
if(!graph && data){ each(data, '') }
}
root.on('in', {'@': id, put: graph, err: err? err : u, rad: Radix});
//console.log(+new Date - S, 'got prep'); S = +new Date;
root.on('in', {'@': id, put: graph, '%': o.more? 1 : u, err: err? err : u, _: each});
//console.log(+new Date - S, 'got sent');
}, o);
//console.log(+new Date - S, 'get call');
function each(val, has, a,b){
if(!val){ return }
has = (key+has).split(esc);
var soul = has.slice(0,1)[0];
has = has.slice(-1)[0];
o.count = (o.count || 0) + val.length;
tmp = val.lastIndexOf('>');
var tmp = val.lastIndexOf('>');
var state = Radisk.decode(val.slice(tmp+1), null, esc);
val = Radisk.decode(val.slice(0,tmp), null, esc);
(graph = graph || {})[soul] = Gun.state.ify(graph[soul], has, state, val, soul);
if(o.limit && o.limit <= o.count){ return true }
}
each.rad = get;
});
opt.store.stats = {get:{time:{}, count:0}, put: {time:{}, count:0}}; // STATS!
var statg = 0, statp = 0; // STATS!

View File

@ -1,6 +1,6 @@
{
"name": "gun",
"version": "0.2019.915",
"version": "0.2019.930",
"description": "A realtime, decentralized, offline-first, graph data synchronization engine.",
"main": "index.js",
"browser": "browser.js",

View File

@ -1,5 +1,6 @@
var Type = require('../type');
var puff = (typeof setImmediate !== "undefined")? setImmediate : setTimeout;
function Mesh(root){
var mesh = function(){};
@ -18,10 +19,18 @@ function Mesh(root){
if('[' === tmp){
try{msg = JSON.parse(raw);}catch(e){opt.log('DAM JSON parse error', e)}
if(!msg){ return }
var i = 0, m;
while(m = msg[i++]){
mesh.hear(m, peer);
}
//console.log('hear batch length of', msg.length);
(function go(){
var S = +new Date; // STATS!
var m, c = 100; // hardcoded for now?
while(c-- && (m = msg.shift())){
mesh.hear(m, peer);
}
//console.log(+new Date - S, 'hear batch');
(mesh.hear.long || (mesh.hear.long = [])).push(+new Date - S);
if(!msg.length){ return }
puff(go, 0);
}());
return;
}
if('{' === tmp || (Type.obj.is(raw) && (msg = raw))){
@ -29,6 +38,7 @@ function Mesh(root){
}catch(e){return opt.log('DAM JSON parse error', e)}
if(!msg){ return }
if(!(id = msg['#'])){ id = msg['#'] = Type.text.random(9) }
if(msg.DBG_s){ console.log(+new Date - msg.DBG_s, 'to hear', id) }
if(dup.check(id)){ return }
dup.track(id, true).it = msg; // GUN core also dedups, so `true` is needed. // Does GUN core need to dedup anymore?
if(!(hash = msg['##']) && u !== msg.put){ hash = msg['##'] = Type.obj.hash(msg.put) }
@ -44,7 +54,9 @@ function Mesh(root){
}
return;
}
//var S = +new Date;
root.on('in', msg);
//!msg.nts && console.log(+new Date - S, 'msg', msg['#']);
return;
}
}
@ -58,6 +70,7 @@ function Mesh(root){
if(this.to){ this.to.next(msg) } // compatible with middleware adapters.
if(!msg){ return false }
var id, hash, tmp, raw;
//var S = +new Date; //msg.DBG_s = msg.DBG_s || +new Date;
var meta = msg._||(msg._=function(){});
if(!(id = msg['#'])){ id = msg['#'] = Type.text.random(9) }
if(!(hash = msg['##']) && u !== msg.put){ hash = msg['##'] = Type.obj.hash(msg.put) }
@ -71,12 +84,15 @@ function Mesh(root){
}
}
}
//console.log(+new Date - S, 'mesh say prep');
dup.track(id).it = msg; // track for 9 seconds, default. Earth<->Mars would need more!
if(!peer){ peer = (tmp = dup.s[msg['@']]) && (tmp = tmp.it) && (tmp = tmp._) && (tmp = tmp.via) }
if(!peer && mesh.way){ return mesh.way(msg) }
if(!peer || !peer.id){ message = msg;
if(!Type.obj.is(peer || opt.peers)){ return false }
//var S = +new Date;
Type.obj.map(peer || opt.peers, each); // in case peer is a peer list.
//console.log(+new Date - S, 'mesh say loop');
return;
}
if(!peer.wire && mesh.wire){ mesh.wire(peer) }
@ -100,8 +116,10 @@ function Mesh(root){
peer.batch = peer.tail = null;
if(!tmp){ return }
if(!tmp.length){ return } // if(3 > tmp.length){ return } // TODO: ^
//var S = +new Date;
try{tmp = (1 === tmp.length? tmp[0] : JSON.stringify(tmp));
}catch(e){return opt.log('DAM JSON stringify error', e)}
//console.log(+new Date - S, 'mesh flush', tmp.length);
if(!tmp){ return }
send(tmp, peer);
}
@ -111,12 +129,14 @@ function Mesh(root){
// for now - find better place later.
function send(raw, peer){ try{
var wire = peer.wire;
//var S = +new Date;
if(peer.say){
peer.say(raw);
} else
if(wire.send){
wire.send(raw);
}
//console.log(+new Date - S, 'wire send', raw.length);
mesh.say.d += raw.length||0; ++mesh.say.c; // STATS!
}catch(e){
(peer.queue = peer.queue || []).push(raw);

View File

@ -42,6 +42,8 @@ Gun.on('opt', function(root){
return wire;
}catch(e){}}
setTimeout(function(){ root.on('out', {dam:'hi'}) },1); // it can take a while to open a socket, so maybe no longer lazy load for perf reasons?
var wait = 2 * 1000;
function reconnect(peer){
clearTimeout(peer.defer);

View File

@ -71,19 +71,18 @@ function soul(gun, cb, opt, as){
if(tmp = cat.soul || cat.link || cat.dub){ return cb(tmp, as, cat) }
if(cat.jam){ return cat.jam.push([cb, as]) }
cat.jam = [[cb,as]];
gun.get(function(msg, eve){
gun.get(function go(msg, eve){
if(u === msg.put && (tmp = Object.keys(cat.root.opt.peers).length) && ++acks < tmp){
return;
}
eve.rid(msg);
var at = ((at = msg.$) && at._) || {};
tmp = cat.jam; Gun.obj.del(cat, 'jam');
Gun.obj.map(tmp, function(as, cb){
cb = as[0]; as = as[1];
if(!cb){ return }
var id = at.link || at.soul || rel.is(msg.put) || node_soul(msg.put) || at.dub;
cb(id, as, msg, eve);
});
var at = ((at = msg.$) && at._) || {}, i = 0, as;
tmp = cat.jam; delete cat.jam; // tmp = cat.jam.splice(0, 100);
//if(tmp.length){ process.nextTick(function(){ go(msg, eve) }) }
while(as = tmp[i++]){ //Gun.obj.map(tmp, function(as, cb){
var cb = as[0], id; as = as[1];
cb && cb(id = at.link || at.soul || rel.is(msg.put) || node_soul(msg.put) || at.dub, as, msg, eve);
} //);
}, {out: {get: {'.':true}}});
return gun;
}

View File

@ -94,7 +94,7 @@ function val(msg, eve, to){
return;
}
if(link && u === link.put && (tmp = rel.is(data))){ data = Gun.node.ify({}, tmp) }
eve.rid(msg);
eve.rid? eve.rid(msg) : eve.off();
opt.ok.call(gun || opt.$, data, msg.get);
}

View File

@ -162,16 +162,20 @@ Gun.dup = require('./dup');
// Maybe... in case the in-memory key we have is a local write
// we still need to trigger a pull/merge from peers.
} else {
//var S = +new Date;
node = Gun.obj.copy(node);
//console.log(+new Date - S, 'copy node');
}
node = Gun.graph.node(node);
tmp = (at||empty).ack;
//var S = +new Date;
root.on('in', {
'@': msg['#'],
how: 'mem',
put: node,
$: gun
});
//console.log(+new Date - S, 'root got send');
//if(0 < tmp){ return }
root.on('get', msg);
}

View File

@ -116,12 +116,13 @@ describe("Put ACK", function(){
}, {acks: c});
function wire(){
ref.hear = ref.hear || [];
var hear = ref._.root.opt.mesh.hear;
ref._.root.opt.mesh.hear = function(raw, peer){
var msg = Gun.obj.ify(raw);
console.log('hear:', msg);
hear(raw, peer);
(ref.hear || (ref.hear = [])).push(msg);
ref.hear.push(msg);
}
var say = ref._.root.opt.mesh.say;
ref._.root.opt.mesh.say = function(raw, peer){
@ -150,12 +151,13 @@ describe("Put ACK", function(){
console.log("I AM DAVE");
test.async();
var c = 0, to;
ref.hear = ref.hear || [];
var hear = ref._.root.opt.mesh.hear;
ref._.root.opt.mesh.hear = function(raw, peer){
var msg = Gun.obj.ify(raw);
console.log('hear:', msg);
hear(raw, peer);
(ref.hear || (ref.hear = [])).push(msg);
ref.hear.push(msg);
if(msg.put){ ++c }
}

137
test/panic/3puts.js Normal file
View File

@ -0,0 +1,137 @@
/*
This is the first in a series of basic networking correctness tests.
Each test itself might be dumb and simple, but built up together,
they prove desired end goals for behavior at scale.
1. (this file) Is a browser write is confirmed as save by multiple peers even if by daisy chain.
2.
*/
var config = {
IP: require('ip').address(),
port: 8765,
servers: 1,
browsers: 2,
puts: 1000,
route: {
'/': __dirname + '/index.html',
'/gun.js': __dirname + '/../../gun.js',
'/jquery.js': __dirname + '/../../examples/jquery.js'
}
}
var panic = require('panic-server');
panic.server().on('request', function(req, res){
config.route[req.url] && require('fs').createReadStream(config.route[req.url]).pipe(res);
}).listen(config.port);
var clients = panic.clients;
var manager = require('panic-manager')();
manager.start({
clients: Array(config.servers).fill().map(function(u, i){
return {
type: 'node',
port: config.port + (i + 1)
}
}),
panic: 'http://' + config.IP + ':' + config.port
});
var servers = clients.filter('Node.js');
var bob = servers.pluck(1);
var browsers = clients.excluding(servers);
var alice = browsers.pluck(1);
var carl = browsers.excluding(alice).pluck(1);
describe("Put ACK", function(){
//this.timeout(5 * 60 * 1000);
this.timeout(10 * 60 * 1000);
it("Servers have joined!", function(){
return servers.atLeast(config.servers);
});
it("GUN started!", function(){
var tests = [], i = 0;
servers.each(function(client){
tests.push(client.run(function(test){
var env = test.props;
test.async();
try{ require('fs').unlinkSync(env.i+'data') }catch(e){}
try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){}
var server = require('http').createServer(function(req, res){
res.end("I am "+ env.i +"!");
});
var port = env.config.port + env.i;
var Gun = require('gun');
var peers = [], i = env.config.servers;
while(i--){
var tmp = (env.config.port + (i + 1));
if(port != tmp){ // ignore ourselves
peers.push('http://'+ env.config.IP + ':' + tmp + '/gun');
}
}
console.log(port, " connect to ", peers);
var gun = Gun({file: env.i+'data', peers: peers, web: server});
server.listen(port, function(){
test.done();
});
}, {i: i += 1, config: config}));
});
return Promise.all(tests);
});
it(config.browsers +" browser(s) have joined!", function(){
console.log("PLEASE OPEN http://"+ config.IP +":"+ config.port +" IN "+ config.browsers +" BROWSER(S)!");
return browsers.atLeast(config.browsers);
});
it("Browsers initialized gun!", function(){
var tests = [], i = 0;
browsers.each(function(client, id){
tests.push(client.run(function(test){
try{ localStorage.clear() }catch(e){}
try{ indexedDB.deleteDatabase('radata') }catch(e){}
var env = test.props;
var gun = Gun('http://'+ env.config.IP + ':' + (env.config.port + 1) + '/gun');
window.ref = gun.get('test');
}, {i: i += 1, config: config}));
});
return Promise.all(tests);
});
it("Puts", function(){
return alice.run(function(test){
console.log("I AM ALICE");
test.async();
var i = test.props.puts, d = 0;
while(i--){ go(i) }
function go(i){
ref.get(i).put({hello: 'world'}, function(ack){
if(ack.err){ put_failed }
if(++d !== test.props.puts){ return }
console.log("all success", d);
test.done();
});
}
}, {puts: config.puts});
});
it("All finished!", function(done){
console.log("Done! Cleaning things up...");
setTimeout(function(){
done();
},1000);
});
after("Everything shut down.", function(){
browsers.run(function(){
//location.reload();
//setTimeout(function(){
//}, 15 * 1000);
});
return servers.run(function(){
process.exit();
});
});
});

View File

@ -190,12 +190,13 @@ var names = ["Adalard","Adora","Aia","Albertina","Alfie","Allyn","Amabil","Ammam
if(opt.end < v){ return }
if(v.indexOf(find) == 0){ all[v] = true }
});
rad(find, function(err, data){
rad(find, function(err, data, o){
Radix.map(data, function(v,k){
//console.log(find+k, v);
delete all[find+k];
});
if(!Gun.obj.empty(all)){ return }
if(!data){ return } // in case there is "more" that returned empty
done();
}, opt);
});