axe tweaks

This commit is contained in:
Mark Nadal 2022-03-23 00:10:57 -07:00
parent f6b65c8e7e
commit 4f19440262
10 changed files with 215 additions and 56 deletions

10
gun.js
View File

@ -1380,7 +1380,7 @@
var P = opt.puff; var P = opt.puff;
(function go(){ (function go(){
var S = +new Date; var S = +new Date;
var i = 0, m; while(i < P && (m = msg[i++])){ hear(m, peer) } var i = 0, m; while(i < P && (m = msg[i++])){ mesh.hear(m, peer) }
msg = msg.slice(i); // slicing after is faster than shifting during. msg = msg.slice(i); // slicing after is faster than shifting during.
console.STAT && console.STAT(S, +new Date - S, 'hear loop'); console.STAT && console.STAT(S, +new Date - S, 'hear loop');
flush(peer); // force send all synchronously batched acks. flush(peer); // force send all synchronously batched acks.
@ -1446,7 +1446,7 @@
console.STAT && console.STAT(S, +new Date - S, 'say json+hash'); console.STAT && console.STAT(S, +new Date - S, 'say json+hash');
msg._.$put = t; msg._.$put = t;
msg['##'] = h; msg['##'] = h;
say(msg, peer); mesh.say(msg, peer);
delete msg._.$put; delete msg._.$put;
}, sort); }, sort);
} }
@ -1488,7 +1488,7 @@
loop = 1; var wr = meta.raw; meta.raw = raw; // quick perf hack loop = 1; var wr = meta.raw; meta.raw = raw; // quick perf hack
var i = 0, p; while(i < 9 && (p = (pl||'')[i++])){ var i = 0, p; while(i < 9 && (p = (pl||'')[i++])){
if(!(p = ps[p])){ continue } if(!(p = ps[p])){ continue }
say(msg, p); mesh.say(msg, p);
} }
meta.raw = wr; loop = 0; meta.raw = wr; loop = 0;
pl = pl.slice(i); // slicing after is faster than shifting during. pl = pl.slice(i); // slicing after is faster than shifting during.
@ -1562,7 +1562,7 @@
function res(err, raw){ function res(err, raw){
if(err){ return } // TODO: Handle!! if(err){ return } // TODO: Handle!!
meta.raw = raw; //if(meta && (raw||'').length < (999 * 99)){ meta.raw = raw } // HNPERF: If string too big, don't keep in memory. meta.raw = raw; //if(meta && (raw||'').length < (999 * 99)){ meta.raw = raw } // HNPERF: If string too big, don't keep in memory.
say(msg, peer); mesh.say(msg, peer);
} }
} }
}()); }());
@ -1621,6 +1621,7 @@
if(msg.pid){ if(msg.pid){
if(!peer.pid){ peer.pid = msg.pid } if(!peer.pid){ peer.pid = msg.pid }
if(msg['@']){ return } if(msg['@']){ return }
if(msg.pid === opt.pid){ mesh.bye(peer) }
} }
mesh.say({dam: '?', pid: opt.pid, '@': msg['#']}, peer); mesh.say({dam: '?', pid: opt.pid, '@': msg['#']}, peer);
delete dup.s[peer.last]; // IMPORTANT: see https://gun.eco/docs/DAM#self delete dup.s[peer.last]; // IMPORTANT: see https://gun.eco/docs/DAM#self
@ -1712,6 +1713,7 @@
var wait = 2 * 999; var wait = 2 * 999;
function reconnect(peer){ function reconnect(peer){
clearTimeout(peer.defer); clearTimeout(peer.defer);
if(!opt.peers[peer.url]){ return }
if(doc && peer.retry <= 0){ return } if(doc && peer.retry <= 0){ return }
peer.retry = (peer.retry || opt.retry+1 || 60) - ((-peer.tried + (peer.tried = +new Date) < wait*4)?1:0); peer.retry = (peer.retry || opt.retry+1 || 60) - ((-peer.tried + (peer.tried = +new Date) < wait*4)?1:0);
peer.defer = setTimeout(function to(){ peer.defer = setTimeout(function to(){

View File

@ -37,7 +37,7 @@
//opt.log("WARNING: `store.list` interface might be needed!"); //opt.log("WARNING: `store.list` interface might be needed!");
} }
if(''+u != typeof require){ require('./ison') } if(''+u != typeof require){ require('./yson') }
var parse = JSON.parseAsync || function(t,cb,r){ var u; try{ cb(u, JSON.parse(t,r)) }catch(e){ cb(e) } } var parse = JSON.parseAsync || function(t,cb,r){ var u; try{ cb(u, JSON.parse(t,r)) }catch(e){ cb(e) } }
var json = JSON.stringifyAsync || function(v,cb,r,s){ var u; try{ cb(u, JSON.stringify(v,r,s)) }catch(e){ cb(e) } } var json = JSON.stringifyAsync || function(v,cb,r,s){ var u; try{ cb(u, JSON.stringify(v,r,s)) }catch(e){ cb(e) } }
/* /*

View File

@ -1,5 +1,5 @@
;(function(){ ;(function(){
require('./ison'); require('./yson');
var Gun = require('../gun'), u; var Gun = require('../gun'), u;
Gun.serve = require('./serve'); Gun.serve = require('./serve');
//process.env.GUN_ENV = process.env.GUN_ENV || 'debug'; //process.env.GUN_ENV = process.env.GUN_ENV || 'debug';

View File

@ -72,7 +72,7 @@ Gun.on('opt', function(root){
}); });
var exec = require("child_process").exec, noop = function(){}; var exec = require("child_process").exec, noop = function(){};
require('./ison'); require('./yson');
var log = Gun.log, all = {}, max = 1000; var log = Gun.log, all = {}, max = 1000;
Gun.log = console.STAT = function(a,b,c,d){ Gun.log = console.STAT = function(a,b,c,d){

View File

@ -14,6 +14,7 @@ Gun.on('create', function(root){
root.on('put', function(msg){ root.on('put', function(msg){
this.to.next(msg); this.to.next(msg);
if((msg._||'').rad){ return } // don't save what just came from a read. if((msg._||'').rad){ return } // don't save what just came from a read.
//if(msg['@']){ return } // WHY DID I NOT ADD THIS?
var id = msg['#'], put = msg.put, soul = put['#'], key = put['.'], val = put[':'], state = put['>'], tmp; var id = msg['#'], put = msg.put, soul = put['#'], key = put['.'], val = put[':'], state = put['>'], tmp;
var DBG = (msg._||'').DBG; DBG && (DBG.sp = DBG.sp || +new Date); var DBG = (msg._||'').DBG; DBG && (DBG.sp = DBG.sp || +new Date);
//var lot = (msg._||'').lot||''; count[id] = (count[id] || 0) + 1; //var lot = (msg._||'').lot||''; count[id] = (count[id] || 0) + 1;

2
sea.js
View File

@ -475,7 +475,7 @@
}}); }});
module.exports = SEA.verify; module.exports = SEA.verify;
// legacy & ossl leak mitigation: // legacy & ossl memory leak mitigation:
var knownKeys = {}; var knownKeys = {};
var keyForPair = SEA.opt.slow_leak = pair => { var keyForPair = SEA.opt.slow_leak = pair => {

View File

@ -14,7 +14,7 @@ describe('Gun', function(){
root.Gun = root.Gun; root.Gun = root.Gun;
root.Gun.TESTING = true; root.Gun.TESTING = true;
} else { } else {
require('../lib/ison'); require('../lib/yson');
root.Gun = require('../gun'); root.Gun = require('../gun');
root.Gun.TESTING = true; root.Gun.TESTING = true;
require('../lib/store'); require('../lib/store');

View File

@ -2,10 +2,21 @@
This is the first in a series of basic networking correctness tests. This is the first in a series of basic networking correctness tests.
Each test itself might be dumb and simple, but built up together, Each test itself might be dumb and simple, but built up together,
they prove desired end goals for behavior at scale. they prove desired end goals for behavior at scale.
1. (this file) Is a browser write is confirmed as save by multiple peers even if by daisy chain.
2. 1. (this file) Makes sure that a browser receives daisy chain acks that data was saved.
2. (this file) Makes sure the browser receives a deduplicated ACK when data is requested across the daisy chains.
Assume we have a 4 peer federated-like topology,
..B--C..
./....\.
A......D
Alice's data can be saved by more than just Bob.
Dave asking for the data should not flood him with more than necessary responses.
*/ */
// <-- PANIC template, copy & paste, tweak a few settings if needed...
var config = { var config = {
IP: require('ip').address(), IP: require('ip').address(),
port: 8765, port: 8765,
@ -43,6 +54,7 @@ var browsers = clients.excluding(servers);
var alice = browsers.pluck(1); var alice = browsers.pluck(1);
var dave = browsers.excluding(alice).pluck(1); var dave = browsers.excluding(alice).pluck(1);
// continue boiler plate, tweak a few defaults if needed, but give descriptive test names...
describe("Put ACK", function(){ describe("Put ACK", function(){
//this.timeout(5 * 60 * 1000); //this.timeout(5 * 60 * 1000);
this.timeout(10 * 60 * 1000); this.timeout(10 * 60 * 1000);
@ -58,7 +70,7 @@ describe("Put ACK", function(){
var env = test.props; var env = test.props;
test.async(); test.async();
try{ require('fs').unlinkSync(env.i+'data') }catch(e){} try{ require('fs').unlinkSync(env.i+'data') }catch(e){}
try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){} try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){}
var server = require('http').createServer(function(req, res){ var server = require('http').createServer(function(req, res){
res.end("I am "+ env.i +"!"); res.end("I am "+ env.i +"!");
}); });
@ -100,36 +112,42 @@ describe("Put ACK", function(){
return Promise.all(tests); return Promise.all(tests);
}); });
// end PANIC template -->
it("Put", function(){ it("Put", function(){
return alice.run(function(test){ return alice.run(function(test){
console.log("I AM ALICE"); console.log("I AM ALICE");
test.async(); test.async();
var c = test.props.acks, acks = {}; var c = test.props.acks, acks = {}, tmp;
c = c < 2? 2 : c; c = c < 2? 2 : c; // at least 2 acks.
ref.put({hello: 'world'}, function(ack){ ref.put({hello: 'world'}, function(ack){
//console.log("acks:", ack, c); //console.log("ack:", ack['#']);
acks[ack['#']] = 1; acks[ack['#']] = 1; // uniquely list all the ack IDs.
if(Object.keys(acks).length == c){ tmp = Object.keys(acks).length;
wire(); console.log(tmp, "save");
return test.done(); if(tmp >= c){ // when there are enough
test.done(); // confirm test passes
wire(); // start sniffing for future tests
return;
} }
}, {acks: c}); }, {acks: c});
function wire(){ function wire(){ // for the future tests, track how many wire messages are heard/sent.
ref.hear = ref.hear || []; ref.hear = ref.hear || [];
var hear = ref._.root.opt.mesh.hear; var dam = ref.back('opt.mesh');
ref._.root.opt.mesh.hear = function(raw, peer){ var hear = dam.hear;
console.log('hear:', msg); dam.hear = function(raw, peer){ // hijack the listener
var msg = JSON.parse(raw); try{var msg = JSON.parse(raw);
}catch(e){ console.log("Note: This test not support RAD serialization format yet, use JSON.") }
hear(raw, peer); hear(raw, peer);
ref.hear.push(msg); ref.hear.push(msg); // add to count
} }
var say = ref._.root.opt.mesh.say; var say = dam.say;
ref._.root.opt.mesh.say = function(raw, peer){ dam.say = function(raw, peer){
var yes = say(raw, peer); var yes = say(raw, peer);
if(yes === false){ return } if(yes === false){ return }
console.log("say:", msg, yes); console.log(msg);
(ref.say || (ref.say = [])).push(JSON.parse(msg)); (ref.say || (ref.say = [])).push(JSON.parse(msg)); // add to count.
} }
} }
}, {acks: config.servers}); }, {acks: config.servers});
@ -139,31 +157,35 @@ describe("Put ACK", function(){
/* /*
Here is the recursive rule for GET, keep replying while hashes mismatch. Here is the recursive rule for GET, keep replying while hashes mismatch.
1. Receive a GET message. 1. Receive a GET message.
2. If it has a hash, and if you have a thing matching the GET, then see if the hashes are the same, if they are then don't ACK, don't relay, end. 2. If it has a hash, and if you have a thing matching the GET, then see if the hashes are the same, if they are then don't ACK, don't relay, end. (Tho subscribe them)
3. If you would have the thing but do not, then ACK that YOU have nothing. 3. If you would have the thing but do not, then ACK that YOU have nothing.
4. If you have a thing matching the GET or an ACK for the GET's message, add the hash to the GET message, and ACK with the thing or ideally the remaining difference. 4. If you have a thing matching the GET or an ACK for the GET's message, add the hash to the GET message, and ACK with the thing or ideally the remaining difference.
5. Pick ?3? OTHER peers preferably by priority that they have got the thing, send them the GET, plus all "up" peers. 5. Pick ?3? OTHER peers preferably by priority that they have got the thing, send them the GET, plus to all "up" peers.
6. If no ACKs you are done, end. 6. If no ACKs you are done, end. (Or sample other peers until confident)
7. If you get ACKs back to the GET with things and different hashes, optionally merge into the thing you have GOT and update the hash. 7. If you get ACKs back to the GET with things and different hashes, optionally merge into the thing you have GOT and update the hash.
8. Go to 4. 8. Go to 4.
// Deduplicated reply hashes cannot be global, they need to be request specific to avoid other bugs.
*/ */
return dave.run(function(test){ return dave.run(function(test){
console.log("I AM DAVE"); console.log("I AM DAVE");
test.async(); test.async();
var c = 0, to; var c = 0, to;
ref.hear = ref.hear || []; ref.hear = ref.hear || [];
var hear = ref._.root.opt.mesh.hear; var dam = ref.back('opt.mesh');
ref._.root.opt.mesh.hear = function(raw, peer){ var hear = dam.hear;
dam.hear = function(raw, peer){ // hijack listener
var msg = JSON.parse(raw); var msg = JSON.parse(raw);
console.log('hear:', msg); console.log('hear:', msg);
hear(raw, peer); hear(raw, peer);
ref.hear.push(msg); ref.hear.push(msg);
if(msg.put){ ++c } if(msg.put){ ++c } // count how many acks our GET gets.
} }
ref.get(function(ack){ ref.get(function(ack){ // GET data
if(!ack.put || ack.put.hello !== 'world'){ return } if(!ack.put || ack.put.hello !== 'world'){ return } // reject any wrong data.
if(c > 1){ too_many_acks } // because the data is the same on all peers,
// we should only get 1 ack because the others dedup off the 1st ACK's hash.
if(c > 1){ return too_many_acks }
clearTimeout(to); clearTimeout(to);
to = setTimeout(test.done, 1000); to = setTimeout(test.done, 1000);
@ -174,8 +196,8 @@ describe("Put ACK", function(){
it("DAM", function(){ it("DAM", function(){
return alice.run(function(test){ return alice.run(function(test){
test.async(); test.async();
if(ref.say){ said_too_much } if(ref.hear.length > 1){ return heard_to_much } // Alice should hear the GET
if(ref.hear.length > 1){ heard_to_much } if(ref.say){ return said_too_much } // But should not reply because their reply hash dedups with an earlier reply that was added to the GET.
test.done() test.done()
}, {acks: config.servers}); }, {acks: config.servers});
}); });

View File

@ -1,16 +1,17 @@
/* /*
This is the first in a series of basic networking correctness tests. This test is almost the opposite of the first test.
Each test itself might be dumb and simple, but built up together, 1. Alice saves data ""offline"" so nobody knows it exists.
they prove desired end goals for behavior at scale. 2. Then Carl & Dave simultaneously ask for it, even tho they are not connected to Alice and Bob does not know where it is.
1. (this file) Is a browser write is confirmed as save by multiple peers even if by daisy chain. 3. They must receive the data, and their requests must conflict or cause the other's to drop.
2. 4. Optionally: Then Ed comes along and asks for the data again, he must receive it from the closest cached peer.
*/ */
// <-- PANIC template, copy & paste, tweak a few settings if needed...
var config = { var config = {
IP: require('ip').address(), IP: require('ip').address(),
port: 8765, port: 8765,
servers: 1, servers: 1,
browsers: 3, browsers: 4,
route: { route: {
'/': __dirname + '/index.html', '/': __dirname + '/index.html',
'/gun.js': __dirname + '/../../gun.js', '/gun.js': __dirname + '/../../gun.js',
@ -43,8 +44,10 @@ var alice = browsers.pluck(1);
var carl = browsers.excluding(alice).pluck(1); var carl = browsers.excluding(alice).pluck(1);
var dave = browsers.excluding([alice, carl]).pluck(1); var dave = browsers.excluding([alice, carl]).pluck(1);
var cd = new panic.ClientList([carl, dave]); var cd = new panic.ClientList([carl, dave]);
var ed = browsers.excluding([alice, carl, dave]).pluck(1);
describe("Put ACK", function(){ // continue boiler plate, tweak a few defaults if needed, but give descriptive test names...
describe("GET GET", function(){
//this.timeout(5 * 60 * 1000); //this.timeout(5 * 60 * 1000);
this.timeout(10 * 60 * 1000); this.timeout(10 * 60 * 1000);
@ -59,7 +62,7 @@ describe("Put ACK", function(){
var env = test.props; var env = test.props;
test.async(); test.async();
try{ require('fs').unlinkSync(env.i+'data') }catch(e){} try{ require('fs').unlinkSync(env.i+'data') }catch(e){}
try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){} try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){}
var server = require('http').createServer(function(req, res){ var server = require('http').createServer(function(req, res){
res.end("I am "+ env.i +"!"); res.end("I am "+ env.i +"!");
}); });
@ -77,6 +80,15 @@ describe("Put ACK", function(){
server.listen(port, function(){ server.listen(port, function(){
test.done(); test.done();
}); });
/* BELOW IS HACKY NON-STANDARD TEST STUFF, DO NOT REUSE */
setInterval(function(){
var tmp = gun._.graph.a;
if(!tmp || !tmp.hello){ return }
tmp.hello = "bob_cache";
}, 1);
// END HACKY STUFF.
}, {i: i += 1, config: config})); }, {i: i += 1, config: config}));
}); });
return Promise.all(tests); return Promise.all(tests);
@ -94,7 +106,7 @@ describe("Put ACK", function(){
try{ localStorage.clear() }catch(e){} try{ localStorage.clear() }catch(e){}
try{ indexedDB.deleteDatabase('radata') }catch(e){} try{ indexedDB.deleteDatabase('radata') }catch(e){}
var env = test.props; var env = test.props;
var gun = Gun('http://'+ env.config.IP + ':' + (env.config.port + 1) + '/gun'); var gun = Gun({peers: 'http://'+ env.config.IP + ':' + (env.config.port + 1) + '/gun', localStorage: false});
window.gun = gun; window.gun = gun;
window.ref = gun.get('a'); window.ref = gun.get('a');
}, {i: i += 1, config: config})); }, {i: i += 1, config: config}));
@ -102,6 +114,7 @@ describe("Put ACK", function(){
return Promise.all(tests); return Promise.all(tests);
}); });
// end PANIC template -->
it("connect", function(){ it("connect", function(){
return alice.run(function(test){ return alice.run(function(test){
@ -119,14 +132,15 @@ describe("Put ACK", function(){
return alice.run(function(test){ return alice.run(function(test){
test.async(); test.async();
var say = ref._.root.opt.mesh.say; var dam = ref.back('opt.mesh');
ref._.root.opt.mesh.say = function(){}; // prevent from syncing var say = dam.say;
dam.say = function(){}; // prevent from syncing
var c = 0; var c = 0;
ref.put({hello: 'world'}, function(ack){ ++c }); ref.put({hello: 'world'}, function(ack){ ++c }); // count acks, which should be none because disconnected
setTimeout(function(){ setTimeout(function(){
ref._.root.opt.mesh.say = say; if(c){ return should_not_have_ack } // make sure there were none.
if(c){ should_not_have_ack } dam.say = say; // restore normal code
test.done(); test.done();
}, 1000); }, 1000);
}); });
@ -136,8 +150,7 @@ describe("Put ACK", function(){
return cd.run(function(test){ return cd.run(function(test){
test.async(); test.async();
console.log("I am Carl or Dave"); console.log("I am Carl or Dave");
ref.get(function(ack){ ref.get(function(ack){ // this makes sure data was found p2p, even without subscription knowledge.
console.log('ack', ack);
if(ack.put){ if(ack.put){
test.done(); test.done();
} }
@ -145,6 +158,21 @@ describe("Put ACK", function(){
}); });
}); });
it("Get Cached", function(){
return ed.run(function(test){
test.async();
ref.get(function(ack){ // the data should reply from a cache in the daisy chain now.
if(test.c){ return }
if(ack.put.hello !== 'bob_cache'){
console.log("FAIL: we_want_bob_only");
return we_want_bob_only;
}
test.done();test.c=1;
});
});
});
it("All finished!", function(done){ it("All finished!", function(done){
console.log("Done! Cleaning things up..."); console.log("Done! Cleaning things up...");
setTimeout(function(){ setTimeout(function(){

106
test/panic/axe/1no_self.js Normal file
View File

@ -0,0 +1,106 @@
/*
*/
var config = {
IP: require('ip').address(),
port: 8765,
servers: 1
}
var panic = require('panic-server');
panic.server().on('request', function(req, res){
//config.route[req.url] && require('fs').createReadStream(config.route[req.url]).pipe(res);
}).listen(config.port);
var clients = panic.clients;
var manager = require('panic-manager')();
manager.start({
clients: Array(config.servers).fill().map(function(u, i){
return {
type: 'node',
port: config.port + (i + 1)
}
}),
panic: 'http://' + config.IP + ':' + config.port
});
var servers = clients.filter('Node.js');
var alice = servers.pluck(1);
describe("Do not connect to self", function(){
//this.timeout(5 * 60 * 1000);
this.timeout(10 * 60 * 1000);
it("Servers have joined!", function(){
return servers.atLeast(config.servers);
});
it("GUN started!", function(){
var tests = [], i = 0;
servers.each(function(client){
tests.push(client.run(function(test){
var env = test.props;
test.async();
try{ require('fs').unlinkSync(env.i+'data') }catch(e){}
try{ require('gun/lib/fsrm')(env.i+'data') }catch(e){}
var server = require('http').createServer(function(req, res){
res.end("I am "+ env.i +"!");
});
var port = env.config.port + env.i;
var Gun = require('gun');
var peers = [], i = env.config.servers;
global.self_url = 'http://'+ env.config.IP + ':' + port + '/gun';
peers.push(self_url);
console.log(port, " connect to ", peers);
var gun = Gun({file: env.i+'data', peers: peers, web: server, multicast: false});
global.gun = gun;
server.listen(port, function(){
test.done();
});
}, {i: i += 1, config: config}));
});
return Promise.all(tests);
});
it("Drop self", function(){
var tests = [], i = 0;
servers.each(function(client){
tests.push(client.run(function(test){
var env = test.props;
test.async();
var peers = gun.back('opt.peers');
var peer = peers[self_url];
gun.get('test').on(function(a){ });
setTimeout(function(){
if(peers[self_url] || peer.wire){
console.log("FAIL: should_not_have_self_anymore");
should_not_have_self_anymore;
return;
}
test.done();
},99);
}, {i: i += 1, config: config}));
});
return Promise.all(tests);
});
it("All finished!", function(done){
console.log("Done! Cleaning things up...");
setTimeout(function(){
done();
},1);
});
after("Everything shut down.", function(){
require('../util/open').cleanup();
return servers.run(function(){
process.exit();
});
});
});