mirror of
https://github.com/amark/gun.git
synced 2025-03-30 15:08:33 +00:00
stub all out for 17K ? CPU ? test
This commit is contained in:
parent
fd600dbc20
commit
6d56aaa233
1
gun.js
1
gun.js
@ -308,7 +308,6 @@
|
||||
ctx.all = 0;
|
||||
ctx.stun = 1;
|
||||
var nl = Object.keys(put).sort(); // TODO: This is unbounded operation, large graphs will be slower. Write our own CPU scheduled sort? Or somehow do it in below?
|
||||
msg['%'] && console.log(msg['%']);
|
||||
var ni = 0, nj, kl, soul, node, states, err, tmp;
|
||||
(function pop(o){
|
||||
if(nj != ni){ nj = ni;
|
||||
|
@ -10,16 +10,16 @@
|
||||
root.opt.log = root.opt.log || Gun.log;
|
||||
this.to.next(root);
|
||||
})
|
||||
require('../nts');
|
||||
require('./store');
|
||||
require('./rfs');
|
||||
require('./rs3');
|
||||
//require('../nts');
|
||||
//require('./store');
|
||||
//require('./rfs');
|
||||
//require('./rs3');
|
||||
require('./wire');
|
||||
try{require('../sea');}catch(e){}
|
||||
try{require('../axe');}catch(e){}
|
||||
//try{require('../sea');}catch(e){}
|
||||
//try{require('../axe');}catch(e){}
|
||||
//require('./file');
|
||||
require('./evict');
|
||||
require('./multicast');
|
||||
//require('./evict');
|
||||
//require('./multicast');
|
||||
require('./stats');
|
||||
module.exports = Gun;
|
||||
}());
|
||||
|
@ -3,7 +3,7 @@ var config = {
|
||||
port: 8765,
|
||||
servers: 1,
|
||||
browsers: 2, //3,
|
||||
each: 100000,
|
||||
each: 10000,
|
||||
wait: 1,
|
||||
route: {
|
||||
'/': __dirname + '/index.html',
|
||||
@ -13,15 +13,20 @@ var config = {
|
||||
}
|
||||
|
||||
/*
|
||||
Assume we have 4 peers in a star topology,
|
||||
Assume we have 3 peers in a star topology,
|
||||
|
||||
...B...
|
||||
./.|.\.
|
||||
A..C..D
|
||||
..B..
|
||||
./.\.
|
||||
A...C
|
||||
|
||||
And they share a chat room with 10K messages.
|
||||
|
||||
A - GET chat -> B (cache miss) -> C
|
||||
A -> GET chat -> B (cache miss) -> C
|
||||
C hosts the data and streams it back
|
||||
C -> PUT chat -> B (relay) -> A got.
|
||||
|
||||
Using the WebRTC module, C <-> A directly, no need for a relay!
|
||||
But we're wanting to test the performance of the whole network.
|
||||
*/
|
||||
|
||||
var panic = require('panic-server');
|
||||
@ -51,7 +56,6 @@ var servers = clients.filter('Node.js');
|
||||
var browsers = clients.excluding(servers);
|
||||
var alice = browsers.pluck(1);
|
||||
var carl = browsers.excluding(alice).pluck(1);
|
||||
//var dave = browsers.excluding([alice, carl]).pluck(1);
|
||||
|
||||
describe("Load test "+ config.browsers +" browser(s) across "+ config.servers +" server(s)!", function(){
|
||||
|
||||
@ -90,7 +94,6 @@ describe("Load test "+ config.browsers +" browser(s) across "+ config.servers +"
|
||||
// It has successfully launched.
|
||||
test.done();
|
||||
});
|
||||
//setInterval(function(){ console.log("CPU turns stacked:", setTimeout.turn.s.length) },1000);
|
||||
}, {i: i += 1, config: config}));
|
||||
});
|
||||
// NOW, this is very important:
|
||||
@ -165,16 +168,7 @@ describe("Load test "+ config.browsers +" browser(s) across "+ config.servers +"
|
||||
setInterval(function(){ $('u').text(setTimeout.turn.s.length) },1000);
|
||||
}, config);
|
||||
});
|
||||
|
||||
/*it("Carl Recovers Chats", function(){
|
||||
return carl.run(function(test){
|
||||
console.log("... why not sending ...", window.chat);
|
||||
test.async();
|
||||
gun.on('in', {'#': 'asdf', put: {chat: chat}});
|
||||
//test.done();
|
||||
});
|
||||
});*/
|
||||
|
||||
|
||||
after("Everything shut down.", function(){
|
||||
// which is to shut down all the browsers.
|
||||
browsers.run(function(){
|
||||
|
Loading…
x
Reference in New Issue
Block a user