Merge pull request #107 from amark/develop

fix live replication bug depending on ordering
This commit is contained in:
Mark Nadal 2015-10-16 17:15:58 -06:00
commit fc27af870d
5 changed files with 42 additions and 19 deletions

View File

@ -1,7 +1,7 @@
var port = process.env.OPENSHIFT_NODEJS_PORT || process.env.VCAP_APP_PORT || process.env.PORT || process.argv[2] || 80; var port = process.env.OPENSHIFT_NODEJS_PORT || process.env.VCAP_APP_PORT || process.env.PORT || process.argv[2] || 80;
var Gun = require('gun'); var Gun = require('gun');
var gun = Gun({ var gun = Gun({
file: 'data.json', file: 'data.json',
s3: { s3: {
key: '', // AWS Access Key key: '', // AWS Access Key

View File

@ -8,7 +8,7 @@
} }
, "dependencies": { , "dependencies": {
"express": "~>4.9.0", "express": "~>4.9.0",
"gun": "~>0.2.2" "gun": "~>0.2.3"
} }
, "scripts": { , "scripts": {
"start": "node http.js", "start": "node http.js",

9
gun.js
View File

@ -1021,11 +1021,12 @@
tab.put(p.graph = cb.graph, function(e,r){ // then sync it if we haven't already tab.put(p.graph = cb.graph, function(e,r){ // then sync it if we haven't already
Gun.log("Stateless handshake sync:", e, r); Gun.log("Stateless handshake sync:", e, r);
}, {peers: tab.peers(url)}); // to the peer. // TODO: This forces local to flush again, not necessary. }, {peers: tab.peers(url)}); // to the peer. // TODO: This forces local to flush again, not necessary.
// TODO: What about syncing our keys up?
} }
Gun.is.graph(reply.body, function(node, soul){ // make sure for each received node if(!Gun.is.soul(key)){
if(!Gun.is.soul(key)){ tab.key(key, soul, function(){}, {local: true}) } // that the key points to it. Gun.is.graph(reply.body || gun.__.key.s[key], function(node, soul){ // make sure for each received node or nodes of our key
}); tab.key(key, soul, function(){}); // that the key points to it.
});
}
setTimeout(function(){ tab.put(reply.body, function(){}, {local: true}) },1); // and flush the in memory nodes of this graph to localStorage after we've had a chance to union on it. setTimeout(function(){ tab.put(reply.body, function(){}, {local: true}) },1); // and flush the in memory nodes of this graph to localStorage after we've had a chance to union on it.
}), opt); }), opt);
cb.peers = true; cb.peers = true;

View File

@ -127,15 +127,37 @@
return cb({headers: reply.headers, body: (err? (err.err? err : {err: err || "Unknown error."}) : null)}); return cb({headers: reply.headers, body: (err? (err.err? err : {err: err || "Unknown error."}) : null)});
} }
if(Gun.obj.empty(graph)){ return cb({headers: reply.headers, body: graph}) } // we're out of stuff! if(Gun.obj.empty(graph)){ return cb({headers: reply.headers, body: graph}) } // we're out of stuff!
// TODO: chunk the graph even if it is already chunked. pseudo code below! /*
/*Gun.is.graph(graph, function(node, soul){ (function(chunks){// FEATURE! Stream chunks if the nodes are large!
if(Object.keys(node).length > 100){ var max = 10;
// split object into many objects that have a fixed size Gun.is.graph(graph, function(node, soul){
// iterate over each object var chunk = {};
// cb({headers: reply.headers, chunk: {object} ); console.log("node big enough?", Object.keys(node).length);
} if(Object.keys(node).length > max){
});*/ var count = 0, n = Gun.union.pseudo(soul);
return cb({headers: reply.headers, chunk: graph }); // keep streaming Gun.obj.map(node, function(val, field){
if(!(++count % max)){
console.log("Sending chunk", chunk);
cb({headers: reply.headers, chunk: chunk});
n = Gun.union.pseudo(soul);
chunk = {};
}
chunk[soul] = n;
n[field] = val;
(n._[Gun._.HAM] = n._[Gun._.HAM] || {})[field] = ((node._||{})[Gun._.HAM]||{})[field];
});
if(count % max){ // finish off the last chunk
cb({headers: reply.headers, chunk: chunk});
}
} else {
chunk[soul] = node;
console.log("Send BLOB", chunk);
cb({headers: reply.headers, chunk: chunk});
}
});
}([]));
*/
cb({headers: reply.headers, chunk: graph }); // Use this if you don't want streaming chunks feature.
}); });
} }
tran.put = function(req, cb){ tran.put = function(req, cb){
@ -153,8 +175,8 @@
var ctx = ctx || {}; ctx.graph = {}; var ctx = ctx || {}; ctx.graph = {};
Gun.is.graph(req.body, function(node, soul){ Gun.is.graph(req.body, function(node, soul){
ctx.graph[soul] = gun.__.graph[soul]; // TODO: BUG? Probably should be delta fields ctx.graph[soul] = gun.__.graph[soul]; // TODO: BUG? Probably should be delta fields
}) });
gun.__.opt.hooks.put(ctx.graph, function(err, ok){ (gun.__.opt.hooks.put || function(g,cb){cb("No save.")})(ctx.graph, function(err, ok){
if(err){ return cb({headers: reply.headers, body: {err: err || "Failed."}}) } if(err){ return cb({headers: reply.headers, body: {err: err || "Failed."}}) }
cb({headers: reply.headers, body: {ok: ok || "Persisted."}}); cb({headers: reply.headers, body: {ok: ok || "Persisted."}});
}); });

View File

@ -1,6 +1,6 @@
{ {
"name": "gun", "name": "gun",
"version": "0.2.2", "version": "0.2.3",
"description": "Graph engine", "description": "Graph engine",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
@ -43,7 +43,7 @@
"dependencies": { "dependencies": {
"aws-sdk": "~>2.0.0", "aws-sdk": "~>2.0.0",
"formidable": "~>1.0.15", "formidable": "~>1.0.15",
"ws": "~>0.4.32" "ws": "~>0.8.0"
}, },
"devDependencies": { "devDependencies": {
"mocha": "~>1.9.0" "mocha": "~>1.9.0"