diff --git a/examples/cacheTest.js b/examples/cacheTest.js new file mode 100644 index 0000000..72f2a78 --- /dev/null +++ b/examples/cacheTest.js @@ -0,0 +1,58 @@ +'use strict'; + +var async = require('asyncawait/async'); +var OrbitClient = require('../src/OrbitClient'); +var Timer = require('./Timer'); + +var host = 'localhost:3006'; +var username = 'testrunner'; +var password = ''; + +let run = (async(() => { + try { + // Connect + var orbit = OrbitClient.connect(host, username, password); + + console.log("-------- EVENT log -------") + const c1 = 'cache-test'; + orbit.channel(c1).delete(); + + var timer1 = new Timer(true); + console.log("Writing..."); + for(let i = 0; i < 100; i ++) { + orbit.channel(c1).add("hello " + i); + } + console.log("Write took", timer1.stop() + "ms"); + + var timer2 = new Timer(true); + console.log("Reading 1st time..."); + var items = orbit.channel(c1).iterator({ limit: -1 }).collect(); + items = items.map((e) => { + return { key: e.item.key, val: e.item.Payload }; + }); + console.log("Reading 1st time took", timer2.stop() + "ms"); + + var timer3 = new Timer(true); + console.log("Reading 2nd time..."); + var items = orbit.channel(c1).iterator({ limit: -1 }).collect(); + items = items.map((e) => { + return { key: e.item.key, val: e.item.Payload }; + }); + console.log("Reading 2nd time took", timer3.stop() + "ms"); + + var timer4 = new Timer(true); + console.log("Reading 3rd time..."); + var items = orbit.channel(c1).iterator({ limit: -1 }).collect(); + items = items.map((e) => { + return { key: e.item.key, val: e.item.Payload }; + }); + console.log("Reading 3rd time took", timer4.stop() + "ms"); + + } catch(e) { + console.error("error:", e); + console.error(e.stack); + process.exit(1); + } +}))(); + +module.exports = run; diff --git a/src/Aggregator.js b/src/Aggregator.js index 9df7826..7ed3356 100644 --- a/src/Aggregator.js +++ b/src/Aggregator.js @@ -8,6 +8,7 @@ var Encryption = require('orbit-common/lib/Encryption'); var HashCache = require('./HashCacheClient'); var HashCacheItem = require('./HashCacheItem').EncryptedHashCacheItem; var HashCacheOps = require('./HashCacheOps'); +var MemoryCache = require('./MemoryCache'); const pubkey = Keystore.getKeys().publicKey; const privkey = Keystore.getKeys().privateKey; @@ -59,13 +60,29 @@ class Aggregator { } static _fetchOne(ipfs, hash, password) { - let data = await (ipfsAPI.getObject(ipfs, hash)); + // 1. Try fetching from memory + let data = MemoryCache.get(hash); + // TODO: 2. Try fetching from local cache + + // 3. Fetch from network + if(!data) + data = await (ipfsAPI.getObject(ipfs, hash)); + + // Cache the fetched item (encrypted) + MemoryCache.put(hash, data); + + // Decrypt the item let item = HashCacheItem.fromEncrypted(data, pubkey, privkey, password); // TODO: add possibility to fetch content separately // fetch and decrypt content if(item.op === HashCacheOps.Add || item.op === HashCacheOps.Put) { - const payload = await (ipfsAPI.getObject(ipfs, item.target)); + let payload = MemoryCache.get(item.target); + if(!payload) + payload = await (ipfsAPI.getObject(ipfs, item.target)); + + MemoryCache.put(item.target, payload); + const contentEnc = JSON.parse(payload.Data)["content"]; const contentDec = Encryption.decrypt(contentEnc, privkey, 'TODO: pubkey'); item.Payload = contentDec; diff --git a/src/MemoryCache.js b/src/MemoryCache.js new file mode 100644 index 0000000..d8cabf4 --- /dev/null +++ b/src/MemoryCache.js @@ -0,0 +1,15 @@ +'use strict'; + +let items = {}; + +class MemoryCache { + static put(hash, item) { + items[hash] = item; + } + + static get(hash) { + return items[hash]; + } +} + +module.exports = MemoryCache;