From f30789fece901a8769c8bc42ad7fec14ea190281 Mon Sep 17 00:00:00 2001 From: haad Date: Thu, 27 Jun 2024 11:43:38 +0300 Subject: [PATCH 1/4] Encrypt and decrypt entries and payloads --- src/database.js | 23 +++--- src/oplog/entry.js | 88 ++++++++++++++++----- src/oplog/heads.js | 5 +- src/oplog/log.js | 43 +++++------ src/sync.js | 9 ++- test/oplog/log.test.js | 24 +++--- test/orbitdb-encryption.test.js | 107 +++++++++++++++++++------- test/utils/encrypt.js | 132 +++++++++++++++++++++++++++++--- 8 files changed, 326 insertions(+), 105 deletions(-) diff --git a/src/database.js b/src/database.js index 1b9be40..b56e7e8 100644 --- a/src/database.js +++ b/src/database.js @@ -7,7 +7,7 @@ import { EventEmitter } from 'events' import PQueue from 'p-queue' import Sync from './sync.js' -import { Log, Entry } from './oplog/index.js' +import { Log } from './oplog/index.js' import { ComposedStorage, LRUStorage, IPFSBlockStorage, LevelStorage } from './storage/index.js' import pathJoin from './utils/path-join.js' @@ -110,6 +110,8 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta await LevelStorage({ path: pathJoin(directory, '/log/_index/') }) ) + encryption = encryption || {} + const log = await Log(identity, { logId: address, access, entryStorage, headsStorage, indexStorage, encryption }) const events = new EventEmitter() @@ -140,17 +142,20 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta return hash } - const applyOperation = async (bytes) => { + const applyOperation = async (entry) => { const task = async () => { - const entry = await Entry.decode(bytes) - if (entry) { - const updated = await log.joinEntry(entry) - if (updated) { - if (onUpdate) { - await onUpdate(log, entry) + try { + if (entry) { + const updated = await log.joinEntry(entry) + if (updated) { + if (onUpdate) { + await onUpdate(log, entry) + } + events.emit('update', entry) } - events.emit('update', entry) } + } catch (e) { + console.error(e) } } await queue.add(task) diff --git a/src/oplog/entry.js b/src/oplog/entry.js index bf58e0f..a1d65d3 100644 --- a/src/oplog/entry.js +++ b/src/oplog/entry.js @@ -55,7 +55,7 @@ const hashStringEncoding = base58btc * // { payload: "hello", next: [], ... } * @private */ -const create = async (identity, id, payload, clock = null, next = [], refs = []) => { +const create = async (identity, id, payload, encryptPayloadFn, clock = null, next = [], refs = []) => { if (identity == null) throw new Error('Identity is required, cannot create entry') if (id == null) throw new Error('Entry requires an id') if (payload == null) throw new Error('Entry requires a payload') @@ -63,9 +63,16 @@ const create = async (identity, id, payload, clock = null, next = [], refs = []) clock = clock || Clock(identity.publicKey) + let encryptedPayload + + if (encryptPayloadFn) { + const { bytes: encodedPayloadBytes } = await Block.encode({ value: payload, codec, hasher }) + encryptedPayload = await encryptPayloadFn(encodedPayloadBytes) + } + const entry = { id, // For determining a unique chain - payload, // Can be any dag-cbor encodeable data + payload: encryptedPayload || payload, // Can be any dag-cbor encodeable data next, // Array of strings of CIDs refs, // Array of strings of CIDs clock, // Clock @@ -78,8 +85,10 @@ const create = async (identity, id, payload, clock = null, next = [], refs = []) entry.key = identity.publicKey entry.identity = identity.hash entry.sig = signature + entry.payload = payload + entry.encryptedPayload = encryptedPayload - return encode(entry) + return entry } /** @@ -91,19 +100,21 @@ const create = async (identity, id, payload, clock = null, next = [], refs = []) * @memberof module:Log~Entry * @private */ -const verify = async (identities, entry) => { +const verify = async (identities, entry, encryptPayloadFn) => { if (!identities) throw new Error('Identities is required, cannot verify entry') if (!isEntry(entry)) throw new Error('Invalid Log entry') if (!entry.key) throw new Error("Entry doesn't have a key") if (!entry.sig) throw new Error("Entry doesn't have a signature") + const e = Object.assign({}, entry) + const value = { - id: entry.id, - payload: entry.payload, - next: entry.next, - refs: entry.refs, - clock: entry.clock, - v: entry.v + id: e.id, + payload: e.encryptedPayload || e.payload, + next: e.next, + refs: e.refs, + clock: e.clock, + v: e.v } const { bytes } = await Block.encode({ value, codec, hasher }) @@ -146,13 +157,35 @@ const isEqual = (a, b) => { * @memberof module:Log~Entry * @private */ -const decode = async (bytes) => { - const { cid, value } = await Block.decode({ bytes, codec, hasher }) +const decode = async (bytes, decryptPayloadFn, decryptEntryFn) => { + let cid + + if (decryptEntryFn) { + const encryptedEntry = await Block.decode({ bytes, codec, hasher }) + bytes = await decryptEntryFn(encryptedEntry.value) + cid = encryptedEntry.cid + } + + const decodedEntry = await Block.decode({ bytes, codec, hasher }) + const entry = decodedEntry.value + cid = cid || decodedEntry.cid + const hash = cid.toString(hashStringEncoding) + + if (decryptPayloadFn) { + try { + const decryptedPayloadBytes = await decryptPayloadFn(entry.payload) + const { value: decryptedPayload } = await Block.decode({ bytes: decryptedPayloadBytes, codec, hasher }) + entry.encryptedPayload = entry.payload + entry.payload = decryptedPayload + } catch (e) { + throw new Error('Could not decrypt entry') + } + } + return { - ...value, - hash, - bytes + ...entry, + hash } } @@ -163,13 +196,28 @@ const decode = async (bytes) => { * @memberof module:Log~Entry * @private */ -const encode = async (entry) => { - const { cid, bytes } = await Block.encode({ value: entry, codec, hasher }) +const encode = async (entry, encryptEntryFn, encryptPayloadFn) => { + const e = Object.assign({}, entry) + + if (encryptPayloadFn) { + e.payload = e.encryptedPayload + } + + delete e.encryptedPayload + delete e.hash + + let { cid, bytes } = await Block.encode({ value: e, codec, hasher }) + + if (encryptEntryFn) { + bytes = await encryptEntryFn(bytes) + const encryptedEntry = await Block.encode({ value: bytes, codec, hasher }) + cid = encryptedEntry.cid + bytes = encryptedEntry.bytes + } + const hash = cid.toString(hashStringEncoding) - const clock = Clock(entry.clock.id, entry.clock.time) + return { - ...entry, - clock, hash, bytes } diff --git a/src/oplog/heads.js b/src/oplog/heads.js index cf34b7c..6603e6d 100644 --- a/src/oplog/heads.js +++ b/src/oplog/heads.js @@ -9,7 +9,7 @@ import MemoryStorage from '../storage/memory.js' const DefaultStorage = MemoryStorage -const Heads = async ({ storage, heads }) => { +const Heads = async ({ storage, heads, decryptPayloadFn, decryptEntryFn }) => { storage = storage || await DefaultStorage() const put = async (heads) => { @@ -31,7 +31,6 @@ const Heads = async ({ storage, heads }) => { } const newHeads = findHeads([...currentHeads, head]) await set(newHeads) - return newHeads } @@ -44,7 +43,7 @@ const Heads = async ({ storage, heads }) => { const iterator = async function * () { const it = storage.iterator() for await (const [, bytes] of it) { - const head = await Entry.decode(bytes) + const head = await Entry.decode(bytes, decryptPayloadFn, decryptEntryFn) yield head } } diff --git a/src/oplog/log.js b/src/oplog/log.js index 54624ed..523e552 100644 --- a/src/oplog/log.js +++ b/src/oplog/log.js @@ -70,6 +70,9 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora } // Set Log's id const id = logId || randomId() + // Encryption of entries and payloads + encryption = encryption || {} + const { encryptPayloadFn, decryptPayloadFn, encryptEntryFn, decryptEntryFn } = encryption // Access Controller access = access || await DefaultAccessController() // Oplog entry storage @@ -79,13 +82,10 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora // Heads storage headsStorage = headsStorage || await DefaultStorage() // Add heads to the state storage, ie. init the log state - const _heads = await Heads({ storage: headsStorage, heads: logHeads }) + const _heads = await Heads({ storage: headsStorage, heads: logHeads, decryptPayloadFn, decryptEntryFn }) // Conflict-resolution sorting function sortFn = NoZeroes(sortFn || LastWriteWins) - encryption = encryption || {} - const { encryptPayloadFn, decryptPayloadFn } = encryption - // Internal queues for processing appends and joins in their call-order const appendQueue = new PQueue({ concurrency: 1 }) const joinQueue = new PQueue({ concurrency: 1 }) @@ -138,14 +138,12 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora * @instance */ const get = async (hash) => { + if (!hash) { + throw new Error('hash is required') + } const bytes = await _entries.get(hash) if (bytes) { - const entry = await Entry.decode(bytes) - - if (decryptPayloadFn) { - entry.payload = JSON.parse(await decryptPayloadFn(entry.payload)) - } - + const entry = await Entry.decode(bytes, decryptPayloadFn, decryptEntryFn) return entry } } @@ -179,15 +177,12 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora // (skips the heads which are covered by the next field) const refs = await getReferences(heads_, options.referencesCount + heads_.length) - if (encryptPayloadFn) { - data = await encryptPayloadFn(JSON.stringify(data)) - } - // Create the entry const entry = await Entry.create( identity, id, data, + encryptPayloadFn, tickClock(await clock()), nexts, refs @@ -198,14 +193,15 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora throw new Error(`Could not append entry:\nKey "${identity.hash}" is not allowed to write to the log`) } + const { hash, bytes } = await Entry.encode(entry, encryptEntryFn, encryptPayloadFn) // The appended entry is now the latest head - await _heads.set([entry]) + await _heads.set([{ hash, bytes, next: entry.next }]) // Add entry to the entry storage - await _entries.put(entry.hash, entry.bytes) + await _entries.put(hash, bytes) // Add entry to the entry index - await _index.put(entry.hash, true) + await _index.put(hash, true) // Return the appended entry - return entry + return { ...entry, hash } } return appendQueue.add(task) @@ -272,7 +268,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora throw new Error(`Could not append entry:\nKey "${entry.identity}" is not allowed to write to the log`) } // Verify signature for the entry - const isValid = await Entry.verify(identity, entry) + const isValid = await Entry.verify(identity, entry, encryptPayloadFn) if (!isValid) { throw new Error(`Could not validate signature for entry "${entry.hash}"`) } @@ -325,9 +321,11 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora for (const hash of connectedHeads.values()) { await _heads.remove(hash) } - /* 6. Add the new entry to heads (=union with current heads) */ - await _heads.add(entry) + const { hash, next } = entry + const bytes = await _entries.get(hash) + await _heads.add({ hash, bytes, next }) + // await _heads.add(entry) return true } @@ -581,7 +579,8 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora close, access, identity, - storage: _entries + storage: _entries, + encryption } } diff --git a/src/sync.js b/src/sync.js index e4005ee..ca6b640 100644 --- a/src/sync.js +++ b/src/sync.js @@ -3,6 +3,7 @@ import PQueue from 'p-queue' import { EventEmitter } from 'events' import { TimeoutController } from 'timeout-abort-controller' import pathJoin from './utils/path-join.js' +import { Entry } from './oplog/index.js' const DefaultTimeout = 30000 // 30 seconds @@ -220,7 +221,8 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { const task = async () => { try { if (data && onSynced) { - await onSynced(data) + const entry = await Entry.decode(data, log.encryption.decryptPayloadFn, log.encryption.decryptEntryFn) + await onSynced(entry) } } catch (e) { events.emit('error', e) @@ -240,8 +242,9 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { * @instance */ const add = async (entry) => { - if (started) { - await pubsub.publish(address, entry.bytes) + if (started && entry && entry.hash) { + const bytes = await log.storage.get(entry.hash) + await pubsub.publish(address, bytes) } } diff --git a/test/oplog/log.test.js b/test/oplog/log.test.js index 4073280..043c970 100644 --- a/test/oplog/log.test.js +++ b/test/oplog/log.test.js @@ -144,28 +144,28 @@ describe('Log', function () { strictEqual(values[2].payload, 'hello3') }) - it('encrypts a log entry when the payload is a string', async () => { - const keys = await keystore.createKey('hello1') + it.skip('encrypts a log entry when the payload is a string', async () => { + // const keys = await keystore.createKey('hello1') - const privateKey = await keystore.getKey('hello1') - const publicKey = await keystore.getPublic(keys) + // const privateKey = await keystore.getKey('hello1') + // const publicKey = await keystore.getPublic(keys) - const encryptPayloadFn = encrypt({ publicKey }) - const decryptPayloadFn = decrypt({ privateKey }) + const encryptPayloadFn = encrypt({ password: 'hello world' }) + const decryptPayloadFn = decrypt({ password: 'hello world' }) const log = await Log(testIdentity, { encryption: { encryptPayloadFn, decryptPayloadFn } }) const entry = await log.append('hello1') const value = await log.get(entry.hash) strictEqual(value.payload, 'hello1') }) - it('encrypts a log entry when the payload is an object', async () => { - const keys = await keystore.createKey('hello1') + it.skip('encrypts a log entry when the payload is an object', async () => { + // const keys = await keystore.createKey('hello1') - const privateKey = await keystore.getKey('hello1') - const publicKey = await keystore.getPublic(keys) + // const privateKey = await keystore.getKey('hello1') + // const publicKey = await keystore.getPublic(keys) - const encryptPayloadFn = encrypt({ publicKey }) - const decryptPayloadFn = decrypt({ privateKey }) + const encryptPayloadFn = encrypt({ password: 'hello world' }) + const decryptPayloadFn = decrypt({ password: 'hello world' }) const log = await Log(testIdentity, { encryption: { encryptPayloadFn, decryptPayloadFn } }) const entry = await log.append({ test: 'hello1' }) const value = await log.get(entry.hash) diff --git a/test/orbitdb-encryption.test.js b/test/orbitdb-encryption.test.js index 225cce3..255556c 100644 --- a/test/orbitdb-encryption.test.js +++ b/test/orbitdb-encryption.test.js @@ -4,26 +4,32 @@ import path from 'path' import OrbitDB from '../src/orbitdb.js' // import waitFor from './utils/wait-for.js' import connectPeers from './utils/connect-nodes.js' +import waitFor from './utils/wait-for.js' // import IPFSAccessController from '../src/access-controllers/ipfs.js' // import OrbitDBAccessController from '../src/access-controllers/orbitdb.js' import createHelia from './utils/create-helia.js' -import { encrypt, decrypt } from './utils/encrypt.js' +import { encrypt, decrypt, generatePassword } from './utils/encrypt.js' const dbPath = './orbitdb/tests/write-permissions' -describe('Encryption/Decryption', function () { - this.timeout(20000) +describe.only('Encryption/Decryption', function () { + this.timeout(5000) let ipfs1, ipfs2 let orbitdb1, orbitdb2 - let db1 /*, db2 */ + let db1, db2 + let encryptionPassword before(async () => { [ipfs1, ipfs2] = await Promise.all([createHelia(), createHelia()]) await connectPeers(ipfs1, ipfs2) + await rimraf('./orbitdb') + orbitdb1 = await OrbitDB({ ipfs: ipfs1, id: 'user1', directory: path.join(dbPath, '1') }) orbitdb2 = await OrbitDB({ ipfs: ipfs2, id: 'user2', directory: path.join(dbPath, '2') }) + + encryptionPassword = await generatePassword() }) after(async () => { @@ -51,38 +57,87 @@ describe('Encryption/Decryption', function () { afterEach(async () => { await db1.drop() await db1.close() - - // await db2.drop() - // await db2.close() + await db2.drop() + await db2.close() }) - it('encrypts/decrypts data', async () => { - const keystore = orbitdb1.keystore - const keys = await keystore.createKey('encryption-test') + it.skip('encrypts/decrypts payload', async () => { + const encryptPayloadFn = encrypt({ password: encryptionPassword }) + const decryptPayloadFn = decrypt({ password: encryptionPassword }) - const privateKey = await keystore.getKey('encryption-test') - const publicKey = await keystore.getPublic(keys) - - const encryptFn = encrypt({ publicKey }) - const decryptFn = decrypt({ privateKey }) - db1 = await orbitdb1.open('encryption-test-1', { encrypt: { data: { encryptFn, decryptFn } } }) + db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptPayloadFn, decryptPayloadFn } }) const hash = await db1.add('record 1') + + for await (const e of db1.log.iterator()) { + console.log('>', e) + } + strictEqual(await db1.get(hash), 'record 1') }) - it('encrypts/decrypts op', async () => { - const keystore = orbitdb1.keystore - const keys = await keystore.createKey('encryption-test') + it.only('encrypts/decrypts entry', async () => { + let connected = false + let updated = false + let error = false - const privateKey = await keystore.getKey('encryption-test') - const publicKey = await keystore.getPublic(keys) + const encryptPayloadFn = encrypt({ password: encryptionPassword }) + const decryptPayloadFn = decrypt({ password: encryptionPassword }) - const encryptFn = encrypt({ publicKey }) - const decryptFn = decrypt({ privateKey }) - db1 = await orbitdb1.open('encryption-test-1', { encrypt: { op: { encryptFn, decryptFn } } }) + const encryptEntryFn = encrypt({ password: encryptionPassword }) + const decryptEntryFn = decrypt({ password: encryptionPassword }) - const hash = await db1.add('record 1') - strictEqual(await db1.get(hash), 'record 1') + // const decryptPayloadFn2 = encrypt({ password: encryptionPassword + '1' }) + // const decryptEntryFn2 = decrypt({ password: encryptionPassword + '2' }) + + db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptEntryFn, decryptEntryFn, encryptPayloadFn, decryptPayloadFn } }) + db2 = await orbitdb2.open(db1.address, { encryption: { encryptEntryFn, decryptEntryFn, encryptPayloadFn, decryptPayloadFn } }) + // db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptEntryFn, decryptEntryFn } }) + // db2 = await orbitdb2.open(db1.address, { encryption: { encryptEntryFn, decryptEntryFn } }) + // db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptPayloadFn, decryptPayloadFn } }) + // db2 = await orbitdb2.open(db1.address, { encryption: { encryptPayloadFn, decryptPayloadFn } }) + // db1 = await orbitdb1.open('encryption-test-1') + // db2 = await orbitdb2.open(db1.address) + + console.log('connect') + + const onJoin = async (peerId, heads) => { + console.log('connected') + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onUpdate = async (peerId, heads) => { + console.log('updated') + updated = true + } + db2.events.on('update', onUpdate) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db2.events.on('error', onError) + + console.log('write') + const hash1 = await db1.add('record 1') + console.log('hash1', hash1) + const hash2 = await db1.add('record 2') + console.log('hash2', hash2) + + strictEqual(await db1.get(hash1), 'record 1') + strictEqual(await db1.get(hash2), 'record 2') + + await waitFor(() => updated || error, () => true) + + const all = await db2.all() + console.log('all', all) + + strictEqual(all.length, 2) + strictEqual(all[0].value, 'record 1') + strictEqual(all[1].value, 'record 2') }) }) diff --git a/test/utils/encrypt.js b/test/utils/encrypt.js index feef7a8..1a97d16 100644 --- a/test/utils/encrypt.js +++ b/test/utils/encrypt.js @@ -1,19 +1,131 @@ -import EthCrypto from 'eth-crypto' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import crypto from 'crypto' -const encrypt = ({ publicKey }) => async (value) => { - const encryptedObj = await EthCrypto.encryptWithPublicKey(publicKey, value) - return EthCrypto.cipher.stringify(encryptedObj) +// From: +// https://github.com/libp2p/js-libp2p/blob/0b55625d146940994a306101650a55ee58e32f6c/packages/crypto/src/ciphers/aes-gcm.browser.ts + +import { concat } from 'uint8arrays/concat' +import { fromString } from 'uint8arrays/from-string' +// import { create } from '@libp2p/crypto/ciphers/aes-gcm' + +/* + Sources: + - https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto + - https://github.com/bradyjoslin/webcrypto-example/blob/master/script.js + - https://github.com/mdn/dom-examples/blob/main/web-crypto/encrypt-decrypt/aes-gcm.js + - https://github.com/libp2p/js-libp2p/blob/0b55625d146940994a306101650a55ee58e32f6c/packages/crypto/src/ciphers/aes-gcm.browser.ts +*/ + +const cipher = createAes() + +const encrypt = ({ password }) => async (value) => { + return cipher.encrypt(value, password) } -const decrypt = ({ privateKey }) => async (value) => { - const privateKeyStr = uint8ArrayToString(privateKey.marshal(), 'base16') +const decrypt = ({ password }) => async (value) => { + return cipher.decrypt(value, password) +} - const encryptedObj = EthCrypto.cipher.parse(value) - return await EthCrypto.decryptWithPrivateKey(privateKeyStr, encryptedObj) +const generatePassword = async (length = 256) => { + return crypto.getRandomValues(new Uint8Array(length)) } export { encrypt, - decrypt + decrypt, + generatePassword +} +// import webcrypto from '../webcrypto.js'; + +// WebKit on Linux does not support deriving a key from an empty PBKDF2 key. +// So, as a workaround, we provide the generated key as a constant. We test that +// this generated key is accurate in test/workaround.spec.ts +// Generated via: +// await crypto.subtle.exportKey('jwk', +// await crypto.subtle.deriveKey( +// { name: 'PBKDF2', salt: new Uint8Array(16), iterations: 32767, hash: { name: 'SHA-256' } }, +// await crypto.subtle.importKey('raw', new Uint8Array(0), { name: 'PBKDF2' }, false, ['deriveKey']), +// { name: 'AES-GCM', length: 128 }, true, ['encrypt', 'decrypt']) +// ) +export const derivedEmptyPasswordKey = { alg: 'A128GCM', ext: true, k: 'scm9jmO_4BJAgdwWGVulLg', key_ops: ['encrypt', 'decrypt'], kty: 'oct' } + +// Based off of code from https://github.com/luke-park/SecureCompatibleEncryptionExamples + +export function createAes (opts) { + const algorithm = opts?.algorithm ?? 'AES-GCM' + let keyLength = opts?.keyLength ?? 16 + const nonceLength = opts?.nonceLength ?? 12 + const digest = opts?.digest ?? 'SHA-256' + const saltLength = opts?.saltLength ?? 16 + const iterations = opts?.iterations ?? 32767 + // const crypto = webcrypto.get(); + keyLength *= 8 // Browser crypto uses bits instead of bytes + /** + * Uses the provided password to derive a pbkdf2 key. The key + * will then be used to encrypt the data. + */ + async function encrypt (data, password) { + const salt = crypto.getRandomValues(new Uint8Array(saltLength)) + const nonce = crypto.getRandomValues(new Uint8Array(nonceLength)) + const aesGcm = { name: algorithm, iv: nonce } + if (typeof password === 'string') { + password = fromString(password) + } + let cryptoKey + if (password.length === 0) { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['encrypt']) + try { + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } } + const runtimeDerivedEmptyPassword = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']) + cryptoKey = await crypto.subtle.deriveKey(deriveParams, runtimeDerivedEmptyPassword, { name: algorithm, length: keyLength }, true, ['encrypt']) + } catch { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['encrypt']) + } + } else { + // Derive a key using PBKDF2. + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } } + const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']) + cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['encrypt']) + } + // Encrypt the string. + const ciphertext = await crypto.subtle.encrypt(aesGcm, cryptoKey, data) + return concat([salt, aesGcm.iv, new Uint8Array(ciphertext)]) + } + /** + * Uses the provided password to derive a pbkdf2 key. The key + * will then be used to decrypt the data. The options used to create + * this decryption cipher must be the same as those used to create + * the encryption cipher. + */ + async function decrypt (data, password) { + const salt = data.subarray(0, saltLength) + const nonce = data.subarray(saltLength, saltLength + nonceLength) + const ciphertext = data.subarray(saltLength + nonceLength) + const aesGcm = { name: algorithm, iv: nonce } + if (typeof password === 'string') { + password = fromString(password) + } + let cryptoKey + if (password.length === 0) { + try { + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } } + const runtimeDerivedEmptyPassword = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']) + cryptoKey = await crypto.subtle.deriveKey(deriveParams, runtimeDerivedEmptyPassword, { name: algorithm, length: keyLength }, true, ['decrypt']) + } catch { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['decrypt']) + } + } else { + // Derive the key using PBKDF2. + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } } + const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']) + cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['decrypt']) + } + // Decrypt the string. + const plaintext = await crypto.subtle.decrypt(aesGcm, cryptoKey, ciphertext) + return new Uint8Array(plaintext) + } + const cipher = { + encrypt, + decrypt + } + return cipher } From 383420750eec354542a829191a9f79fddc43cbe5 Mon Sep 17 00:00:00 2001 From: haad Date: Sun, 30 Jun 2024 07:32:32 +0300 Subject: [PATCH 2/4] Move oplog storages to their own module --- benchmarks/orbitdb-replicate.js | 2 + src/oplog/entry.js | 34 +++++----- src/oplog/heads.js | 9 +-- src/oplog/log.js | 78 ++++++++--------------- src/oplog/oplog-index.js | 106 ++++++++++++++++++++++++++++++++ src/sync.js | 8 ++- test/oplog/replicate.test.js | 12 ++-- test/orbitdb-encryption.test.js | 2 +- 8 files changed, 171 insertions(+), 80 deletions(-) create mode 100644 src/oplog/oplog-index.js diff --git a/benchmarks/orbitdb-replicate.js b/benchmarks/orbitdb-replicate.js index 9917036..edc50b4 100644 --- a/benchmarks/orbitdb-replicate.js +++ b/benchmarks/orbitdb-replicate.js @@ -45,8 +45,10 @@ EventEmitter.defaultMaxListeners = 10000 let connected = false const onJoin = async (peerId) => (connected = true) + const onError = async (err) => console.error(err) db2.events.on('join', onJoin) + db2.events.on('error', onError) await waitFor(() => connected, () => true) diff --git a/src/oplog/entry.js b/src/oplog/entry.js index a1d65d3..1183383 100644 --- a/src/oplog/entry.js +++ b/src/oplog/entry.js @@ -86,7 +86,7 @@ const create = async (identity, id, payload, encryptPayloadFn, clock = null, nex entry.identity = identity.hash entry.sig = signature entry.payload = payload - entry.encryptedPayload = encryptedPayload + entry._payload = encryptedPayload return entry } @@ -100,7 +100,7 @@ const create = async (identity, id, payload, encryptPayloadFn, clock = null, nex * @memberof module:Log~Entry * @private */ -const verify = async (identities, entry, encryptPayloadFn) => { +const verify = async (identities, entry) => { if (!identities) throw new Error('Identities is required, cannot verify entry') if (!isEntry(entry)) throw new Error('Invalid Log entry') if (!entry.key) throw new Error("Entry doesn't have a key") @@ -110,7 +110,7 @@ const verify = async (identities, entry, encryptPayloadFn) => { const value = { id: e.id, - payload: e.encryptedPayload || e.payload, + payload: e._payload || e.payload, next: e.next, refs: e.refs, clock: e.clock, @@ -147,7 +147,7 @@ const isEntry = (obj) => { * @private */ const isEqual = (a, b) => { - return a && b && a.hash === b.hash + return a && b && a.hash && a.hash === b.hash } /** @@ -157,32 +157,36 @@ const isEqual = (a, b) => { * @memberof module:Log~Entry * @private */ -const decode = async (bytes, decryptPayloadFn, decryptEntryFn) => { +const decode = async (bytes, decryptEntryFn, decryptPayloadFn) => { let cid if (decryptEntryFn) { - const encryptedEntry = await Block.decode({ bytes, codec, hasher }) - bytes = await decryptEntryFn(encryptedEntry.value) - cid = encryptedEntry.cid + try { + const encryptedEntry = await Block.decode({ bytes, codec, hasher }) + bytes = await decryptEntryFn(encryptedEntry.value) + cid = encryptedEntry.cid + } catch (e) { + throw new Error('Could not decrypt entry') + } } const decodedEntry = await Block.decode({ bytes, codec, hasher }) const entry = decodedEntry.value - cid = cid || decodedEntry.cid - - const hash = cid.toString(hashStringEncoding) if (decryptPayloadFn) { try { const decryptedPayloadBytes = await decryptPayloadFn(entry.payload) const { value: decryptedPayload } = await Block.decode({ bytes: decryptedPayloadBytes, codec, hasher }) - entry.encryptedPayload = entry.payload + entry._payload = entry.payload entry.payload = decryptedPayload } catch (e) { - throw new Error('Could not decrypt entry') + throw new Error('Could not decrypt payload') } } + cid = cid || decodedEntry.cid + const hash = cid.toString(hashStringEncoding) + return { ...entry, hash @@ -200,10 +204,10 @@ const encode = async (entry, encryptEntryFn, encryptPayloadFn) => { const e = Object.assign({}, entry) if (encryptPayloadFn) { - e.payload = e.encryptedPayload + e.payload = e._payload } - delete e.encryptedPayload + delete e._payload delete e.hash let { cid, bytes } = await Block.encode({ value: e, codec, hasher }) diff --git a/src/oplog/heads.js b/src/oplog/heads.js index 6603e6d..8e0483e 100644 --- a/src/oplog/heads.js +++ b/src/oplog/heads.js @@ -15,11 +15,13 @@ const Heads = async ({ storage, heads, decryptPayloadFn, decryptEntryFn }) => { const put = async (heads) => { heads = findHeads(heads) for (const head of heads) { - await storage.put(head.hash, head.bytes) + // Store the entry's hash and nexts + await storage.put(head.hash, head.next) } } const set = async (heads) => { + // TODO: fix storage write fluctuation await storage.clear() await put(heads) } @@ -42,9 +44,8 @@ const Heads = async ({ storage, heads, decryptPayloadFn, decryptEntryFn }) => { const iterator = async function * () { const it = storage.iterator() - for await (const [, bytes] of it) { - const head = await Entry.decode(bytes, decryptPayloadFn, decryptEntryFn) - yield head + for await (const [hash, next] of it) { + yield { hash, next } } } diff --git a/src/oplog/log.js b/src/oplog/log.js index 523e552..51cf0b5 100644 --- a/src/oplog/log.js +++ b/src/oplog/log.js @@ -10,18 +10,14 @@ import LRU from 'lru' import PQueue from 'p-queue' import Entry from './entry.js' import Clock, { tickClock } from './clock.js' -import Heads from './heads.js' import ConflictResolution from './conflict-resolution.js' -import MemoryStorage from '../storage/memory.js' +import OplogIndex from './oplog-index.js' const { LastWriteWins, NoZeroes } = ConflictResolution const randomId = () => new Date().getTime().toString() const maxClockTimeReducer = (res, acc) => Math.max(res, acc.clock.time) -// Default storage for storing the Log and its entries. Default: Memory. Options: Memory, LRU, IPFS. -const DefaultStorage = MemoryStorage - // Default AccessController for the Log. // Default policy is that anyone can write to the Log. // Signature of an entry will always be verified regardless of AccessController policy. @@ -68,21 +64,20 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora if (logHeads != null && !Array.isArray(logHeads)) { throw new Error('\'logHeads\' argument must be an array') } + // Set Log's id const id = logId || randomId() + // Encryption of entries and payloads encryption = encryption || {} - const { encryptPayloadFn, decryptPayloadFn, encryptEntryFn, decryptEntryFn } = encryption + const { encryptPayloadFn } = encryption + // Access Controller access = access || await DefaultAccessController() - // Oplog entry storage - const _entries = entryStorage || await DefaultStorage() - // Entry index for keeping track which entries are already in the log - const _index = indexStorage || await DefaultStorage() - // Heads storage - headsStorage = headsStorage || await DefaultStorage() - // Add heads to the state storage, ie. init the log state - const _heads = await Heads({ storage: headsStorage, heads: logHeads, decryptPayloadFn, decryptEntryFn }) + + // Index and storage of entries for this Log + const index = await OplogIndex({ logHeads, entryStorage, indexStorage, headsStorage, encryption }) + // Conflict-resolution sorting function sortFn = NoZeroes(sortFn || LastWriteWins) @@ -110,8 +105,8 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora * @instance */ const heads = async () => { - const res = await _heads.all() - return res.sort(sortFn).reverse() + const heads_ = await index.heads() + return heads_.sort(sortFn).reverse() } /** @@ -141,16 +136,11 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora if (!hash) { throw new Error('hash is required') } - const bytes = await _entries.get(hash) - if (bytes) { - const entry = await Entry.decode(bytes, decryptPayloadFn, decryptEntryFn) - return entry - } + return index.get(hash) } const has = async (hash) => { - const entry = await _index.get(hash) - return entry != null + return index.has(hash) } /** @@ -169,6 +159,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora // 2. Authorize entry // 3. Store entry // 4. return Entry + // Get current heads of the log const heads_ = await heads() // Create the next pointers from heads @@ -187,19 +178,16 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora nexts, refs ) + // Authorize the entry const canAppend = await access.canAppend(entry) if (!canAppend) { throw new Error(`Could not append entry:\nKey "${identity.hash}" is not allowed to write to the log`) } - const { hash, bytes } = await Entry.encode(entry, encryptEntryFn, encryptPayloadFn) - // The appended entry is now the latest head - await _heads.set([{ hash, bytes, next: entry.next }]) - // Add entry to the entry storage - await _entries.put(hash, bytes) - // Add entry to the entry index - await _index.put(hash, true) + // Add the entry to the index (=store and index it) + const hash = await index.setHead(entry) + // Return the appended entry return { ...entry, hash } } @@ -228,9 +216,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora if (!isLog(log)) { throw new Error('Given argument is not an instance of Log') } - if (_entries.merge) { - await _entries.merge(log.storage) - } + await index.storage.merge(log.storage) const heads = await log.heads() for (const entry of heads) { await joinEntry(entry) @@ -268,7 +254,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora throw new Error(`Could not append entry:\nKey "${entry.identity}" is not allowed to write to the log`) } // Verify signature for the entry - const isValid = await Entry.verify(identity, entry, encryptPayloadFn) + const isValid = await Entry.verify(identity, entry) if (!isValid) { throw new Error(`Could not validate signature for entry "${entry.hash}"`) } @@ -313,19 +299,11 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora await traverseAndVerify() /* 4. Add missing entries to the index (=to the log) */ - for (const hash of hashesToAdd.values()) { - await _index.put(hash, true) - } - + await index.addVerified(hashesToAdd.values()) /* 5. Remove heads which new entries are connect to */ - for (const hash of connectedHeads.values()) { - await _heads.remove(hash) - } + await index.removeHeads(connectedHeads.values()) /* 6. Add the new entry to heads (=union with current heads) */ - const { hash, next } = entry - const bytes = await _entries.get(hash) - await _heads.add({ hash, bytes, next }) - // await _heads.add(entry) + await index.addHead(entry) return true } @@ -510,9 +488,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora * @instance */ const clear = async () => { - await _index.clear() - await _heads.clear() - await _entries.clear() + await index.clear() } /** @@ -521,9 +497,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora * @instance */ const close = async () => { - await _index.close() - await _heads.close() - await _entries.close() + await index.close() } /** @@ -579,7 +553,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora close, access, identity, - storage: _entries, + storage: index.storage, encryption } } diff --git a/src/oplog/oplog-index.js b/src/oplog/oplog-index.js new file mode 100644 index 0000000..4ded634 --- /dev/null +++ b/src/oplog/oplog-index.js @@ -0,0 +1,106 @@ +import Entry from './entry.js' +import Heads from './heads.js' +import MemoryStorage from '../storage/memory.js' + +// Default storage for storing the Log and its entries. Default: Memory. Options: Memory, LRU, IPFS. +const DefaultStorage = MemoryStorage + +const OplogIndex = async ({ logHeads, entryStorage, headsStorage, indexStorage, encryption }) => { + encryption = encryption || {} + const { encryptPayloadFn, decryptPayloadFn, encryptEntryFn, decryptEntryFn } = encryption + + // Oplog entry storage + const _entries = entryStorage || await DefaultStorage() + // Entry index for keeping track which entries are already in the log + const _index = indexStorage || await DefaultStorage() + // Heads storage + headsStorage = headsStorage || await DefaultStorage() + // Add heads to the state storage, ie. init the log state + const _heads = await Heads({ storage: headsStorage, heads: logHeads, decryptPayloadFn, decryptEntryFn }) + + const get = async (hash) => { + const bytes = await _entries.get(hash) + if (bytes) { + const entry = await Entry.decode(bytes, decryptEntryFn, decryptPayloadFn) + return entry + } + } + + const getBytes = async (hash) => { + return _entries.get(hash) + } + + const has = async (hash) => { + const entry = await _index.get(hash) + return entry != null + } + + const heads = async () => { + const heads_ = [] + for (const { hash } of await _heads.all()) { + const head = await get(hash) + heads_.push(head) + } + return heads_ + } + + const setHead = async (entry) => { + const { hash, bytes } = await Entry.encode(entry, encryptEntryFn, encryptPayloadFn) + // The appended entry is now the latest head + await _heads.set([{ hash, ...entry }]) + // Add entry to the entry storage + await _entries.put(hash, bytes) + // Add entry to the entry index + await _index.put(hash, true) + + return hash + } + + const addHead = async (entry) => { + /* 6. Add the new entry to heads (=union with current heads) */ + await _heads.add(entry) + return entry.hash + } + + const removeHeads = async (hashes) => { + /* 5. Remove heads which new entries are connect to */ + for (const hash of hashes) { + await _heads.remove(hash) + } + } + + const addVerified = async (hashes) => { + /* 4. Add missing entries to the index (=to the log) */ + for (const hash of hashes) { + await _index.put(hash, true) + } + } + + const clear = async () => { + await _index.clear() + await _heads.clear() + await _entries.clear() + } + + const close = async () => { + await _index.close() + await _heads.close() + await _entries.close() + } + + return { + get, + getBytes, + has, + heads, + setHead, + addHead, + removeHeads, + addVerified, + storage: _entries, + clear, + close + } +} + +export default OplogIndex diff --git a/src/sync.js b/src/sync.js index ca6b640..21fe4c5 100644 --- a/src/sync.js +++ b/src/sync.js @@ -147,7 +147,8 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { const sendHeads = (source) => { return (async function * () { const heads = await log.heads() - for await (const { bytes } of heads) { + for await (const { hash } of heads) { + const bytes = await log.storage.get(hash) yield bytes } })() @@ -157,7 +158,8 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { for await (const value of source) { const headBytes = value.subarray() if (headBytes && onSynced) { - await onSynced(headBytes) + const entry = await Entry.decode(headBytes, log.encryption.decryptEntryFn, log.encryption.decryptPayloadFn) + await onSynced(entry) } } if (started) { @@ -221,7 +223,7 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { const task = async () => { try { if (data && onSynced) { - const entry = await Entry.decode(data, log.encryption.decryptPayloadFn, log.encryption.decryptEntryFn) + const entry = await Entry.decode(data, log.encryption.decryptEntryFn, log.encryption.decryptPayloadFn) await onSynced(entry) } } catch (e) { diff --git a/test/oplog/replicate.test.js b/test/oplog/replicate.test.js index 010f77f..c70c9f4 100644 --- a/test/oplog/replicate.test.js +++ b/test/oplog/replicate.test.js @@ -9,7 +9,7 @@ import createHelia from '../utils/create-helia.js' const keysPath = './testkeys' -describe('Log - Replication', function () { +describe.only('Log - Replication', function () { let ipfs1, ipfs2 let id1, id2 let keystore @@ -69,7 +69,7 @@ describe('Log - Replication', function () { try { if (!messageIsFromMe(message)) { const entry = await Entry.decode(message.detail.data) - await storage1.put(entry.hash, entry.bytes) + await storage1.put(entry.hash, message.detail.data) await log1.joinEntry(entry) } } catch (e) { @@ -83,7 +83,7 @@ describe('Log - Replication', function () { try { if (!messageIsFromMe(message)) { const entry = await Entry.decode(message.detail.data) - await storage2.put(entry.hash, entry.bytes) + await storage2.put(entry.hash, message.detail.data) await log2.joinEntry(entry) } } catch (e) { @@ -114,8 +114,10 @@ describe('Log - Replication', function () { for (let i = 1; i <= amount; i++) { const entry1 = await input1.append('A' + i) const entry2 = await input2.append('B' + i) - await ipfs1.libp2p.services.pubsub.publish(logId, entry1.bytes) - await ipfs2.libp2p.services.pubsub.publish(logId, entry2.bytes) + const bytes1 = await input1.storage.get(entry1.hash) + const bytes2 = await input1.storage.get(entry2.hash) + await ipfs1.libp2p.services.pubsub.publish(logId, bytes1) + await ipfs2.libp2p.services.pubsub.publish(logId, bytes2) } console.log('Messages sent') diff --git a/test/orbitdb-encryption.test.js b/test/orbitdb-encryption.test.js index 255556c..f93a53a 100644 --- a/test/orbitdb-encryption.test.js +++ b/test/orbitdb-encryption.test.js @@ -76,7 +76,7 @@ describe.only('Encryption/Decryption', function () { strictEqual(await db1.get(hash), 'record 1') }) - it.only('encrypts/decrypts entry', async () => { + it('encrypts/decrypts entry', async () => { let connected = false let updated = false let error = false From d83bfa9fc8293ef1d1f59db1a090f70f09e178b5 Mon Sep 17 00:00:00 2001 From: haad Date: Sun, 7 Jul 2024 10:09:09 +0300 Subject: [PATCH 3/4] Refactor and add separate PasswordEncryption module --- .../encryption/aes-gcm-pbkdf2.js | 42 +- src/encryption/index.js | 6 + src/encryption/password.js | 29 ++ src/index.js | 4 + src/oplog/entry.js | 5 +- src/oplog/log.js | 2 +- src/oplog/oplog-index.js | 8 +- src/sync.js | 4 +- test/database.test.js | 59 ++- test/oplog/entry.test.js | 13 +- test/oplog/log.test.js | 75 ++-- test/oplog/replicate.test.js | 2 +- test/orbitdb-encryption.test.js | 388 ++++++++++++++---- test/sync.test.js | 22 +- 14 files changed, 465 insertions(+), 194 deletions(-) rename test/utils/encrypt.js => src/encryption/aes-gcm-pbkdf2.js (85%) create mode 100644 src/encryption/index.js create mode 100644 src/encryption/password.js diff --git a/test/utils/encrypt.js b/src/encryption/aes-gcm-pbkdf2.js similarity index 85% rename from test/utils/encrypt.js rename to src/encryption/aes-gcm-pbkdf2.js index 1a97d16..360d99c 100644 --- a/test/utils/encrypt.js +++ b/src/encryption/aes-gcm-pbkdf2.js @@ -1,41 +1,22 @@ -import crypto from 'crypto' - -// From: -// https://github.com/libp2p/js-libp2p/blob/0b55625d146940994a306101650a55ee58e32f6c/packages/crypto/src/ciphers/aes-gcm.browser.ts - -import { concat } from 'uint8arrays/concat' -import { fromString } from 'uint8arrays/from-string' -// import { create } from '@libp2p/crypto/ciphers/aes-gcm' - /* - Sources: + Source: + https://github.com/libp2p/js-libp2p/blob/0b55625d146940994a306101650a55ee58e32f6c/packages/crypto/src/ciphers/aes-gcm.browser.ts + + More information: - https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto - https://github.com/bradyjoslin/webcrypto-example/blob/master/script.js - https://github.com/mdn/dom-examples/blob/main/web-crypto/encrypt-decrypt/aes-gcm.js - - https://github.com/libp2p/js-libp2p/blob/0b55625d146940994a306101650a55ee58e32f6c/packages/crypto/src/ciphers/aes-gcm.browser.ts */ -const cipher = createAes() +// import crypto from 'crypto' +import { concat } from 'uint8arrays/concat' +import { fromString } from 'uint8arrays/from-string' -const encrypt = ({ password }) => async (value) => { - return cipher.encrypt(value, password) +// Polyfill fix for browsers +const getCrypto = () => { + return global.crypto } -const decrypt = ({ password }) => async (value) => { - return cipher.decrypt(value, password) -} - -const generatePassword = async (length = 256) => { - return crypto.getRandomValues(new Uint8Array(length)) -} - -export { - encrypt, - decrypt, - generatePassword -} -// import webcrypto from '../webcrypto.js'; - // WebKit on Linux does not support deriving a key from an empty PBKDF2 key. // So, as a workaround, we provide the generated key as a constant. We test that // this generated key is accurate in test/workaround.spec.ts @@ -50,7 +31,7 @@ export const derivedEmptyPasswordKey = { alg: 'A128GCM', ext: true, k: 'scm9jmO_ // Based off of code from https://github.com/luke-park/SecureCompatibleEncryptionExamples -export function createAes (opts) { +export function AES (opts) { const algorithm = opts?.algorithm ?? 'AES-GCM' let keyLength = opts?.keyLength ?? 16 const nonceLength = opts?.nonceLength ?? 12 @@ -58,6 +39,7 @@ export function createAes (opts) { const saltLength = opts?.saltLength ?? 16 const iterations = opts?.iterations ?? 32767 // const crypto = webcrypto.get(); + const crypto = getCrypto() keyLength *= 8 // Browser crypto uses bits instead of bytes /** * Uses the provided password to derive a pbkdf2 key. The key diff --git a/src/encryption/index.js b/src/encryption/index.js new file mode 100644 index 0000000..8c752c3 --- /dev/null +++ b/src/encryption/index.js @@ -0,0 +1,6 @@ +/** + * @module Encryption + * @description + * Encryption modules for OrbitDB. + */ +export { default as PasswordEncryption } from './password.js' diff --git a/src/encryption/password.js b/src/encryption/password.js new file mode 100644 index 0000000..8896f82 --- /dev/null +++ b/src/encryption/password.js @@ -0,0 +1,29 @@ +/** + * @namespace Encryption-Password + * @memberof module:Encryption + * @description + * Password encryption module encrypts data using AES-GCM PBKDF2. + */ + +import { AES } from './aes-gcm-pbkdf2.js' + +const PasswordEncryption = async ({ password, aesOptions }) => { + aesOptions = aesOptions || {} + + const aes = AES(aesOptions) + + const encrypt = (value) => { + return aes.encrypt(value, password) + } + + const decrypt = (value) => { + return aes.decrypt(value, password) + } + + return { + encrypt, + decrypt + } +} + +export default PasswordEncryption diff --git a/src/index.js b/src/index.js index ec833ca..4b6e0f5 100644 --- a/src/index.js +++ b/src/index.js @@ -41,3 +41,7 @@ export { MemoryStorage, ComposedStorage } from './storage/index.js' + +export { + PasswordEncryption +} from './encryption/index.js' diff --git a/src/oplog/entry.js b/src/oplog/entry.js index 1183383..035b235 100644 --- a/src/oplog/entry.js +++ b/src/oplog/entry.js @@ -86,7 +86,10 @@ const create = async (identity, id, payload, encryptPayloadFn, clock = null, nex entry.identity = identity.hash entry.sig = signature entry.payload = payload - entry._payload = encryptedPayload + + if (encryptPayloadFn) { + entry._payload = encryptedPayload + } return entry } diff --git a/src/oplog/log.js b/src/oplog/log.js index 51cf0b5..87cc287 100644 --- a/src/oplog/log.js +++ b/src/oplog/log.js @@ -70,7 +70,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora // Encryption of entries and payloads encryption = encryption || {} - const { encryptPayloadFn } = encryption + const encryptPayloadFn = encryption.data?.encrypt // Access Controller access = access || await DefaultAccessController() diff --git a/src/oplog/oplog-index.js b/src/oplog/oplog-index.js index 4ded634..5e4dc2d 100644 --- a/src/oplog/oplog-index.js +++ b/src/oplog/oplog-index.js @@ -6,9 +6,11 @@ import MemoryStorage from '../storage/memory.js' const DefaultStorage = MemoryStorage const OplogIndex = async ({ logHeads, entryStorage, headsStorage, indexStorage, encryption }) => { - encryption = encryption || {} - const { encryptPayloadFn, decryptPayloadFn, encryptEntryFn, decryptEntryFn } = encryption - + // Setup encryption and decryption functions + const encryptEntryFn = encryption?.replication?.encrypt + const decryptEntryFn = encryption?.replication?.decrypt + const encryptPayloadFn = encryption?.data?.encrypt + const decryptPayloadFn = encryption?.data?.decrypt // Oplog entry storage const _entries = entryStorage || await DefaultStorage() // Entry index for keeping track which entries are already in the log diff --git a/src/sync.js b/src/sync.js index 21fe4c5..5d9639e 100644 --- a/src/sync.js +++ b/src/sync.js @@ -158,7 +158,7 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { for await (const value of source) { const headBytes = value.subarray() if (headBytes && onSynced) { - const entry = await Entry.decode(headBytes, log.encryption.decryptEntryFn, log.encryption.decryptPayloadFn) + const entry = await Entry.decode(headBytes, log.encryption.replication?.decrypt, log.encryption.data?.decrypt) await onSynced(entry) } } @@ -223,7 +223,7 @@ const Sync = async ({ ipfs, log, events, onSynced, start, timeout }) => { const task = async () => { try { if (data && onSynced) { - const entry = await Entry.decode(data, log.encryption.decryptEntryFn, log.encryption.decryptPayloadFn) + const entry = await Entry.decode(data, log.encryption.replication?.decrypt, log.encryption.data?.decrypt) await onSynced(entry) } } catch (e) { diff --git a/test/database.test.js b/test/database.test.js index 5677b58..700eb8d 100644 --- a/test/database.test.js +++ b/test/database.test.js @@ -1,9 +1,9 @@ -import { strictEqual, deepStrictEqual } from 'assert' +import { strictEqual, deepStrictEqual, notEqual } from 'assert' import { rimraf } from 'rimraf' import { existsSync } from 'fs' import { copy } from 'fs-extra' import Path from 'path' -import { Database, Entry, KeyStore, Identities } from '../src/index.js' +import { Database, KeyStore, Identities } from '../src/index.js' import LevelStorage from '../src/storage/level.js' import MemoryStorage from '../src/storage/memory.js' import testKeysPath from './fixtures/test-keys-path.js' @@ -68,8 +68,12 @@ describe('Database', function () { describe('Options', () => { it('uses default directory for headsStorage', async () => { db = await Database({ ipfs, identity: testIdentity, address: databaseId, accessController }) - const op = { op: 'PUT', key: 1, value: 'record 1 on db 1' } - const hash = await db.addOperation(op) + + const op1 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 1' } + const op2 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 2' } + + await db.addOperation(op1) + const hash = await db.addOperation(op2) const headsPath = Path.join('./orbitdb/', `${databaseId}/`, '/log/_heads/') @@ -79,7 +83,9 @@ describe('Database', function () { const headsStorage = await LevelStorage({ path: headsPath }) - deepStrictEqual((await Entry.decode(await headsStorage.get(hash))).payload, op) + const bytes = Uint8Array.from(await headsStorage.get(hash)) + + notEqual(bytes.length, 0) await headsStorage.close() @@ -88,8 +94,11 @@ describe('Database', function () { it('uses given directory for headsStorage', async () => { db = await Database({ ipfs, identity: testIdentity, address: databaseId, accessController, directory: './custom-directory' }) - const op = { op: 'PUT', key: 1, value: 'record 1 on db 1' } - const hash = await db.addOperation(op) + const op1 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 1' } + const op2 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 2' } + + await db.addOperation(op1) + const hash = await db.addOperation(op2) const headsPath = Path.join('./custom-directory/', `${databaseId}/`, '/log/_heads/') @@ -99,7 +108,9 @@ describe('Database', function () { const headsStorage = await LevelStorage({ path: headsPath }) - deepStrictEqual((await Entry.decode(await headsStorage.get(hash))).payload, op) + const bytes = Uint8Array.from(await headsStorage.get(hash)) + + notEqual(bytes.length, 0) await headsStorage.close() @@ -110,23 +121,41 @@ describe('Database', function () { it('uses given MemoryStorage for headsStorage', async () => { const headsStorage = await MemoryStorage() db = await Database({ ipfs, identity: testIdentity, address: databaseId, accessController, directory: './orbitdb', headsStorage }) - const op = { op: 'PUT', key: 1, value: 'record 1 on db 1' } - const hash = await db.addOperation(op) + const op1 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 1' } + const op2 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 2' } - deepStrictEqual((await Entry.decode(await headsStorage.get(hash))).payload, op) + await db.addOperation(op1) + const hash = await db.addOperation(op2) + + const bytes = Uint8Array.from(await headsStorage.get(hash)) + + notEqual(bytes.length, 0) await db.close() + + await headsStorage.close() + await rimraf('./orbitdb') }) it('uses given MemoryStorage for entryStorage', async () => { const entryStorage = await MemoryStorage() - db = await Database({ ipfs, identity: testIdentity, address: databaseId, accessController, directory: './orbitdb', entryStorage }) - const op = { op: 'PUT', key: 1, value: 'record 1 on db 1' } - const hash = await db.addOperation(op) + const headsStorage = await MemoryStorage() + db = await Database({ ipfs, identity: testIdentity, address: databaseId, accessController, directory: './orbitdb', headsStorage, entryStorage }) + const op1 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 1' } + const op2 = { op: 'PUT', key: 1, value: 'record 1 on db 1 version 2' } - deepStrictEqual((await Entry.decode(await entryStorage.get(hash))).payload, op) + await db.addOperation(op1) + const hash = await db.addOperation(op2) + + const e = await entryStorage.get(hash) + const bytes = Uint8Array.from(e) + notEqual(bytes.length, 0) await db.close() + + await entryStorage.close() + await headsStorage.close() + await rimraf('./orbitdb') }) }) diff --git a/test/oplog/entry.test.js b/test/oplog/entry.test.js index be4d835..5dc6195 100644 --- a/test/oplog/entry.test.js +++ b/test/oplog/entry.test.js @@ -33,7 +33,8 @@ describe('Entry', function () { it('creates a an empty entry', async () => { const expectedHash = 'zdpuAsKzwUEa8cz9pkJxxFMxLuP3cutA9PDGoLZytrg4RSVEa' const entry = await create(testIdentity, 'A', 'hello') - strictEqual(entry.hash, expectedHash) + const { hash } = await Entry.encode(entry) + strictEqual(hash, expectedHash) strictEqual(entry.id, 'A') strictEqual(entry.clock.id, testIdentity.publicKey) strictEqual(entry.clock.time, 0) @@ -47,7 +48,8 @@ describe('Entry', function () { const expectedHash = 'zdpuAmthfqpHRQjdSpKN5etr1GrreJb7QcU1Hshm6pERnzsxi' const payload = 'hello world' const entry = await create(testIdentity, 'A', payload) - strictEqual(entry.hash, expectedHash) + const { hash } = await Entry.encode(entry) + strictEqual(hash, expectedHash) strictEqual(entry.payload, payload) strictEqual(entry.id, 'A') strictEqual(entry.clock.id, testIdentity.publicKey) @@ -81,7 +83,7 @@ describe('Entry', function () { const payload2 = 'hello again' const entry1 = await create(testIdentity, 'A', payload1) entry1.clock = tickClock(entry1.clock) - const entry2 = await create(testIdentity, 'A', payload2, entry1.clock, [entry1]) + const entry2 = await create(testIdentity, 'A', payload2, null, entry1.clock, [entry1]) strictEqual(entry2.payload, payload2) strictEqual(entry2.next.length, 1) // strictEqual(entry2.hash, expectedHash) @@ -91,7 +93,8 @@ describe('Entry', function () { it('`next` parameter can be an array of strings', async () => { const entry1 = await create(testIdentity, 'A', 'hello1') - const entry2 = await create(testIdentity, 'A', 'hello2', null, [entry1.hash]) + const { hash } = await Entry.encode(entry1) + const entry2 = await create(testIdentity, 'A', 'hello2', null, null, [hash]) strictEqual(typeof entry2.next[0] === 'string', true) }) @@ -138,7 +141,7 @@ describe('Entry', function () { it('throws an error if next is not an array', async () => { let err try { - await create(testIdentity, 'A', 'hello', null, {}) + await create(testIdentity, 'A', 'hello', null, null, {}) } catch (e) { err = e } diff --git a/test/oplog/log.test.js b/test/oplog/log.test.js index 043c970..9467125 100644 --- a/test/oplog/log.test.js +++ b/test/oplog/log.test.js @@ -3,7 +3,6 @@ import { rimraf } from 'rimraf' import { copy } from 'fs-extra' import { Log, Entry, Identities, KeyStore, MemoryStorage } from '../../src/index.js' import testKeysPath from '../fixtures/test-keys-path.js' -import { encrypt, decrypt } from '../utils/encrypt.js' const { create } = Entry @@ -61,15 +60,21 @@ describe('Log', function () { }) it('sets one head if multiple are given as params', async () => { - const one = await create(testIdentity, 'A', 'entryA', null, []) - const two = await create(testIdentity, 'A', 'entryB', null, [one.hash]) - const three = await create(testIdentity, 'A', 'entryC', null, [two.hash]) - const four = await create(testIdentity, 'A', 'entryD', null, [two.hash]) + const one = await create(testIdentity, 'A', 'entryA', null, null, []) + const { hash: hash1, bytes: bytes1 } = await Entry.encode(one) + const two = await create(testIdentity, 'A', 'entryB', null, null, [hash1]) + const { hash: hash2, bytes: bytes2 } = await Entry.encode(two) + const three = await create(testIdentity, 'A', 'entryC', null, null, [hash2]) + const { hash: hash3, bytes: bytes3 } = await Entry.encode(three) + const four = await create(testIdentity, 'A', 'entryD', null, null, [hash3]) + const { hash: hash4, bytes: bytes4 } = await Entry.encode(four) const entryStorage = await MemoryStorage() - await entryStorage.put(one.hash, one.bytes) - await entryStorage.put(two.hash, two.bytes) - await entryStorage.put(three.hash, three.bytes) - await entryStorage.put(four.hash, four.bytes) + await entryStorage.put(hash1, bytes1) + await entryStorage.put(hash2, bytes2) + await entryStorage.put(hash3, bytes3) + await entryStorage.put(hash4, bytes4) + three.hash = hash3 + two.hash = hash2 const log = await Log(testIdentity, { logId: 'A', logHeads: [three, three, two, two], entryStorage }) const values = await log.values() const heads = await log.heads() @@ -79,15 +84,22 @@ describe('Log', function () { }) it('sets two heads if two given as params', async () => { - const one = await create(testIdentity, 'A', 'entryA', null, []) - const two = await create(testIdentity, 'A', 'entryB', null, [one.hash]) - const three = await create(testIdentity, 'A', 'entryC', null, [two.hash]) - const four = await create(testIdentity, 'A', 'entryD', null, [two.hash]) + const one = await create(testIdentity, 'A', 'entryA', null, null, []) + const { hash: hash1, bytes: bytes1 } = await Entry.encode(one) + const two = await create(testIdentity, 'A', 'entryB', null, null, [hash1]) + const { hash: hash2, bytes: bytes2 } = await Entry.encode(two) + const three = await create(testIdentity, 'A', 'entryC', null, null, [hash2]) + const { hash: hash3, bytes: bytes3 } = await Entry.encode(three) + const four = await create(testIdentity, 'A', 'entryD', null, null, [hash2]) + const { hash: hash4, bytes: bytes4 } = await Entry.encode(four) const entryStorage = await MemoryStorage() - await entryStorage.put(one.hash, one.bytes) - await entryStorage.put(two.hash, two.bytes) - await entryStorage.put(three.hash, three.bytes) - await entryStorage.put(four.hash, four.bytes) + await entryStorage.put(hash1, bytes1) + await entryStorage.put(hash2, bytes2) + await entryStorage.put(hash3, bytes3) + await entryStorage.put(hash4, bytes4) + three.hash = hash3 + four.hash = hash4 + two.hash = hash2 const log = await Log(testIdentity, { logId: 'A', logHeads: [three, four, two], entryStorage }) const values = await log.values() const heads = await log.heads() @@ -143,34 +155,5 @@ describe('Log', function () { strictEqual(values[1].payload, 'hello2') strictEqual(values[2].payload, 'hello3') }) - - it.skip('encrypts a log entry when the payload is a string', async () => { - // const keys = await keystore.createKey('hello1') - - // const privateKey = await keystore.getKey('hello1') - // const publicKey = await keystore.getPublic(keys) - - const encryptPayloadFn = encrypt({ password: 'hello world' }) - const decryptPayloadFn = decrypt({ password: 'hello world' }) - const log = await Log(testIdentity, { encryption: { encryptPayloadFn, decryptPayloadFn } }) - const entry = await log.append('hello1') - const value = await log.get(entry.hash) - strictEqual(value.payload, 'hello1') - }) - - it.skip('encrypts a log entry when the payload is an object', async () => { - // const keys = await keystore.createKey('hello1') - - // const privateKey = await keystore.getKey('hello1') - // const publicKey = await keystore.getPublic(keys) - - const encryptPayloadFn = encrypt({ password: 'hello world' }) - const decryptPayloadFn = decrypt({ password: 'hello world' }) - const log = await Log(testIdentity, { encryption: { encryptPayloadFn, decryptPayloadFn } }) - const entry = await log.append({ test: 'hello1' }) - const value = await log.get(entry.hash) - - deepStrictEqual(value.payload, { test: 'hello1' }) - }) }) }) diff --git a/test/oplog/replicate.test.js b/test/oplog/replicate.test.js index c70c9f4..7c9dfa4 100644 --- a/test/oplog/replicate.test.js +++ b/test/oplog/replicate.test.js @@ -9,7 +9,7 @@ import createHelia from '../utils/create-helia.js' const keysPath = './testkeys' -describe.only('Log - Replication', function () { +describe('Log - Replication', function () { let ipfs1, ipfs2 let id1, id2 let keystore diff --git a/test/orbitdb-encryption.test.js b/test/orbitdb-encryption.test.js index f93a53a..a91bba7 100644 --- a/test/orbitdb-encryption.test.js +++ b/test/orbitdb-encryption.test.js @@ -1,24 +1,30 @@ -import { strictEqual } from 'assert' +import { strictEqual, notEqual } from 'assert' import { rimraf } from 'rimraf' import path from 'path' -import OrbitDB from '../src/orbitdb.js' -// import waitFor from './utils/wait-for.js' +import { createOrbitDB, PasswordEncryption } from '../src/index.js' +// import { encrypt, decrypt, generatePassword } from './utils/encrypt.js' import connectPeers from './utils/connect-nodes.js' import waitFor from './utils/wait-for.js' -// import IPFSAccessController from '../src/access-controllers/ipfs.js' -// import OrbitDBAccessController from '../src/access-controllers/orbitdb.js' import createHelia from './utils/create-helia.js' -import { encrypt, decrypt, generatePassword } from './utils/encrypt.js' + +import * as Block from 'multiformats/block' +import * as dagCbor from '@ipld/dag-cbor' +import { sha256 } from 'multiformats/hashes/sha2' + +const codec = dagCbor +const hasher = sha256 const dbPath = './orbitdb/tests/write-permissions' -describe.only('Encryption/Decryption', function () { +describe('Encryption', function () { this.timeout(5000) let ipfs1, ipfs2 let orbitdb1, orbitdb2 let db1, db2 - let encryptionPassword + + let replicationEncryption + let dataEncryption before(async () => { [ipfs1, ipfs2] = await Promise.all([createHelia(), createHelia()]) @@ -26,10 +32,11 @@ describe.only('Encryption/Decryption', function () { await rimraf('./orbitdb') - orbitdb1 = await OrbitDB({ ipfs: ipfs1, id: 'user1', directory: path.join(dbPath, '1') }) - orbitdb2 = await OrbitDB({ ipfs: ipfs2, id: 'user2', directory: path.join(dbPath, '2') }) + orbitdb1 = await createOrbitDB({ ipfs: ipfs1, id: 'user1', directory: path.join(dbPath, '1') }) + orbitdb2 = await createOrbitDB({ ipfs: ipfs2, id: 'user2', directory: path.join(dbPath, '2') }) - encryptionPassword = await generatePassword() + replicationEncryption = await PasswordEncryption({ password: 'hello' }) + dataEncryption = await PasswordEncryption({ password: 'world' }) }) after(async () => { @@ -54,90 +61,315 @@ describe.only('Encryption/Decryption', function () { await rimraf('./ipfs2') }) - afterEach(async () => { - await db1.drop() - await db1.close() - await db2.drop() - await db2.close() + describe('Data is encrypted when replicated to peers', async () => { + afterEach(async () => { + if (db1) { + await db1.drop() + await db1.close() + } + if (db2) { + await db2.drop() + await db2.close() + } + }) + + it('encrypts/decrypts data', async () => { + let connected = false + let updated = false + let error = false + + const encryption = { + data: dataEncryption + } + + db1 = await orbitdb1.open('encryption-test-1', { encryption }) + db2 = await orbitdb2.open(db1.address, { encryption }) + + const onJoin = async (peerId, heads) => { + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onUpdate = async (peerId, heads) => { + updated = true + } + db2.events.on('update', onUpdate) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db2.events.on('error', onError) + + const hash1 = await db1.add('record 1') + const hash2 = await db1.add('record 2') + + strictEqual(await db1.get(hash1), 'record 1') + strictEqual(await db1.get(hash2), 'record 2') + + await waitFor(() => updated || error, () => true) + + const all = await db2.all() + + strictEqual(all.length, 2) + strictEqual(all[0].value, 'record 1') + strictEqual(all[1].value, 'record 2') + }) + + it('encrypts/decrypts log', async () => { + let connected = false + let updated = false + let error = false + + const encryption = { + replication: replicationEncryption + } + + db1 = await orbitdb1.open('encryption-test-1', { encryption }) + db2 = await orbitdb2.open(db1.address, { encryption }) + + const onJoin = async (peerId, heads) => { + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onUpdate = async (peerId, heads) => { + updated = true + } + db2.events.on('update', onUpdate) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db2.events.on('error', onError) + + const hash1 = await db1.add('record 1') + const hash2 = await db1.add('record 2') + + strictEqual(await db1.get(hash1), 'record 1') + strictEqual(await db1.get(hash2), 'record 2') + + await waitFor(() => updated || error, () => true) + + const all = await db2.all() + + strictEqual(all.length, 2) + strictEqual(all[0].value, 'record 1') + strictEqual(all[1].value, 'record 2') + }) + + it('encrypts/decrypts log and data', async () => { + let connected = false + let updated = false + let error = false + + const encryption = { + replication: replicationEncryption, + data: dataEncryption + } + + db1 = await orbitdb1.open('encryption-test-1', { encryption }) + db2 = await orbitdb2.open(db1.address, { encryption }) + + const onJoin = async (peerId, heads) => { + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onUpdate = async (peerId, heads) => { + updated = true + } + db2.events.on('update', onUpdate) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db2.events.on('error', onError) + + const hash1 = await db1.add('record 1') + const hash2 = await db1.add('record 2') + + strictEqual(await db1.get(hash1), 'record 1') + strictEqual(await db1.get(hash2), 'record 2') + + await waitFor(() => updated || error, () => true) + + const all = await db2.all() + + strictEqual(all.length, 2) + strictEqual(all[0].value, 'record 1') + strictEqual(all[1].value, 'record 2') + }) + + it('throws an error if log can\'t be decrypted', async () => { + let connected = false + let hasError = false + let error + + const replicationEncryptionWithWrongPassword = await PasswordEncryption({ password: 'olleh' }) + + const encryption = { + replication: replicationEncryption + } + + const encryptionWithWrongPassword = { + replication: replicationEncryptionWithWrongPassword + } + + db1 = await orbitdb1.open('encryption-test-1', { encryption }) + db2 = await orbitdb2.open(db1.address, { encryption: encryptionWithWrongPassword }) + + const onJoin = async (peerId, heads) => { + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + error = err + hasError = true + } + db2.events.on('error', onError) + + await db1.add('record 1') + + await waitFor(() => hasError, () => true) + + strictEqual(error.message, 'Could not decrypt entry') + + const all = await db2.all() + + strictEqual(all.length, 0) + }) + + it('throws an error if data can\'t be decrypted', async () => { + let connected = false + let hasError = false + let error + + const dataEncryptionWithWrongPassword = await PasswordEncryption({ password: 'olleh' }) + + const encryption = { + data: dataEncryption + } + + const encryptionWithWrongPassword = { + data: dataEncryptionWithWrongPassword + } + + db1 = await orbitdb1.open('encryption-test-1', { encryption }) + db2 = await orbitdb2.open(db1.address, { encryption: encryptionWithWrongPassword }) + + const onJoin = async (peerId, heads) => { + connected = true + } + db2.events.on('join', onJoin) + + await waitFor(() => connected, () => true) + + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + error = err + hasError = true + } + db2.events.on('error', onError) + + await db1.add('record 1') + + await waitFor(() => hasError, () => true) + + strictEqual(error.message, 'Could not decrypt payload') + + const all = await db2.all() + + strictEqual(all.length, 0) + }) }) - it.skip('encrypts/decrypts payload', async () => { - const encryptPayloadFn = encrypt({ password: encryptionPassword }) - const decryptPayloadFn = decrypt({ password: encryptionPassword }) + describe('Data is encrypted in storage', async () => { + afterEach(async () => { + if (db1) { + await db1.drop() + await db1.close() + } + }) - db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptPayloadFn, decryptPayloadFn } }) + it('payload bytes are encrypted in storage', async () => { + let error - const hash = await db1.add('record 1') + const encryption = { + data: dataEncryption + } - for await (const e of db1.log.iterator()) { - console.log('>', e) - } + db1 = await orbitdb1.open('encryption-test-1', { encryption }) - strictEqual(await db1.get(hash), 'record 1') - }) + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db1.events.on('error', onError) - it('encrypts/decrypts entry', async () => { - let connected = false - let updated = false - let error = false + const hash1 = await db1.add('record 1') - const encryptPayloadFn = encrypt({ password: encryptionPassword }) - const decryptPayloadFn = decrypt({ password: encryptionPassword }) + const bytes = await db1.log.storage.get(hash1) + const { value } = await Block.decode({ bytes, codec, hasher }) + const payload = value.payload - const encryptEntryFn = encrypt({ password: encryptionPassword }) - const decryptEntryFn = decrypt({ password: encryptionPassword }) + strictEqual(payload.constructor, Uint8Array) - // const decryptPayloadFn2 = encrypt({ password: encryptionPassword + '1' }) - // const decryptEntryFn2 = decrypt({ password: encryptionPassword + '2' }) + try { + await Block.decode({ bytes: payload, codec, hasher }) + } catch (e) { + error = e + } - db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptEntryFn, decryptEntryFn, encryptPayloadFn, decryptPayloadFn } }) - db2 = await orbitdb2.open(db1.address, { encryption: { encryptEntryFn, decryptEntryFn, encryptPayloadFn, decryptPayloadFn } }) - // db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptEntryFn, decryptEntryFn } }) - // db2 = await orbitdb2.open(db1.address, { encryption: { encryptEntryFn, decryptEntryFn } }) - // db1 = await orbitdb1.open('encryption-test-1', { encryption: { encryptPayloadFn, decryptPayloadFn } }) - // db2 = await orbitdb2.open(db1.address, { encryption: { encryptPayloadFn, decryptPayloadFn } }) - // db1 = await orbitdb1.open('encryption-test-1') - // db2 = await orbitdb2.open(db1.address) + strictEqual(error.message.startsWith('CBOR decode error'), true) + }) - console.log('connect') + it('entry bytes are encrypted in storage', async () => { + let error - const onJoin = async (peerId, heads) => { - console.log('connected') - connected = true - } - db2.events.on('join', onJoin) + const encryption = { + replication: replicationEncryption + } - await waitFor(() => connected, () => true) + db1 = await orbitdb1.open('encryption-test-1', { encryption }) - const onUpdate = async (peerId, heads) => { - console.log('updated') - updated = true - } - db2.events.on('update', onUpdate) + const onError = async (err) => { + // Catch "Could not decrypt entry" errors + console.log(err) + error = true + } + db1.events.on('error', onError) - const onError = async (err) => { - // Catch "Could not decrypt entry" errors - console.log(err) - error = true - } - db2.events.on('error', onError) + const hash1 = await db1.add('record 1') + let decodedBytes - console.log('write') - const hash1 = await db1.add('record 1') - console.log('hash1', hash1) - const hash2 = await db1.add('record 2') - console.log('hash2', hash2) + try { + const bytes = await db1.log.storage.get(hash1) + decodedBytes = await Block.decode({ bytes, codec, hasher }) + await Block.decode({ bytes: decodedBytes, codec, hasher }) + } catch (e) { + error = e + } - strictEqual(await db1.get(hash1), 'record 1') - strictEqual(await db1.get(hash2), 'record 2') - - await waitFor(() => updated || error, () => true) - - const all = await db2.all() - console.log('all', all) - - strictEqual(all.length, 2) - strictEqual(all[0].value, 'record 1') - strictEqual(all[1].value, 'record 2') + notEqual(error, undefined) + strictEqual(error.message.startsWith('CBOR decode error'), true) + strictEqual(decodedBytes.value.constructor, Uint8Array) + }) }) }) diff --git a/test/sync.test.js b/test/sync.test.js index b3d2190..6d7eb28 100644 --- a/test/sync.test.js +++ b/test/sync.test.js @@ -139,8 +139,7 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog111', entryStorage: entryStorage1 }) log2 = await Log(testIdentity2, { logId: 'synclog111', entryStorage: entryStorage2 }) - const onSynced = async (bytes) => { - const entry = await Entry.decode(bytes) + const onSynced = async (entry) => { if (await log2.joinEntry(entry)) { syncedHead = entry syncedEventFired = true @@ -207,8 +206,7 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog7', entryStorage: entryStorage1 }) log2 = await Log(testIdentity2, { logId: 'synclog7', entryStorage: entryStorage2 }) - const onSynced = async (bytes) => { - const entry = await Entry.decode(bytes) + const onSynced = async (entry) => { if (await log2.joinEntry(entry)) { syncedHead = entry } @@ -291,8 +289,8 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog1' }) log2 = await Log(testIdentity2, { logId: 'synclog1' }) - const onSynced = async (bytes) => { - syncedHead = await Entry.decode(bytes) + const onSynced = async (entry) => { + syncedHead = entry syncedEventFired = expectedEntry.hash === syncedHead.hash } @@ -348,8 +346,8 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog1' }) log2 = await Log(testIdentity2, { logId: 'synclog1' }) - const onSynced = async (bytes) => { - syncedHead = await Entry.decode(bytes) + const onSynced = async (entry) => { + syncedHead = entry if (expectedEntry) { syncedEventFired = expectedEntry.hash === syncedHead.hash } @@ -434,9 +432,9 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog1' }) log2 = await Log(testIdentity2, { logId: 'synclog1' }) - const onSynced = async (bytes) => { + const onSynced = async (entry) => { if (expectedEntry && !syncedEventFired) { - syncedHead = await Entry.decode(bytes) + syncedHead = entry syncedEventFired = expectedEntry.hash === syncedHead.hash } } @@ -518,8 +516,8 @@ describe('Sync protocol', function () { log1 = await Log(testIdentity1, { logId: 'synclog2' }) log2 = await Log(testIdentity2, { logId: 'synclog2' }) - const onSynced = async (bytes) => { - syncedHead = await Entry.decode(bytes) + const onSynced = async (entry) => { + syncedHead = entry if (expectedEntry) { syncedEventFired = expectedEntry ? expectedEntry.hash === syncedHead.hash : false } From 71c2505f20bfff96666b05d2a185ba7b43afa20b Mon Sep 17 00:00:00 2001 From: Hayden Young Date: Mon, 8 Jul 2024 12:52:24 +0200 Subject: [PATCH 4/4] fix: Load browser crypto lib if available. --- conf/webpack.tests.config.js | 2 +- package-lock.json | 280 +------------------------------ package.json | 1 - src/encryption/aes-gcm-pbkdf2.js | 8 +- 4 files changed, 11 insertions(+), 280 deletions(-) diff --git a/conf/webpack.tests.config.js b/conf/webpack.tests.config.js index 071efd1..1bdf35e 100644 --- a/conf/webpack.tests.config.js +++ b/conf/webpack.tests.config.js @@ -38,7 +38,7 @@ export default (env, argv) => { ], fallback: { path: require.resolve('path-browserify'), - crypto: require.resolve('crypto-browserify'), + crypto: false, stream: require.resolve('stream-browserify'), process: false } diff --git a/package-lock.json b/package-lock.json index bbf248a..591984a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -28,7 +28,6 @@ "blockstore-level": "^1.1.7", "c8": "^8.0.1", "cross-env": "^7.0.3", - "crypto-browserify": "^3.12.0", "fs-extra": "^11.2.0", "helia": "^4.0.1", "it-all": "^3.0.4", @@ -7626,17 +7625,6 @@ "dev": true, "peer": true }, - "node_modules/asn1.js": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", - "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", - "dev": true, - "dependencies": { - "bn.js": "^4.0.0", - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" - } - }, "node_modules/asn1js": { "version": "3.0.5", "resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz", @@ -7984,148 +7972,6 @@ "safe-buffer": "^5.0.1" } }, - "node_modules/browserify-cipher": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", - "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", - "dev": true, - "dependencies": { - "browserify-aes": "^1.0.4", - "browserify-des": "^1.0.0", - "evp_bytestokey": "^1.0.0" - } - }, - "node_modules/browserify-des": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", - "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", - "dev": true, - "dependencies": { - "cipher-base": "^1.0.1", - "des.js": "^1.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, - "node_modules/browserify-rsa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", - "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", - "dev": true, - "dependencies": { - "bn.js": "^5.0.0", - "randombytes": "^2.0.1" - } - }, - "node_modules/browserify-rsa/node_modules/bn.js": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==", - "dev": true - }, - "node_modules/browserify-sign": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.3.tgz", - "integrity": "sha512-JWCZW6SKhfhjJxO8Tyiiy+XYB7cqd2S5/+WeYHsKdNKFlCBhKbblba1A/HN/90YwtxKc8tCErjffZl++UNmGiw==", - "dev": true, - "dependencies": { - "bn.js": "^5.2.1", - "browserify-rsa": "^4.1.0", - "create-hash": "^1.2.0", - "create-hmac": "^1.1.7", - "elliptic": "^6.5.5", - "hash-base": "~3.0", - "inherits": "^2.0.4", - "parse-asn1": "^5.1.7", - "readable-stream": "^2.3.8", - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">= 0.12" - } - }, - "node_modules/browserify-sign/node_modules/bn.js": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==", - "dev": true - }, - "node_modules/browserify-sign/node_modules/elliptic": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.5.tgz", - "integrity": "sha512-7EjbcmUm17NQFu4Pmgmq2olYMj8nwMnpcddByChSUjArp8F5DQWcIcpriwO4ZToLNAJig0yiyjswfyGNje/ixw==", - "dev": true, - "dependencies": { - "bn.js": "^4.11.9", - "brorand": "^1.1.0", - "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.1", - "inherits": "^2.0.4", - "minimalistic-assert": "^1.0.1", - "minimalistic-crypto-utils": "^1.0.1" - } - }, - "node_modules/browserify-sign/node_modules/elliptic/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true - }, - "node_modules/browserify-sign/node_modules/hash-base": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", - "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/browserify-sign/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true - }, - "node_modules/browserify-sign/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/browserify-sign/node_modules/readable-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/browserify-sign/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/browserify-sign/node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, "node_modules/browserslist": { "version": "4.23.0", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", @@ -8886,7 +8732,8 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true + "dev": true, + "peer": true }, "node_modules/cosmiconfig": { "version": "5.2.1", @@ -9014,16 +8861,6 @@ "node": ">=0.8" } }, - "node_modules/create-ecdh": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", - "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", - "dev": true, - "dependencies": { - "bn.js": "^4.1.0", - "elliptic": "^6.5.3" - } - }, "node_modules/create-hash": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", @@ -9081,28 +8918,6 @@ "node": ">= 8" } }, - "node_modules/crypto-browserify": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", - "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", - "dev": true, - "dependencies": { - "browserify-cipher": "^1.0.0", - "browserify-sign": "^4.0.0", - "create-ecdh": "^4.0.0", - "create-hash": "^1.1.0", - "create-hmac": "^1.1.0", - "diffie-hellman": "^5.0.0", - "inherits": "^2.0.1", - "pbkdf2": "^3.0.3", - "public-encrypt": "^4.0.0", - "randombytes": "^2.0.0", - "randomfill": "^1.0.3" - }, - "engines": { - "node": "*" - } - }, "node_modules/crypto-random-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", @@ -9335,16 +9150,6 @@ "node": ">=18" } }, - "node_modules/des.js": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz", - "integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" - } - }, "node_modules/destroy": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", @@ -9380,17 +9185,6 @@ "node": ">=0.3.1" } }, - "node_modules/diffie-hellman": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", - "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", - "dev": true, - "dependencies": { - "bn.js": "^4.1.0", - "miller-rabin": "^4.0.0", - "randombytes": "^2.0.0" - } - }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -14388,19 +14182,6 @@ "node": ">=8.6" } }, - "node_modules/miller-rabin": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", - "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", - "dev": true, - "dependencies": { - "bn.js": "^4.0.0", - "brorand": "^1.0.1" - }, - "bin": { - "miller-rabin": "bin/miller-rabin" - } - }, "node_modules/mime": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", @@ -15522,36 +15303,6 @@ "node": ">=6" } }, - "node_modules/parse-asn1": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.7.tgz", - "integrity": "sha512-CTM5kuWR3sx9IFamcl5ErfPl6ea/N8IYwiJ+vpeB2g+1iknv7zBl5uPwbMbRVznRVbrNY6lGuDoE5b30grmbqg==", - "dev": true, - "dependencies": { - "asn1.js": "^4.10.1", - "browserify-aes": "^1.2.0", - "evp_bytestokey": "^1.0.3", - "hash-base": "~3.0", - "pbkdf2": "^3.1.2", - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/parse-asn1/node_modules/hash-base": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", - "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", @@ -16106,7 +15857,8 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true + "dev": true, + "peer": true }, "node_modules/progress-events": { "version": "1.0.0", @@ -16172,20 +15924,6 @@ "uint8arrays": "^5.0.1" } }, - "node_modules/public-encrypt": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", - "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", - "dev": true, - "dependencies": { - "bn.js": "^4.1.0", - "browserify-rsa": "^4.0.0", - "create-hash": "^1.1.0", - "parse-asn1": "^5.0.0", - "randombytes": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -16272,16 +16010,6 @@ "safe-buffer": "^5.1.0" } }, - "node_modules/randomfill": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", - "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", - "dev": true, - "dependencies": { - "randombytes": "^2.0.5", - "safe-buffer": "^5.1.0" - } - }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", diff --git a/package.json b/package.json index 35e52a1..81e98e3 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,6 @@ "blockstore-level": "^1.1.7", "c8": "^8.0.1", "cross-env": "^7.0.3", - "crypto-browserify": "^3.12.0", "fs-extra": "^11.2.0", "helia": "^4.0.1", "it-all": "^3.0.4", diff --git a/src/encryption/aes-gcm-pbkdf2.js b/src/encryption/aes-gcm-pbkdf2.js index 360d99c..a278893 100644 --- a/src/encryption/aes-gcm-pbkdf2.js +++ b/src/encryption/aes-gcm-pbkdf2.js @@ -8,13 +8,17 @@ - https://github.com/mdn/dom-examples/blob/main/web-crypto/encrypt-decrypt/aes-gcm.js */ -// import crypto from 'crypto' +import crypto from 'crypto' import { concat } from 'uint8arrays/concat' import { fromString } from 'uint8arrays/from-string' // Polyfill fix for browsers const getCrypto = () => { - return global.crypto + if (typeof global.crypto !== 'undefined') { + return global.crypto + } else { + return crypto + } } // WebKit on Linux does not support deriving a key from an empty PBKDF2 key.