mirror of
https://github.com/orbitdb/orbitdb.git
synced 2025-03-30 15:08:28 +00:00
Jsdocs (#73)
* fix: Load correct main. * docs: Enable markdown. * docs: Add jsdoc blocks. * docs: Separate orbitdb into module and namespace. * docs: Database module and namespace. * docs: Default values for storage. * docs: Match param naming across modules. * docs: Use @link where possible. * docs: Use @example for code. * docs: KeyStore module and namespace documentation. * docs: API for Access controller management. * docs: Document Database events. * docs: Formatting. * docs: Notes on Custom AC and including a canAppend function. * docs: Standardize primitive data types. * docs: Proposed submodule structure. * docs: Composed storage. How to create custom storage. * docs: annotations for all storage modules. * docs: Documents. * docs: KeyValueIndexed. * docs: Provide example for specifying storage for keyvalue. * docs: KeyValue. * docs: Remove const documentation where not required. * docs: KeyStore static functions. * docs: Identities. * docs: Identity. * refactor: Remove unused param. * docs: Address. * docs: Address. * docs: Manifest. * docs: IPFSAccessController. * docs: OrbitDBAccessController. * docs: Utils. * test: check for correct message.
This commit is contained in:
parent
5ab0bcdbf5
commit
60fbe47ee3
3
jsdoc.json
Normal file
3
jsdoc.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"plugins": ["plugins/markdown"]
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
"dist"
|
||||
],
|
||||
"type": "module",
|
||||
"main": "src/OrbitDB.js",
|
||||
"main": "src/orbitdb.js",
|
||||
"dependencies": {
|
||||
"@ipld/dag-cbor": "^9.0.0",
|
||||
"@libp2p/crypto": "^1.0.12",
|
||||
@ -62,7 +62,7 @@
|
||||
"build:examples": "webpack --config conf/webpack.example.config.js",
|
||||
"build:dist": "webpack --config conf/webpack.config.js",
|
||||
"build:debug": "webpack --config conf/webpack.debug.config.js",
|
||||
"build:docs": "jsdoc -r src/**",
|
||||
"build:docs": "jsdoc -c ./jsdoc.json -r src/**",
|
||||
"build:tests": "rm -f test/browser/bundle.js* && webpack --config ./conf/webpack.tests.config.js",
|
||||
"prepublishOnly": "npm run build",
|
||||
"lint": "standard --env=mocha",
|
||||
|
@ -1,12 +1,41 @@
|
||||
/** @module AccessControllers */
|
||||
/**
|
||||
* @module AccessControllers
|
||||
* @description
|
||||
* Provides a platform for managing access controllers. Supported access
|
||||
* controllers can be added and removed from the access controller list, and
|
||||
* can load the associated module if they are supported.
|
||||
*
|
||||
* An AccessController module needs to only expose one function,
|
||||
* canAppend(entry) which returns true if the entry can be appended to the
|
||||
* oplog, or false otherwise:
|
||||
* ```javascript
|
||||
* const CustomAccessController = ({ write } = {}) => async => {
|
||||
* const canAppend = async (entry) => {
|
||||
* // Use entry.identity to determine whether the entry can be appended.
|
||||
* // Return true if entry can be appended to OpLog.
|
||||
* // Or return false otherwise.
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
import IPFSAccessController from './ipfs.js'
|
||||
import OrbitDBAccessController from './orbitdb.js'
|
||||
|
||||
/**
|
||||
* An array of available access controller types.
|
||||
* @name types
|
||||
* @†ype []
|
||||
* @return [] An array of access controller types.
|
||||
*/
|
||||
const types = {
|
||||
ipfs: IPFSAccessController,
|
||||
orbitdb: OrbitDBAccessController
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets an access controller module specified by type.
|
||||
* @param {string} type A valid access controller type.
|
||||
* @return {AccessController} The access controller module.
|
||||
*/
|
||||
const get = (type) => {
|
||||
if (!isSupported(type)) {
|
||||
throw new Error(`AccessController type '${type}' is not supported`)
|
||||
@ -14,10 +43,25 @@ const get = (type) => {
|
||||
return types[type]
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the access controller exists.
|
||||
* @param {string} type A valid access controller type.
|
||||
* @return {boolean} True if the access controller exists, false otherwise.
|
||||
*/
|
||||
const isSupported = type => {
|
||||
return Object.keys(types).includes(type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an access controller module to the list of supported access controller
|
||||
* types.
|
||||
* @param {AccessController} accessController A compatible access controller
|
||||
* module.
|
||||
* @throws Access controller `type` already added if the access controller is
|
||||
* already supported.
|
||||
* @throws Given AccessController class needs to implement: type if the access
|
||||
* controller module does not implement a type property.
|
||||
*/
|
||||
const add = (accessController) => {
|
||||
if (types[accessController.type]) {
|
||||
throw new Error(`Access controller '${accessController.type}' already added.`)
|
||||
@ -30,6 +74,10 @@ const add = (accessController) => {
|
||||
types[accessController.type] = accessController
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an access controller from the types list.
|
||||
* @param {string} type A valid access controller type.
|
||||
*/
|
||||
const remove = type => {
|
||||
delete types[type]
|
||||
}
|
||||
|
@ -26,6 +26,31 @@ const AccessControlList = async ({ storage, type, params }) => {
|
||||
|
||||
const type = 'ipfs'
|
||||
|
||||
/**
|
||||
* Creates an instance of IPFSAccessController.
|
||||
* @callback IPFSAccessController
|
||||
* @param {Object} params Various parameters for configuring the access
|
||||
* controller.
|
||||
* @param {module:OrbitDB} params.orbitdb An OrbitDB instance.
|
||||
* @param {module:Identities} params.identities An Identities instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:AccessControllers.AccessControllers-IPFS
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines an IPFS access controller.
|
||||
* @param {Object} options Various options for configuring the
|
||||
* IPFSAccessController.
|
||||
* @param {Array} [params.write] An array of identity ids who can write to the
|
||||
* database.
|
||||
* @param {module:Storage} [params.storage] An instance of a compatible storage.
|
||||
* @returns {module:AccessControllers.AccessControllers-IPFS} An
|
||||
* IPFSAccessController function.
|
||||
* @memberof module:AccessControllers
|
||||
*/
|
||||
const IPFSAccessController = ({ write, storage } = {}) => async ({ orbitdb, identities, address }) => {
|
||||
storage = storage || await ComposedStorage(
|
||||
await LRUStorage({ size: 1000 }),
|
||||
@ -42,6 +67,13 @@ const IPFSAccessController = ({ write, storage } = {}) => async ({ orbitdb, iden
|
||||
address = pathJoin('/', type, address)
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies the write permission of an entry.
|
||||
* @param {module:Log~Entry} entry An entry to verify.
|
||||
* @returns {boolean} True if the entry's identity has write permission,
|
||||
* false otherwise.
|
||||
* @memberof module:AccessControllers.AccessControllers-IPFS
|
||||
*/
|
||||
const canAppend = async (entry) => {
|
||||
const writerIdentity = await identities.getIdentity(entry.identity)
|
||||
if (!writerIdentity) {
|
||||
|
@ -7,6 +7,30 @@ import IPFSAccessController from './ipfs.js'
|
||||
|
||||
const type = 'orbitdb'
|
||||
|
||||
/**
|
||||
* Creates an instance of OrbitDBAccessController.
|
||||
* @callback OrbitDBAccessController
|
||||
* @param {Object} params Various parameters for configuring the access
|
||||
* controller.
|
||||
* @param {module:OrbitDB} params.orbitdb An OrbitDB instance.
|
||||
* @param {module:Identities} params.identities An Identities instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines an OrbitDB access controller.
|
||||
* @param {Object} options Various options for configuring the
|
||||
* IPFSAccessController.
|
||||
* @param {Array} [params.write] An array of identity ids who can write to the
|
||||
* database.
|
||||
* @returns {module:AccessControllers.AccessControllers-OrbitDB} An
|
||||
* IPFSAccessController function.
|
||||
* @memberof module:AccessControllers
|
||||
*/
|
||||
const OrbitDBAccessController = ({ write } = {}) => async ({ orbitdb, identities, address }) => {
|
||||
address = address || 'default-access-controller'
|
||||
write = write || [orbitdb.identity.id]
|
||||
@ -15,7 +39,14 @@ const OrbitDBAccessController = ({ write } = {}) => async ({ orbitdb, identities
|
||||
const db = await orbitdb.open(ensureACAddress(address), { type: 'keyvalue', AccessController: IPFSAccessController({ write }) })
|
||||
address = db.address
|
||||
|
||||
// Return true if entry is allowed to be added to the database
|
||||
/**
|
||||
* Verifies the write permission of an entry.
|
||||
* @param {module:Log~Entry} entry An entry to verify.
|
||||
* @returns {boolean} True if the entry's identity has write permission,
|
||||
* false otherwise.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const canAppend = async (entry) => {
|
||||
const writerIdentity = await identities.getIdentity(entry.identity)
|
||||
if (!writerIdentity) {
|
||||
@ -32,6 +63,15 @@ const OrbitDBAccessController = ({ write } = {}) => async ({ orbitdb, identities
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the access capabilities of the OrbitDB access controller.
|
||||
*
|
||||
* The returned capabilities will be a mixture of admin and write access
|
||||
* addresses.
|
||||
* @returns {Array} A list of addresses with admin and write access.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const capabilities = async () => {
|
||||
const _capabilities = []
|
||||
for await (const entry of db.iterator()) {
|
||||
@ -55,27 +95,64 @@ const OrbitDBAccessController = ({ write } = {}) => async ({ orbitdb, identities
|
||||
return _capabilities
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of addresses with the specified capability.
|
||||
* @param {string} capability A capability (e.g. write).
|
||||
* @returns {Array} One or more addresses with the spcified capability.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const get = async (capability) => {
|
||||
const _capabilities = await capabilities()
|
||||
return _capabilities[capability] || new Set([])
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the underlying access control database.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const close = async () => {
|
||||
await db.close()
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether an address has a capability.
|
||||
* @param {string} capability A capability (e.g. write).
|
||||
* @param {string} key An address.
|
||||
* @returns {boolean} True if the address has the capability, false
|
||||
* otherwise.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const hasCapability = async (capability, key) => {
|
||||
// Write keys and admins keys are allowed
|
||||
const access = new Set(await get(capability))
|
||||
return access.has(key) || access.has('*')
|
||||
}
|
||||
|
||||
/**
|
||||
* Grants a capability to an address, storing it to the access control
|
||||
* database.
|
||||
* @param {string} capability A capability (e.g. write).
|
||||
* @param {string} key An address.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const grant = async (capability, key) => {
|
||||
// Merge current keys with the new key
|
||||
const capabilities = new Set([...(await db.get(capability) || []), ...[key]])
|
||||
await db.put(capability, Array.from(capabilities.values()))
|
||||
}
|
||||
|
||||
/**
|
||||
* Revokes a capability from an address, removing it from the access control
|
||||
* database.
|
||||
* @param {string} capability A capability (e.g. write).
|
||||
* @param {string} key An address.
|
||||
* @memberof module:AccessControllers.AccessControllers-OrbitDB
|
||||
* @instance
|
||||
*/
|
||||
const revoke = async (capability, key) => {
|
||||
const capabilities = new Set(await db.get(capability) || [])
|
||||
capabilities.delete(key)
|
||||
|
@ -1,8 +1,19 @@
|
||||
/** @namespace Address */
|
||||
/**
|
||||
* @module Address
|
||||
* @description OrbitDB database address verification.
|
||||
*/
|
||||
import { CID } from 'multiformats/cid'
|
||||
import { base58btc } from 'multiformats/bases/base58'
|
||||
import { posixJoin } from './utils/path-join.js'
|
||||
|
||||
/**
|
||||
* Validates an OrbitDB database address.
|
||||
* @function
|
||||
* @param {OrbitDBAddress|string} address An OrbitDB database address.
|
||||
* @return {boolean} True if the address is a valid OrbitDB database address,
|
||||
* false otherwise.
|
||||
* @static
|
||||
*/
|
||||
const isValidAddress = (address) => {
|
||||
address = address.toString()
|
||||
|
||||
@ -25,6 +36,15 @@ const isValidAddress = (address) => {
|
||||
return cid !== undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an OrbitDB database address.
|
||||
* @function
|
||||
* @param {OrbitDBAddress|string} address A valid OrbitDB database address.
|
||||
* @return {OrbitDBAddress} An instance of OrbitDBAddress.
|
||||
* @throws Not a valid OrbitDB address if no address if provided.
|
||||
* @throws Not a valid OrbitDB address if address is invalid.
|
||||
* @static
|
||||
*/
|
||||
const parseAddress = (address) => {
|
||||
if (!address) {
|
||||
throw new Error(`Not a valid OrbitDB address: ${address}`)
|
||||
@ -37,14 +57,41 @@ const parseAddress = (address) => {
|
||||
return OrbitDBAddress(address)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of OrbitDBAddress.
|
||||
* @function
|
||||
* @param {OrbitDBAddress|string} address A valid OrbitDB database address.
|
||||
* @returns {OrbitDBAddress} An instance of OrbitDBAddress.
|
||||
* @instance
|
||||
*/
|
||||
const OrbitDBAddress = (address) => {
|
||||
/**
|
||||
* @namespace module:Address~OrbitDBAddress
|
||||
* @description The instance returned by {@link module:Address~OrbitDBAddress}.
|
||||
*/
|
||||
|
||||
if (address && address.protocol === 'orbitdb' && address.path) {
|
||||
return address
|
||||
}
|
||||
|
||||
/**
|
||||
* The 'orbitdb' protocol.
|
||||
* @memberof module:Address~OrbitDBAddress
|
||||
*/
|
||||
const protocol = 'orbitdb'
|
||||
|
||||
/**
|
||||
* The path without the /orbitdb/ prefix.
|
||||
* @memberof module:Address~OrbitDBAddress
|
||||
*/
|
||||
const path = address.replace('/orbitdb/', '').replace('\\orbitdb\\', '')
|
||||
|
||||
/**
|
||||
* Returns OrbitDBAddress as a string.
|
||||
* @function
|
||||
* @returns {string} The string form of OrbitDBAddress.
|
||||
* @memberof module:Address~OrbitDBAddress
|
||||
*/
|
||||
const toString = () => {
|
||||
return posixJoin('/', protocol, path)
|
||||
}
|
||||
|
132
src/database.js
132
src/database.js
@ -1,4 +1,38 @@
|
||||
/** @module Database */
|
||||
/**
|
||||
* @module Database
|
||||
* @description
|
||||
* Database is the base class for OrbitDB data stores and handles all lower
|
||||
* level add operations and database sync-ing using IPFS.
|
||||
*
|
||||
* Database should be instantiated and initialized when implementing a
|
||||
* compatible datastore:
|
||||
* ```
|
||||
* const CustomDataStore = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
* const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
|
||||
* const { addOperation, log } = database
|
||||
*
|
||||
* const put = async (key, value) => {
|
||||
* return addOperation({ op: 'ADD', key, value })
|
||||
* }
|
||||
*
|
||||
* const get = async (hash) => {
|
||||
* const entry = await log.get(hash)
|
||||
* return entry.payload.value
|
||||
* }
|
||||
*
|
||||
* return {
|
||||
* ...database,
|
||||
* type: 'custom-data-store',
|
||||
* put,
|
||||
* get
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* export default CustomDataStore
|
||||
* ```
|
||||
* The functions put and get are recommended but not mandatory. For example,
|
||||
* the Events data store uses a function called `add`.
|
||||
*/
|
||||
import { EventEmitter } from 'events'
|
||||
import PQueue from 'p-queue'
|
||||
import Sync from './sync.js'
|
||||
@ -9,10 +43,60 @@ import pathJoin from './utils/path-join.js'
|
||||
const defaultReferencesCount = 16
|
||||
const defaultCacheSize = 1000
|
||||
|
||||
/**
|
||||
* Creates an instance of Database.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring Database.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @param {string} [params.name] The name of the database.
|
||||
* @param {module:AccessControllers} [params.access] An AccessController
|
||||
* instance.
|
||||
* @param {string} [params.directory] A location for storing Database-related
|
||||
* data. Defaults to ./orbitdb/[params.address].
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {module:Storage} [params.headsStorage] A compatible storage
|
||||
* instance for storing log heads. Defaults to ComposedStorage.
|
||||
* @param {module:Storage} [params.entryStorage] A compatible storage instance
|
||||
* for storing log entries. Defaults to ComposedStorage.
|
||||
* @param {module:Storage} [params.indexStorage] A compatible storage
|
||||
* instance for storing an index of log entries. Defaults to ComposedStorage.
|
||||
* @param {number} [params.referencesCount=16] The maximum distance between
|
||||
* references to other entries.
|
||||
* @param {boolean} [params.syncAutomatically=false] If true, sync databases
|
||||
* automatically. Otherwise, false.
|
||||
* @param {function} [params.onUpdate] A function callback. Fired when an
|
||||
* entry is added to the oplog.
|
||||
* @return {module:Database~Database} An instance of Database.
|
||||
* @instance
|
||||
*/
|
||||
const Database = async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
/**
|
||||
* @namespace module:Database~Database
|
||||
* @description The instance returned by {@link module:Database}.
|
||||
* @description The instance returned by {@link module:Database~Database}.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Event fired when an update occurs.
|
||||
* @event module:Database~Database#update
|
||||
* @param {module:Entry} entry An entry.
|
||||
* @example
|
||||
* database.events.on('update', (entry) => ...)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Event fired when a close occurs.
|
||||
* @event module:Database~Database#close
|
||||
* @example
|
||||
* database.events.on('close', () => ...)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Event fired when a drop occurs.
|
||||
* @event module:Database~Database#drop
|
||||
* @example
|
||||
* database.events.on('drop', () => ...)
|
||||
*/
|
||||
|
||||
directory = pathJoin(directory || './orbitdb', `./${address}/`)
|
||||
@ -36,9 +120,29 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta
|
||||
|
||||
const log = await Log(identity, { logId: address, access, entryStorage, headsStorage, indexStorage })
|
||||
|
||||
/**
|
||||
* Event emitter that emits updates.
|
||||
* @name events
|
||||
* @†ype EventEmitter
|
||||
* @fires update when an entry is added to the database.
|
||||
* @fires close When the database is closed.
|
||||
* @fires drop When the database is dropped.
|
||||
* @memberof module:Database~Database
|
||||
* @instance
|
||||
*/
|
||||
const events = new EventEmitter()
|
||||
|
||||
const queue = new PQueue({ concurrency: 1 })
|
||||
|
||||
/**
|
||||
* Adds an operation to the oplog.
|
||||
* @function addOperation
|
||||
* @param {*} op Some operation to add to the oplog.
|
||||
* @return {string} The hash of the operation.
|
||||
* @memberof module:Database~Database
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const addOperation = async (op) => {
|
||||
const task = async () => {
|
||||
const entry = await log.append(op, { referencesCount })
|
||||
@ -70,6 +174,12 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta
|
||||
await queue.add(task)
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the database, stopping sync and closing the oplog.
|
||||
* @memberof module:Database~Database
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const close = async () => {
|
||||
await sync.stop()
|
||||
await queue.onIdle()
|
||||
@ -77,15 +187,27 @@ const Database = async ({ ipfs, identity, address, name, access, directory, meta
|
||||
events.emit('close')
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops the database, clearing the oplog.
|
||||
* @memberof module:Database~Database
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const drop = async () => {
|
||||
await queue.onIdle()
|
||||
await log.clear()
|
||||
events.emit('drop')
|
||||
}
|
||||
|
||||
// Start the Sync protocol
|
||||
// Sync protocol exchanges OpLog heads (latest known entries) between peers when they connect
|
||||
// Sync emits 'join', 'leave' and 'error' events through the given event emitter
|
||||
/**
|
||||
* Starts the [Sync protocol]{@link module:Sync~Sync}.
|
||||
*
|
||||
* Sync protocol exchanges OpLog heads (latest known entries) between peers
|
||||
* when they connect.
|
||||
* @memberof module:Database~Database
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const sync = await Sync({ ipfs, log, events, onSynced: applyOperation, start: syncAutomatically })
|
||||
|
||||
return {
|
||||
|
@ -1,12 +1,62 @@
|
||||
/**
|
||||
* @namespace Database-Documents
|
||||
* @memberof module:Database
|
||||
* @description Documents Database
|
||||
* @description Documents database.
|
||||
* @example <caption>Create documents db with default options</caption>
|
||||
* import { create } from 'IPFS'
|
||||
*
|
||||
* const ipfs = create()
|
||||
* const Partial = Documents()
|
||||
* const documents = await Partial({ ipfs })
|
||||
* @example <caption>Create documents db with custom index</caption>
|
||||
* import { create } from 'IPFS'
|
||||
*
|
||||
* const ipfs = create()
|
||||
* const options = { indexBy: 'myCustomId'}
|
||||
* const Partial = Documents(options)
|
||||
* const documents = await Partial({ ipfs })
|
||||
*/
|
||||
import Database from '../database.js'
|
||||
|
||||
const DefaultOptions = { indexBy: '_id' }
|
||||
|
||||
/**
|
||||
* Creates an instance of Documents.
|
||||
* @callback Documents
|
||||
* @param {Object} params One or more parameters for configuring Database.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @param {string} [params.name] The name of the database.
|
||||
* @param {module:AccessControllers} [params.access] An AccessController
|
||||
* instance.
|
||||
* @param {string} [params.directory] A location for storing Database-related
|
||||
* data. Defaults to ./orbitdb/[params.address].
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {module:Storage} [params.headsStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log heads. Defaults to ComposedStorage(LRUStorage, IPFSBlockStorage).
|
||||
* @param {module:Storage} [params.entryStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {module:Storage} [params.indexStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing an " index of log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {number} [params.referencesCount] The maximum distance between
|
||||
* references to other entries.
|
||||
* @param {boolean} [params.syncAutomatically=false] If true, sync databases
|
||||
* automatically. Otherwise, false.
|
||||
* @param {function} [params.onUpdate] A function callback. Fired when an
|
||||
* entry is added to the oplog.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:Database.Database-Documents
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines a Documents database.
|
||||
* @param {Object} options Various options for configuring the Document store.
|
||||
* @param {string} [params.indexBy=_id] An index.
|
||||
* @returns {module:Database.Database-Documents} A Documents function.
|
||||
* @memberof module:Database
|
||||
*/
|
||||
const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically })
|
||||
|
||||
@ -14,9 +64,11 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
|
||||
/**
|
||||
* Stores a document to the store.
|
||||
*
|
||||
* @function
|
||||
* @param {Object} doc An object representing a key/value list of fields.
|
||||
* @returns {string} The hash of the new oplog entry.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const put = async (doc) => {
|
||||
const key = doc[indexBy]
|
||||
@ -28,9 +80,11 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
|
||||
/**
|
||||
* Deletes a document from the store.
|
||||
*
|
||||
* @function
|
||||
* @param {string} key The key of the doc to delete.
|
||||
* @returns {string} The hash of the new oplog entry.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const del = async (key) => {
|
||||
if (!await get(key)) { throw new Error(`No document with key '${key}' in the database`) }
|
||||
@ -40,9 +94,11 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
|
||||
/**
|
||||
* Gets a document from the store by key.
|
||||
*
|
||||
* @function
|
||||
* @param {string} key The key of the doc to get.
|
||||
* @returns {Object} The doc corresponding to key or null.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const get = async (key) => {
|
||||
for await (const doc of iterator()) {
|
||||
@ -54,9 +110,12 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
|
||||
/**
|
||||
* Queries the document store for documents matching mapper filters.
|
||||
*
|
||||
* @param {function(Object)} findFn A function for querying for specific results.
|
||||
* @function
|
||||
* @param {function(Object)} findFn A function for querying for specific
|
||||
* results.
|
||||
* @returns {Array} Found documents.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const query = async (findFn) => {
|
||||
const results = []
|
||||
@ -70,6 +129,15 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over documents.
|
||||
* @function
|
||||
* @params {Object} [filters={}] Various filters to apply to the iterator.
|
||||
* @params {string} [filters.amount=-1] The number of results to fetch.
|
||||
* @yields [string, string, string] The next document as hash/key/value.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
const keys = {}
|
||||
let count = 0
|
||||
@ -89,6 +157,14 @@ const Documents = ({ indexBy } = DefaultOptions) => async ({ ipfs, identity, add
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all documents.
|
||||
* @function
|
||||
* @returns [][string, string, string] An array of documents as hash/key
|
||||
* value entries.
|
||||
* @memberof module:Database.Database-Documents
|
||||
* @instance
|
||||
*/
|
||||
const all = async () => {
|
||||
const values = []
|
||||
for await (const entry of iterator()) {
|
||||
|
@ -1,24 +1,92 @@
|
||||
/**
|
||||
* @namespace Database-Events
|
||||
* @memberof module:Database
|
||||
* @description Events Database
|
||||
* @description Events database.
|
||||
*/
|
||||
import Database from '../database.js'
|
||||
|
||||
/**
|
||||
* Creates an instance of Events.
|
||||
* @callback Events
|
||||
* @param {Object} params One or more parameters for configuring Database.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @param {string} [params.name] The name of the database.
|
||||
* @param {module:AccessControllers} [params.access] An AccessController
|
||||
* instance.
|
||||
* @param {string} [params.directory] A location for storing Database-related
|
||||
* data. Defaults to ./orbitdb/[params.address].
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {module:Storage} [params.headsStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log heads. Defaults to ComposedStorage(LRUStorage, IPFSBlockStorage).
|
||||
* @param {module:Storage} [params.entryStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {module:Storage} [params.indexStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing an " index of log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {number} [params.referencesCount] The maximum distance between
|
||||
* references to other entries.
|
||||
* @param {boolean} [params.syncAutomatically=false] If true, sync databases
|
||||
* automatically. Otherwise, false.
|
||||
* @param {function} [params.onUpdate] A function callback. Fired when an
|
||||
* entry is added to the oplog.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:Database.Database-Events
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines an Events database.
|
||||
* @returns {module:Database.Database-Events} A Events function.
|
||||
* @memberof module:Database
|
||||
*/
|
||||
const Events = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
|
||||
|
||||
const { addOperation, log } = database
|
||||
|
||||
/**
|
||||
* Adds an event to the store.
|
||||
* @function
|
||||
* @param {*} value The event to be added.
|
||||
* @returns {string} The hash of the new oplog entry.
|
||||
* @memberof module:Database.Database-Events
|
||||
* @instance
|
||||
*/
|
||||
const add = async (value) => {
|
||||
return addOperation({ op: 'ADD', key: null, value })
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets an event from the store by hash.
|
||||
* @function
|
||||
* @param {string} hash The hash of the event to get.
|
||||
* @returns {*} The value corresponding to hash or null.
|
||||
* @memberof module:Database.Database-Events
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
const entry = await log.get(hash)
|
||||
return entry.payload.value
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over events.
|
||||
* @function
|
||||
* @params {Object} [filters={}] Various filters to apply to the iterator.
|
||||
* @params {string} [filters.gt] All events which are greater than the
|
||||
* given hash.
|
||||
* @params {string} [filters.gte] All events which are greater than or equal
|
||||
* to the given hash.
|
||||
* @params {string} [filters.lt] All events which are less than the given
|
||||
* hash.
|
||||
* @params {string} [filters.lte] All events which are less than or equal to
|
||||
* the given hash.
|
||||
* @params {string} [filters.amount=-1] The number of results to fetch.
|
||||
* @yields [string, string] The next event as hash/value.
|
||||
* @memberof module:Database.Database-Events
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * ({ gt, gte, lt, lte, amount } = {}) {
|
||||
const it = log.iterator({ gt, gte, lt, lte, amount })
|
||||
for await (const event of it) {
|
||||
@ -28,6 +96,13 @@ const Events = () => async ({ ipfs, identity, address, name, access, directory,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all events.
|
||||
* @function
|
||||
* @returns [][string, string] An array of events as hash/value entries.
|
||||
* @memberof module:Database.Database-Events
|
||||
* @instance
|
||||
*/
|
||||
const all = async () => {
|
||||
const values = []
|
||||
for await (const entry of iterator()) {
|
||||
|
@ -1,7 +1,17 @@
|
||||
/**
|
||||
* @namespace Database-KeyValueIndexed
|
||||
* @memberof module:Database
|
||||
* @description KeyValueIndexed Database
|
||||
* @description
|
||||
* KeyValueIndexed database.
|
||||
*
|
||||
* Key/value pairs are stored to the configured storage.
|
||||
* @example <caption>Specify a custom storage</caption>
|
||||
* import { create } from 'IPFS'
|
||||
*
|
||||
* const ipfs = create()
|
||||
* const storage = await IPFSBlockStorage()
|
||||
* const Partial = KeyValueIndexed({ storage })
|
||||
* const keyValueIndexed = await Partial({ ipfs })
|
||||
*/
|
||||
import { KeyValue } from './index.js'
|
||||
import LevelStorage from '../storage/level.js'
|
||||
@ -9,6 +19,45 @@ import pathJoin from '../utils/path-join.js'
|
||||
|
||||
const valueEncoding = 'json'
|
||||
|
||||
/**
|
||||
* Creates an instance of KeyValueIndexed.
|
||||
* @callback KeyValueIndexed
|
||||
* @param {Object} params One or more parameters for configuring Database.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @param {string} [params.name] The name of the database.
|
||||
* @param {module:AccessControllers} [params.access] An AccessController
|
||||
* instance.
|
||||
* @param {string} [params.directory] A location for storing Database-related
|
||||
* data. Defaults to ./orbitdb/[params.address].
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {module:Storage} [params.headsStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log heads. Defaults to ComposedStorage(LRUStorage, IPFSBlockStorage).
|
||||
* @param {module:Storage} [params.entryStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {module:Storage} [params.indexStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing an " index of log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {number} [params.referencesCount] The maximum distance between
|
||||
* references to other entries.
|
||||
* @param {boolean} [params.syncAutomatically=false] If true, sync databases
|
||||
* automatically. Otherwise, false.
|
||||
* @param {function} [params.onUpdate] A function callback. Fired when an
|
||||
* entry is added to the oplog.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:Database.Database-KeyValueIndexed
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines a KeyValueIndexed database.
|
||||
* @param {Object} options Various options for configuring the KeyValueIndexed
|
||||
* store.
|
||||
* @param {module:Storage} [storage=LevelStorage] A compatible storage.
|
||||
* @returns {module:Database.Database-KeyValueIndexed} A KeyValueIndexed
|
||||
* function.
|
||||
* @memberof module:Database
|
||||
*/
|
||||
const KeyValueIndexed = ({ storage } = {}) => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
const indexDirectory = pathJoin(directory || './orbitdb', `./${address}/_index/`)
|
||||
const index = storage || await LevelStorage({ path: indexDirectory, valueEncoding })
|
||||
@ -40,6 +89,14 @@ const KeyValueIndexed = ({ storage } = {}) => async ({ ipfs, identity, address,
|
||||
// Compute the index
|
||||
await _updateIndex(keyValueStore.log)
|
||||
|
||||
/**
|
||||
* Gets a value from the store by key.
|
||||
* @function
|
||||
* @param {string} key The key of the value to get.
|
||||
* @returns {*} The value corresponding to key or null.
|
||||
* @memberof module:Database.Database-KeyValueIndexed
|
||||
* @instance
|
||||
*/
|
||||
const get = async (key) => {
|
||||
const value = await index.get(key)
|
||||
if (value) {
|
||||
@ -48,6 +105,15 @@ const KeyValueIndexed = ({ storage } = {}) => async ({ ipfs, identity, address,
|
||||
return keyValueStore.get(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over keyvalue pairs.
|
||||
* @function
|
||||
* @params {Object} [filters={}] Various filters to apply to the iterator.
|
||||
* @params {string} [filters.amount=-1] The number of results to fetch.
|
||||
* @yields [string, string, string] The next key/value as key/value/hash.
|
||||
* @memberof module:Database.Database-KeyValueIndexed
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
const it = keyValueStore.iterator({ amount })
|
||||
for await (const { key, value, hash } of it) {
|
||||
@ -55,11 +121,17 @@ const KeyValueIndexed = ({ storage } = {}) => async ({ ipfs, identity, address,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the index and underlying storage.
|
||||
*/
|
||||
const close = async () => {
|
||||
await index.close()
|
||||
await keyValueStore.close()
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops all records from the index and underlying storage.
|
||||
*/
|
||||
const drop = async () => {
|
||||
await index.clear()
|
||||
await keyValueStore.drop()
|
||||
|
@ -1,23 +1,82 @@
|
||||
/**
|
||||
* @namespace Database-KeyValue
|
||||
* @memberof module:Database
|
||||
* @description KeyValue Database
|
||||
* @description KeyValue database.
|
||||
*/
|
||||
import Database from '../database.js'
|
||||
|
||||
/**
|
||||
* Creates an instance of KeyValue.
|
||||
* @callback KeyValue
|
||||
* @param {Object} params One or more parameters for configuring Database.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {string} [params.address] The address of the database.
|
||||
* @param {string} [params.name] The name of the database.
|
||||
* @param {module:AccessControllers} [params.access] An AccessController
|
||||
* instance.
|
||||
* @param {string} [params.directory] A location for storing Database-related
|
||||
* data. Defaults to ./orbitdb/[params.address].
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {module:Storage} [params.headsStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log heads. Defaults to ComposedStorage(LRUStorage, IPFSBlockStorage).
|
||||
* @param {module:Storage} [params.entryStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {module:Storage} [params.indexStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing an " index of log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {number} [params.referencesCount] The maximum distance between
|
||||
* references to other entries.
|
||||
* @param {boolean} [params.syncAutomatically=false] If true, sync databases
|
||||
* automatically. Otherwise, false.
|
||||
* @param {function} [params.onUpdate] A function callback. Fired when an
|
||||
* entry is added to the oplog.
|
||||
* @function
|
||||
* @instance
|
||||
* @async
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
*/
|
||||
|
||||
/**
|
||||
* Defines an KeyValue database.
|
||||
* @returns {module:Database.Database-KeyValue} A KeyValue function.
|
||||
* @memberof module:Database
|
||||
*/
|
||||
const KeyValue = () => async ({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate }) => {
|
||||
const database = await Database({ ipfs, identity, address, name, access, directory, meta, headsStorage, entryStorage, indexStorage, referencesCount, syncAutomatically, onUpdate })
|
||||
|
||||
const { addOperation, log } = database
|
||||
|
||||
/**
|
||||
* Stores a key/value pair to the store.
|
||||
* @function
|
||||
* @param {string} key The key to store.
|
||||
* @param {*} value The value to store.
|
||||
* @returns {string} The hash of the new oplog entry.
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
* @instance
|
||||
*/
|
||||
const put = async (key, value) => {
|
||||
return addOperation({ op: 'PUT', key, value })
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a key/value pair from the store.
|
||||
* @function
|
||||
* @param {string} key The key of the key/value pair to delete.
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
* @instance
|
||||
*/
|
||||
const del = async (key) => {
|
||||
return addOperation({ op: 'DEL', key, value: null })
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a value from the store by key.
|
||||
* @function
|
||||
* @param {string} key The key of the value to get.
|
||||
* @returns {*} The value corresponding to key or null.
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
* @instance
|
||||
*/
|
||||
const get = async (key) => {
|
||||
for await (const entry of log.traverse()) {
|
||||
const { op, key: k, value } = entry.payload
|
||||
@ -29,6 +88,15 @@ const KeyValue = () => async ({ ipfs, identity, address, name, access, directory
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over keyvalue pairs.
|
||||
* @function
|
||||
* @params {Object} [filters={}] Various filters to apply to the iterator.
|
||||
* @params {string} [filters.amount=-1] The number of results to fetch.
|
||||
* @yields [string, string, string] The next key/value as key/value/hash.
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * ({ amount } = {}) {
|
||||
const keys = {}
|
||||
let count = 0
|
||||
@ -48,6 +116,14 @@ const KeyValue = () => async ({ ipfs, identity, address, name, access, directory
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all key/value pairs.
|
||||
* @function
|
||||
* @returns [][string, string, string] An array of key/value pairs as
|
||||
* key/value/hash entries.
|
||||
* @memberof module:Database.Database-KeyValue
|
||||
* @instance
|
||||
*/
|
||||
const all = async () => {
|
||||
const values = []
|
||||
for await (const entry of iterator()) {
|
||||
|
@ -1,4 +1,10 @@
|
||||
/** @module Identities */
|
||||
/**
|
||||
* @module Identities
|
||||
* @description
|
||||
* Identities provides a framework for generating and managing identity
|
||||
* details and providers.
|
||||
*/
|
||||
|
||||
import Identity, { isIdentity, isEqual, decodeIdentity } from './identity.js'
|
||||
import { PublicKeyIdentityProvider } from './providers/index.js'
|
||||
// import DIDIdentityProvider from './identity-providers/did.js'
|
||||
@ -16,7 +22,29 @@ const supportedTypes = {
|
||||
// [EthIdentityProvider.type]: EthIdentityProvider
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of Identities.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring Identities.
|
||||
* @param {module:KeyStore} [params.keystore] A preconfigured KeyStore.
|
||||
* A KeyStore will be created in the path defined by the path param. If neither
|
||||
* Keystore nor path are defined, a new KeyStore is stored in ./orbitdb
|
||||
* identities.
|
||||
* @param {string} [params.path] The path to a KeyStore. If no path is
|
||||
* provided, the default is ./orbitdb/identities.
|
||||
* @param {module:Storage} [params.storage] An instance of a compatible storage
|
||||
* module.
|
||||
* @param {IPFS} [params.ipfs] An instance of IPFS. This param is not required
|
||||
* if storage is provided.
|
||||
* @returns {module:Identities~Identities} An instance of Identities.
|
||||
* @instance
|
||||
*/
|
||||
const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
/**
|
||||
* @namespace module:Identities~Identities
|
||||
* @description The instance returned by {@link module:Identities~Identities}.
|
||||
*/
|
||||
|
||||
keystore = keystore || await KeyStore({ path: path || DefaultIdentityKeysPath })
|
||||
|
||||
if (!storage) {
|
||||
@ -27,6 +55,13 @@ const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
|
||||
const verifiedIdentitiesCache = await LRUStorage({ size: 1000 })
|
||||
|
||||
/**
|
||||
* Gets an identity by hash.
|
||||
* @param {string} hash An identity hash.
|
||||
* @returns {Identity} An instance of identity.
|
||||
* @memberof module:Identities~Identities
|
||||
* @instance
|
||||
*/
|
||||
const getIdentity = async (hash) => {
|
||||
const bytes = await storage.get(hash)
|
||||
if (bytes) {
|
||||
@ -34,6 +69,14 @@ const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an identity, adding it to storage.
|
||||
* @param {Object} options Various options for configuring a new identity.
|
||||
* @param {string} [options.type=PublicKeyIdentityProvider.type] The type of provider to use for generating an identity.
|
||||
* @returns {Identity} An instance of identity.
|
||||
* @memberof module:Identities~Identities
|
||||
* @instance
|
||||
*/
|
||||
const createIdentity = async (options = {}) => {
|
||||
options.keystore = keystore
|
||||
|
||||
@ -57,6 +100,12 @@ const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
return identity
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies an identity using the identity's provider.
|
||||
* @param {Identity} identity The identity to verify.
|
||||
* @returns {boolean} True the identity is valid, false otherwise.
|
||||
* @memberof module:Identities~Identities
|
||||
*/
|
||||
const verifyIdentity = async (identity) => {
|
||||
if (!isIdentity(identity)) {
|
||||
return false
|
||||
@ -84,6 +133,16 @@ const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
return identityVerified
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs data using an identity.
|
||||
* @param {Identity} identity The identity to use for signing.
|
||||
* @param {string} data The data to sign.
|
||||
* @returns {string} The signed data.
|
||||
* @throws Private signing key not fund from KeyStore when no signing key can
|
||||
* be retrieved.
|
||||
* @memberof module:Identities~Identities
|
||||
* @instance
|
||||
*/
|
||||
const sign = async (identity, data) => {
|
||||
const signingKey = await keystore.getKey(identity.id)
|
||||
|
||||
@ -108,10 +167,26 @@ const Identities = async ({ keystore, path, storage, ipfs } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether an identity provider is supported.
|
||||
* @param {string} type The identity provider type.
|
||||
* @returns {boolean} True if the identity provider is supported, false
|
||||
* otherwise.
|
||||
* @static
|
||||
*/
|
||||
const isProviderSupported = (type) => {
|
||||
return Object.keys(supportedTypes).includes(type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets an identity provider.
|
||||
* @param {string} type The identity provider type.
|
||||
* @returns {IdentityProvider} The IdentityProvider module corresponding to
|
||||
* type.
|
||||
* @throws IdentityProvider type is not supported if the identity provider is
|
||||
* not supported.
|
||||
* @static
|
||||
*/
|
||||
const getProviderFor = (type) => {
|
||||
if (!isProviderSupported(type)) {
|
||||
throw new Error(`IdentityProvider type '${type}' is not supported`)
|
||||
@ -120,6 +195,15 @@ const getProviderFor = (type) => {
|
||||
return supportedTypes[type]
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an identity provider.
|
||||
* @param {IdentityProvider} IdentityProvider The identity provider to add.
|
||||
* @throws IdentityProvider must be given as an argument if no module is
|
||||
* provided.
|
||||
* @throws 'Given IdentityProvider doesn't have a field 'type' if the
|
||||
* IdentityProvider does not include a type property.
|
||||
* @static
|
||||
*/
|
||||
const addIdentityProvider = (IdentityProvider) => {
|
||||
if (!IdentityProvider) {
|
||||
throw new Error('IdentityProvider must be given as an argument')
|
||||
@ -133,6 +217,11 @@ const addIdentityProvider = (IdentityProvider) => {
|
||||
supportedTypes[IdentityProvider.type] = IdentityProvider
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an identity provider.
|
||||
* @param {string} type The identity provider type.
|
||||
* @static
|
||||
*/
|
||||
const removeIdentityProvider = (type) => {
|
||||
delete supportedTypes[type]
|
||||
}
|
||||
|
@ -1,3 +1,9 @@
|
||||
/**
|
||||
* @module Identity
|
||||
* @description
|
||||
* An identity.
|
||||
*/
|
||||
|
||||
import * as Block from 'multiformats/block'
|
||||
import * as dagCbor from '@ipld/dag-cbor'
|
||||
import { sha256 } from 'multiformats/hashes/sha2'
|
||||
@ -7,6 +13,26 @@ const codec = dagCbor
|
||||
const hasher = sha256
|
||||
const hashStringEncoding = base58btc
|
||||
|
||||
/**
|
||||
* Creates an instance of Identity.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring an Identity.
|
||||
* @param {string} params.id A unique identitifer for the identity.
|
||||
* @param {string} params.publicKey A public key.
|
||||
* @param {Object} params.signatures A signed identity id and public key.
|
||||
* @param {string} params.type The type of identity provider.
|
||||
* @param {function} params.sign A sign function.
|
||||
* @param {function} params.verify A verify function.
|
||||
* @returns {module:Identity~Identity} An instance of Identity.
|
||||
* @throws Identity id is required if id is not provided.
|
||||
* @throws Invalid public key if publicKey is not provided.
|
||||
* @throws Signatures object is required if signature is not provided.
|
||||
* @throws Signature of id is required if signature's id is not provided.
|
||||
* @throws Signature of publicKey+id is required if signature's publicKey+id is
|
||||
* not provided.
|
||||
* @throws Identity type is required if type is not provided.
|
||||
* @instance
|
||||
*/
|
||||
const Identity = async ({ id, publicKey, signatures, type, sign, verify } = {}) => {
|
||||
if (id == null) throw new Error('Identity id is required')
|
||||
if (publicKey == null) throw new Error('Invalid public key')
|
||||
@ -34,9 +60,10 @@ const Identity = async ({ id, publicKey, signatures, type, sign, verify } = {})
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode an Identity to a serializable form
|
||||
* @param {Identity} identity Identity to encode
|
||||
* @returns {Object} Object with fields hash and bytes
|
||||
* Encode an Identity to a serializable form.
|
||||
* @param {Identity} identity Identity to encode,
|
||||
* @returns {Object} Object with fields hash and bytes.
|
||||
* @static
|
||||
*/
|
||||
const _encodeIdentity = async (identity) => {
|
||||
const { id, publicKey, signatures, type } = identity
|
||||
@ -50,12 +77,19 @@ const _encodeIdentity = async (identity) => {
|
||||
* Decode an Identity from bytes
|
||||
* @param {Uint8Array} bytes Bytes from which to decode an Identity from
|
||||
* @returns {Identity}
|
||||
* @static
|
||||
*/
|
||||
const decodeIdentity = async (bytes) => {
|
||||
const { value } = await Block.decode({ bytes, codec, hasher })
|
||||
return Identity({ ...value })
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies whether an identity is valid.
|
||||
* @param {Identity} identity The identity to verify.
|
||||
* @return {boolean} True if the identity is valid, false otherwise.
|
||||
* @static
|
||||
*/
|
||||
const isIdentity = (identity) => {
|
||||
return identity.id != null &&
|
||||
identity.hash != null &&
|
||||
@ -67,6 +101,13 @@ const isIdentity = (identity) => {
|
||||
identity.type != null
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates whether two identities are equal.
|
||||
* @param {Identity} a First identity.
|
||||
* @param {Identity} b Second identity.
|
||||
* @return {boolean} True if identity a and b are equal, false otherwise.
|
||||
* @static
|
||||
*/
|
||||
const isEqual = (a, b) => {
|
||||
return a.id === b.id &&
|
||||
a.hash === b.hash &&
|
||||
|
115
src/key-store.js
115
src/key-store.js
@ -1,3 +1,13 @@
|
||||
/**
|
||||
* @module KeyStore
|
||||
* @description
|
||||
* Provides a local key manager for OrbitDB.
|
||||
* @example <caption>Create a keystore with defaults.</caption>
|
||||
* const keystore = await KeyStore()
|
||||
* @example <caption>Create a keystore with custom storage.</caption>
|
||||
* const storage = await MemoryStorage()
|
||||
* const keystore = await KeyStore({ storage })
|
||||
*/
|
||||
import * as crypto from '@libp2p/crypto'
|
||||
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
|
||||
import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
|
||||
@ -9,6 +19,17 @@ import LRUStorage from './storage/lru.js'
|
||||
const unmarshal = crypto.keys.supportedKeys.secp256k1.unmarshalSecp256k1PrivateKey
|
||||
const unmarshalPubKey = crypto.keys.supportedKeys.secp256k1.unmarshalSecp256k1PublicKey
|
||||
|
||||
/**
|
||||
* Verifies a signature used for signing data.
|
||||
* @params {string} signature The generated signature.
|
||||
* @params {string} publicKey The derived public key of the key pair.
|
||||
* @params {string} data The data to be verified.
|
||||
* @return {boolean} True if the signature is valid, false otherwise.
|
||||
* @throws No signature given if no signature is provided.
|
||||
* @throws Given publicKey was undefined if no publicKey is provided.
|
||||
* @throws Given input data was undefined if no data is provided.
|
||||
* @static
|
||||
*/
|
||||
const verifySignature = async (signature, publicKey, data) => {
|
||||
if (!signature) {
|
||||
throw new Error('No signature given')
|
||||
@ -37,6 +58,15 @@ const verifySignature = async (signature, publicKey, data) => {
|
||||
return Promise.resolve(res)
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs data using a key pair.
|
||||
* @params {string} key The key to use for signing data.
|
||||
* @params {string} data The data to sign.
|
||||
* @return {string} A signature.
|
||||
* @throws No signing key given if no key is provided.
|
||||
* @throws Given input data was undefined if no data is provided.
|
||||
* @static
|
||||
*/
|
||||
const signMessage = async (key, data) => {
|
||||
if (!key) {
|
||||
throw new Error('No signing key given')
|
||||
@ -55,6 +85,14 @@ const signMessage = async (key, data) => {
|
||||
|
||||
const verifiedCachePromise = LRUStorage({ size: 1000 })
|
||||
|
||||
/**
|
||||
* Verifies input data against a cached version of the signed message.
|
||||
* @params {string} signature The generated signature.
|
||||
* @params {string} publicKey The derived public key of the key pair.
|
||||
* @params {string} data The data to be verified.
|
||||
* @return {boolean} True if the the data and cache match, false otherwise.
|
||||
* @static
|
||||
*/
|
||||
const verifyMessage = async (signature, publicKey, data) => {
|
||||
const verifiedCache = await verifiedCachePromise
|
||||
const cached = await verifiedCache.get(signature)
|
||||
@ -81,22 +119,50 @@ const defaultPath = './keystore'
|
||||
|
||||
/**
|
||||
* Creates an instance of KeyStore.
|
||||
* @param {Object} options Various options to use when instantiating KeyStore.
|
||||
* @param {Object} options.storage An instance of a storage class. Can be one of ComposedStorage, IPFSBlockStorage, LevelStorage, etc. Defaults to ComposedStorage.
|
||||
* @param {string} options.path The path to a valid storage. Defaults to ./keystore.
|
||||
* @return {KeyStore} An instance of KeyStore.
|
||||
* @param {Object} params One or more parameters for configuring KeyStore.
|
||||
* @param {Object} [params.storage] An instance of a storage class. Can be one
|
||||
* of ComposedStorage, IPFSBlockStorage, LevelStorage, etc. Defaults to
|
||||
* ComposedStorage.
|
||||
* @param {string} [params.path=./keystore] The path to a valid storage.
|
||||
* @return {module:KeyStore~KeyStore} An instance of KeyStore.
|
||||
* @instance
|
||||
*/
|
||||
const KeyStore = async ({ storage, path } = {}) => {
|
||||
/**
|
||||
* @namespace module:KeyStore~KeyStore
|
||||
* @description The instance returned by {@link module:KeyStore}.
|
||||
*/
|
||||
storage = storage || await ComposedStorage(await LRUStorage({ size: 1000 }), await LevelStorage({ path: path || defaultPath }))
|
||||
|
||||
/**
|
||||
* Closes the KeyStore's underlying storage.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const close = async () => {
|
||||
await storage.close()
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the KeyStore's underlying storage.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const clear = async () => {
|
||||
await storage.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the key exists in the key store.
|
||||
* @param {string} id The id of the private key in the key store.
|
||||
* @return {boolean} True if the key exists, false otherwise.
|
||||
* @throws id needed to check a key if no id is specified.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const hasKey = async (id) => {
|
||||
if (!id) {
|
||||
throw new Error('id needed to check a key')
|
||||
@ -114,12 +180,27 @@ const KeyStore = async ({ storage, path } = {}) => {
|
||||
return hasKey
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a key to the keystore.
|
||||
* @param {string} id A storage id for the key.
|
||||
* @param {Uint8Array} key The key to store.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const addKey = async (id, key) => {
|
||||
// await storage.put('public_' + id, key.publicKey)
|
||||
await storage.put('private_' + id, key.privateKey)
|
||||
}
|
||||
|
||||
const createKey = async (id, { entropy } = {}) => {
|
||||
/**
|
||||
* Creates a key, storing it to the keystore.
|
||||
* @param {string} id A storage id for the key.
|
||||
* @throws id needed to create a key if no id is specified.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const createKey = async (id) => {
|
||||
if (!id) {
|
||||
throw new Error('id needed to create a key')
|
||||
}
|
||||
@ -139,6 +220,15 @@ const KeyStore = async ({ storage, path } = {}) => {
|
||||
return keys
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key from keystore.
|
||||
* @param {string} id A storage id of the key.
|
||||
* @return {Uint8Array} The key specified by id.
|
||||
* @throws id needed to get a key if no id is specified.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const getKey = async (id) => {
|
||||
if (!id) {
|
||||
throw new Error('id needed to get a key')
|
||||
@ -158,6 +248,19 @@ const KeyStore = async ({ storage, path } = {}) => {
|
||||
return unmarshal(storedKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets th serialized public key from a key pair.
|
||||
* @param {*} keys A key pair.
|
||||
* @param {Object} options One or more options.
|
||||
* @param {Object} [options.format=hex] The format the public key should be
|
||||
* returned in.
|
||||
* @return {Uint8Array|String} The public key.
|
||||
* @throws Supported formats are `hex` and `buffer` if an invalid format is
|
||||
* passed in options.
|
||||
* @memberof module:KeyStore~KeyStore
|
||||
* @async
|
||||
* @instance
|
||||
*/
|
||||
const getPublic = (keys, options = {}) => {
|
||||
const formats = ['hex', 'buffer']
|
||||
const format = options.format || 'hex'
|
||||
|
@ -1,4 +1,9 @@
|
||||
/** @namespace Manifest */
|
||||
/**
|
||||
* @module Manifest
|
||||
* @description
|
||||
* A manifest provides an OrbitDB database with various descriptive information
|
||||
* including access controls and metadata.
|
||||
*/
|
||||
import * as Block from 'multiformats/block'
|
||||
import * as dagCbor from '@ipld/dag-cbor'
|
||||
import { sha256 } from 'multiformats/hashes/sha2'
|
||||
@ -9,19 +14,51 @@ const codec = dagCbor
|
||||
const hasher = sha256
|
||||
const hashStringEncoding = base58btc
|
||||
|
||||
// Creates a DB manifest file and saves it in IPFS
|
||||
/**
|
||||
* Creates a DB manifest file and saves it in IPFS.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring Manifest.
|
||||
* @param {IPFS} params.ipfs An instance of IPFS.
|
||||
* @param {module:Storage} [param.storage=module:Storage.Storage-ComposedStorage] An instance of Storage.
|
||||
* @returns {module:Manifest} An instance of Manifest.
|
||||
* @instance
|
||||
*/
|
||||
const Manifest = async ({ ipfs, storage } = {}) => {
|
||||
/**
|
||||
* @namespace module:Manifest~Manifest
|
||||
* @description The instance returned by {@link module:Manifest~Manifest}.
|
||||
*/
|
||||
|
||||
storage = storage || await ComposedStorage(
|
||||
await LRUStorage({ size: 1000 }),
|
||||
await IPFSBlockStorage({ ipfs, pin: true })
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets the manifest data from the underlying storage.
|
||||
* @param {string} address The address of the manifest.
|
||||
* @returns {*} The manifest data.
|
||||
* @memberof module:Manifest~Manifest
|
||||
*/
|
||||
const get = async (address) => {
|
||||
const bytes = await storage.get(address)
|
||||
const { value } = await Block.decode({ bytes, codec, hasher })
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a valid manifest.
|
||||
* @param {Object} params One or more parameters for configuring Manifest.
|
||||
* @param {string} name The name of the database.
|
||||
* @param {string} type The type of the database.
|
||||
* @param {string} accessController The type of access controller.
|
||||
* @param {Object} meta Metadata.
|
||||
* @returns {Object} A hash and manifest.
|
||||
* @throws name is required if no name is provided.
|
||||
* @throws type is required if no type is provided.
|
||||
* @throws accessController is required if no access controller is provided.
|
||||
* @memberof module:Manifest~Manifest
|
||||
*/
|
||||
const create = async ({ name, type, accessController, meta }) => {
|
||||
if (!name) throw new Error('name is required')
|
||||
if (!type) throw new Error('type is required')
|
||||
@ -47,6 +84,10 @@ const Manifest = async ({ ipfs, storage } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the underlying storage.
|
||||
* @memberof module:Manifest~Manifest
|
||||
*/
|
||||
const close = async () => {
|
||||
await storage.close()
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ const Log = async (identity, { logId, logHeads, access, entryStorage, headsStora
|
||||
*
|
||||
* @param {data} data Payload to add to the entry
|
||||
* @param {Object} options
|
||||
* @param {Integer} options.referencesCount TODO
|
||||
* @param {number} options.referencesCount TODO
|
||||
* @return {module:Log~Entry} Entry that was appended
|
||||
* @memberof module:Log~Log
|
||||
* @instance
|
||||
|
122
src/orbitdb.js
122
src/orbitdb.js
@ -1,4 +1,33 @@
|
||||
/** @module OrbitDB */
|
||||
/**
|
||||
* @module OrbitDB
|
||||
* @description
|
||||
* OrbitDB is a serverless, distributed, peer-to-peer database. OrbitDB uses
|
||||
* IPFS as its data storage and Libp2p Pubsub to automatically sync databases
|
||||
* with peers. It's an eventually consistent database that uses Merkle-CRDTs
|
||||
* for conflict-free database writes and merges making OrbitDB an excellent
|
||||
* choice for p2p and decentralized apps, blockchain applications and local
|
||||
* first web applications.
|
||||
*
|
||||
* To install OrbitDB:
|
||||
* ```bash
|
||||
* npm install orbit-db
|
||||
* ```
|
||||
*
|
||||
* IPFS is also required:
|
||||
* ```bash
|
||||
* npm install ipfs-core
|
||||
* ```
|
||||
* @example <caption>Instantiate OrbitDB and open a new database:</caption>
|
||||
* import { create } from 'ipfs-core'
|
||||
* import OrbitDB from 'orbit-db'
|
||||
*
|
||||
* const ipfs = await create() // IPFS is required for storage and syncing
|
||||
* const orbitdb = await OrbitDB({ ipfs })
|
||||
* const mydb = await orbitdb.open('mydb')
|
||||
* const dbAddress = mydb.address // E.g. /orbitdb/zdpuAuK3BHpS7NvMBivynypqciYCuy2UW77XYBPUYRnLjnw13
|
||||
* @example <caption>Open an existing database using its multiformat address:</caption>
|
||||
* const mydb = await orbitdb.open(dbAddress)
|
||||
*/
|
||||
import { Events, KeyValue, Documents } from './db/index.js'
|
||||
import KeyStore from './key-store.js'
|
||||
import { Identities } from './identities/index.js'
|
||||
@ -9,13 +38,33 @@ import pathJoin from './utils/path-join.js'
|
||||
import * as AccessControllers from './access-controllers/index.js'
|
||||
import IPFSAccessController from './access-controllers/ipfs.js'
|
||||
|
||||
// Mapping for database types
|
||||
/**
|
||||
* An array of available database types.
|
||||
* @name databaseTypes
|
||||
* @†ype []
|
||||
* @return [] An array of database types.
|
||||
* @memberof module:OrbitDB
|
||||
*/
|
||||
const databaseTypes = {
|
||||
events: Events,
|
||||
documents: Documents,
|
||||
keyvalue: KeyValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new database type.
|
||||
* @example
|
||||
* import { addDatabaseType } from 'orbit-db'
|
||||
* const CustomDBTypeModule = async (params) => {
|
||||
* const database = await Database(...params)
|
||||
* ...
|
||||
* }
|
||||
* addDatabaseType('customDBType', CustomDBTypeModule)
|
||||
* @function addDatabaseType
|
||||
* @param {string} type The database type.
|
||||
* @param {module:Database} store A Database-compatible module.
|
||||
* @memberof module:OrbitDB
|
||||
*/
|
||||
const addDatabaseType = (type, store) => {
|
||||
if (databaseTypes[type]) {
|
||||
throw new Error(`Type already exists: ${type}`)
|
||||
@ -24,11 +73,31 @@ const addDatabaseType = (type, store) => {
|
||||
}
|
||||
|
||||
const DefaultDatabaseType = 'events'
|
||||
|
||||
const DefaultAccessController = IPFSAccessController
|
||||
|
||||
/**
|
||||
* Creates an instance of OrbitDB.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring OrbitDB.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {string} [params.id] The id of the OrbitDB instance.
|
||||
* @param {Identity} [params.identity] An Identity instance.
|
||||
* @param {namespace:KeyStore} [params.keystore] A KeyStore instance.
|
||||
* @param {string} [params.directory] A location for storing OrbitDB-related
|
||||
* data.
|
||||
* @return {module:OrbitDB~OrbitDB} An instance of OrbitDB.
|
||||
* @throws IPFSinstance is required argument if no IPFS instance is provided.
|
||||
* @instance
|
||||
*/
|
||||
const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
/**
|
||||
* @namespace module:OrbitDB~OrbitDB
|
||||
* @description The instance returned by {@link module:OrbitDB}.
|
||||
*/
|
||||
|
||||
if (ipfs == null) {
|
||||
throw new Error('IPFS instance is a required argument. See https://github.com/orbitdb/orbit-db/blob/master/API.md#createinstance')
|
||||
throw new Error('IPFS instance is a required argument.')
|
||||
}
|
||||
|
||||
id = id || await createId()
|
||||
@ -42,6 +111,46 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
|
||||
let databases = {}
|
||||
|
||||
/**
|
||||
* Open a database or create one if it does not already exist.
|
||||
*
|
||||
* By default, OrbitDB will create a database of type [DefaultDatabaseType]{@link module:OrbitDB~DefaultDatabaseType}:
|
||||
* ```
|
||||
* const mydb = await orbitdb.open('mydb')
|
||||
* ```
|
||||
* To create a database of a different type, specify the type param:
|
||||
* ```
|
||||
* const mydb = await orbitdb.open('mydb', {type: 'documents'})
|
||||
* ```
|
||||
* The type must be listed in [databaseTypes]{@link module:OrbitDB.databaseTypes} or an error is thrown.
|
||||
* @function
|
||||
* @param {string} address The address of an existing database to open, or
|
||||
* the name of a new database.
|
||||
* @param {Object} params One or more database configuration parameters.
|
||||
* @param {string} [params.type=events] The database's type.
|
||||
* @param {*} [params.meta={}] The database's metadata.
|
||||
* @param {boolean} [params.sync=false] If true, sync databases automatically.
|
||||
* Otherwise, false.
|
||||
* @param {module:Database} [params.Database=[Events]{@link module:Database.Database-Events}] A Database-compatible
|
||||
* module.
|
||||
* @param {module:AccessControllers}
|
||||
* [params.AccessController=IPFSAccessController]
|
||||
* An AccessController-compatible module.
|
||||
* @param {module:Storage} [params.headsStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log heads. Defaults to ComposedStorage(LRUStorage, IPFSBlockStorage).
|
||||
* @param {module:Storage} [params.entryStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing
|
||||
* log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {module:Storage} [params.indexStorage=[ComposedStorage]{@link module:Storage.Storage-Composed}] A compatible storage instance for storing an " index of log entries. Defaults to ComposedStorage(LRUStorage, LevelStorage).
|
||||
* @param {number} [params.referencesCount] The maximum distance between
|
||||
* references to other entries.
|
||||
* @memberof module:OrbitDB
|
||||
* @return {module:Database} A database instance.
|
||||
* @throws Unsupported database type if the type specified is not in the list
|
||||
* of known databaseTypes.
|
||||
* @memberof module:OrbitDB~OrbitDB
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const open = async (address, { type, meta, sync, Database, AccessController, headsStorage, entryStorage, indexStorage, referencesCount } = {}) => {
|
||||
let name, manifest, accessController
|
||||
|
||||
@ -97,6 +206,13 @@ const OrbitDB = async ({ ipfs, id, identity, keystore, directory } = {}) => {
|
||||
delete databases[address]
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops OrbitDB, closing the underlying keystore and manifest.
|
||||
* @function stop
|
||||
* @memberof module:OrbitDB~OrbitDB
|
||||
* @instance
|
||||
* @async
|
||||
*/
|
||||
const stop = async () => {
|
||||
if (keystore) {
|
||||
await keystore.close()
|
||||
|
@ -1,18 +1,51 @@
|
||||
/**
|
||||
* @namespace Storage-Composed
|
||||
* @memberof module:Storage
|
||||
* @description
|
||||
* ComposedStorage stores data to multiple storage mechanisms.
|
||||
* @example <caption>Store to LRU and Level</caption>
|
||||
* await ComposedStorage(await LRUStorage(), await LevelStorage())
|
||||
* @example <caption>Store to memory and IPFS</caption>
|
||||
* await ComposedStorage(await MemoryStorage(), await IPFSBlockStorage())
|
||||
* @example <caption>Store to LRU and a nested ComposedStorage</caption>
|
||||
* const storage1 = await ComposedStorage(await LRUStorage(), await LevelStorage())
|
||||
* await ComposedStorage(storage1, await IPFSBlockStorage())
|
||||
*/
|
||||
|
||||
// Compose storages:
|
||||
// const storage1 = await ComposedStorage(await LRUStorage(), await LevelStorage())
|
||||
// const storage2 = await ComposedStorage(storage1, await IPFSBlockStorage())
|
||||
|
||||
/**
|
||||
* Creates an instance of ComposedStorage.
|
||||
* @function
|
||||
* @param {module:Storage} storage1 A storage instance.
|
||||
* @param {module:Storage} storage2 A storage instance.
|
||||
* @returns {module:Storage.Storage-Composed} An instance of ComposedStorage.
|
||||
* @memberof module:Storage
|
||||
* @instance
|
||||
*/
|
||||
const ComposedStorage = async (storage1, storage2) => {
|
||||
/**
|
||||
* Puts data to all configured storages.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to put.
|
||||
* @param {*} data The data to store.
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const put = async (hash, data) => {
|
||||
await storage1.put(hash, data)
|
||||
await storage2.put(hash, data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets data from the composed storage.
|
||||
*
|
||||
* Get will fetch the data from storage1 first. If no value is found, an
|
||||
* attempt is made to fetch the data from storage2. If data exists in
|
||||
* storage2 but not in storage1, the data is added to storage1.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to get.
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
let value = await storage1.get(hash)
|
||||
if (!value) {
|
||||
@ -24,6 +57,13 @@ const ComposedStorage = async (storage1, storage2) => {
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over records stored in both storages.
|
||||
* @function
|
||||
* @yields [string, string] The next key/value pair from all storages.
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * () {
|
||||
const keys = []
|
||||
for (const storage of [storage1, storage2]) {
|
||||
@ -36,6 +76,13 @@ const ComposedStorage = async (storage1, storage2) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges data from another source into each of the composed storages.
|
||||
* @function
|
||||
* @param {module:Storage} other Another storage instance.
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const merge = async (other) => {
|
||||
await storage1.merge(other)
|
||||
await storage2.merge(other)
|
||||
@ -43,11 +90,23 @@ const ComposedStorage = async (storage1, storage2) => {
|
||||
await other.merge(storage2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls clear on each of the composed storages.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const clear = async () => {
|
||||
await storage1.clear()
|
||||
await storage2.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls close on each of the composed storages.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-Composed
|
||||
* @instance
|
||||
*/
|
||||
const close = async () => {
|
||||
await storage1.close()
|
||||
await storage2.close()
|
||||
|
@ -1,4 +1,60 @@
|
||||
/** @module Storage */
|
||||
/**
|
||||
* @module Storage
|
||||
* @description
|
||||
* Various storage mechanisms with a common interface.
|
||||
*
|
||||
* ## Custom Storage
|
||||
* Custom storage modules can be created for special use cases. A storage
|
||||
* module must take the following form:
|
||||
* ```javascript
|
||||
* const CustomStorage = async (params) => { // drop params if not required
|
||||
* const put = async (hash, data) => {
|
||||
* // puts the hash and data to the underlying storage.
|
||||
* }
|
||||
*
|
||||
* const get = async (hash) => {
|
||||
* // gets a record identified by hash from the underlying storage
|
||||
* }
|
||||
*
|
||||
* const del = async (hash) => {
|
||||
* // deletes a record identified by hash from the underlying storage
|
||||
* }
|
||||
*
|
||||
* const iterator = async function * () {
|
||||
* // iterates over the underlying storage's records
|
||||
* // see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Generator
|
||||
* }
|
||||
*
|
||||
* const merge = async (other) => {
|
||||
* // merges the records from two storages
|
||||
* }
|
||||
*
|
||||
* const clear = async () => {
|
||||
* // clears all records from the underlying storage
|
||||
* }
|
||||
*
|
||||
* const close = async () => {
|
||||
* // closes the underlying storage
|
||||
* }
|
||||
*
|
||||
* return {
|
||||
* put,
|
||||
* del,
|
||||
* get,
|
||||
* iterator,
|
||||
* merge,
|
||||
* clear,
|
||||
* close
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* All functions must be defined but do not necessarily need to be implemented.
|
||||
* For example, if the storage does not require closing, the close function can
|
||||
* remain empty. For example:
|
||||
* ```JavaScript
|
||||
* const close = async () => {}
|
||||
* ```
|
||||
*/
|
||||
export { default as ComposedStorage } from './composed.js'
|
||||
export { default as IPFSBlockStorage } from './ipfs-block.js'
|
||||
export { default as LevelStorage } from './level.js'
|
||||
|
@ -1,17 +1,41 @@
|
||||
/**
|
||||
* @namespace Storage-IPFS
|
||||
* @memberof module:Storage
|
||||
* @description
|
||||
* IPFSBlockStorage uses IPFS to store data as raw blocks.
|
||||
*/
|
||||
import { CID } from 'multiformats/cid'
|
||||
import { base58btc } from 'multiformats/bases/base58'
|
||||
|
||||
const defaultTimeout = 30000
|
||||
|
||||
/**
|
||||
* Creates an instance of IPFSBlockStorage.
|
||||
* @function
|
||||
* @param {Object} params One or more parameters for configuring
|
||||
* IPFSBlockStorage.
|
||||
* @param {IPFS} params.ipfs An IPFS instance.
|
||||
* @param {number} [params.timeout=defaultTimeout] A timeout in ms.
|
||||
* @param {boolean} [params.pin=false] True, if the block should be pinned,
|
||||
* false otherwise.
|
||||
* @returns {module:Storage.Storage-IPFS} An instance of IPFSBlockStorage.
|
||||
* @memberof module:Storage
|
||||
* @throw An instance of ipfs is required if params.ipfs is not specified.
|
||||
* @instance
|
||||
*/
|
||||
const IPFSBlockStorage = async ({ ipfs, timeout, pin } = {}) => {
|
||||
if (!ipfs) throw new Error('An instance of ipfs is required.')
|
||||
|
||||
timeout = timeout || defaultTimeout
|
||||
|
||||
/**
|
||||
* Puts data to an IPFS block.
|
||||
* @function
|
||||
* @param {string} hash The hash of the block to put.
|
||||
* @param {*} data The data to store in the IPFS block.
|
||||
* @memberof module:Storage.Storage-IPFS
|
||||
* @instance
|
||||
*/
|
||||
const put = async (hash, data) => {
|
||||
const cid = CID.parse(hash, base58btc)
|
||||
await ipfs.block.put(data, {
|
||||
@ -26,6 +50,13 @@ const IPFSBlockStorage = async ({ ipfs, timeout, pin } = {}) => {
|
||||
|
||||
const del = async (hash) => {}
|
||||
|
||||
/**
|
||||
* Gets data from an IPFS block.
|
||||
* @function
|
||||
* @param {string} hash The hash of the block to get.
|
||||
* @memberof module:Storage.Storage-IPFS
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
const cid = CID.parse(hash, base58btc)
|
||||
const block = await ipfs.block.get(cid, { timeout })
|
||||
|
@ -1,26 +1,65 @@
|
||||
/**
|
||||
* @namespace Storage-Level
|
||||
* @memberof module:Storage
|
||||
* @description
|
||||
* LevelStorage stores data to a Level-compatible database.
|
||||
*
|
||||
* To learn more about Level, see {@link https://github.com/Level/level}.
|
||||
*/
|
||||
import { Level } from 'level'
|
||||
|
||||
const defaultPath = './level'
|
||||
const defaultValueEncoding = 'view'
|
||||
|
||||
/**
|
||||
* Creates an instance of LevelStorage.
|
||||
* @function
|
||||
* @param {Object} [params={}] One or more parameters for configuring
|
||||
* LevelStorage.
|
||||
* @param {string} [params.path=defaultPath] The Level path.
|
||||
* @param {string} [params.valueEncoding=defaultValueEncoding] Value encoding.
|
||||
* @returns {module:Storage.Storage-Level} An instance of LevelStorage.
|
||||
* @memberof module:Storage
|
||||
* @instance
|
||||
*/
|
||||
const LevelStorage = async ({ path, valueEncoding } = {}) => {
|
||||
path = path || './level'
|
||||
path = path || defaultPath
|
||||
valueEncoding = valueEncoding || defaultValueEncoding
|
||||
|
||||
const db = new Level(path, { valueEncoding, passive: true })
|
||||
await db.open()
|
||||
|
||||
/**
|
||||
* Puts data to Level.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to put.
|
||||
* @param {*} data The data to store.
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const put = async (hash, value) => {
|
||||
await db.put(hash, value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes data from Level.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to delete.
|
||||
* @param {*} data The data to store.
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const del = async (hash) => {
|
||||
await db.del(hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets data from Level.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to get.
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
try {
|
||||
const value = await db.get(hash)
|
||||
@ -32,18 +71,36 @@ const LevelStorage = async ({ path, valueEncoding } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over records stored in Level.
|
||||
* @function
|
||||
* @yields [string, string] The next key/value pair from Level.
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * () {
|
||||
for await (const [key, value] of db.iterator()) {
|
||||
yield [key, value]
|
||||
}
|
||||
}
|
||||
|
||||
const merge = async (other) => {}
|
||||
|
||||
/**
|
||||
* Clears the contents of the Level db.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const clear = async () => {
|
||||
await db.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the Level db.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-Level
|
||||
* @instance
|
||||
*/
|
||||
const close = async () => {
|
||||
await db.close()
|
||||
}
|
||||
|
@ -1,26 +1,67 @@
|
||||
/**
|
||||
* @namespace Storage-LRU
|
||||
* @memberof module:Storage
|
||||
* @description
|
||||
* LRUStorage stores data in a Least Recently Used (LRU) cache.
|
||||
*/
|
||||
import LRU from 'lru'
|
||||
|
||||
const defaultSize = 1000000
|
||||
|
||||
/**
|
||||
* Creates an instance of LRUStorage.
|
||||
* @function
|
||||
* @param {Object} [params={}] One or more parameters for configuring
|
||||
* IPFSBlockStorage.
|
||||
* @param {string} [params.size=defaultSize] The number of elements to store.
|
||||
* @returns {module:Storage.Storage-LRU} An instance of LRUStorage.
|
||||
* @memberof module:Storage
|
||||
* @instance
|
||||
*/
|
||||
const LRUStorage = async ({ size } = {}) => {
|
||||
let lru = new LRU(size || defaultSize)
|
||||
|
||||
/**
|
||||
* Puts data to the LRU cache.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to put.
|
||||
* @param {*} data The data to store.
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const put = async (hash, data) => {
|
||||
lru.set(hash, data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes data from the LRU cache.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to delete.
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const del = async (hash) => {
|
||||
lru.remove(hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets data from the LRU cache.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to get.
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
return lru.get(hash)
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over records stored in the LRU cache.
|
||||
* @function
|
||||
* @yields [string, string] The next key/value pair from the LRU cache.
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * () {
|
||||
for await (const key of lru.keys) {
|
||||
const value = lru.get(key)
|
||||
@ -28,6 +69,13 @@ const LRUStorage = async ({ size } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges data from another source into the LRU cache.
|
||||
* @function
|
||||
* @param {module:Storage} other Another storage instance.
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const merge = async (other) => {
|
||||
if (other) {
|
||||
for await (const [key, value] of other.iterator()) {
|
||||
@ -36,6 +84,12 @@ const LRUStorage = async ({ size } = {}) => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the contents of the LRU cache.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-LRU
|
||||
* @instance
|
||||
*/
|
||||
const clear = async () => {
|
||||
lru = new LRU(size || defaultSize)
|
||||
}
|
||||
|
@ -1,28 +1,74 @@
|
||||
/**
|
||||
* @namespace Storage-Memory
|
||||
* @memberof module:Storage
|
||||
* @description
|
||||
* MemoryStorage stores data in memory.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates an instance of MemoryStorage.
|
||||
* @function
|
||||
* @returns {module:Storage.Storage-Memory} An instance of LRUStorage.
|
||||
* @memberof module:Storage
|
||||
* @instance
|
||||
*/
|
||||
const MemoryStorage = async () => {
|
||||
let memory = {}
|
||||
|
||||
/**
|
||||
* Puts data to memory.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to put.
|
||||
* @param {*} data The data to store.
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const put = async (hash, data) => {
|
||||
memory[hash] = data
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes data from memory.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to delete.
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const del = async (hash) => {
|
||||
delete memory[hash]
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets data from memory.
|
||||
* @function
|
||||
* @param {string} hash The hash of the data to get.
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const get = async (hash) => {
|
||||
return memory[hash]
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over records stored in memory.
|
||||
* @function
|
||||
* @yields [string, string] The next key/value pair from memory.
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const iterator = async function * () {
|
||||
for await (const [key, value] of Object.entries(memory)) {
|
||||
yield [key, value]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges data from another source into memory.
|
||||
* @function
|
||||
* @param {module:Storage} other Another storage instance.
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const merge = async (other) => {
|
||||
if (other) {
|
||||
for await (const [key, value] of other.iterator()) {
|
||||
@ -31,6 +77,12 @@ const MemoryStorage = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the contents of memory.
|
||||
* @function
|
||||
* @memberof module:Storage.Storage-Memory
|
||||
* @instance
|
||||
*/
|
||||
const clear = async () => {
|
||||
memory = {}
|
||||
}
|
||||
|
@ -1,4 +1,10 @@
|
||||
// Source: https://stackoverflow.com/questions/1349404/generate-random-string-characters-in-javascript
|
||||
/**
|
||||
* Creates an id from an alphanumeric character list.
|
||||
* @param {number} [length=32] The length of the id.
|
||||
* @returns {string} An id.
|
||||
* @see {@link https://stackoverflow.com/questions/1349404/generate-random-string-characters-in-javascript}
|
||||
* @memberof module:Utils
|
||||
*/
|
||||
const createId = async (length = 32) => {
|
||||
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
|
||||
let result = ''
|
||||
|
@ -1,6 +1,12 @@
|
||||
import pathJoin from './path-join.js'
|
||||
|
||||
// Make sure the given address has '/_access' as the last part
|
||||
/**
|
||||
* Checks that the given address has '/_access' as the last part.
|
||||
* @function
|
||||
* @param {string} address The address to check.
|
||||
* @returns {string} The address appended with /_access.
|
||||
* @memberof module:Utils
|
||||
*/
|
||||
export default address => {
|
||||
const suffix = address.toString().split('/').pop()
|
||||
return suffix === '_access'
|
||||
|
@ -1,3 +1,8 @@
|
||||
/**
|
||||
* Various utility functions.
|
||||
* @module Utils
|
||||
*/
|
||||
|
||||
import createId from './create-id.js'
|
||||
|
||||
export {
|
||||
|
@ -1,12 +1,35 @@
|
||||
/**
|
||||
* A posix-compatible verions of join.
|
||||
* @function posixJoin
|
||||
* @param {...string} paths or more strings to join.
|
||||
* @return {string} The joined strings.
|
||||
* @memberof module:Utils
|
||||
*/
|
||||
export const posixJoin = (...paths) => paths
|
||||
.join('/')
|
||||
.replace(/((?<=\/)\/+)|(^\.\/)|((?<=\/)\.\/)/g, '') || '.'
|
||||
|
||||
/**
|
||||
* A windows-compatible verions of join.
|
||||
* @function win32Join
|
||||
* @param {...string} One or more strings to join.
|
||||
* @return {string} The joined strings.
|
||||
* @memberof module:Utils
|
||||
*/
|
||||
export const win32Join = (...paths) => paths
|
||||
.join('\\')
|
||||
.replace(/\//g, '\\')
|
||||
.replace(/((?<=\\)\\+)|(^\.\\)|((?<=\\)\.\\)/g, '') || '.'
|
||||
|
||||
/**
|
||||
* An alias for posixJoin.
|
||||
* @function join
|
||||
* @alias posixJoin
|
||||
* @param {...string} paths or more strings to join.
|
||||
* @return {string} The joined strings.
|
||||
* @memberof module:Utils
|
||||
* @static
|
||||
*/
|
||||
export const join = posixJoin
|
||||
|
||||
export default posixJoin
|
||||
|
@ -231,7 +231,7 @@ describe('OrbitDB', function () {
|
||||
err = e
|
||||
}
|
||||
notStrictEqual(err, undefined)
|
||||
strictEqual(err.message, 'IPFS instance is a required argument. See https://github.com/orbitdb/orbit-db/blob/master/API.md#createinstance')
|
||||
strictEqual(err.message, 'IPFS instance is a required argument.')
|
||||
})
|
||||
|
||||
it('throws an error if IPFS instance is not given', async () => {
|
||||
@ -242,7 +242,7 @@ describe('OrbitDB', function () {
|
||||
err = e
|
||||
}
|
||||
notStrictEqual(err, undefined)
|
||||
strictEqual(err.message, 'IPFS instance is a required argument. See https://github.com/orbitdb/orbit-db/blob/master/API.md#createinstance')
|
||||
strictEqual(err.message, 'IPFS instance is a required argument.')
|
||||
})
|
||||
|
||||
it('doesn\'t create the data directory when an error occurs', async () => {
|
||||
|
Loading…
x
Reference in New Issue
Block a user