mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Merge branch 'master' into tendermint
This commit is contained in:
commit
1e104ad2c4
20
.pre-commit-config.yaml
Normal file
20
.pre-commit-config.yaml
Normal file
@ -0,0 +1,20 @@
|
||||
repos:
|
||||
- repo: git://github.com/pre-commit/pre-commit-hooks
|
||||
sha: v1.1.1
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
args: ['--no-markdown-linebreak-ext']
|
||||
- id: check-merge-conflict
|
||||
- id: debug-statements
|
||||
- id: check-added-large-files
|
||||
- id: flake8
|
||||
|
||||
- repo: git://github.com/chewse/pre-commit-mirrors-pydocstyle
|
||||
sha: v2.1.1
|
||||
hooks:
|
||||
- id: pydocstyle
|
||||
# list of error codes to check, see: http://www.pydocstyle.org/en/latest/error_codes.html
|
||||
args: ['--select=D204,D201,D209,D210,D212,D300,D403']
|
||||
|
||||
# negate the exclude to only apply the hooks to 'bigchaindb' and 'tests' folder
|
||||
exclude: '^(?!bigchaindb/)(?!tests/)'
|
@ -115,6 +115,9 @@ git add new-or-changed-file-1
|
||||
git add new-or-changed-file-2
|
||||
git commit -m "Short description of new or changed things"
|
||||
```
|
||||
We use [pre-commit](http://pre-commit.com/) which should be triggered with every commit. Some hooks will change files but others will give errors that needs to be fixed. Every time a hook is failing you need to add the changed files again.
|
||||
The hooks we use can be found in the [.pre-commit-config.yaml](https://github.com/bigchaindb/bigchaindb/blob/master/.pre-commit-config.yaml) file.
|
||||
|
||||
|
||||
You will want to merge changes from upstream (i.e. the original repository) into your new branch from time to time, using something like:
|
||||
```text
|
||||
@ -133,7 +136,7 @@ Make sure you've commited all the additions or changes you want to include in yo
|
||||
git push origin new-branch-name
|
||||
```
|
||||
|
||||
### Step 10 - Create a Pull Request
|
||||
### Step 10 - Create a Pull Request
|
||||
|
||||
Go to the GitHub website and to _your_ remote bigchaindb repository (i.e. something like https://github.com/your-user-name/bigchaindb).
|
||||
|
||||
|
@ -8,6 +8,9 @@ Our starting point is [PEP8](https://www.python.org/dev/peps/pep-0008/), the sta
|
||||
|
||||
BigchainDB uses Python 3.5+, so you can ignore all PEP8 guidelines specific to Python 2.
|
||||
|
||||
We use [pre-commit](http://pre-commit.com/) to check some of the rules below before every commit but not everything is realized yet.
|
||||
The hooks we use can be found in the [.pre-commit-config.yaml](https://github.com/bigchaindb/bigchaindb/blob/master/.pre-commit-config.yaml) file.
|
||||
|
||||
### Python Docstrings
|
||||
|
||||
PEP8 says some things about docstrings, but not what to put in them or how to structure them. [PEP257](https://www.python.org/dev/peps/pep-0257/) was one proposal for docstring conventions, but we prefer [Google-style docstrings](https://google.github.io/styleguide/pyguide.html?showone=Comments#Comments) instead: they're easier to read and the [napoleon extension](http://www.sphinx-doc.org/en/stable/ext/napoleon.html) for Sphinx lets us turn them into nice-looking documentation. Here are some references on Google-style docstrings:
|
||||
|
@ -20,6 +20,7 @@ class MongoDBChangeFeed(ChangeFeed):
|
||||
We emulate the behaviour of the RethinkDB changefeed by using a tailable
|
||||
cursor that listens for events on the oplog.
|
||||
"""
|
||||
|
||||
def run_forever(self):
|
||||
for element in self.prefeed:
|
||||
self.outqueue.put(element)
|
||||
|
@ -20,7 +20,6 @@ class MongoDBConnection(Connection):
|
||||
def __init__(self, replicaset=None, ssl=None, login=None, password=None,
|
||||
ca_cert=None, certfile=None, keyfile=None,
|
||||
keyfile_passphrase=None, crlfile=None, **kwargs):
|
||||
|
||||
"""Create a new Connection instance.
|
||||
|
||||
Args:
|
||||
|
@ -408,8 +408,7 @@ def get_last_voted_block_id(connection, node_pubkey):
|
||||
|
||||
@singledispatch
|
||||
def get_txids_filtered(connection, asset_id, operation=None):
|
||||
"""
|
||||
Return all transactions for a particular asset id and optional operation.
|
||||
"""Return all transactions for a particular asset id and optional operation.
|
||||
|
||||
Args:
|
||||
asset_id (str): ID of transaction that defined the asset
|
||||
@ -421,8 +420,7 @@ def get_txids_filtered(connection, asset_id, operation=None):
|
||||
|
||||
@singledispatch
|
||||
def get_new_blocks_feed(connection, start_block_id):
|
||||
"""
|
||||
Return a generator that yields change events of the blocks feed
|
||||
"""Return a generator that yields change events of the blocks feed
|
||||
|
||||
Args:
|
||||
start_block_id (str): ID of block to resume from
|
||||
|
@ -4,8 +4,7 @@ from bigchaindb.backend.exceptions import ConnectionError, OperationError
|
||||
|
||||
|
||||
class RethinkDBConnection(Connection):
|
||||
"""
|
||||
This class is a proxy to run queries against the database, it is:
|
||||
"""This class is a proxy to run queries against the database, it is:
|
||||
|
||||
- lazy, since it creates a connection only when needed
|
||||
- resilient, because before raising exceptions it tries
|
||||
|
@ -293,7 +293,7 @@ def get_votes_for_blocks_by_voter(connection, block_ids, node_pubkey):
|
||||
|
||||
|
||||
def unwind_block_transactions(block):
|
||||
""" Yield a block for each transaction in given block """
|
||||
"""Yield a block for each transaction in given block"""
|
||||
return block['block']['transactions'].map(lambda tx: block.merge({'tx': tx}))
|
||||
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
class ModuleDispatchRegistrationError(Exception):
|
||||
"""Raised when there is a problem registering dispatched functions for a
|
||||
module"""
|
||||
module
|
||||
"""
|
||||
|
||||
|
||||
def module_dispatch_registrar(module):
|
||||
|
@ -29,7 +29,8 @@ class KeypairNotFoundException(BigchainDBError):
|
||||
|
||||
class KeypairMismatchException(BigchainDBError):
|
||||
"""Raised if the private key(s) provided for signing don't match any of the
|
||||
current owner(s)"""
|
||||
current owner(s)
|
||||
"""
|
||||
|
||||
|
||||
class OperationError(BigchainDBError):
|
||||
@ -55,7 +56,8 @@ class DoubleSpend(ValidationError):
|
||||
|
||||
class InvalidHash(ValidationError):
|
||||
"""Raised if there was an error checking the hash for a particular
|
||||
operation"""
|
||||
operation
|
||||
"""
|
||||
|
||||
|
||||
class SchemaValidationError(ValidationError):
|
||||
@ -64,7 +66,8 @@ class SchemaValidationError(ValidationError):
|
||||
|
||||
class InvalidSignature(ValidationError):
|
||||
"""Raised if there was an error checking the signature for a particular
|
||||
operation"""
|
||||
operation
|
||||
"""
|
||||
|
||||
|
||||
class ImproperVoteError(ValidationError):
|
||||
@ -77,7 +80,8 @@ class MultipleVotesError(ValidationError):
|
||||
|
||||
class TransactionNotInValidBlock(ValidationError):
|
||||
"""Raised when a transfer transaction is attempting to fulfill the
|
||||
outputs of a transaction that is in an invalid or undecided block"""
|
||||
outputs of a transaction that is in an invalid or undecided block
|
||||
"""
|
||||
|
||||
|
||||
class AssetIdMismatch(ValidationError):
|
||||
|
@ -1,4 +1,4 @@
|
||||
""" Schema validation related functions and data """
|
||||
"""Schema validation related functions and data"""
|
||||
import os.path
|
||||
import logging
|
||||
|
||||
@ -14,7 +14,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _load_schema(name):
|
||||
""" Load a schema from disk """
|
||||
"""Load a schema from disk"""
|
||||
path = os.path.join(os.path.dirname(__file__), name + '.yaml')
|
||||
with open(path) as handle:
|
||||
schema = yaml.safe_load(handle)
|
||||
@ -34,7 +34,7 @@ VOTE_SCHEMA_PATH, VOTE_SCHEMA = _load_schema('vote')
|
||||
|
||||
|
||||
def _validate_schema(schema, body):
|
||||
""" Validate data against a schema """
|
||||
"""Validate data against a schema"""
|
||||
|
||||
# Note
|
||||
#
|
||||
@ -59,8 +59,7 @@ def _validate_schema(schema, body):
|
||||
|
||||
|
||||
def validate_transaction_schema(tx):
|
||||
"""
|
||||
Validate a transaction dict.
|
||||
"""Validate a transaction dict.
|
||||
|
||||
TX_SCHEMA_COMMON contains properties that are common to all types of
|
||||
transaction. TX_SCHEMA_[TRANSFER|CREATE] add additional constraints on top.
|
||||
@ -73,5 +72,5 @@ def validate_transaction_schema(tx):
|
||||
|
||||
|
||||
def validate_vote_schema(vote):
|
||||
""" Validate a vote dict """
|
||||
"""Validate a vote dict"""
|
||||
_validate_schema(VOTE_SCHEMA, vote)
|
||||
|
@ -128,8 +128,7 @@ class Input(object):
|
||||
|
||||
|
||||
def _fulfillment_to_details(fulfillment):
|
||||
"""
|
||||
Encode a fulfillment as a details dictionary
|
||||
"""Encode a fulfillment as a details dictionary
|
||||
|
||||
Args:
|
||||
fulfillment: Crypto-conditions Fulfillment object
|
||||
@ -156,8 +155,7 @@ def _fulfillment_to_details(fulfillment):
|
||||
|
||||
|
||||
def _fulfillment_from_details(data, _depth=0):
|
||||
"""
|
||||
Load a fulfillment for a signing spec dictionary
|
||||
"""Load a fulfillment for a signing spec dictionary
|
||||
|
||||
Args:
|
||||
data: tx.output[].condition.details dictionary
|
||||
@ -471,6 +469,7 @@ class Transaction(object):
|
||||
Metadata to be stored along with the Transaction.
|
||||
version (string): Defines the version number of a Transaction.
|
||||
"""
|
||||
|
||||
CREATE = 'CREATE'
|
||||
TRANSFER = 'TRANSFER'
|
||||
GENESIS = 'GENESIS'
|
||||
@ -911,7 +910,7 @@ class Transaction(object):
|
||||
tx_serialized = Transaction._to_str(tx_dict)
|
||||
|
||||
def validate(i, output_condition_uri=None):
|
||||
""" Validate input against output condition URI """
|
||||
"""Validate input against output condition URI"""
|
||||
return self._input_valid(self.inputs[i], self.operation,
|
||||
tx_serialized, output_condition_uri)
|
||||
|
||||
|
@ -135,7 +135,8 @@ def env_config(config):
|
||||
|
||||
def update_types(config, reference, list_sep=':'):
|
||||
"""Return a new configuration where all the values types
|
||||
are aligned with the ones in the default configuration"""
|
||||
are aligned with the ones in the default configuration
|
||||
"""
|
||||
|
||||
def _coerce(current, value):
|
||||
# Coerce a value to the `current` type.
|
||||
@ -226,7 +227,8 @@ def is_configured():
|
||||
|
||||
def autoconfigure(filename=None, config=None, force=False):
|
||||
"""Run ``file_config`` and ``env_config`` if the module has not
|
||||
been initialized."""
|
||||
been initialized.
|
||||
"""
|
||||
if not force and is_configured():
|
||||
logger.debug('System already configured, skipping autoconfiguration')
|
||||
return
|
||||
|
@ -9,12 +9,14 @@ class BaseConsensusRules():
|
||||
All methods listed below must be implemented.
|
||||
|
||||
"""
|
||||
|
||||
voting = Voting
|
||||
|
||||
@staticmethod
|
||||
def validate_transaction(bigchain, transaction):
|
||||
"""See :meth:`bigchaindb.models.Transaction.validate`
|
||||
for documentation."""
|
||||
for documentation.
|
||||
"""
|
||||
return transaction.validate(bigchain)
|
||||
|
||||
@staticmethod
|
||||
|
@ -159,8 +159,7 @@ class Bigchain(object):
|
||||
return self.consensus.validate_transaction(self, transaction)
|
||||
|
||||
def is_new_transaction(self, txid, exclude_block_id=None):
|
||||
"""
|
||||
Return True if the transaction does not exist in any
|
||||
"""Return True if the transaction does not exist in any
|
||||
VALID or UNDECIDED block. Return False otherwise.
|
||||
|
||||
Args:
|
||||
@ -421,8 +420,7 @@ class Bigchain(object):
|
||||
return fastquery.FastQuery(self.connection, self.me)
|
||||
|
||||
def get_outputs_filtered(self, owner, spent=None):
|
||||
"""
|
||||
Get a list of output links filtered on some criteria
|
||||
"""Get a list of output links filtered on some criteria
|
||||
|
||||
Args:
|
||||
owner (str): base58 encoded public_key.
|
||||
@ -443,8 +441,7 @@ class Bigchain(object):
|
||||
return self.fastquery.filter_spent_outputs(outputs)
|
||||
|
||||
def get_transactions_filtered(self, asset_id, operation=None):
|
||||
"""
|
||||
Get a list of transactions filtered on some criteria
|
||||
"""Get a list of transactions filtered on some criteria
|
||||
"""
|
||||
txids = backend.query.get_txids_filtered(self.connection, asset_id,
|
||||
operation)
|
||||
@ -617,12 +614,12 @@ class Bigchain(object):
|
||||
|
||||
def block_election_status(self, block):
|
||||
"""Tally the votes on a block, and return the status:
|
||||
valid, invalid, or undecided."""
|
||||
valid, invalid, or undecided.
|
||||
"""
|
||||
return self.block_election(block)['status']
|
||||
|
||||
def get_assets(self, asset_ids):
|
||||
"""
|
||||
Return a list of assets that match the asset_ids
|
||||
"""Return a list of assets that match the asset_ids
|
||||
|
||||
Args:
|
||||
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
|
||||
@ -634,8 +631,7 @@ class Bigchain(object):
|
||||
return backend.query.get_assets(self.connection, asset_ids)
|
||||
|
||||
def get_metadata(self, txn_ids):
|
||||
"""
|
||||
Return a list of metadata that match the transaction ids (txn_ids)
|
||||
"""Return a list of metadata that match the transaction ids (txn_ids)
|
||||
|
||||
Args:
|
||||
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
|
||||
@ -647,8 +643,7 @@ class Bigchain(object):
|
||||
return backend.query.get_metadata(self.connection, txn_ids)
|
||||
|
||||
def write_assets(self, assets):
|
||||
"""
|
||||
Writes a list of assets into the database.
|
||||
"""Writes a list of assets into the database.
|
||||
|
||||
Args:
|
||||
assets (:obj:`list` of :obj:`dict`): A list of assets to write to
|
||||
@ -657,8 +652,7 @@ class Bigchain(object):
|
||||
return backend.query.write_assets(self.connection, assets)
|
||||
|
||||
def write_metadata(self, metadata):
|
||||
"""
|
||||
Writes a list of metadata into the database.
|
||||
"""Writes a list of metadata into the database.
|
||||
|
||||
Args:
|
||||
metadata (:obj:`list` of :obj:`dict`): A list of metadata to write to
|
||||
@ -667,8 +661,7 @@ class Bigchain(object):
|
||||
return backend.query.write_metadata(self.connection, metadata)
|
||||
|
||||
def text_search(self, search, *, limit=0, table='assets'):
|
||||
"""
|
||||
Return an iterator of assets that match the text search
|
||||
"""Return an iterator of assets that match the text search
|
||||
|
||||
Args:
|
||||
search (str): Text search string to query the text index
|
||||
|
@ -7,7 +7,8 @@ POISON_PILL = 'POISON_PILL'
|
||||
|
||||
class EventTypes:
|
||||
"""Container class that holds all the possible
|
||||
events BigchainDB manages."""
|
||||
events BigchainDB manages.
|
||||
"""
|
||||
|
||||
# If you add a new Event Type, make sure to add it
|
||||
# to the docs in docs/server/source/event-plugin-api.rst
|
||||
|
@ -4,8 +4,7 @@ from bigchaindb.common.transaction import TransactionLink
|
||||
|
||||
|
||||
class FastQuery:
|
||||
"""
|
||||
Database queries that join on block results from a single node.
|
||||
"""Database queries that join on block results from a single node.
|
||||
|
||||
* Votes are not validated for security (security is a replication concern)
|
||||
* Votes come from only one node, and as such, non-byzantine fault tolerance
|
||||
@ -18,14 +17,13 @@ class FastQuery:
|
||||
query multiple nodes to insure against getting an incorrect response from
|
||||
a byzantine node.
|
||||
"""
|
||||
|
||||
def __init__(self, connection, me):
|
||||
self.connection = connection
|
||||
self.me = me
|
||||
|
||||
def filter_valid_block_ids(self, block_ids, include_undecided=False):
|
||||
"""
|
||||
Given block ids, return only the ones that are valid.
|
||||
"""
|
||||
"""Given block ids, return only the ones that are valid."""
|
||||
block_ids = list(set(block_ids))
|
||||
votes = query.get_votes_for_blocks_by_voter(
|
||||
self.connection, block_ids, self.me)
|
||||
@ -35,8 +33,7 @@ class FastQuery:
|
||||
if votes.get(block_id, include_undecided)]
|
||||
|
||||
def filter_valid_items(self, items, block_id_key=lambda b: b[0]):
|
||||
"""
|
||||
Given items with block ids, return only the ones that are valid or undecided.
|
||||
"""Given items with block ids, return only the ones that are valid or undecided.
|
||||
"""
|
||||
items = list(items)
|
||||
block_ids = map(block_id_key, items)
|
||||
@ -44,9 +41,7 @@ class FastQuery:
|
||||
return [b for b in items if block_id_key(b) in valid_block_ids]
|
||||
|
||||
def get_outputs_by_public_key(self, public_key):
|
||||
"""
|
||||
Get outputs for a public key
|
||||
"""
|
||||
"""Get outputs for a public key"""
|
||||
res = list(query.get_owned_ids(self.connection, public_key))
|
||||
txs = [tx for _, tx in self.filter_valid_items(res)]
|
||||
return [TransactionLink(tx['id'], index)
|
||||
@ -56,8 +51,7 @@ class FastQuery:
|
||||
public_key)]
|
||||
|
||||
def filter_spent_outputs(self, outputs):
|
||||
"""
|
||||
Remove outputs that have been spent
|
||||
"""Remove outputs that have been spent
|
||||
|
||||
Args:
|
||||
outputs: list of TransactionLink
|
||||
@ -71,8 +65,7 @@ class FastQuery:
|
||||
return [ff for ff in outputs if ff not in spends]
|
||||
|
||||
def filter_unspent_outputs(self, outputs):
|
||||
"""
|
||||
Remove outputs that have not been spent
|
||||
"""Remove outputs that have not been spent
|
||||
|
||||
Args:
|
||||
outputs: list of TransactionLink
|
||||
|
@ -12,6 +12,7 @@ class HttpServerLogger(Logger):
|
||||
configuration setting on gunicorn.
|
||||
|
||||
"""
|
||||
|
||||
def setup(self, cfg):
|
||||
"""Setup the gunicorn access and error loggers. This overrides
|
||||
the parent method. Its main goal is to simply pipe all the logs to
|
||||
|
@ -116,8 +116,7 @@ class LogRecordStreamHandler(StreamRequestHandler):
|
||||
"""
|
||||
|
||||
def handle(self):
|
||||
"""
|
||||
Handle multiple requests - each expected to be a 4-byte length,
|
||||
"""Handle multiple requests - each expected to be a 4-byte length,
|
||||
followed by the LogRecord in pickle format. Logs the record
|
||||
according to whatever policy is configured locally.
|
||||
"""
|
||||
@ -152,10 +151,10 @@ class LogRecordStreamHandler(StreamRequestHandler):
|
||||
|
||||
|
||||
class LogRecordSocketServer(ThreadingTCPServer):
|
||||
"""
|
||||
Simple TCP socket-based logging server.
|
||||
"""Simple TCP socket-based logging server.
|
||||
|
||||
"""
|
||||
|
||||
allow_reuse_address = True
|
||||
|
||||
def __init__(self,
|
||||
|
@ -96,8 +96,7 @@ class Transaction(Transaction):
|
||||
|
||||
@classmethod
|
||||
def from_db(cls, bigchain, tx_dict):
|
||||
"""
|
||||
Helper method that reconstructs a transaction dict that was returned
|
||||
"""Helper method that reconstructs a transaction dict that was returned
|
||||
from the database. It checks what asset_id to retrieve, retrieves the
|
||||
asset from the asset table and reconstructs the transaction.
|
||||
|
||||
@ -346,8 +345,7 @@ class Block(object):
|
||||
|
||||
@classmethod
|
||||
def from_db(cls, bigchain, block_dict, from_dict_kwargs=None):
|
||||
"""
|
||||
Helper method that reconstructs a block_dict that was returned from
|
||||
"""Helper method that reconstructs a block_dict that was returned from
|
||||
the database. It checks what asset_ids to retrieve, retrieves the
|
||||
assets from the assets table and reconstructs the block.
|
||||
|
||||
@ -373,8 +371,7 @@ class Block(object):
|
||||
return cls.from_dict(block_dict, **kwargs)
|
||||
|
||||
def decouple_assets(self, block_dict=None):
|
||||
"""
|
||||
Extracts the assets from the ``CREATE`` transactions in the block.
|
||||
"""Extracts the assets from the ``CREATE`` transactions in the block.
|
||||
|
||||
Returns:
|
||||
tuple: (assets, block) with the assets being a list of dicts and
|
||||
@ -395,8 +392,7 @@ class Block(object):
|
||||
return (assets, block_dict)
|
||||
|
||||
def decouple_metadata(self, block_dict=None):
|
||||
"""
|
||||
Extracts the metadata from transactions in the block.
|
||||
"""Extracts the metadata from transactions in the block.
|
||||
|
||||
Returns:
|
||||
tuple: (metadatas, block) with the metadatas being a list of dict/null and
|
||||
@ -417,8 +413,7 @@ class Block(object):
|
||||
|
||||
@staticmethod
|
||||
def couple_assets(block_dict, assets):
|
||||
"""
|
||||
Given a block_dict with no assets (as returned from a database call)
|
||||
"""Given a block_dict with no assets (as returned from a database call)
|
||||
and a list of assets, reconstruct the original block by putting the
|
||||
assets back into the ``CREATE`` transactions in the block.
|
||||
|
||||
@ -442,8 +437,7 @@ class Block(object):
|
||||
|
||||
@staticmethod
|
||||
def couple_metadata(block_dict, metadatal):
|
||||
"""
|
||||
Given a block_dict with no metadata (as returned from a database call)
|
||||
"""Given a block_dict with no metadata (as returned from a database call)
|
||||
and a list of metadata, reconstruct the original block by putting the
|
||||
metadata of each transaction back into its original transaction.
|
||||
|
||||
@ -470,8 +464,7 @@ class Block(object):
|
||||
|
||||
@staticmethod
|
||||
def get_asset_ids(block_dict):
|
||||
"""
|
||||
Given a block_dict return all the asset_ids for that block (the txid
|
||||
"""Given a block_dict return all the asset_ids for that block (the txid
|
||||
of CREATE transactions). Useful to know which assets to retrieve
|
||||
from the database to reconstruct the block.
|
||||
|
||||
@ -493,8 +486,7 @@ class Block(object):
|
||||
|
||||
@staticmethod
|
||||
def get_txn_ids(block_dict):
|
||||
"""
|
||||
Given a block_dict return all the transaction ids.
|
||||
"""Given a block_dict return all the transaction ids.
|
||||
|
||||
Args:
|
||||
block_dict (:obj:`dict`): The block dict as returned from a
|
||||
@ -515,13 +507,13 @@ class Block(object):
|
||||
|
||||
|
||||
class FastTransaction:
|
||||
"""
|
||||
A minimal wrapper around a transaction dictionary. This is useful for
|
||||
"""A minimal wrapper around a transaction dictionary. This is useful for
|
||||
when validation is not required but a routine expects something that looks
|
||||
like a transaction, for example during block creation.
|
||||
|
||||
Note: immutability could also be provided
|
||||
"""
|
||||
|
||||
def __init__(self, tx_dict):
|
||||
self.data = tx_dict
|
||||
|
||||
|
@ -145,7 +145,7 @@ class BlockPipeline:
|
||||
|
||||
|
||||
def tx_collector():
|
||||
""" A helper to deduplicate transactions """
|
||||
"""A helper to deduplicate transactions"""
|
||||
|
||||
def snowflake():
|
||||
txids = set()
|
||||
@ -167,7 +167,8 @@ def tx_collector():
|
||||
|
||||
def create_pipeline():
|
||||
"""Create and return the pipeline of operations to be distributed
|
||||
on different processes."""
|
||||
on different processes.
|
||||
"""
|
||||
|
||||
block_pipeline = BlockPipeline()
|
||||
|
||||
|
@ -28,8 +28,7 @@ class Election:
|
||||
self.events_queue = events_queue
|
||||
|
||||
def check_for_quorum(self, next_vote):
|
||||
"""
|
||||
Checks if block has enough invalid votes to make a decision
|
||||
"""Checks if block has enough invalid votes to make a decision
|
||||
|
||||
Args:
|
||||
next_vote: The next vote.
|
||||
@ -60,8 +59,7 @@ class Election:
|
||||
})
|
||||
|
||||
def requeue_transactions(self, invalid_block):
|
||||
"""
|
||||
Liquidates transactions from invalid blocks so they can be processed again
|
||||
"""Liquidates transactions from invalid blocks so they can be processed again
|
||||
"""
|
||||
logger.info('Rewriting %s transactions from invalid block %s',
|
||||
len(invalid_block.transactions),
|
||||
|
@ -56,7 +56,8 @@ class StaleTransactionMonitor:
|
||||
|
||||
def create_pipeline(timeout=5, backlog_reassign_delay=5):
|
||||
"""Create and return the pipeline of operations to be distributed
|
||||
on different processes."""
|
||||
on different processes.
|
||||
"""
|
||||
|
||||
stm = StaleTransactionMonitor(timeout=timeout,
|
||||
backlog_reassign_delay=backlog_reassign_delay)
|
||||
|
@ -155,7 +155,8 @@ class Vote:
|
||||
|
||||
def create_pipeline():
|
||||
"""Create and return the pipeline of operations to be distributed
|
||||
on different processes."""
|
||||
on different processes.
|
||||
"""
|
||||
|
||||
voter = Vote()
|
||||
|
||||
@ -170,7 +171,8 @@ def create_pipeline():
|
||||
|
||||
def get_changefeed():
|
||||
"""Create and return ordered changefeed of blocks starting from
|
||||
last voted block"""
|
||||
last voted block
|
||||
"""
|
||||
b = Bigchain()
|
||||
last_block_id = b.get_last_voted_block().id
|
||||
feed = backend.query.get_new_blocks_feed(b.connection, last_block_id)
|
||||
|
@ -83,9 +83,7 @@ def pool(builder, size, timeout=None):
|
||||
|
||||
# TODO: Rename this function, it's handling fulfillments not conditions
|
||||
def condition_details_has_owner(condition_details, owner):
|
||||
"""
|
||||
|
||||
Check if the public_key of owner is in the condition details
|
||||
"""Check if the public_key of owner is in the condition details
|
||||
as an Ed25519Fulfillment.public_key
|
||||
|
||||
Args:
|
||||
|
@ -12,8 +12,7 @@ UNDECIDED = 'undecided'
|
||||
|
||||
|
||||
class Voting:
|
||||
"""
|
||||
Everything to do with verifying and counting votes for block election.
|
||||
"""Everything to do with verifying and counting votes for block election.
|
||||
|
||||
All functions in this class should be referentially transparent, that is,
|
||||
they always give the same output for a given input. This makes it easier
|
||||
@ -27,9 +26,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def block_election(cls, block, votes, keyring):
|
||||
"""
|
||||
Calculate the election status of a block.
|
||||
"""
|
||||
"""Calculate the election status of a block."""
|
||||
eligible_voters = set(block['block']['voters']) & set(keyring)
|
||||
n_voters = len(eligible_voters)
|
||||
eligible_votes, ineligible_votes = \
|
||||
@ -43,8 +40,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def partition_eligible_votes(cls, votes, eligible_voters):
|
||||
"""
|
||||
Filter votes from unknown nodes or nodes that are not listed on
|
||||
"""Filter votes from unknown nodes or nodes that are not listed on
|
||||
block. This is the primary Sybill protection.
|
||||
"""
|
||||
eligible, ineligible = ([], [])
|
||||
@ -63,8 +59,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def dedupe_by_voter(cls, eligible_votes):
|
||||
"""
|
||||
Throw a critical error if there is a duplicate vote
|
||||
"""Throw a critical error if there is a duplicate vote
|
||||
"""
|
||||
by_voter = {}
|
||||
for vote in eligible_votes:
|
||||
@ -76,8 +71,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def count_votes(cls, by_voter):
|
||||
"""
|
||||
Given a list of eligible votes, (votes from known nodes that are listed
|
||||
"""Given a list of eligible votes, (votes from known nodes that are listed
|
||||
as voters), produce the number that say valid and the number that say
|
||||
invalid. Votes must agree on previous block, otherwise they become invalid.
|
||||
"""
|
||||
@ -111,8 +105,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def decide_votes(cls, n_voters, n_valid, n_invalid):
|
||||
"""
|
||||
Decide on votes.
|
||||
"""Decide on votes.
|
||||
|
||||
To return VALID there must be a clear majority that say VALID
|
||||
and also agree on the previous block.
|
||||
@ -127,8 +120,7 @@ class Voting:
|
||||
|
||||
@classmethod
|
||||
def verify_vote_signature(cls, vote):
|
||||
"""
|
||||
Verify the signature of a vote
|
||||
"""Verify the signature of a vote
|
||||
"""
|
||||
signature = vote.get('signature')
|
||||
pk_base58 = vote.get('node_pubkey')
|
||||
|
@ -1,4 +1,4 @@
|
||||
""" API routes definition """
|
||||
"""API routes definition"""
|
||||
from flask_restful import Api
|
||||
from bigchaindb.web.views import (
|
||||
assets,
|
||||
@ -13,7 +13,7 @@ from bigchaindb.web.views import (
|
||||
|
||||
|
||||
def add_routes(app):
|
||||
""" Add the routes to an app """
|
||||
"""Add the routes to an app"""
|
||||
for (prefix, routes) in API_SECTIONS:
|
||||
api = Api(app, prefix=prefix)
|
||||
for ((pattern, resource, *args), kwargs) in routes:
|
||||
|
@ -25,13 +25,13 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication):
|
||||
"""
|
||||
|
||||
def __init__(self, app, *, options=None):
|
||||
'''Initialize a new standalone application.
|
||||
"""Initialize a new standalone application.
|
||||
|
||||
Args:
|
||||
app: A wsgi Python application.
|
||||
options (dict): the configuration.
|
||||
|
||||
'''
|
||||
"""
|
||||
self.options = options or {}
|
||||
self.application = app
|
||||
super().__init__()
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Common classes and methods for API handlers
|
||||
"""Common classes and methods for API handlers
|
||||
"""
|
||||
import logging
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
""" API Index endpoint """
|
||||
"""API Index endpoint"""
|
||||
|
||||
import flask
|
||||
from flask_restful import Resource
|
||||
@ -33,8 +33,7 @@ class ApiV1Index(Resource):
|
||||
|
||||
|
||||
def get_api_v1_info(api_prefix):
|
||||
"""
|
||||
Return a dict with all the information specific for the v1 of the
|
||||
"""Return a dict with all the information specific for the v1 of the
|
||||
api.
|
||||
"""
|
||||
websocket_root = base_ws_uri() + EVENTS_ENDPOINT
|
||||
|
1
setup.py
1
setup.py
@ -32,6 +32,7 @@ dev_require = [
|
||||
'ipython',
|
||||
'watchdog',
|
||||
'logging_tree',
|
||||
'pre-commit'
|
||||
]
|
||||
|
||||
docs_require = [
|
||||
|
@ -49,18 +49,18 @@ Certificate:
|
||||
1a:b2:61
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Basic Constraints:
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Subject Key Identifier:
|
||||
X509v3 Subject Key Identifier:
|
||||
B7:F5:E7:0E:F8:D8:FE:A1:56:5B:EA:80:2F:18:71:C2:44:0C:91:D0
|
||||
X509v3 Authority Key Identifier:
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:69:C7:85:80:64:E6:90:40:E9:30:68:88:23:D2:4D:BB:EC:DF:98:98
|
||||
DirName:/C=DE/ST=Berlin/L=Berlin/O=BigchainDB GmbH/OU=ROOT-CA/CN=Test Infra Root CA/emailAddress=dev@bigchaindb.com
|
||||
serial:E8:06:B2:C9:2A:9C:2E:FC
|
||||
|
||||
X509v3 Extended Key Usage:
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Client Authentication
|
||||
X509v3 Key Usage:
|
||||
X509v3 Key Usage:
|
||||
Digital Signature
|
||||
Signature Algorithm: sha256WithRSAEncryption
|
||||
78:44:00:be:10:3b:f3:40:e1:5e:e4:3a:64:99:13:71:1d:91:
|
||||
|
@ -49,18 +49,18 @@ Certificate:
|
||||
a6:7f:b9
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Basic Constraints:
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Subject Key Identifier:
|
||||
X509v3 Subject Key Identifier:
|
||||
95:F3:A7:FB:99:C6:9D:91:36:06:51:31:39:EC:37:42:89:07:AB:31
|
||||
X509v3 Authority Key Identifier:
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:69:C7:85:80:64:E6:90:40:E9:30:68:88:23:D2:4D:BB:EC:DF:98:98
|
||||
DirName:/C=DE/ST=Berlin/L=Berlin/O=BigchainDB GmbH/OU=ROOT-CA/CN=Test Infra Root CA/emailAddress=dev@bigchaindb.com
|
||||
serial:E8:06:B2:C9:2A:9C:2E:FC
|
||||
|
||||
X509v3 Extended Key Usage:
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Client Authentication
|
||||
X509v3 Key Usage:
|
||||
X509v3 Key Usage:
|
||||
Digital Signature
|
||||
Signature Algorithm: sha256WithRSAEncryption
|
||||
5b:42:f5:e9:cc:2a:40:8a:53:29:d9:67:2b:5d:df:25:b8:08:
|
||||
|
@ -49,18 +49,18 @@ Certificate:
|
||||
f8:f2:1d
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Basic Constraints:
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Subject Key Identifier:
|
||||
X509v3 Subject Key Identifier:
|
||||
31:63:2C:98:2F:9F:6C:44:82:A9:B8:D3:06:15:95:84:D9:52:98:71
|
||||
X509v3 Authority Key Identifier:
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:69:C7:85:80:64:E6:90:40:E9:30:68:88:23:D2:4D:BB:EC:DF:98:98
|
||||
DirName:/C=DE/ST=Berlin/L=Berlin/O=BigchainDB GmbH/OU=ROOT-CA/CN=Test Infra Root CA/emailAddress=dev@bigchaindb.com
|
||||
serial:E8:06:B2:C9:2A:9C:2E:FC
|
||||
|
||||
X509v3 Extended Key Usage:
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Client Authentication
|
||||
X509v3 Key Usage:
|
||||
X509v3 Key Usage:
|
||||
Digital Signature
|
||||
Signature Algorithm: sha256WithRSAEncryption
|
||||
1e:16:02:5b:35:f6:36:0a:54:bc:48:11:51:39:a1:b1:e5:39:
|
||||
|
@ -49,20 +49,20 @@ Certificate:
|
||||
99:1f:23
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Basic Constraints:
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Subject Key Identifier:
|
||||
X509v3 Subject Key Identifier:
|
||||
0B:53:E2:76:40:AD:73:C4:12:6C:85:CF:36:5B:5F:FB:6E:E8:03:A7
|
||||
X509v3 Authority Key Identifier:
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:69:C7:85:80:64:E6:90:40:E9:30:68:88:23:D2:4D:BB:EC:DF:98:98
|
||||
DirName:/C=DE/ST=Berlin/L=Berlin/O=BigchainDB GmbH/OU=ROOT-CA/CN=Test Infra Root CA/emailAddress=dev@bigchaindb.com
|
||||
serial:E8:06:B2:C9:2A:9C:2E:FC
|
||||
|
||||
X509v3 Extended Key Usage:
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Server Authentication, TLS Web Client Authentication
|
||||
X509v3 Key Usage:
|
||||
X509v3 Key Usage:
|
||||
Digital Signature, Key Encipherment
|
||||
X509v3 Subject Alternative Name:
|
||||
X509v3 Subject Alternative Name:
|
||||
DNS:localhost, DNS:test-mdb-ssl
|
||||
Signature Algorithm: sha256WithRSAEncryption
|
||||
4c:14:3e:6b:af:f8:e8:69:11:2e:13:12:b7:9b:91:c7:68:01:
|
||||
|
@ -529,7 +529,7 @@ def test_get_assets():
|
||||
assert list(cursor.sort('id', pymongo.ASCENDING)) == assets[::2]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("table", ['assets', 'metadata'])
|
||||
@pytest.mark.parametrize('table', ['assets', 'metadata'])
|
||||
def test_text_search(table):
|
||||
from bigchaindb.backend import connect, query
|
||||
conn = connect()
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
This module is tests related to schema checking, but _not_ of granular schematic
|
||||
"""This module is tests related to schema checking, but _not_ of granular schematic
|
||||
properties related to validation.
|
||||
"""
|
||||
|
||||
@ -23,8 +22,7 @@ UNSUPPORTED_CRYPTOCONDITION_TYPES = (
|
||||
# Test of schema utils
|
||||
|
||||
def _test_additionalproperties(node, path=''):
|
||||
"""
|
||||
Validate that each object node has additionalProperties set, so that
|
||||
"""Validate that each object node has additionalProperties set, so that
|
||||
objects with junk keys do not pass as valid.
|
||||
"""
|
||||
if isinstance(node, list):
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
These are tests of the API of the Transaction class and associated classes.
|
||||
"""These are tests of the API of the Transaction class and associated classes.
|
||||
Tests for transaction validation are separate.
|
||||
"""
|
||||
from copy import deepcopy
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Fixtures and setup / teardown functions
|
||||
"""Fixtures and setup / teardown functions
|
||||
|
||||
Tasks:
|
||||
1. setup test database before starting the tests
|
||||
|
@ -1242,8 +1242,7 @@ def test_get_outputs_filtered(filter_spent, filter_unspent):
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_cant_spend_same_input_twice_in_tx(b, genesis_block):
|
||||
"""
|
||||
Recreate duplicated fulfillments bug
|
||||
"""Recreate duplicated fulfillments bug
|
||||
https://github.com/bigchaindb/bigchaindb/issues/1099
|
||||
"""
|
||||
from bigchaindb.models import Transaction
|
||||
|
@ -15,8 +15,7 @@ from tests.pipelines.stepping import create_stepper
|
||||
|
||||
@contextmanager
|
||||
def federation(n):
|
||||
"""
|
||||
Return a list of Bigchain objects and pipeline steppers to represent
|
||||
"""Return a list of Bigchain objects and pipeline steppers to represent
|
||||
a BigchainDB federation
|
||||
"""
|
||||
keys = [generate_key_pair() for _ in range(n)]
|
||||
@ -24,8 +23,7 @@ def federation(n):
|
||||
|
||||
@contextmanager
|
||||
def make_nodes(i):
|
||||
"""
|
||||
make_nodes is a recursive context manager. Essentially it is doing:
|
||||
"""make_nodes is a recursive context manager. Essentially it is doing:
|
||||
|
||||
with f(a[0]) as b0:
|
||||
with f(a[1]) as b1:
|
||||
@ -159,8 +157,7 @@ def test_elect_sybill(federation_3):
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.genesis
|
||||
def test_elect_dos(federation_3):
|
||||
"""
|
||||
https://github.com/bigchaindb/bigchaindb/issues/1314
|
||||
"""https://github.com/bigchaindb/bigchaindb/issues/1314
|
||||
Test that a node cannot block another node's opportunity to vote
|
||||
on a block by writing an incorrectly signed vote
|
||||
"""
|
||||
@ -171,9 +168,7 @@ def test_elect_dos(federation_3):
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.genesis
|
||||
def test_elect_bad_block_voters_list(federation_3):
|
||||
"""
|
||||
See https://github.com/bigchaindb/bigchaindb/issues/1224
|
||||
"""
|
||||
"""See https://github.com/bigchaindb/bigchaindb/issues/1224"""
|
||||
[bx, (s0, s1, s2)] = federation_3
|
||||
b = s0.block.bigchain
|
||||
# First remove other nodes from node 0 so that it self assigns the tx
|
||||
|
@ -29,7 +29,7 @@ def test_double_create(b, user_pk):
|
||||
@pytest.mark.dspend
|
||||
@pytest.mark.usefixtures('inputs')
|
||||
def test_get_owned_ids_works_after_double_spend(b, user_pk, user_sk):
|
||||
""" Test for #633 https://github.com/bigchaindb/bigchaindb/issues/633 """
|
||||
"""Test for #633 https://github.com/bigchaindb/bigchaindb/issues/633"""
|
||||
from bigchaindb.common.exceptions import DoubleSpend
|
||||
from bigchaindb.models import Transaction
|
||||
input_valid = b.get_owned_ids(user_pk).pop()
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Pipeline stepping is a way to advance the asynchronous data pipeline
|
||||
"""Pipeline stepping is a way to advance the asynchronous data pipeline
|
||||
deterministically by exposing each step separately and advancing the states
|
||||
manually.
|
||||
|
||||
@ -53,7 +52,7 @@ class MultipipesStepper:
|
||||
self.processes = []
|
||||
|
||||
def add_input(self, prefix, node, next):
|
||||
""" Add an input task; Reads from the outqueue of the Node """
|
||||
"""Add an input task; Reads from the outqueue of the Node"""
|
||||
name = '%s_%s' % (prefix, node.name)
|
||||
next_name = '%s_%s' % (prefix, next.name)
|
||||
|
||||
@ -77,8 +76,7 @@ class MultipipesStepper:
|
||||
self.input_tasks.add(name)
|
||||
|
||||
def add_stage(self, prefix, node, next):
|
||||
"""
|
||||
Add a stage task, popping from own queue and appending to the queue
|
||||
"""Add a stage task, popping from own queue and appending to the queue
|
||||
of the next node
|
||||
"""
|
||||
f = node.target
|
||||
@ -96,7 +94,7 @@ class MultipipesStepper:
|
||||
self.tasks[name] = task
|
||||
|
||||
def _enqueue(self, name, item):
|
||||
""" internal function; add item(s) to queue) """
|
||||
"""Internal function; add item(s) to queue)"""
|
||||
queue = self.queues.setdefault(name, [])
|
||||
if isinstance(item, types.GeneratorType):
|
||||
items = list(item)
|
||||
@ -108,7 +106,7 @@ class MultipipesStepper:
|
||||
queue.append(list(item))
|
||||
|
||||
def step(self, name, **kwargs):
|
||||
""" Advance pipeline stage. Throws Empty if no data to consume. """
|
||||
"""Advance pipeline stage. Throws Empty if no data to consume."""
|
||||
logging.debug('Stepping %s', name)
|
||||
task = self.tasks[name]
|
||||
if name in self.input_tasks:
|
||||
@ -122,7 +120,7 @@ class MultipipesStepper:
|
||||
|
||||
@property
|
||||
def counts(self):
|
||||
""" Get sizes of non empty queues """
|
||||
"""Get sizes of non empty queues"""
|
||||
counts = {}
|
||||
for name in self.queues:
|
||||
n = len(self.queues[name])
|
||||
@ -131,12 +129,12 @@ class MultipipesStepper:
|
||||
return counts
|
||||
|
||||
def __getattr__(self, name):
|
||||
""" Shortcut to get a queue """
|
||||
"""Shortcut to get a queue"""
|
||||
return lambda **kwargs: self.step(name, **kwargs)
|
||||
|
||||
@contextmanager
|
||||
def start(self):
|
||||
""" Start async inputs; changefeeds etc """
|
||||
"""Start async inputs; changefeeds etc"""
|
||||
for p in self.processes:
|
||||
p.start()
|
||||
# It would be nice to have a better way to wait for changefeeds here.
|
||||
|
@ -40,8 +40,7 @@ def test_validate_transaction(b, create_tx):
|
||||
|
||||
|
||||
def test_validate_transaction_handles_exceptions(b, signed_create_tx):
|
||||
"""
|
||||
This test makes sure that `BlockPipeline.validate_tx` handles possible
|
||||
"""This test makes sure that `BlockPipeline.validate_tx` handles possible
|
||||
exceptions from `Transaction.from_dict`.
|
||||
"""
|
||||
from bigchaindb.pipelines.block import BlockPipeline
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Test getting a list of transactions from the backend.
|
||||
"""Test getting a list of transactions from the backend.
|
||||
|
||||
This test module defines it's own fixture which is used by all the tests.
|
||||
"""
|
||||
|
@ -122,8 +122,7 @@ def test_decide_votes_invalid(kwargs):
|
||||
|
||||
@pytest.mark.parametrize('n_voters', range(8))
|
||||
def test_vote_actions(n_voters):
|
||||
"""
|
||||
* Legal transitions are UNDECIDED -> [VALID|INVALID] only
|
||||
"""* Legal transitions are UNDECIDED -> [VALID|INVALID] only
|
||||
* Block is never left UNDECIDED after voting
|
||||
* Accomodates rogues on previous block / invalid schema
|
||||
"""
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
All tests of transaction structure. The concern here is that transaction
|
||||
"""All tests of transaction structure. The concern here is that transaction
|
||||
structural / schematic issues are caught when reading a transaction
|
||||
(ie going from dict -> transaction).
|
||||
"""
|
||||
|
@ -18,7 +18,7 @@ def test_api_root_endpoint(client, wsserver_base_url):
|
||||
'outputs': '/api/v1/outputs/',
|
||||
'streams': '{}/api/v1/streams/valid_transactions'.format(
|
||||
wsserver_base_url),
|
||||
"metadata": "/api/v1/metadata/"
|
||||
'metadata': '/api/v1/metadata/',
|
||||
}
|
||||
},
|
||||
'docs': 'https://docs.bigchaindb.com/projects/server/en/vtsttst/',
|
||||
@ -42,7 +42,7 @@ def test_api_v1_endpoint(client, wsserver_base_url):
|
||||
'outputs': '/outputs/',
|
||||
'streams': '{}/api/v1/streams/valid_transactions'.format(
|
||||
wsserver_base_url),
|
||||
"metadata": "/metadata/"
|
||||
'metadata': '/metadata/',
|
||||
}
|
||||
res = client.get('/api/v1')
|
||||
assert res.json == api_v1_info
|
||||
|
@ -47,8 +47,8 @@ def test_post_create_transaction_endpoint(b, client):
|
||||
assert res.json['outputs'][0]['public_keys'][0] == user_pub
|
||||
|
||||
|
||||
@pytest.mark.parametrize("nested", [False, True])
|
||||
@pytest.mark.parametrize("language,expected_status_code", [
|
||||
@pytest.mark.parametrize('nested', [False, True])
|
||||
@pytest.mark.parametrize('language,expected_status_code', [
|
||||
('danish', 202), ('dutch', 202), ('english', 202), ('finnish', 202),
|
||||
('french', 202), ('german', 202), ('hungarian', 202), ('italian', 202),
|
||||
('norwegian', 202), ('portuguese', 202), ('romanian', 202), ('none', 202),
|
||||
@ -322,7 +322,7 @@ def test_transactions_get_list_good(client):
|
||||
from functools import partial
|
||||
|
||||
def get_txs_patched(conn, **args):
|
||||
""" Patch `get_transactions_filtered` so that rather than return an array
|
||||
"""Patch `get_transactions_filtered` so that rather than return an array
|
||||
of transactions it returns an array of shims with a to_dict() method
|
||||
that reports one of the arguments passed to `get_transactions_filtered`.
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user