Merge remote-tracking branch 'remotes/origin/feat/127/crypto-conditions-ilp-bigchain-integration' into feat/128/multiple-input-output

This commit is contained in:
Rodolphe Marques 2016-04-08 15:12:08 +02:00
commit ab8e808ef3
37 changed files with 1290 additions and 407 deletions

6
.gitignore vendored
View File

@ -65,3 +65,9 @@ target/
# pyenv
.python-version
# Private key files from AWS
*.pem
# Some files created when deploying a cluster on AWS
deploy-cluster-aws/conf/rethinkdb.conf
deploy-cluster-aws/hostlist.py

View File

@ -19,6 +19,7 @@ Tag name: TBD
committed: TBD
### Added
- AWS deployment scripts: [Issue #151](https://github.com/bigchaindb/bigchaindb/issues/151)
- `CHANGELOG.md` (this file)
- Multisig support: [Pull Request #107](https://github.com/bigchaindb/bigchaindb/pull/107)
- API/Wire protocol (RESTful HTTP API): [Pull Request #102](https://github.com/bigchaindb/bigchaindb/pull/102)

View File

@ -0,0 +1,51 @@
# How to Handle Pull Requests
This document is for whoever has the ability to merge pull requests in the Git repositories associated with BigchainDB.
If the pull request is from an employee of ascribe GmbH, then you can ignore this document.
If the pull request is from someone who is _not_ an employee of ascribe, then:
* Have they agreed to the Individual Contributor Agreement in the past? (Troy, Greg, and others have a list.) If yes, then you can merge the PR and ignore the rest of this document.
* Do they belong to a company or organization which agreed to the Entity Contributor Agreement in the past, and will they be contributing on behalf of that company or organization? (Troy, Greg, and others have a list.) If yes, then you can merge the PR and ignore the rest of this document.
* Otherwise, go to the pull request in question and post a comment using this template:
Hi @nameofuser
Before we can merge this pull request, which may contain your intellectual property in the form of copyright or patents, our lawyers say we need you or your organization to agree to one of our contributor agreements. If you are contributing on behalf of yourself (and not on behalf of your employer or another organization you are part of) then you should:
1. Go to: https://www.bigchaindb.com/cla/
2. Read the Individual Contributor Agreement
3. Fill in the form "For Individuals"
4. Check the box to agree
5. Click the SEND button
If you're contributing as an employee, and/or you want all employees of your employing organization to be covered by our contributor agreement, then someone in your organization with the authority to enter agreements on behalf of all employees must do the following:
1. Go to: https://www.bigchaindb.com/cla/
2. Read the Entity Contributor Agreement
3. Fill in the form "For Organizations”
4. Check the box to agree
5. Click the SEND button
We will email you (or your employer) with further instructions.
(END OF COMMENT)
Once they click SEND, we (ascribe) will get an email with the information in the form. (Troy gets those emails for sure, I'm not sure who else.) The next step is to send an email to the email address submitted in the form, saying something like (where the stuff in [square brackets] should be replaced):
Hi [NAME],
The next step is for you to copy the following block of text into the comments of Pull Request #[NN] on GitHub:
BEGIN BLOCK
This is to confirm that I agreed to and accepted the BigchainDB [Entity/Individual] Contributor Agreement at https://www.bigchaindb.com/cla/ and to represent and warrant that I have authority to do so.
[Insert long random string here. One good source of those is https://www.grc.com/passwords.htm ]
END BLOCK
(END OF EMAIL)
The next step is to wait for them to copy that comment into the comments of the indicated pull request. Once they do so, it's safe to merge the pull request.

View File

@ -2,9 +2,8 @@ import os
import copy
def e(key, default=None, conv=None):
'''Get the environment variable `key`, fallback to `default`
"""Get the environment variable `key`, fallback to `default`
if nothing is found.
Keyword arguments:
@ -12,7 +11,7 @@ def e(key, default=None, conv=None):
default -- the default value if nothing is found (default: None)
conv -- a callable used to convert the value (default: use the type of the
default value)
'''
"""
val = os.environ.get(key, default)
@ -24,6 +23,9 @@ def e(key, default=None, conv=None):
config = {
'server': {
'bind': e('BIGCHAIN_SERVER_BIND', default='0.0.0.0:5000'),
},
'database': {
'host': e('BIGCHAIN_DATABASE_HOST', default='localhost'),
'port': e('BIGCHAIN_DATABASE_PORT', default=28015),
@ -49,3 +51,4 @@ config = {
# for more info.
_config = copy.deepcopy(config)
from bigchaindb.core import Bigchain # noqa

View File

@ -27,8 +27,8 @@ class Client:
3. Reading them from the `config.json` file.
Args:
public_key (str): the base58 encoded public key for the ECDSA secp256k1 curve.
private_key (str): the base58 encoded private key for the ECDSA secp256k1 curve.
public_key (str): the base58 encoded public key for the ED25519 curve.
private_key (str): the base58 encoded private key for the ED25519 curve.
api_endpoint (str): a URL where rethinkdb is running.
format: scheme://hostname:port
consensus_plugin (str): the registered name of your installed

View File

@ -55,6 +55,10 @@ def run_configure(args, skip_if_exists=False):
conf['keypair']['private'], conf['keypair']['public'] = crypto.generate_key_pair()
if not args.yes:
for key in ('host', 'port'):
val = conf['server'][key]
conf['server'][key] = input('API Server {}? (default `{}`): '.format(key, val)) or val
for key in ('host', 'port', 'name'):
val = conf['database'][key]
conf['database'][key] = input('Database {}? (default `{}`): '.format(key, val)) or val

View File

@ -3,7 +3,7 @@ from abc import ABCMeta, abstractmethod
import bigchaindb.exceptions as exceptions
from bigchaindb import util
from bigchaindb.crypto import hash_data, PublicKey
from bigchaindb import crypto
class AbstractConsensusRules(metaclass=ABCMeta):
@ -132,11 +132,12 @@ class BaseConsensusRules(AbstractConsensusRules):
else:
# check if the input exists, is owned by the current_owner
if not transaction['transaction']['fulfillments']:
raise ValueError(
'Transaction contains no fulfillments')
raise ValueError('Transaction contains no fulfillments')
# check inputs
for fulfillment in transaction['transaction']['fulfillments']:
if not fulfillment['input']:
raise ValueError('Only `CREATE` transactions can have null inputs')
tx_input = bigchain.get_transaction(fulfillment['input']['txid'])
if not tx_input:
@ -158,8 +159,8 @@ class BaseConsensusRules(AbstractConsensusRules):
for fulfillment in transaction_data['transaction']['fulfillments']:
fulfillment['fulfillment'] = None
calculated_hash = hash_data(util.serialize(
transaction_data['transaction']))
calculated_hash = crypto.hash_data(util.serialize(
transaction['transaction']))
if calculated_hash != transaction['id']:
raise exceptions.InvalidHash()
@ -187,7 +188,7 @@ class BaseConsensusRules(AbstractConsensusRules):
"""
# Check if current hash is correct
calculated_hash = hash_data(util.serialize(block['block']))
calculated_hash = crypto.hash_data(util.serialize(block['block']))
if calculated_hash != block['id']:
raise exceptions.InvalidHash()

View File

@ -3,7 +3,6 @@ import random
import json
import rapidjson
import bigchaindb
from bigchaindb import util
from bigchaindb import config_utils
@ -11,7 +10,6 @@ from bigchaindb import exceptions
from bigchaindb import crypto
from bigchaindb.monitor import Monitor
monitor = Monitor()
@ -41,8 +39,8 @@ class Bigchain(object):
host (str): hostname where the rethinkdb is running.
port (int): port in which rethinkb is running (usually 28015).
dbname (str): the name of the database to connect to (usually bigchain).
public_key (str): the base58 encoded public key for the ECDSA secp256k1 curve.
private_key (str): the base58 encoded private key for the ECDSA secp256k1 curve.
public_key (str): the base58 encoded public key for the ED25519 curve.
private_key (str): the base58 encoded private key for the ED25519 curve.
keyring (list[str]): list of base58 encoded public keys of the federation nodes.
"""
@ -181,8 +179,8 @@ class Bigchain(object):
returns `None`
"""
cursor = r.table('bigchain')\
.get_all(payload_hash, index='payload_hash')\
cursor = r.table('bigchain') \
.get_all(payload_hash, index='payload_hash') \
.run(self.conn)
transactions = list(cursor)
@ -305,7 +303,7 @@ class Bigchain(object):
# Calculate the hash of the new block
block_data = util.serialize(block)
block_hash = crypto.hash_data(block_data)
block_signature = crypto.PrivateKey(self.me_private).sign(block_data)
block_signature = crypto.SigningKey(self.me_private).sign(block_data)
block = {
'id': block_hash,
@ -426,7 +424,7 @@ class Bigchain(object):
}
vote_data = util.serialize(vote)
signature = crypto.PrivateKey(self.me_private).sign(vote_data)
signature = crypto.SigningKey(self.me_private).sign(vote_data)
vote_signed = {
'node_pubkey': self.me,
@ -446,37 +444,37 @@ class Bigchain(object):
if 'block_number' not in block:
update['block_number'] = block_number
r.table('bigchain')\
.get(vote['vote']['voting_for_block'])\
.update(update)\
.run(self.conn)
r.table('bigchain') \
.get(vote['vote']['voting_for_block']) \
.update(update) \
.run(self.conn)
def get_last_voted_block(self):
"""Returns the last block that this node voted on."""
# query bigchain for all blocks this node is a voter but didn't voted on
last_voted = r.table('bigchain')\
.filter(r.row['block']['voters'].contains(self.me))\
.filter(lambda doc: doc['votes'].contains(lambda vote: vote['node_pubkey'] == self.me))\
.order_by(r.desc('block_number'))\
.limit(1)\
last_voted = r.table('bigchain') \
.filter(r.row['block']['voters'].contains(self.me)) \
.filter(lambda doc: doc['votes'].contains(lambda vote: vote['node_pubkey'] == self.me)) \
.order_by(r.desc('block_number')) \
.limit(1) \
.run(self.conn)
# return last vote if last vote exists else return Genesis block
last_voted = list(last_voted)
if not last_voted:
return list(r.table('bigchain')
.filter(r.row['block_number'] == 0)
.run(self.conn))[0]
.filter(r.row['block_number'] == 0)
.run(self.conn))[0]
return last_voted[0]
def get_unvoted_blocks(self):
"""Return all the blocks that has not been voted by this node."""
unvoted = r.table('bigchain')\
.filter(lambda doc: doc['votes'].contains(lambda vote: vote['node_pubkey'] == self.me).not_())\
.order_by(r.asc((r.row['block']['timestamp'])))\
unvoted = r.table('bigchain') \
.filter(lambda doc: doc['votes'].contains(lambda vote: vote['node_pubkey'] == self.me).not_()) \
.order_by(r.asc((r.row['block']['timestamp']))) \
.run(self.conn)
if unvoted and unvoted[0].get('block_number') == 0:

View File

@ -1,155 +1,17 @@
# Separate all crypto code so that we can easily test several implementations
import binascii
import base58
import sha3
import bitcoin
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import hashes
from cryptography.exceptions import InvalidSignature
class PrivateKey(object):
"""
PrivateKey instance
"""
def __init__(self, key):
"""
Instantiate the private key with the private_value encoded in base58
"""
private_value = self.decode(key)
private_numbers = self._private_value_to_cryptography_private_numbers(private_value)
self.private_key = self._cryptography_private_key_from_private_numbers(private_numbers)
def sign(self, data):
"""
Sign data with private key
"""
signer = self.private_key.signer(ec.ECDSA(hashes.SHA256()))
signer.update(data.encode('utf-8'))
signature = signer.finalize()
return binascii.hexlify(signature).decode('utf-8')
@staticmethod
def encode(private_value):
"""
Encode the decimal number private_value to base58
"""
private_value_hex = bitcoin.encode_privkey(private_value, 'hex')
private_value_base58 = base58.b58encode(bytes.fromhex(private_value_hex))
return private_value_base58
@staticmethod
def decode(key):
"""
Decode the base58 private_value to decimale
"""
private_value_hex = binascii.hexlify(base58.b58decode(key))
private_value = bitcoin.decode_privkey(private_value_hex)
return private_value
def _private_value_to_public_values(self, private_value):
"""
Return the public values from the private value
"""
public_value_x, public_value_y = bitcoin.privkey_to_pubkey(private_value)
return (public_value_x, public_value_y)
def _private_value_to_cryptography_private_numbers(self, private_value):
"""
Return an instance of cryptography PrivateNumbers from the decimal private_value
"""
public_value_x, public_value_y = self._private_value_to_public_values(private_value)
public_numbers = PublicKey._public_values_to_cryptography_public_numbers(public_value_x, public_value_y)
private_numbers = ec.EllipticCurvePrivateNumbers(private_value, public_numbers)
return private_numbers
@staticmethod
def _cryptography_private_key_from_private_numbers(private_numbers):
"""
Return an instace of cryptography PrivateKey from a cryptography instance of PrivateNumbers
"""
return private_numbers.private_key(default_backend())
class PublicKey(object):
def __init__(self, key):
"""
Instantiate the public key with the compressed public value encoded in base58
"""
public_value_x, public_value_y = self.decode(key)
public_numbers = self._public_values_to_cryptography_public_numbers(public_value_x, public_value_y)
self.public_key = self._criptography_public_key_from_public_numbers(public_numbers)
def verify(self, data, signature):
verifier = self.public_key.verifier(binascii.unhexlify(signature), ec.ECDSA(hashes.SHA256()))
verifier.update(data.encode('utf-8'))
try:
verifier.verify()
except InvalidSignature:
return False
return True
@staticmethod
def encode(public_value_x, public_value_y):
"""
Encode the public key represented by the decimal values x and y to base58
"""
public_value_compressed_hex = bitcoin.encode_pubkey([public_value_x, public_value_y], 'hex_compressed')
public_value_compressed_base58 = base58.b58encode(bytes.fromhex(public_value_compressed_hex))
return public_value_compressed_base58
@staticmethod
def decode(public_value_compressed_base58):
"""
Decode the base58 public_value to the decimal x and y values
"""
public_value_compressed_hex = binascii.hexlify(base58.b58decode(public_value_compressed_base58))
public_value_x, public_value_y = bitcoin.decode_pubkey(public_value_compressed_hex.decode())
return (public_value_x, public_value_y)
@staticmethod
def _public_values_to_cryptography_public_numbers(public_value_x, public_value_y):
"""
Return an instance of cryptography PublicNumbers from the decimal x and y values
"""
public_numbers = ec.EllipticCurvePublicNumbers(public_value_x, public_value_y, ec.SECP256K1())
return public_numbers
def _criptography_public_key_from_public_numbers(self, public_numbers):
"""
Return an instance of cryptography PublicKey from a cryptography instance of PublicNumbers
"""
return public_numbers.public_key(default_backend())
def generate_key_pair():
"""
Generate a new key pair and return the pair encoded in base58
"""
# Private key
private_key = ec.generate_private_key(ec.SECP256K1, default_backend())
private_value = private_key.private_numbers().private_value
private_value_base58 = PrivateKey.encode(private_value)
# Public key
public_key = private_key.public_key()
public_value_x, public_value_y = public_key.public_numbers().x, public_key.public_numbers().y
public_value_compressed_base58 = PublicKey.encode(public_value_x, public_value_y)
return (private_value_base58, public_value_compressed_base58)
from cryptoconditions import ed25519
def hash_data(data):
"""Hash the provided data using SHA3-256"""
return sha3.sha3_256(data.encode()).hexdigest()
def generate_key_pair():
sk, pk = ed25519.ed25519_generate_key_pair()
return sk.decode(), pk.decode()
SigningKey = ed25519.SigningKey
VerifyingKey = ed25519.VerifyingKey

View File

@ -3,6 +3,7 @@ import multiprocessing as mp
import rethinkdb as r
import bigchaindb
from bigchaindb import Bigchain
from bigchaindb.voter import Voter
from bigchaindb.block import Block
@ -11,6 +12,18 @@ from bigchaindb.web import server
logger = logging.getLogger(__name__)
BANNER = """
****************************************************************************
* *
* Initialization complete. BigchainDB is ready and waiting for events. *
* You can send events through the API documented at: *
* - http://docs.bigchaindb.apiary.io/ *
* *
* Listening to client connections on: {:<15} *
* *
****************************************************************************
"""
class Processes(object):
@ -68,8 +81,8 @@ class Processes(object):
block = Block(self.q_new_transaction)
# start the web api
webapi = server.create_app()
p_webapi = mp.Process(name='webapi', target=webapi.run, kwargs={'host': 'localhost'})
app_server = server.create_server(bigchaindb.config['server'])
p_webapi = mp.Process(name='webapi', target=app_server.run)
p_webapi.start()
# initialize the processes
@ -92,5 +105,4 @@ class Processes(object):
# start message
block.initialized.wait()
p_voter.initialized.wait()
logger.info('Initialization complete. BigchainDB ready and waiting for events.')
logger.info('You can send events through the API documented at http://docs.bigchaindb.apiary.io/')
logger.info(BANNER.format(bigchaindb.config['server']['bind']))

View File

@ -1,12 +1,15 @@
import copy
import json
import time
import multiprocessing as mp
from datetime import datetime
from cryptoconditions import Ed25519Fulfillment, ThresholdSha256Fulfillment
from cryptoconditions.fulfillment import Fulfillment
import bigchaindb
from bigchaindb import exceptions
from bigchaindb.crypto import PrivateKey, PublicKey, hash_data
from bigchaindb import crypto
class ProcessGroup(object):
@ -140,8 +143,11 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
}
}
},
}
}
"""
current_owners = current_owners if isinstance(current_owners, list) else [current_owners]
new_owners = new_owners if isinstance(new_owners, list) else [new_owners]
inputs = inputs if isinstance(inputs, list) else [inputs]
# validate arguments (owners and inputs should be lists)
if not isinstance(current_owners, list):
@ -155,7 +161,7 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
data = None
if payload is not None:
if isinstance(payload, dict):
hash_payload = hash_data(serialize(payload))
hash_payload = crypto.hash_data(serialize(payload))
data = {
'hash': hash_payload,
'payload': payload
@ -165,6 +171,7 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
# handle inputs
fulfillments = []
# transfer
if inputs:
for fid, inp in enumerate(inputs):
@ -186,9 +193,18 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
# handle outputs
conditions = []
for fulfillment in fulfillments:
if len(new_owners) > 1:
for new_owner in new_owners:
condition = ThresholdSha256Fulfillment(threshold=len(new_owners))
condition.add_subfulfillment(Ed25519Fulfillment(public_key=new_owner))
elif len(new_owners) == 1:
condition = Ed25519Fulfillment(public_key=new_owners[0])
conditions.append({
'new_owners': new_owners,
'condition': None,
'condition': {
'details': json.loads(condition.serialize_json()),
'uri': condition.condition.serialize_uri()
},
'cid': fulfillment['fid']
})
@ -202,7 +218,7 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
# serialize and convert to bytes
tx_serialized = serialize(tx)
tx_hash = hash_data(tx_serialized)
tx_hash = crypto.hash_data(tx_serialized)
# create the transaction
transaction = {
@ -215,53 +231,55 @@ def create_tx(current_owners, new_owners, inputs, operation, payload=None):
# TODO: Change sign_tx to populate the fulfillments
def sign_tx(transaction, private_key):
def sign_tx(transaction, sk):
"""Sign a transaction
A transaction signed with the `current_owner` corresponding private key.
Args:
transaction (dict): transaction to sign.
private_key (str): base58 encoded private key to create a signature of the transaction.
sk (base58 str): base58 encoded private key to create a signature of the transaction.
Returns:
dict: transaction with the `fulfillment` fields populated.
"""
b = bigchaindb.Bigchain()
private_key = PrivateKey(private_key)
sk = crypto.SigningKey(sk)
tx = copy.deepcopy(transaction)
common_data = {
'operation': transaction['transaction']['operation'],
'timestamp': transaction['transaction']['timestamp'],
'data': transaction['transaction']['data'],
'version': transaction['version'],
'id': transaction['id']
'operation': tx['transaction']['operation'],
'timestamp': tx['transaction']['timestamp'],
'data': tx['transaction']['data'],
'version': tx['version'],
'id': tx['id']
}
for fulfillment in transaction['transaction']['fulfillments']:
for fulfillment in tx['transaction']['fulfillments']:
fulfillment_message = common_data.copy()
if transaction['transaction']['operation'] in ['CREATE', 'GENESIS']:
if tx['transaction']['operation'] in ['CREATE', 'GENESIS']:
fulfillment_message.update({
'input': None,
'condition': None
})
# sign the fulfillment message
parsed_fulfillment = Ed25519Fulfillment(public_key=sk.get_verifying_key())
else:
# get previous condition
previous_tx = b.get_transaction(fulfillment['input']['txid'])
conditions = sorted(previous_tx['transaction']['conditions'], key=lambda d: d['cid'])
# update the fulfillment message
fulfillment_message.update({
'input': fulfillment['input'],
'condition': conditions[fulfillment['fid']]
})
parsed_fulfillment = Fulfillment.from_json(fulfillment_message['condition']['condition']['details'])
parsed_fulfillment.sign(serialize(fulfillment_message), sk)
signed_fulfillment = parsed_fulfillment.serialize_uri()
fulfillment.update({'fulfillment': signed_fulfillment})
# sign the fulfillment message
fulfillment_message_signature = private_key.sign(serialize(fulfillment_message))
fulfillment.update({'fulfillment': fulfillment_message_signature})
return transaction
return tx
def create_and_sign_tx(private_key, current_owner, new_owner, tx_input, operation='TRANSFER', payload=None):
@ -271,7 +289,7 @@ def create_and_sign_tx(private_key, current_owner, new_owner, tx_input, operatio
def check_hash_and_signature(transaction):
# Check hash of the transaction
calculated_hash = hash_data(serialize(transaction['transaction']))
calculated_hash = crypto.hash_data(serialize(transaction['transaction']))
if calculated_hash != transaction['id']:
raise exceptions.InvalidHash()
@ -317,10 +335,17 @@ def verify_signature(signed_transaction):
conditions = sorted(previous_tx['transaction']['conditions'], key=lambda d: d['cid'])
fulfillment_message['condition'] = conditions[fulfillment['fid']]
# verify the signature (for now lets assume there is only one owner)
public_key = PublicKey(fulfillment['current_owners'][0])
# verify the fulfillment (for now lets assume there is only one owner)
try:
parsed_fulfillment = Fulfillment.from_uri(fulfillment['fulfillment'])
except Exception:
return False
is_valid = parsed_fulfillment.validate(serialize(fulfillment_message))
is_valid = public_key.verify(serialize(fulfillment_message), fulfillment['fulfillment'])
# if not a `CREATE` transaction
if fulfillment['input']:
is_valid &= parsed_fulfillment.condition.serialize_uri() == \
fulfillment_message['condition']['condition']['uri']
if not is_valid:
return False

View File

@ -1,16 +1,56 @@
"""This module contains basic functions to instantiate the BigchainDB API. """
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import copy
import multiprocessing
from flask import Flask
from bigchaindb import Bigchain
from bigchaindb.web import views
import gunicorn.app.base
class StandaloneApplication(gunicorn.app.base.BaseApplication):
"""Run a **wsgi** app wrapping it in a Gunicorn Base Application.
Adapted from:
- http://docs.gunicorn.org/en/latest/custom.html
"""
def __init__(self, app, options=None):
'''Initialize a new standalone application.
Args:
app: A wsgi Python application.
options (dict): the configuration.
'''
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict((key, value) for key, value in self.options.items()
if key in self.cfg.settings and value is not None)
for key, value in config.items():
# not sure if we need the `key.lower` here, will just keep
# keep it for now.
self.cfg.set(key.lower(), value)
def load(self):
return self.application
def create_app(debug=False):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app (default: False).
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
@ -18,3 +58,26 @@ def create_app(debug=False):
app.config['bigchain'] = Bigchain()
app.register_blueprint(views.basic_views, url_prefix='/api/v1')
return app
def create_server(settings):
"""Wrap and return an application ready to be run.
Args:
settings (dict): a dictionary containing the settings, more info
here http://docs.gunicorn.org/en/latest/settings.html
Return:
an initialized instance of the application.
"""
settings = copy.deepcopy(settings)
if not settings.get('workers'):
settings['workers'] = (multiprocessing.cpu_count() * 2) + 1
debug = settings.pop('debug', False)
app = create_app(debug)
standalone = StandaloneApplication(app, settings)
return standalone

32
codecov.yml Normal file
View File

@ -0,0 +1,32 @@
codecov:
branch: develop # the branch to show by default
# The help text for bot says:
# "the username that will consume any oauth requests
# must have previously logged into Codecov"
# In GitHub - BigchainDB organization settings - Third-party access,
# it says, for Codecov: "approval requested by r-marques"
bot: r-marques
coverage:
precision: 2
round: down
range: "70...100"
status:
project:
target: auto
if_no_uploads: error
patch:
target: "80%"
if_no_uploads: error
ignore: # files and folders that will be removed during processing
- "deploy-cluster-aws/*"
- "docs/*"
- "tests/*"
comment:
layout: "header, diff, changes, sunburst, suggestions"
behavior: default

View File

@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
"""Shared AWS-related global constants and functions.
"""
from __future__ import unicode_literals
# Global constants
# None yet
# Functions
def get_naeips(client0):
"""Get a list of (allocated) non-associated elastic IP addresses
(NAEIPs) on EC2.
Args:
client0: A client created from an EC2 resource.
e.g. client0 = ec2.meta.client
See http://boto3.readthedocs.org/en/latest/guide/clients.html
Returns:
A list of NAEIPs in the EC2 account associated with the client.
To interpret the contents, see http://tinyurl.com/hrnuy74
"""
# response is a dict with 2 keys: Addresses and ResponseMetadata
# See http://tinyurl.com/hrnuy74
response = client0.describe_addresses()
allocated_eips = response['Addresses']
non_associated_eips = []
for eip in allocated_eips:
if 'InstanceId' not in eip:
non_associated_eips.append(eip)
return non_associated_eips

View File

@ -0,0 +1,105 @@
#
# RethinkDB instance configuration sample
#
# - Give this file the extension .conf and put it in /etc/rethinkdb/instances.d in order to enable it.
# - See http://www.rethinkdb.com/docs/guides/startup/ for the complete documentation
# - Uncomment an option to change its value.
#
###############################
## RethinkDB configuration
###############################
### Process options
## User and group used to run rethinkdb
## Command line default: do not change user or group
## Init script default: rethinkdb user and group
# runuser=rethinkdb
# rungroup=rethinkdb
## Stash the pid in this file when the process is running
## Note for systemd users: Systemd uses its own internal mechanism. Do not set this parameter.
## Command line default: none
## Init script default: /var/run/rethinkdb/<name>/pid_file (where <name> is the name of this config file without the extension)
# pid-file=/var/run/rethinkdb/rethinkdb.pid
### File path options
## Directory to store data and metadata
## Command line default: ./rethinkdb_data
## Init script default: /var/lib/rethinkdb/<name>/ (where <name> is the name of this file without the extension)
directory=/data
## Log file options
## Default: <directory>/log_file
#log-file=/var/log/rethinkdb
### Network options
## Address of local interfaces to listen on when accepting connections
## May be 'all' or an IP address, loopback addresses are enabled by default
## Default: all local addresses
# bind=127.0.0.1
bind=all
## Address that other rethinkdb instances will use to connect to this server.
## It can be specified multiple times
# canonical-address=
## The port for rethinkdb protocol for client drivers
## Default: 28015 + port-offset
# driver-port=28015
## The port for receiving connections from other nodes
## Default: 29015 + port-offset
# cluster-port=29015
## The host:port of a node that rethinkdb will connect to
## This option can be specified multiple times.
## Default: none
# join=example.com:29015
## All ports used locally will have this value added
## Default: 0
# port-offset=0
## r.http(...) queries will use the given server as a web proxy
## Default: no proxy
# reql-http-proxy=socks5://example.com:1080
### Web options
## Port for the http admin console
## Default: 8080 + port-offset
# http-port=8080
## Disable web administration console
# no-http-admin
### CPU options
## The number of cores to use
## Default: total number of cores of the CPU
# cores=2
### Memory options
## Size of the cache in MB
## Default: Half of the available RAM on startup
# cache-size=1024
### Disk
## How many simultaneous I/O operations can happen at the same time
# io-threads=64
#io-threads=128
## Enable direct I/O
direct-io
### Meta
## The name for this server (as will appear in the metadata).
## If not specified, it will be randomly chosen from a short list of names.
# server-name=server1

View File

@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
"""(Re)create the RethinkDB configuration file conf/rethinkdb.conf.
Start with conf/rethinkdb.conf.template
then append additional configuration settings (lines).
"""
from __future__ import unicode_literals
import os
import os.path
import shutil
from hostlist import hosts_dev
# cwd = current working directory
old_cwd = os.getcwd()
os.chdir('conf')
if os.path.isfile('rethinkdb.conf'):
os.remove('rethinkdb.conf')
# Create the initial rethinkdb.conf using rethinkdb.conf.template
shutil.copy2('rethinkdb.conf.template', 'rethinkdb.conf')
# Append additional lines to rethinkdb.conf
with open('rethinkdb.conf', 'a') as f:
f.write('## The host:port of a node that RethinkDB will connect to\n')
for public_dns_name in hosts_dev:
f.write('join=' + public_dns_name + ':29015\n')
os.chdir(old_cwd)
# Note: The original code by Andreas wrote a file with lines of the form
# join=public_dns_name_0:29015
# join=public_dns_name_1:29015
# but it stopped about halfway through the list of public_dns_names
# (publist). In principle, it's only strictly necessary to
# have one join= line.
# Maybe Andreas thought that more is better, but all is too much?
# Below is Andreas' original code. -Troy
# lfile = open('add2dbconf', 'w')
# before = 'join='
# after = ':29015'
# lfile.write('## The host:port of a node that rethinkdb will connect to\n')
# for entry in range(0,int(len(publist)/2)):
# lfile.write(before + publist[entry] + after + '\n')

View File

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
""" Generating genesis block
"""
from __future__ import with_statement, unicode_literals
from fabric import colors as c
from fabric.api import *
from fabric.api import local, puts, settings, hide, abort, lcd, prefix
from fabric.api import run, sudo, cd, get, local, lcd, env, hide
from fabric.api import task, parallel
from fabric.contrib import files
from fabric.contrib.files import append, exists
from fabric.contrib.console import confirm
from fabric.contrib.project import rsync_project
from fabric.operations import run, put
from fabric.context_managers import settings
from fabric.decorators import roles
from fabtools import *
env.user = 'ubuntu'
env.key_filename = 'pem/bigchaindb.pem'
@task
def init_bigchaindb():
run('bigchaindb -y start &', pty = False)

197
deploy-cluster-aws/fabfile.py vendored Normal file
View File

@ -0,0 +1,197 @@
# -*- coding: utf-8 -*-
"""A fabfile with functionality to prepare, install, and configure
bigchaindb, including its storage backend.
"""
from __future__ import with_statement, unicode_literals
import requests
from time import *
import os
from datetime import datetime, timedelta
import json
from pprint import pprint
from fabric import colors as c
from fabric.api import *
from fabric.api import local, puts, settings, hide, abort, lcd, prefix
from fabric.api import run, sudo, cd, get, local, lcd, env, hide
from fabric.api import task, parallel
from fabric.contrib import files
from fabric.contrib.files import append, exists
from fabric.contrib.console import confirm
from fabric.contrib.project import rsync_project
from fabric.operations import run, put
from fabric.context_managers import settings
from fabric.decorators import roles
from fabtools import *
from hostlist import hosts_dev
env.hosts = hosts_dev
env.roledefs = {
"role1": hosts_dev,
"role2": [hosts_dev[0]],
}
env.roles = ["role1"]
env.user = 'ubuntu'
env.key_filename = 'pem/bigchaindb.pem'
######################################################################
# base software rollout
@task
@parallel
def install_base_software():
# new from Troy April 5, 2016. Why? See http://tinyurl.com/lccfrsj
# sudo('rm -rf /var/lib/apt/lists/*')
# sudo('apt-get -y clean')
# from before:
sudo('apt-get -y update')
sudo('dpkg --configure -a')
sudo('apt-get -y -f install')
sudo('apt-get -y install build-essential wget bzip2 ca-certificates \
libglib2.0-0 libxext6 libsm6 libxrender1 libssl-dev \
git gcc g++ python-dev libboost-python-dev libffi-dev \
software-properties-common python-software-properties \
python3-pip ipython3 sysstat s3cmd')
# RethinkDB
@task
@parallel
def install_rethinkdb():
"""Installation of RethinkDB"""
with settings(warn_only=True):
# preparing filesystem
sudo("mkdir -p /data")
# Locally mounted storage (m3.2xlarge, aber auch c3.xxx)
try:
sudo("umount /mnt")
sudo("mkfs -t ext4 /dev/xvdb")
sudo("mount /dev/xvdb /data")
except:
pass
# persist settings to fstab
sudo("rm -rf /etc/fstab")
sudo("echo 'LABEL=cloudimg-rootfs / ext4 defaults,discard 0 0' >> /etc/fstab")
sudo("echo '/dev/xvdb /data ext4 defaults,noatime 0 0' >> /etc/fstab")
# activate deadline scheduler
with settings(sudo_user='root'):
sudo("echo deadline > /sys/block/xvdb/queue/scheduler")
# install rethinkdb
sudo("echo 'deb http://download.rethinkdb.com/apt trusty main' | sudo tee /etc/apt/sources.list.d/rethinkdb.list")
sudo("wget -qO- http://download.rethinkdb.com/apt/pubkey.gpg | sudo apt-key add -")
sudo("apt-get update")
sudo("apt-get -y install rethinkdb")
# change fs to user
sudo('chown -R rethinkdb:rethinkdb /data')
# copy config file to target system
put('conf/rethinkdb.conf',
'/etc/rethinkdb/instances.d/instance1.conf', mode=0600, use_sudo=True)
# initialize data-dir
sudo('rm -rf /data/*')
# finally restart instance
sudo('/etc/init.d/rethinkdb restart')
# bigchaindb deployment
@task
@parallel
def install_bigchaindb():
sudo('python3 -m pip install bigchaindb')
# startup all nodes of bigchaindb in cluster
@task
@parallel
def start_bigchaindb_nodes():
sudo('screen -d -m bigchaindb -y start &', pty=False)
@task
def install_newrelic():
with settings(warn_only=True):
sudo('echo deb http://apt.newrelic.com/debian/ newrelic non-free >> /etc/apt/sources.list')
# sudo('apt-key adv --keyserver hkp://subkeys.pgp.net --recv-keys 548C16BF')
sudo('apt-get update')
sudo('apt-get -y --force-yes install newrelic-sysmond')
sudo('nrsysmond-config --set license_key=c88af00c813983f8ee12e9b455aa13fde1cddaa8')
sudo('/etc/init.d/newrelic-sysmond restart')
###############################
# Security / FirewallStuff next
###############################
@task
def harden_sshd():
"""Security harden sshd."""
# Disable password authentication
sed('/etc/ssh/sshd_config',
'#PasswordAuthentication yes',
'PasswordAuthentication no',
use_sudo=True)
# Deny root login
sed('/etc/ssh/sshd_config',
'PermitRootLogin yes',
'PermitRootLogin no',
use_sudo=True)
@task
def disable_root_login():
"""Disable `root` login for even more security. Access to `root` account
is now possible by first connecting with your dedicated maintenance
account and then running ``sudo su -``."""
sudo('passwd --lock root')
@task
def set_fw():
# snmp
sudo('iptables -A INPUT -p tcp --dport 161 -j ACCEPT')
sudo('iptables -A INPUT -p udp --dport 161 -j ACCEPT')
# dns
sudo('iptables -A OUTPUT -p udp -o eth0 --dport 53 -j ACCEPT')
sudo('iptables -A INPUT -p udp -i eth0 --sport 53 -j ACCEPT')
# rethinkdb
sudo('iptables -A INPUT -p tcp --dport 28015 -j ACCEPT')
sudo('iptables -A INPUT -p udp --dport 28015 -j ACCEPT')
sudo('iptables -A INPUT -p tcp --dport 29015 -j ACCEPT')
sudo('iptables -A INPUT -p udp --dport 29015 -j ACCEPT')
sudo('iptables -A INPUT -p tcp --dport 8080 -j ACCEPT')
sudo('iptables -A INPUT -i eth0 -p tcp --dport 8080 -j DROP')
sudo('iptables -I INPUT -i eth0 -s 127.0.0.1 -p tcp --dport 8080 -j ACCEPT')
# save rules
sudo('iptables-save > /etc/sysconfig/iptables')
#########################################################
# some helper-functions to handle bad behavior of cluster
#########################################################
# rebuild indexes
@task
@parallel
def rebuild_indexes():
run('rethinkdb index-rebuild -n 2')
@task
def stopdb():
sudo('service rethinkdb stop')
@task
def startdb():
sudo('service rethinkdb start')
@task
def restartdb():
sudo('/etc/init.d/rethinkdb restart')

View File

@ -0,0 +1,194 @@
# -*- coding: utf-8 -*-
"""This script:
0. allocates more elastic IP addresses if necessary,
1. launches the specified number of nodes (instances) on Amazon EC2,
2. tags them with the specified tag,
3. waits until those instances exist and are running,
4. for each instance, it associates an elastic IP address
with that instance,
5. writes the shellscript add2known_hosts.sh
6. (over)writes a file named hostlist.py
containing a list of all public DNS names.
"""
from __future__ import unicode_literals
import sys
import time
import argparse
import botocore
import boto3
from awscommon import (
get_naeips,
)
# First, ensure they're using Python 2.5-2.7
pyver = sys.version_info
major = pyver[0]
minor = pyver[1]
print('You are in an environment where "python" is Python {}.{}'.
format(major, minor))
if not ((major == 2) and (minor >= 5) and (minor <= 7)):
print('but Fabric only works with Python 2.5-2.7')
sys.exit(1)
# Parse the command-line arguments
parser = argparse.ArgumentParser()
parser.add_argument("--tag",
help="tag to add to all launched instances on AWS",
required=True)
parser.add_argument("--nodes",
help="number of nodes in the cluster",
required=True,
type=int)
args = parser.parse_args()
tag = args.tag
num_nodes = int(args.nodes)
# Get an AWS EC2 "resource"
# See http://boto3.readthedocs.org/en/latest/guide/resources.html
ec2 = boto3.resource(service_name='ec2')
# Create a client from the EC2 resource
# See http://boto3.readthedocs.org/en/latest/guide/clients.html
client = ec2.meta.client
# Ensure they don't already have some instances with the specified tag
# Get a list of all instances with the specified tag.
# (Technically, instances_with_tag is an ec2.instancesCollection.)
filters = [{'Name': 'tag:Name', 'Values': [tag]}]
instances_with_tag = ec2.instances.filter(Filters=filters)
# len() doesn't work on instances_with_tag. This does:
num_ins = 0
for instance in instances_with_tag:
num_ins += 1
if num_ins != 0:
print('You already have {} instances with the tag {} on EC2.'.
format(num_ins, tag))
print('You should either pick a different tag or '
'terminate all those instances and '
'wait until they vanish from your EC2 Console.')
sys.exit(1)
# Before launching any instances, make sure they have sufficient
# allocated-but-unassociated EC2 elastic IP addresses
print('Checking if you have enough allocated-but-unassociated ' +
'EC2 elastic IP addresses...')
non_associated_eips = get_naeips(client)
print('You have {} allocated elastic IPs which are '
'not already associated with instances'.
format(len(non_associated_eips)))
if num_nodes > len(non_associated_eips):
num_eips_to_allocate = num_nodes - len(non_associated_eips)
print('You want to launch {} instances'.
format(num_nodes))
print('so {} more elastic IPs must be allocated'.
format(num_eips_to_allocate))
for _ in range(num_eips_to_allocate):
try:
# Allocate an elastic IP address
# response is a dict. See http://tinyurl.com/z2n7u9k
response = client.allocate_address(DryRun=False, Domain='standard')
except botocore.exceptions.ClientError:
print('Something went wrong when allocating an '
'EC2 elastic IP address on EC2. '
'Maybe you are already at the maximum number allowed '
'by your AWS account? More details:')
raise
except:
print('Unexpected error:')
raise
print('Commencing launch of {} instances on Amazon EC2...'.
format(num_nodes))
for _ in range(num_nodes):
# Request the launch of one instance at a time
# (so list_of_instances should contain only one item)
list_of_instances = ec2.create_instances(
ImageId='ami-accff2b1', # ubuntu-image
# 'ami-596b7235', # ubuntu w/ iops storage
MinCount=1,
MaxCount=1,
KeyName='bigchaindb',
InstanceType='m3.2xlarge',
# 'c3.8xlarge',
# 'c4.8xlarge',
SecurityGroupIds=['bigchaindb']
)
# Tag the just-launched instances (should be just one)
for instance in list_of_instances:
time.sleep(5)
instance.create_tags(Tags=[{'Key': 'Name', 'Value': tag}])
# Get a list of all instances with the specified tag.
# (Technically, instances_with_tag is an ec2.instancesCollection.)
filters = [{'Name': 'tag:Name', 'Values': [tag]}]
instances_with_tag = ec2.instances.filter(Filters=filters)
print('The launched instances will have these ids:'.format(tag))
for instance in instances_with_tag:
print(instance.id)
print('Waiting until all those instances exist...')
for instance in instances_with_tag:
instance.wait_until_exists()
print('Waiting until all those instances are running...')
for instance in instances_with_tag:
instance.wait_until_running()
print('Associating allocated-but-unassociated elastic IPs ' +
'with the instances...')
# Get a list of elastic IPs which are allocated but
# not associated with any instances.
# There should be enough because we checked earlier and
# allocated more if necessary.
non_associated_eips_2 = get_naeips(client)
for i, instance in enumerate(instances_with_tag):
print('Grabbing an allocated but non-associated elastic IP...')
eip = non_associated_eips_2[i]
public_ip = eip['PublicIp']
print('The public IP address {}'.format(public_ip))
# Associate that Elastic IP address with an instance
response2 = client.associate_address(
DryRun=False,
InstanceId=instance.instance_id,
PublicIp=public_ip
)
print('was associated with the instance with id {}'.
format(instance.instance_id))
# Get a list of the pubic DNS names of the instances_with_tag
hosts_dev = []
for instance in instances_with_tag:
public_dns_name = getattr(instance, 'public_dns_name', None)
if public_dns_name is not None:
hosts_dev.append(public_dns_name)
# Write a shellscript to add remote keys to ~/.ssh/known_hosts
print('Preparing shellscript to add remote keys to known_hosts')
with open('add2known_hosts.sh', 'w') as f:
f.write('#!/bin/bash\n')
for public_dns_name in hosts_dev:
f.write('ssh-keyscan ' + public_dns_name + ' >> ~/.ssh/known_hosts\n')
# Create a file named hostlist.py containing hosts_dev.
# If a hostlist.py already exists, it will be overwritten.
print('Writing hostlist.py')
with open('hostlist.py', 'w') as f:
f.write('# -*- coding: utf-8 -*-\n')
f.write('from __future__ import unicode_literals\n')
f.write('hosts_dev = {}\n'.format(hosts_dev))
# Wait
wait_time = 45
print('Waiting {} seconds to make sure all instances are ready...'.
format(wait_time))
time.sleep(wait_time)

81
deploy-cluster-aws/startup.sh Executable file
View File

@ -0,0 +1,81 @@
#! /bin/bash
# The set -e option instructs bash to immediately exit if any command has a non-zero exit status
set -e
function printErr()
{
echo "usage: ./startup.sh <tag> <number_of_nodes_in_cluster>"
echo "No argument $1 supplied"
}
if [ -z "$1" ]
then
printErr "<tag>"
exit 1
fi
if [ -z "$2" ]
then
printErr "<number_of_nodes_in_cluster>"
exit 1
fi
TAG=$1
NODES=$2
# Check for AWS private key file (.pem file)
if [ ! -f "pem/bigchaindb.pem" ]
then
echo "File pem/bigchaindb.pem (AWS private key) is missing"
exit 1
fi
# Change the file permissions on pem/bigchaindb.pem
# so that the owner can read it, but that's all
chmod 0400 pem/bigchaindb.pem
# The following Python script does these things:
# 0. allocates more elastic IP addresses if necessary,
# 1. launches the specified number of nodes (instances) on Amazon EC2,
# 2. tags them with the specified tag,
# 3. waits until those instances exist and are running,
# 4. for each instance, it associates an elastic IP address
# with that instance,
# 5. writes the shellscript add2known_hosts.sh
# 6. (over)writes a file named hostlist.py
# containing a list of all public DNS names.
python launch_ec2_nodes.py --tag $TAG --nodes $NODES
# Make add2known_hosts.sh executable then execute it.
# This adds remote keys to ~/.ssh/known_hosts
chmod +x add2known_hosts.sh
./add2known_hosts.sh
# (Re)create the RethinkDB configuration file conf/rethinkdb.conf
python create_rethinkdb_conf.py
# rollout base packages (dependencies) needed before
# storage backend (rethinkdb) and bigchaindb can be rolled out
fab install_base_software
# rollout storage backend (rethinkdb)
fab install_rethinkdb
# rollout bigchaindb
fab install_bigchaindb
# generate genesis block
# HORST is the last public_dns_name listed in conf/rethinkdb.conf
# For example:
# ec2-52-58-86-145.eu-central-1.compute.amazonaws.com
HORST=`tail -1 conf/rethinkdb.conf|cut -d: -f1|cut -d= -f2`
fab -H $HORST -f fab_prepare_chain.py init_bigchaindb
# initiate sharding
fab start_bigchaindb_nodes
# cleanup
rm add2known_hosts.sh
# DONE

View File

@ -19,8 +19,8 @@ tx_hash = hashlib.sha3_256(data).hexdigest()
## Signature algorithm and keys
The signature algorithm used by BigchainDB is ECDSA with the secp256k1 curve
using the python [cryptography](https://cryptography.io/en/latest/) module.
The signature algorithm used by BigchainDB is [ED25519](https://tools.ietf.org/html/draft-irtf-cfrg-eddsa-04)
using the python [ed25519](https://github.com/warner/python-ed25519) module, overloaded by the [cryptoconditions library](https://github.com/bigchaindb/cryptoconditions).
The private key is the base58 encoded hexadecimal representation of private number.
The public key is the base58 encoded hexadecimal representation of the

View File

@ -0,0 +1,153 @@
# Deploy a Cluster on AWS
This section explains a way to deploy a cluster of BigchainDB nodes on Amazon Web Services (AWS). We use some Bash and Python scripts to launch several instances (virtual servers) on Amazon Elastic Compute Cloud (EC2). Then we use Fabric to install RethinkDB and BigchainDB on all those instances.
**NOTE: At the time of writing, these script _do_ launch a bunch of EC2 instances, and they do install RethinkDB plus BigchainDB on each instance, but don't expect to be able to use the cluster for anything useful. There are several issues related to configuration, networking, and external clients that must be sorted out first. That said, you might find it useful to try out the AWS deployment scripts, because setting up to use them, and using them, will be very similar once those issues get sorted out.**
## Why?
You might ask why one would want to deploy a centrally-controlled BigchainDB cluster. Isn't BigchainDB supposed to be decentralized, where each node is controlled by a different person or organization?
That's true, but there are some reasons why one might want a centrally-controlled cluster: 1) for testing, and 2) for initial deployment. Afterwards, the control of each node can be handed over to a different entity.
## Python Setup
The instructions that follow have been tested on Ubuntu 14.04, but may also work on similar distros or operating systems.
**Note: Our Python scripts for deploying to AWS use Python 2 because Fabric doesn't work with Python 3.**
Maybe create a Python 2 virtual environment and activate it. Then install the following Python packages (in that virtual environment):
```text
pip install fabric fabtools requests boto3 awscli
```
What did you just install?
* "[Fabric](http://www.fabfile.org/) is a Python (2.5-2.7) library and command-line tool for streamlining the use of SSH for application deployment or systems administration tasks."
* [fabtools](https://github.com/ronnix/fabtools) are "tools for writing awesome Fabric files"
* [requests](http://docs.python-requests.org/en/master/) is a Python package/library for sending HTTP requests
* "[Boto](https://boto3.readthedocs.org/en/latest/) is the Amazon Web Services (AWS) SDK for Python, which allows Python developers to write software that makes use of Amazon services like S3 and EC2." (`boto3` is the name of the latest Boto package.)
* [The aws-cli package](https://pypi.python.org/pypi/awscli), which is an AWS Command Line Interface (CLI).
## AWS Setup
Before you can deploy a BigchainDB cluster on AWS, you must have an AWS account. If you don't already have one, you can [sign up for one for free](https://aws.amazon.com/).
### Create an AWS Access Key
The next thing you'll need is an AWS access key. If you don't have one, you can create one using the [instructions in the AWS documentation](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSGettingStartedGuide/AWSCredentials.html). You should get an access key ID (e.g. AKIAIOSFODNN7EXAMPLE) and a secret access key (e.g. wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY).
You should also pick a default AWS region name (e.g. `eu-central-1`). That's where your cluster will run. The AWS documentation has [a list of them](http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region).
Once you've got your AWS access key, and you've picked a default AWS region name, go to a terminal session and enter:
```text
aws configure
```
and answer the four questions. For example:
```text
AWS Access Key ID [None]: AKIAIOSFODNN7EXAMPLE
AWS Secret Access Key [None]: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
Default region name [None]: eu-central-1
Default output format [None]: [Press Enter]
```
This writes two files:
* `~/.aws/credentials`
* `~/.aws/config`
AWS tools and packages look for those files.
### Get Enough Amazon Elastic IP Addresses
Our AWS deployment scripts use elastic IP addresses (although that may change in the future). By default, AWS accounts get five elastic IP addresses. If you want to deploy a cluster with more than five nodes, then you will need more than five elastic IP addresses; you may have to apply for those; see [the AWS documentation on elastic IP addresses](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-ip-addresses-eip.html).
### Create an Amazon EC2 Key Pair
Go to the AWS EC2 Console and select "Key Pairs" in the left sidebar. Click the "Create Key Pair" button. Give it the name `bigchaindb`. You should be prompted to save a file named `bigchaindb.pem`. That file contains the RSA private key. (Amazon keeps the corresponding public key.) Save the file in `bigchaindb/deploy-cluster-aws/pem/bigchaindb.pem`.
You should not share your private key.
### Create an Amazon EC2 Security Group
Go to the AWS EC2 Console and select "Security Groups" in the left sidebar. Click the "Create Security Group" button. Give it the name `bigchaindb`. The description probably doesn't matter but we also put `bigchaindb` for that.
Add some rules for Inbound traffic:
* Type = All TCP, Protocol = TCP, Port Range = 0-65535, Source = 0.0.0.0/0
* Type = SSH, Protocol = SSH, Port Range = 22, Source = 0.0.0.0/0
* Type = All UDP, Protocol = UDP, Port Range = 0-65535, Source = 0.0.0.0/0
* Type = All ICMP, Protocol = ICMP, Port Range = 0-65535, Source = 0.0.0.0/0
**Note: These rules are extremely lax! They're meant to make testing easy.** You'll want to tighten them up if you intend to have a secure cluster. For example, Source = 0.0.0.0/0 is [CIDR notation](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) for "allow this traffic to come from _any_ IP address."
## Deployment
Here's an example of how one could launch a BigchainDB cluster of 4 nodes tagged `wrigley` on AWS:
```text
cd bigchaindb
cd deploy-cluster-aws
./startup.sh wrigley 4
```
`startup.sh` is a Bash script which calls some Python 2 and Fabric scripts. Here's what it does:
0. allocates more elastic IP addresses if necessary,
1. launches the specified number of nodes (instances) on Amazon EC2,
2. tags them with the specified tag,
3. waits until those instances exist and are running,
4. for each instance, it associates an elastic IP address with that instance,
5. adds remote keys to `~/.ssh/known_hosts`,
6. (re)creates the RethinkDB configuration file `conf/rethinkdb.conf`,
7. installs base (prerequisite) software on all instances,
8. installs RethinkDB on all instances,
9. installs BigchainDB on all instances,
10. generates the genesis block,
11. starts BigchainDB on all instances.
It should take a few minutes for the deployment to finish. If you run into problems, see the section on Known Deployment Issues below.
The EC2 Console has a section where you can see all the instances you have running on EC2. You can `ssh` into a running instance using a command like:
```text
ssh -i pem/bigchaindb.pem ubuntu@ec2-52-29-197-211.eu-central-1.compute.amazonaws.com
```
except you'd replace the `ec2-52-29-197-211.eu-central-1.compute.amazonaws.com` with the public DNS name of the instance you want to `ssh` into. You can get that from the EC2 Console: just click on an instance and look in its details pane at the bottom of the screen. Some commands you might try:
```text
ip addr show
sudo service rethinkdb status
bigchaindb --help
bigchaindb show-config
```
There are fees associated with running instances on EC2, so if you're not using them, you should terminate them. You can do that from the AWS EC2 Console.
The same is true of your allocated elastic IP addresses. There's a small fee to keep them allocated if they're not associated with a running instance. You can release them from the AWS EC2 Console.
## Known Deployment Issues
### NetworkError
If you tested with a high sequence it might be possible that you run into an error message like this:
```text
NetworkError: Host key for ec2-xx-xx-xx-xx.eu-central-1.compute.amazonaws.com
did not match pre-existing key! Server's key was changed recently, or possible
man-in-the-middle attack.
```
If so, just clean up your `known_hosts` file and start again. For example, you might copy your current `known_hosts` file to `old_known_hosts` like so:
```text
mv ~/.ssh/known_hosts ~/.ssh/old_known_hosts
```
Then terminate your instances and try deploying again with a different tag.
### Failure of sudo apt-get update
The first thing that's done on all the instances, once they're running, is basically [`sudo apt-get update`](http://askubuntu.com/questions/222348/what-does-sudo-apt-get-update-do). Sometimes that fails. If so, just terminate your instances and try deploying again with a different tag. (These problems seem to be time-bounded, so maybe wait a couple of hours before retrying.)
### Failure when Installing Base Software
If you get an error with installing the base software on the instances, then just terminate your instances and try deploying again with a different tag.

View File

@ -1,5 +1,11 @@
# The HTTP Client-Server API
The preferred way to communicate with a node in a BigchainDB cluster is via HTTP requests. Each node exposes a simple HTTP API (the HTTP Client-Server API) that provides, right now, two endpoints, one to get information about a specific transaction id, and one to push a transaction to the BigchainDB cluster.
When you start Bigchaindb using `bigchaindb start`, an HTTP API is exposed at:
The endpoints are documented using [Apiary](http://docs.bigchaindb.apiary.io/).
[http://localhost:5000/api/v1/](http://localhost:5000/api/v1/)
Right now, that API can only be accessed from localhost (i.e. not remotely). In the future, we'll enable remote access and explain how that works. See [Issue #149](https://github.com/bigchaindb/bigchaindb/issues/149) on GitHub.
The HTTP API currently exposes two endpoints, one to get information about a specific transaction id, and one to push a transaction to the BigchainDB cluster. Those endpoints are documented at:
[http://docs.bigchaindb.apiary.io/](http://docs.bigchaindb.apiary.io/)

View File

@ -20,6 +20,7 @@ Table of Contents
http-client-server-api
python-driver-api-examples
local-rethinkdb-cluster
deploy-on-aws
cryptography
models
json-serialization

View File

@ -44,7 +44,7 @@ $ sudo dnf install libffi-devel gcc-c++ redhat-rpm-config python3-devel openssl-
With OS-level dependencies installed, you can install BigchainDB Server with `pip` or from source.
### How to Install BigchainDB with `pip`
### How to Install BigchainDB with pip
BigchainDB (i.e. both the Server and the officially-supported drivers) is distributed as a Python package on PyPI so you can install it using `pip`. First, make sure you have a version of `pip` installed for Python 3.4+:
```text

View File

@ -40,8 +40,10 @@ At a high level, a "digital asset" is something which can be represented digital
In BigchainDB, only the federation nodes are allowed to create digital assets, by doing a special kind of transaction: a `CREATE` transaction.
```python
from bigchaindb import crypto
# create a test user
testuser1_priv, testuser1_pub = b.generate_keys()
testuser1_priv, testuser1_pub = crypto.generate_key_pair()
# define a digital asset data payload
digital_asset_payload = {'msg': 'Hello BigchainDB!'}

View File

@ -26,7 +26,7 @@ You can also run all unit tests via `setup.py`, using:
$ python setup.py test
```
### Using `docker-compose` to Run the Tests
### Using docker-compose to Run the Tests
You can also use `docker-compose` to run the unit tests. (You don't have to start RethinkDB first: `docker-compose` does that on its own, when it reads the `docker-compose.yml` file.)

View File

@ -71,14 +71,14 @@ setup(
'rethinkdb==2.2.0.post4',
'pysha3==0.3',
'pytz==2015.7',
'cryptography==1.2.1',
'cryptoconditions==0.1.6',
'statsd==3.2.1',
'python-rapidjson==0.0.6',
'logstats==0.2.1',
'base58==0.2.2',
'bitcoin==1.1.42',
'flask==0.10.1',
'requests==2.9',
'gunicorn~=19.0',
],
setup_requires=['pytest-runner'],
tests_require=tests_require,

View File

@ -19,14 +19,14 @@ CONFIG = {
'name': DB_NAME
},
'keypair': {
'private': '3i2FDXp87N9ExXSvWxqBAw9EgzoxxGTQNKbtxmWBpTyL',
'public': '29Tw3ozmSRtN8XNofvsu5RdoQRk9gAonfpkFvRZDmhTPo'
'private': '31Lb1ZGKTyHnmVK3LUMrAUrPNfd4sE2YyBt3UA4A25aA',
'public': '4XYfCbabAWVUCbjTmRTFEu2sc3dFEdkse4r6X498B1s8'
}
}
# Test user. inputs will be created for this user. Cryptography Keys
USER_PRIVATE_KEY = 'GmRZxQdQv7tooMijXytQkexKuFN6mJocciJarAmMwTX2'
USER_PUBLIC_KEY = 'r3cEu8GNoz8rYpNJ61k7GqfR8VEvdUbtyHce8u1kaYwh'
USER_SIGNING_KEY = '8eJ8q9ZQpReWyQT5aFCiwtZ5wDZC4eDnCen88p3tQ6ie'
USER_VERIFYING_KEY = 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'
@pytest.fixture
@ -41,13 +41,13 @@ def node_config():
@pytest.fixture
def user_private_key():
return USER_PRIVATE_KEY
def user_sk():
return USER_SIGNING_KEY
@pytest.fixture
def user_public_key():
return USER_PUBLIC_KEY
def user_vk():
return USER_VERIFYING_KEY
@pytest.fixture

View File

@ -81,7 +81,7 @@ def cleanup_tables(request, node_config):
@pytest.fixture
def inputs(user_public_key, amount=1, b=None):
def inputs(user_vk, amount=1, b=None):
# 1. create the genesis block
b = b or Bigchain()
try:
@ -92,7 +92,7 @@ def inputs(user_public_key, amount=1, b=None):
# 2. create block with transactions for `USER` to spend
transactions = []
for i in range(amount):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx_signed = b.sign_transaction(tx, b.me_private)
transactions.append(tx_signed)
b.write_transaction(tx_signed)

View File

@ -8,12 +8,11 @@ import rethinkdb as r
import bigchaindb
from bigchaindb import util
from bigchaindb import exceptions
from bigchaindb.crypto import PrivateKey, PublicKey, generate_key_pair, hash_data
from bigchaindb import crypto
from bigchaindb.voter import Voter
from bigchaindb.block import Block
@pytest.mark.skipif(reason='Some tests throw a ResourceWarning that might result in some weird '
'exceptions while running the tests. The problem seems to *not* '
'interfere with the correctness of the tests. ')
@ -22,54 +21,66 @@ def test_remove_unclosed_sockets():
class TestBigchainApi(object):
def test_create_transaction(self, b):
tx = b.create_transaction('a', 'b', 'c', 'd')
def test_create_transaction_create(self, b, user_sk):
tx = b.create_transaction(b.me, user_sk, None, 'CREATE')
assert sorted(tx) == sorted(['id', 'transaction', 'version'])
assert sorted(tx['transaction']) == sorted(['conditions', 'fulfillments', 'operation',
'timestamp', 'data'])
assert sorted(tx['transaction']) == sorted(['conditions', 'data', 'fulfillments', 'operation', 'timestamp'])
def test_create_transaction_with_unsupported_payload_raises(self, b):
with pytest.raises(TypeError):
b.create_transaction('a', 'b', 'c', 'd', payload=[])
def test_transaction_hash(self, b):
@pytest.mark.usefixtures('inputs')
def test_create_transaction_transfer(self, b, user_vk, user_sk):
input_tx = b.get_owned_ids(user_vk).pop()
assert b.verify_signature(b.get_transaction(input_tx)) == True
tx = b.create_transaction(b.me, user_sk, {'txid': input_tx, 'cid': 0}, 'TRANSFER')
assert sorted(tx) == sorted(['id', 'transaction', 'version'])
assert sorted(tx['transaction']) == sorted(['conditions', 'data', 'fulfillments', 'operation', 'timestamp'])
tx_signed = b.sign_transaction(tx, user_sk)
assert b.verify_signature(tx) == False
assert b.verify_signature(tx_signed) == True
def test_transaction_hash(self, b, user_vk):
payload = {'cats': 'are awesome'}
tx = b.create_transaction('a', 'b', 'c', 'd', payload)
tx = b.create_transaction(user_vk, user_vk, None, 'CREATE', payload)
tx_calculated = {
'current_owner': 'a',
'new_owner': 'b',
'input': 'c',
'operation': 'd',
'timestamp': tx['transaction']['timestamp'],
'data': {
'hash': hash_data(util.serialize(payload)),
'payload': payload
}
'conditions': [{'cid': 0,
'condition': tx['transaction']['conditions'][0]['condition'],
'new_owners': [user_vk]}],
'data': {'hash': crypto.hash_data(util.serialize(payload)),
'payload': payload},
'fulfillments': [{'current_owners': [user_vk],
'fid': 0,
'fulfillment': None,
'input': None}],
'operation': 'CREATE',
'timestamp': tx['transaction']['timestamp']
}
assert tx['transaction']['data'] == tx_calculated['data']
# assert tx_hash == tx_calculated_hash
# TODO: Make sure that this is covered when merged with dimi's code
@pytest.mark.skipif(reason='We no longer check signatures, only fulfillments of conditions')
def test_transaction_signature(self, b):
sk, vk = generate_key_pair()
tx = b.create_transaction(vk, 'b', 'c', 'd')
tx_signed = b.sign_transaction(tx, sk)
def test_transaction_signature(self, b, user_sk, user_vk):
tx = b.create_transaction(user_vk, user_vk, None, 'CREATE')
tx_signed = b.sign_transaction(tx, user_sk)
assert 'signature' in tx_signed
assert tx_signed['transaction']['fulfillments'][0]['fulfillment'] is not None
assert b.verify_signature(tx_signed)
def test_serializer(self, b):
tx = b.create_transaction('a', 'b', 'c', 'd')
def test_serializer(self, b, user_vk):
tx = b.create_transaction(user_vk, user_vk, None, 'CREATE')
assert util.deserialize(util.serialize(tx)) == tx
@pytest.mark.usefixtures('inputs')
def test_write_transaction(self, b, user_public_key, user_private_key):
input_tx = b.get_owned_ids(user_public_key).pop()
tx = b.create_transaction(user_public_key, 'b', input_tx, 'd')
tx_signed = b.sign_transaction(tx, user_private_key)
def test_write_transaction(self, b, user_vk, user_sk):
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.create_transaction(user_vk, user_vk, {'txid': input_tx, 'cid': 0}, 'TRANSFER')
tx_signed = b.sign_transaction(tx, user_sk)
response = b.write_transaction(tx_signed)
assert response['skipped'] == 0
@ -80,10 +91,10 @@ class TestBigchainApi(object):
assert response['inserted'] == 1
@pytest.mark.usefixtures('inputs')
def test_read_transaction(self, b, user_public_key, user_private_key):
input_tx = b.get_owned_ids(user_public_key).pop()
tx = b.create_transaction(user_public_key, 'b', input_tx, 'd')
tx_signed = b.sign_transaction(tx, user_private_key)
def test_read_transaction(self, b, user_vk, user_sk):
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.create_transaction(user_vk, user_vk, {'txid': input_tx, 'cid': 0}, 'TRANSFER')
tx_signed = b.sign_transaction(tx, user_sk)
b.write_transaction(tx_signed)
# create block and write it to the bighcain before retrieving the transaction
@ -94,10 +105,10 @@ class TestBigchainApi(object):
assert util.serialize(tx_signed) == util.serialize(response)
@pytest.mark.usefixtures('inputs')
def test_assign_transaction_one_node(self, b, user_public_key, user_private_key):
input_tx = b.get_owned_ids(user_public_key).pop()
tx = b.create_transaction(user_public_key, 'b', input_tx, 'd')
tx_signed = b.sign_transaction(tx, user_private_key)
def test_assign_transaction_one_node(self, b, user_vk, user_sk):
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.create_transaction(user_vk, user_vk, {'txid': input_tx, 'cid': 0}, 'TRANSFER')
tx_signed = b.sign_transaction(tx, user_sk)
b.write_transaction(tx_signed)
# retrieve the transaction
@ -107,16 +118,16 @@ class TestBigchainApi(object):
assert response['assignee'] == b.me
@pytest.mark.usefixtures('inputs')
def test_assign_transaction_multiple_nodes(self, b, user_public_key, user_private_key):
def test_assign_transaction_multiple_nodes(self, b, user_vk, user_sk):
# create 5 federation nodes
for _ in range(5):
b.federation_nodes.append(generate_key_pair()[1])
b.federation_nodes.append(crypto.generate_key_pair()[1])
# test assignee for several transactions
for _ in range(20):
input_tx = b.get_owned_ids(user_public_key).pop()
tx = b.create_transaction(user_public_key, 'b', input_tx, 'd')
tx_signed = b.sign_transaction(tx, user_private_key)
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.create_transaction(user_vk, user_vk, {'txid': input_tx, 'cid': 0}, 'TRANSFER')
tx_signed = b.sign_transaction(tx, user_sk)
b.write_transaction(tx_signed)
# retrieve the transaction
@ -134,7 +145,7 @@ class TestBigchainApi(object):
assert response['block_number'] == 0
assert len(response['block']['transactions']) == 1
assert response['block']['transactions'][0]['transaction']['operation'] == 'GENESIS'
assert response['block']['transactions'][0]['transaction']['input'] is None
assert response['block']['transactions'][0]['transaction']['fulfillments'][0]['input'] is None
def test_create_genesis_block_fails_if_table_not_empty(self, b):
b.create_genesis_block()
@ -187,19 +198,19 @@ class TestBigchainApi(object):
def test_create_new_block(self, b):
new_block = b.create_block([])
block_hash = hash_data(util.serialize(new_block['block']))
block_hash = crypto.hash_data(util.serialize(new_block['block']))
assert new_block['block']['voters'] == [b.me]
assert new_block['block']['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(new_block['block']), new_block['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(new_block['block']), new_block['signature']) is True
assert new_block['id'] == block_hash
assert new_block['votes'] == []
def test_get_last_voted_block_returns_genesis_if_no_votes_has_been_casted(self, b):
b.create_genesis_block()
genesis = list(r.table('bigchain')
.filter(r.row['block_number'] == 0)
.run(b.conn))[0]
.filter(r.row['block_number'] == 0)
.run(b.conn))[0]
assert b.get_last_voted_block() == genesis
def test_get_last_voted_block_returns_the_correct_block(self, b):
@ -226,33 +237,34 @@ class TestBigchainApi(object):
class TestTransactionValidation(object):
def test_create_operation_with_inputs(self, b):
tx = b.create_transaction('a', 'b', 'c', 'CREATE')
@pytest.mark.usefixtures('inputs')
def test_create_operation_with_inputs(self, b, user_vk):
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.create_transaction(b.me, user_vk, {'txid': input_tx, 'cid': 0}, 'CREATE')
with pytest.raises(ValueError) as excinfo:
b.validate_transaction(tx)
assert excinfo.value.args[0] == 'A CREATE operation has no inputs'
assert b.is_valid_transaction(tx) is False
def test_create_operation_not_federation_node(self, b):
tx = b.create_transaction('a', 'b', None, 'CREATE')
def test_create_operation_not_federation_node(self, b, user_vk):
tx = b.create_transaction(user_vk, user_vk, None, 'CREATE')
with pytest.raises(exceptions.OperationError) as excinfo:
b.validate_transaction(tx)
assert excinfo.value.args[0] == 'Only federation nodes can use the operation `CREATE`'
assert b.is_valid_transaction(tx) is False
def test_non_create_operation_no_inputs(self, b):
tx = b.create_transaction('a', 'b', None, 'd')
def test_non_create_operation_no_inputs(self, b, user_vk):
tx = b.create_transaction(user_vk, user_vk, None, 'TRANSFER')
with pytest.raises(ValueError) as excinfo:
b.validate_transaction(tx)
assert excinfo.value.args[0] == 'Only `CREATE` transactions can have null inputs'
assert b.is_valid_transaction(tx) is False
def test_non_create_input_not_found(self, b):
tx = b.create_transaction('a', 'b', 'c', 'd')
def test_non_create_input_not_found(self, b, user_vk):
tx = b.create_transaction(user_vk, user_vk, {'txid': 'c', 'cid': 0}, 'TRANSFER')
with pytest.raises(exceptions.TransactionDoesNotExist) as excinfo:
b.validate_transaction(tx)
@ -260,9 +272,10 @@ class TestTransactionValidation(object):
assert b.is_valid_transaction(tx) is False
@pytest.mark.usefixtures('inputs')
def test_non_create_valid_input_wrong_owner(self, b, user_public_key):
valid_input = b.get_owned_ids(user_public_key).pop()
tx = b.create_transaction('a', 'b', valid_input, 'c')
def test_non_create_valid_input_wrong_owner(self, b, user_vk):
valid_input = b.get_owned_ids(user_vk).pop()
sk, vk = crypto.generate_key_pair()
tx = b.create_transaction(vk, user_vk, {'txid': valid_input, 'cid': 0}, 'TRANSFER')
with pytest.raises(exceptions.TransactionOwnerError) as excinfo:
b.validate_transaction(tx)
@ -270,10 +283,10 @@ class TestTransactionValidation(object):
assert b.is_valid_transaction(tx) is False
@pytest.mark.usefixtures('inputs')
def test_non_create_double_spend(self, b, user_public_key, user_private_key):
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_private_key)
def test_non_create_double_spend(self, b, user_vk, user_sk):
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_sk)
b.write_transaction(tx_valid_signed)
# create and write block to bigchain
@ -281,7 +294,7 @@ class TestTransactionValidation(object):
b.write_block(block, durability='hard')
# create another transaction with the same input
tx_double_spend = b.create_transaction(user_public_key, 'd', input_valid, 'd')
tx_double_spend = b.create_transaction(user_vk, 'd', input_valid, 'd')
with pytest.raises(exceptions.DoubleSpend) as excinfo:
b.validate_transaction(tx_double_spend)
@ -289,9 +302,9 @@ class TestTransactionValidation(object):
assert b.is_valid_transaction(tx_double_spend) is False
@pytest.mark.usefixtures('inputs')
def test_wrong_transaction_hash(self, b, user_public_key):
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
def test_wrong_transaction_hash(self, b, user_vk):
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
# change the transaction hash
tx_valid.update({'id': 'abcd'})
@ -300,9 +313,9 @@ class TestTransactionValidation(object):
assert b.is_valid_transaction(tx_valid) is False
@pytest.mark.usefixtures('inputs')
def test_wrong_signature(self, b, user_public_key):
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
def test_wrong_signature(self, b, user_vk):
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
wrong_private_key = '4fyvJe1aw2qHZ4UNRYftXK7JU7zy9bCqoU5ps6Ne3xrY'
@ -311,27 +324,27 @@ class TestTransactionValidation(object):
b.validate_transaction(tx_invalid_signed)
assert b.is_valid_transaction(tx_invalid_signed) is False
def test_valid_create_transaction(self, b, user_public_key):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
def test_valid_create_transaction(self, b, user_vk):
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx_signed = b.sign_transaction(tx, b.me_private)
assert tx_signed == b.validate_transaction(tx_signed)
assert tx_signed == b.is_valid_transaction(tx_signed)
@pytest.mark.usefixtures('inputs')
def test_valid_non_create_transaction(self, b, user_public_key, user_private_key):
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
def test_valid_non_create_transaction(self, b, user_vk, user_sk):
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_private_key)
tx_valid_signed = b.sign_transaction(tx_valid, user_sk)
assert tx_valid_signed == b.validate_transaction(tx_valid_signed)
assert tx_valid_signed == b.is_valid_transaction(tx_valid_signed)
@pytest.mark.usefixtures('inputs')
def test_valid_non_create_transaction_after_block_creation(self, b, user_public_key, user_private_key):
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
def test_valid_non_create_transaction_after_block_creation(self, b, user_vk, user_sk):
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_private_key)
tx_valid_signed = b.sign_transaction(tx_valid, user_sk)
assert tx_valid_signed == b.validate_transaction(tx_valid_signed)
assert tx_valid_signed == b.is_valid_transaction(tx_valid_signed)
@ -346,7 +359,6 @@ class TestTransactionValidation(object):
class TestBlockValidation(object):
def test_wrong_block_hash(self, b):
block = b.create_block([])
@ -357,9 +369,9 @@ class TestBlockValidation(object):
@pytest.mark.skipif(reason='Separated tx validation from block creation.')
@pytest.mark.usefixtures('inputs')
def test_invalid_transactions_in_block(self, b, user_public_key, ):
def test_invalid_transactions_in_block(self, b, user_vk, ):
# invalid transaction
valid_input = b.get_owned_ids(user_public_key).pop()
valid_input = b.get_owned_ids(user_vk).pop()
tx_invalid = b.create_transaction('a', 'b', valid_input, 'c')
block = b.create_block([tx_invalid])
@ -373,8 +385,8 @@ class TestBlockValidation(object):
}
block_data = util.serialize(block)
block_hash = hash_data(block_data)
block_signature = PrivateKey(b.me_private).sign(block_data)
block_hash = crypto.hash_data(block_data)
block_signature = crypto.SigningKey(b.me_private).sign(block_data)
block = {
'id': block_hash,
@ -397,11 +409,11 @@ class TestBlockValidation(object):
b.validate_block(block)
@pytest.mark.usefixtures('inputs')
def test_valid_block(self, b, user_public_key, user_private_key):
def test_valid_block(self, b, user_vk, user_sk):
# create valid transaction
input_valid = b.get_owned_ids(user_public_key).pop()
tx_valid = b.create_transaction(user_public_key, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_private_key)
input_valid = b.get_owned_ids(user_vk).pop()
tx_valid = b.create_transaction(user_vk, 'b', input_valid, 'd')
tx_valid_signed = b.sign_transaction(tx_valid, user_sk)
# create valid block
block = b.create_block([tx_valid_signed])
@ -410,47 +422,7 @@ class TestBlockValidation(object):
assert b.is_valid_block(block)
class TestBigchainCrypto(object):
PRIVATE_VALUE = 64328150571824492670917070117568709277186368319388887463636481841106388379832
PUBLIC_VALUE_X = 48388170575736684074633245566225141536152842355597159440179742847497614196929
PUBLIC_VALUE_Y = 65233479152484407841598798165960909560839872511163322973341535484598825150846
PRIVATE_VALUE_B58 = 'AaAp4xBavbe6VGeQF2mWdSKNM1r6HfR2Z1tAY6aUkwdq'
PUBLIC_VALUE_COMPRESSED_B58 = 'ifEi3UuTDT4CqUUKiS5omgeDodhu2aRFHVp6LoahbEVe'
def test_private_key_encode(self):
private_value_base58 = PrivateKey.encode(self.PRIVATE_VALUE)
assert private_value_base58 == self.PRIVATE_VALUE_B58
def test_private_key_decode(self):
private_value = PrivateKey.decode(self.PRIVATE_VALUE_B58)
assert private_value == self.PRIVATE_VALUE
def test_public_key_encode(self):
public_value_compressed_base58 = PublicKey.encode(self.PUBLIC_VALUE_X, self.PUBLIC_VALUE_Y)
assert public_value_compressed_base58 == self.PUBLIC_VALUE_COMPRESSED_B58
def test_public_key_decode(self):
public_value_x, public_value_y = PublicKey.decode(self.PUBLIC_VALUE_COMPRESSED_B58)
assert public_value_x == self.PUBLIC_VALUE_X
assert public_value_y == self.PUBLIC_VALUE_Y
def test_sign_verify(self):
message = 'Hello World!'
public_key = PublicKey(self.PUBLIC_VALUE_COMPRESSED_B58)
private_key = PrivateKey(self.PRIVATE_VALUE_B58)
assert public_key.verify(message, private_key.sign(message)) is True
def test_generate_key_pair(self):
private_value_base58, public_value_compressed_base58 = generate_key_pair()
assert PrivateKey.encode(
PrivateKey.decode(private_value_base58)) == private_value_base58
assert PublicKey.encode(
*PublicKey.decode(public_value_compressed_base58)) == public_value_compressed_base58
class TestBigchainVoter(object):
def test_valid_block_voting(self, b):
# create queue and voter
q_new_block = mp.Queue()
@ -485,15 +457,15 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_invalid_block_voting(self, b, user_public_key):
def test_invalid_block_voting(self, b, user_vk):
# create queue and voter
q_new_block = mp.Queue()
voter = Voter(q_new_block)
# create transaction
transaction = b.create_transaction(b.me, user_public_key, None, 'CREATE')
transaction = b.create_transaction(b.me, user_vk, None, 'CREATE')
transaction_signed = b.sign_transaction(transaction, b.me_private)
genesis = b.create_genesis_block()
@ -526,7 +498,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is False
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_vote_creation_valid(self, b):
# create valid block
@ -540,7 +512,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_vote_creation_invalid(self, b):
# create valid block
@ -554,17 +526,16 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is False
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
class TestBigchainBlock(object):
def test_by_assignee(self, b, user_public_key):
def test_by_assignee(self, b, user_vk):
# create transactions and randomly assigne them
transactions = mp.Queue()
count_assigned_to_me = 0
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
assignee = random.choice([b.me, 'aaa', 'bbb', 'ccc'])
if assignee == b.me:
count_assigned_to_me += 1
@ -583,13 +554,13 @@ class TestBigchainBlock(object):
# the queue minus 'stop'
assert block.q_tx_to_validate.qsize() - 1 == count_assigned_to_me
def test_validate_transactions(self, b, user_public_key):
def test_validate_transactions(self, b, user_vk):
# create transactions and randomly invalidate some of them by changing the hash
transactions = mp.Queue()
count_valid = 0
for i in range(100):
valid = random.choice([True, False])
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
if not valid:
tx['id'] = 'a' * 64
@ -608,11 +579,11 @@ class TestBigchainBlock(object):
assert block.q_tx_validated.qsize() - 1 == count_valid
assert block.q_tx_delete.qsize() - 1 == 100
def test_create_block(self, b, user_public_key):
def test_create_block(self, b, user_vk):
# create transactions
transactions = mp.Queue()
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
transactions.put(tx)
transactions.put('stop')
@ -626,12 +597,12 @@ class TestBigchainBlock(object):
# check if the number of valid transactions
assert block.q_block.qsize() - 1 == 1
def test_write_block(self, b, user_public_key):
def test_write_block(self, b, user_vk):
# create transactions
transactions = []
blocks = mp.Queue()
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
transactions.append(tx)
@ -656,14 +627,14 @@ class TestBigchainBlock(object):
# check if the number of blocks in bigchain increased
assert r.table('bigchain').count() == 2
def test_delete_transactions(self, b, user_public_key):
def test_delete_transactions(self, b, user_vk):
# make sure that there are no transactions in the backlog
r.table('backlog').delete().run(b.conn)
# create and write transactions to the backlog
transactions = mp.Queue()
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
b.write_transaction(tx)
transactions.put(tx['id'])
@ -684,13 +655,13 @@ class TestBigchainBlock(object):
# check if all transactions were deleted from the backlog
assert r.table('backlog').count() == 0
def test_bootstrap(self, b, user_public_key):
def test_bootstrap(self, b, user_vk):
# make sure that there are no transactions in the backlog
r.table('backlog').delete().run(b.conn)
# create and write transactions to the backlog
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
b.write_transaction(tx)
@ -703,7 +674,7 @@ class TestBigchainBlock(object):
# we should have gotten a queue with 100 results
assert initial_results.qsize() - 1 == 100
def test_start(self, b, user_public_key):
def test_start(self, b, user_vk):
# start with 100 transactions in the backlog and 100 in the changefeed
# make sure that there are no transactions in the backlog
@ -711,14 +682,14 @@ class TestBigchainBlock(object):
# create and write transactions to the backlog
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
b.write_transaction(tx)
# create 100 more transactions to emulate the changefeed
new_transactions = mp.Queue()
for i in range(100):
tx = b.create_transaction(b.me, user_public_key, None, 'CREATE')
tx = b.create_transaction(b.me, user_vk, None, 'CREATE')
tx = b.sign_transaction(tx, b.me_private)
b.write_transaction(tx)
new_transactions.put(tx)
@ -761,7 +732,6 @@ class TestBigchainBlock(object):
class TestMultipleInputs(object):
def test_transfer_transaction_multiple(self, b):
pass
@ -770,4 +740,3 @@ class TestMultipleInputs(object):
def test_get_spent(self, b):
pass

View File

@ -6,7 +6,7 @@ import multiprocessing as mp
from bigchaindb import util
from bigchaindb.voter import Voter, BlockStream
from bigchaindb.crypto import PublicKey, generate_key_pair
from bigchaindb import crypto
class TestBigchainVoter(object):
@ -45,7 +45,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_valid_block_voting_with_create_transaction(self, b):
q_new_block = mp.Queue()
@ -53,7 +53,7 @@ class TestBigchainVoter(object):
genesis = b.create_genesis_block()
# create a `CREATE` transaction
test_user_priv, test_user_pub = generate_key_pair()
test_user_priv, test_user_pub = crypto.generate_key_pair()
tx = b.create_transaction(b.me, test_user_pub, None, 'CREATE')
tx_signed = b.sign_transaction(tx, b.me_private)
assert b.is_valid_transaction(tx_signed)
@ -87,7 +87,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_valid_block_voting_with_transfer_transactions(self, b):
q_new_block = mp.Queue()
@ -95,7 +95,7 @@ class TestBigchainVoter(object):
b.create_genesis_block()
# create a `CREATE` transaction
test_user_priv, test_user_pub = generate_key_pair()
test_user_priv, test_user_pub = crypto.generate_key_pair()
tx = b.create_transaction(b.me, test_user_pub, None, 'CREATE')
tx_signed = b.sign_transaction(tx, b.me_private)
assert b.is_valid_transaction(tx_signed)
@ -124,7 +124,7 @@ class TestBigchainVoter(object):
assert len(blocks[1]['votes']) == 1
# create a `TRANSFER` transaction
test_user2_priv, test_user2_pub = generate_key_pair()
test_user2_priv, test_user2_pub = crypto.generate_key_pair()
tx2 = b.create_transaction(test_user_pub, test_user2_pub, tx['id'], 'TRANSFER')
tx2_signed = b.sign_transaction(tx2, test_user_priv)
assert b.is_valid_transaction(tx2_signed)
@ -158,15 +158,15 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_invalid_block_voting(self, b, user_public_key):
def test_invalid_block_voting(self, b, user_vk):
# create queue and voter
q_new_block = mp.Queue()
voter = Voter(q_new_block)
# create transaction
transaction = b.create_transaction(b.me, user_public_key, None, 'CREATE')
transaction = b.create_transaction(b.me, user_vk, None, 'CREATE')
transaction_signed = b.sign_transaction(transaction, b.me_private)
genesis = b.create_genesis_block()
@ -197,7 +197,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is False
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_vote_creation_valid(self, b):
# create valid block
@ -211,7 +211,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is True
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_vote_creation_invalid(self, b):
# create valid block
@ -225,7 +225,7 @@ class TestBigchainVoter(object):
assert vote['vote']['is_block_valid'] is False
assert vote['vote']['invalid_reason'] is None
assert vote['node_pubkey'] == b.me
assert PublicKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
assert crypto.VerifyingKey(b.me).verify(util.serialize(vote['vote']), vote['signature']) is True
def test_voter_considers_unvoted_blocks_when_single_node(self, b):
# simulate a voter going donw in a single node environment
@ -301,7 +301,7 @@ class TestBlockStream(object):
def test_if_federation_size_is_greater_than_one_ignore_past_blocks(self, b):
for _ in range(5):
b.federation_nodes.append(generate_key_pair()[1])
b.federation_nodes.append(crypto.generate_key_pair()[1])
new_blocks = mp.Queue()
bs = BlockStream(new_blocks)
block_1 = b.create_block([])

View File

@ -57,9 +57,9 @@ def test_client_can_create_assets(mock_requests_post, client):
def test_client_can_transfer_assets(mock_requests_post, mock_bigchaindb_sign, client):
from bigchaindb import util
tx = client.transfer('a', 123)
tx = client.transfer(client.public_key, 123)
assert tx['transaction']['fulfillments'][0]['current_owners'][0] == client.public_key
assert tx['transaction']['conditions'][0]['new_owners'][0] == 'a'
assert tx['transaction']['conditions'][0]['new_owners'][0] == client.public_key
assert tx['transaction']['fulfillments'][0]['input'] == 123

View File

@ -1,12 +1,12 @@
from bigchaindb import util
def test_transform_create(b, user_private_key, user_public_key):
tx = util.create_tx(user_public_key, user_public_key, None, 'CREATE')
def test_transform_create(b, user_sk, user_vk):
tx = util.create_tx(user_vk, user_vk, None, 'CREATE')
tx = util.transform_create(tx)
tx = util.sign_tx(tx, b.me_private)
assert tx['transaction']['fulfillments'][0]['current_owners'][0] == b.me
assert tx['transaction']['conditions'][0]['new_owners'][0] == user_public_key
assert tx['transaction']['conditions'][0]['new_owners'][0] == user_vk
assert util.verify_signature(tx)

View File

@ -30,6 +30,6 @@ def app(request, node_config):
@pytest.fixture
def inputs(user_public_key):
conftest.inputs(user_public_key)
def inputs(user_vk):
conftest.inputs(user_vk)

View File

@ -9,8 +9,8 @@ TX_ENDPOINT = '/api/v1/transactions/'
@pytest.mark.usefixtures('inputs')
def test_get_transaction_endpoint(b, client, user_public_key):
input_tx = b.get_owned_ids(user_public_key).pop()
def test_get_transaction_endpoint(b, client, user_vk):
input_tx = b.get_owned_ids(user_vk).pop()
tx = b.get_transaction(input_tx)
res = client.get(TX_ENDPOINT + input_tx)
assert tx == res.json

13
tests/web/test_server.py Normal file
View File

@ -0,0 +1,13 @@
import copy
def test_settings(monkeypatch):
import bigchaindb
from bigchaindb.web import server
s = server.create_server(bigchaindb.config['server'])
# for whatever reason the value is wrapped in a list
# needs further investigation
assert s.cfg.bind[0] == bigchaindb.config['server']['bind']