mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
commit
ef93f597f7
1
.gitignore
vendored
1
.gitignore
vendored
@ -73,3 +73,4 @@ deploy-cluster-aws/conf/rethinkdb.conf
|
||||
deploy-cluster-aws/hostlist.py
|
||||
deploy-cluster-aws/confiles/
|
||||
deploy-cluster-aws/client_confile
|
||||
benchmarking-tests/hostlist.py
|
||||
|
3
benchmarking-tests/README.md
Normal file
3
benchmarking-tests/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Benchmarking tests
|
||||
|
||||
This folder contains util files and test case folders to benchmark the performance of a BigchainDB federation.
|
60
benchmarking-tests/benchmark_utils.py
Normal file
60
benchmarking-tests/benchmark_utils.py
Normal file
@ -0,0 +1,60 @@
|
||||
import multiprocessing as mp
|
||||
import uuid
|
||||
import json
|
||||
import argparse
|
||||
|
||||
from os.path import expanduser
|
||||
|
||||
from bigchaindb import Bigchain
|
||||
from bigchaindb.util import ProcessGroup
|
||||
from bigchaindb.commands import utils
|
||||
|
||||
|
||||
def create_write_transaction(tx_left):
|
||||
b = Bigchain()
|
||||
while tx_left > 0:
|
||||
# use uuid to prevent duplicate transactions (transactions with the same hash)
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE',
|
||||
payload={'msg': str(uuid.uuid4())})
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
b.write_transaction(tx_signed)
|
||||
tx_left -= 1
|
||||
|
||||
|
||||
def run_add_backlog(args):
|
||||
tx_left = args.num_transactions // mp.cpu_count()
|
||||
workers = ProcessGroup(target=create_write_transaction, args=(tx_left,))
|
||||
workers.start()
|
||||
|
||||
|
||||
def run_set_statsd_host(args):
|
||||
with open(expanduser('~') + '/.bigchaindb', 'r') as f:
|
||||
conf = json.load(f)
|
||||
|
||||
conf['statsd']['host'] = args.statsd_host
|
||||
with open(expanduser('~') + '/.bigchaindb', 'w') as f:
|
||||
json.dump(conf, f)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='BigchainDB benchmarking utils')
|
||||
subparsers = parser.add_subparsers(title='Commands', dest='command')
|
||||
|
||||
# add transactions to backlog
|
||||
backlog_parser = subparsers.add_parser('add-backlog',
|
||||
help='Add transactions to the backlog')
|
||||
backlog_parser.add_argument('num_transactions', metavar='num_transactions', type=int, default=0,
|
||||
help='Number of transactions to add to the backlog')
|
||||
|
||||
# set statsd host
|
||||
statsd_parser = subparsers.add_parser('set-statsd-host',
|
||||
help='Set statsd host')
|
||||
statsd_parser.add_argument('statsd_host', metavar='statsd_host', default='localhost',
|
||||
help='Hostname of the statsd server')
|
||||
|
||||
utils.start(parser, globals())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
53
benchmarking-tests/fabfile.py
vendored
Normal file
53
benchmarking-tests/fabfile.py
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
from __future__ import with_statement, unicode_literals
|
||||
|
||||
from fabric.api import sudo, env, hosts
|
||||
from fabric.api import task, parallel
|
||||
from fabric.contrib.files import sed
|
||||
from fabric.operations import run, put
|
||||
from fabric.context_managers import settings
|
||||
|
||||
from hostlist import public_dns_names
|
||||
|
||||
# Ignore known_hosts
|
||||
# http://docs.fabfile.org/en/1.10/usage/env.html#disable-known-hosts
|
||||
env.disable_known_hosts = True
|
||||
|
||||
# What remote servers should Fabric connect to? With what usernames?
|
||||
env.user = 'ubuntu'
|
||||
env.hosts = public_dns_names
|
||||
|
||||
# SSH key files to try when connecting:
|
||||
# http://docs.fabfile.org/en/1.10/usage/env.html#key-filename
|
||||
env.key_filename = 'pem/bigchaindb.pem'
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def put_benchmark_utils():
|
||||
put('benchmark_utils.py')
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def set_statsd_host(statsd_host='localhost'):
|
||||
run('python3 benchmark_utils.py set-statsd-host {}'.format(statsd_host))
|
||||
print('update configuration')
|
||||
run('bigchaindb show-config')
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def prepare_backlog(num_transactions=10000):
|
||||
run('python3 benchmark_utils.py add-backlog {}'.format(num_transactions))
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def start_bigchaindb():
|
||||
run('screen -d -m bigchaindb start &', pty=False)
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def kill_bigchaindb():
|
||||
run('killall bigchaindb')
|
21
benchmarking-tests/test1/README.md
Normal file
21
benchmarking-tests/test1/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Transactions per second
|
||||
|
||||
Measure how many blocks per second are created on the _bigchain_ with a pre filled backlog.
|
||||
|
||||
1. Deploy an aws cluster http://bigchaindb.readthedocs.io/en/latest/deploy-on-aws.html
|
||||
2. Make a symbolic link to hostlist.py: `ln -s ../deploy-cluster-aws/hostlist.py .`
|
||||
3. Make a symbolic link to bigchaindb.pem:
|
||||
```bash
|
||||
mkdir pem
|
||||
cd pem
|
||||
ln -s ../deploy-cluster-aws/pem/bigchaindb.pem .
|
||||
```
|
||||
|
||||
Then:
|
||||
|
||||
```bash
|
||||
fab put_benchmark_utils
|
||||
fab set_statsd_host:<hostname of the statsd server>
|
||||
fab prepare_backlog:<num txs per node> # wait for process to finish
|
||||
fab start_bigchaindb
|
||||
```
|
Loading…
x
Reference in New Issue
Block a user