mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Merge pull request #1359 from bigchaindb/feat/1245/clean-outdated-code
Delete outdated speed-tests folder
This commit is contained in:
commit
6ac564728e
4
.gitattributes
vendored
4
.gitattributes
vendored
@ -1,11 +1,9 @@
|
|||||||
benchmarking-tests export-ignore
|
|
||||||
deploy-cluster-aws export-ignore
|
deploy-cluster-aws export-ignore
|
||||||
docs export-ignore
|
docs export-ignore
|
||||||
ntools export-ignore
|
ntools export-ignore
|
||||||
speed-tests export-ignore
|
|
||||||
tests export-ignore
|
tests export-ignore
|
||||||
.gitattributes export-ignore
|
.gitattributes export-ignore
|
||||||
.gitignore export-ignore
|
.gitignore export-ignore
|
||||||
.travis.yml export-ignore
|
.travis.yml export-ignore
|
||||||
*.md export-ignore
|
*.md export-ignore
|
||||||
codecov.yml export-ignore
|
codecov.yml export-ignore
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,8 +71,6 @@ deploy-cluster-aws/confiles/
|
|||||||
deploy-cluster-aws/client_confile
|
deploy-cluster-aws/client_confile
|
||||||
deploy-cluster-aws/hostlist.py
|
deploy-cluster-aws/hostlist.py
|
||||||
deploy-cluster-aws/ssh_key.py
|
deploy-cluster-aws/ssh_key.py
|
||||||
benchmarking-tests/hostlist.py
|
|
||||||
benchmarking-tests/ssh_key.py
|
|
||||||
|
|
||||||
# Ansible-specific files
|
# Ansible-specific files
|
||||||
ntools/one-m/ansible/hosts
|
ntools/one-m/ansible/hosts
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
# Benchmarking tests
|
|
||||||
|
|
||||||
This folder contains util files and test case folders to benchmark the performance of a BigchainDB cluster.
|
|
@ -1,154 +0,0 @@
|
|||||||
import multiprocessing as mp
|
|
||||||
import uuid
|
|
||||||
import argparse
|
|
||||||
import csv
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import rethinkdb as r
|
|
||||||
|
|
||||||
from bigchaindb.common.transaction import Transaction
|
|
||||||
|
|
||||||
from bigchaindb import Bigchain
|
|
||||||
from bigchaindb.utils import ProcessGroup
|
|
||||||
from bigchaindb.commands import utils
|
|
||||||
|
|
||||||
|
|
||||||
SIZE_OF_FILLER = {'minimal': 0,
|
|
||||||
'small': 10**3,
|
|
||||||
'medium': 10**4,
|
|
||||||
'large': 10**5}
|
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def create_write_transaction(tx_left, payload_filler):
|
|
||||||
b = Bigchain()
|
|
||||||
payload_dict = {}
|
|
||||||
if payload_filler:
|
|
||||||
payload_dict['filler'] = payload_filler
|
|
||||||
while tx_left > 0:
|
|
||||||
# Include a random uuid string in the payload
|
|
||||||
# to prevent duplicate transactions
|
|
||||||
# (i.e. transactions with the same hash)
|
|
||||||
payload_dict['msg'] = str(uuid.uuid4())
|
|
||||||
tx = Transaction.create([b.me], [b.me], payload=payload_dict)
|
|
||||||
tx = tx.sign([b.me_private])
|
|
||||||
b.write_transaction(tx)
|
|
||||||
tx_left -= 1
|
|
||||||
|
|
||||||
|
|
||||||
def run_add_backlog(args):
|
|
||||||
tx_left = args.num_transactions // mp.cpu_count()
|
|
||||||
payload_filler = 'x' * SIZE_OF_FILLER[args.payload_size]
|
|
||||||
workers = ProcessGroup(target=create_write_transaction,
|
|
||||||
args=(tx_left, payload_filler))
|
|
||||||
workers.start()
|
|
||||||
|
|
||||||
|
|
||||||
def run_gather_metrics(args):
|
|
||||||
# setup a rethinkdb connection
|
|
||||||
conn = r.connect(args.bigchaindb_host, 28015, 'bigchain')
|
|
||||||
|
|
||||||
# setup csv writer
|
|
||||||
csv_file = open(args.csvfile, 'w')
|
|
||||||
csv_writer = csv.writer(csv_file)
|
|
||||||
|
|
||||||
# query for the number of transactions on the backlog
|
|
||||||
num_transactions = r.table('backlog').count().run(conn)
|
|
||||||
num_transactions_received = 0
|
|
||||||
initial_time = None
|
|
||||||
logger.info('Starting gathering metrics.')
|
|
||||||
logger.info('{} transasctions in the backlog'.format(num_transactions))
|
|
||||||
logger.info('This process should exit automatically. '
|
|
||||||
'If this does not happen you can exit at any time using Ctrl-C '
|
|
||||||
'saving all the metrics gathered up to this point.')
|
|
||||||
|
|
||||||
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
|
|
||||||
'timestamp',
|
|
||||||
'tx in block',
|
|
||||||
'tx/s',
|
|
||||||
'% complete'
|
|
||||||
))
|
|
||||||
|
|
||||||
# listen to the changefeed
|
|
||||||
try:
|
|
||||||
for change in r.table('bigchain').changes().run(conn):
|
|
||||||
# check only for new blocks
|
|
||||||
if change['old_val'] is None:
|
|
||||||
block_num_transactions = len(
|
|
||||||
change['new_val']['block']['transactions']
|
|
||||||
)
|
|
||||||
time_now = time.time()
|
|
||||||
csv_writer.writerow(
|
|
||||||
[str(time_now), str(block_num_transactions)]
|
|
||||||
)
|
|
||||||
|
|
||||||
# log statistics
|
|
||||||
if initial_time is None:
|
|
||||||
initial_time = time_now
|
|
||||||
|
|
||||||
num_transactions_received += block_num_transactions
|
|
||||||
elapsed_time = time_now - initial_time
|
|
||||||
percent_complete = round(
|
|
||||||
(num_transactions_received / num_transactions) * 100
|
|
||||||
)
|
|
||||||
|
|
||||||
if elapsed_time != 0:
|
|
||||||
transactions_per_second = round(
|
|
||||||
num_transactions_received / elapsed_time
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
transactions_per_second = float('nan')
|
|
||||||
|
|
||||||
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
|
|
||||||
time_now,
|
|
||||||
block_num_transactions,
|
|
||||||
transactions_per_second,
|
|
||||||
percent_complete
|
|
||||||
))
|
|
||||||
|
|
||||||
if (num_transactions - num_transactions_received) == 0:
|
|
||||||
break
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.info('Interrupted. Exiting early...')
|
|
||||||
finally:
|
|
||||||
# close files
|
|
||||||
csv_file.close()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description='BigchainDB benchmarking utils')
|
|
||||||
subparsers = parser.add_subparsers(title='Commands', dest='command')
|
|
||||||
|
|
||||||
# add transactions to backlog
|
|
||||||
backlog_parser = subparsers.add_parser('add-backlog',
|
|
||||||
help='Add transactions to the backlog')
|
|
||||||
backlog_parser.add_argument('num_transactions',
|
|
||||||
metavar='num_transactions',
|
|
||||||
type=int, default=0,
|
|
||||||
help='Number of transactions to add to the backlog')
|
|
||||||
backlog_parser.add_argument('-s', '--payload-size',
|
|
||||||
choices=SIZE_OF_FILLER.keys(),
|
|
||||||
default='minimal',
|
|
||||||
help='Payload size')
|
|
||||||
|
|
||||||
# metrics
|
|
||||||
metrics_parser = subparsers.add_parser('gather-metrics',
|
|
||||||
help='Gather metrics to a csv file')
|
|
||||||
|
|
||||||
metrics_parser.add_argument('-b', '--bigchaindb-host',
|
|
||||||
required=True,
|
|
||||||
help=('Bigchaindb node hostname to connect '
|
|
||||||
'to gather cluster metrics'))
|
|
||||||
|
|
||||||
metrics_parser.add_argument('-c', '--csvfile',
|
|
||||||
required=True,
|
|
||||||
help='Filename to save the metrics')
|
|
||||||
|
|
||||||
utils.start(parser, globals())
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
46
benchmarking-tests/fabfile.py
vendored
46
benchmarking-tests/fabfile.py
vendored
@ -1,46 +0,0 @@
|
|||||||
from __future__ import with_statement, unicode_literals
|
|
||||||
|
|
||||||
from fabric.api import sudo, env, hosts
|
|
||||||
from fabric.api import task, parallel
|
|
||||||
from fabric.contrib.files import sed
|
|
||||||
from fabric.operations import run, put
|
|
||||||
from fabric.context_managers import settings
|
|
||||||
|
|
||||||
from hostlist import public_dns_names
|
|
||||||
from ssh_key import ssh_key_path
|
|
||||||
|
|
||||||
# Ignore known_hosts
|
|
||||||
# http://docs.fabfile.org/en/1.10/usage/env.html#disable-known-hosts
|
|
||||||
env.disable_known_hosts = True
|
|
||||||
|
|
||||||
# What remote servers should Fabric connect to? With what usernames?
|
|
||||||
env.user = 'ubuntu'
|
|
||||||
env.hosts = public_dns_names
|
|
||||||
|
|
||||||
# SSH key files to try when connecting:
|
|
||||||
# http://docs.fabfile.org/en/1.10/usage/env.html#key-filename
|
|
||||||
env.key_filename = ssh_key_path
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
@parallel
|
|
||||||
def put_benchmark_utils():
|
|
||||||
put('benchmark_utils.py')
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
@parallel
|
|
||||||
def prepare_backlog(num_transactions=10000):
|
|
||||||
run('python3 benchmark_utils.py add-backlog {}'.format(num_transactions))
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
@parallel
|
|
||||||
def start_bigchaindb():
|
|
||||||
run('screen -d -m bigchaindb start &', pty=False)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
@parallel
|
|
||||||
def kill_bigchaindb():
|
|
||||||
run('killall bigchaindb')
|
|
@ -1,20 +0,0 @@
|
|||||||
# Transactions per second
|
|
||||||
|
|
||||||
Measure how many blocks per second are created on the _bigchain_ with a pre filled backlog.
|
|
||||||
|
|
||||||
1. Deploy an aws cluster https://docs.bigchaindb.com/projects/server/en/latest/clusters-feds/aws-testing-cluster.html
|
|
||||||
2. Make a symbolic link to hostlist.py: `ln -s ../deploy-cluster-aws/hostlist.py .`
|
|
||||||
3. Make a symbolic link to bigchaindb.pem:
|
|
||||||
```bash
|
|
||||||
mkdir pem
|
|
||||||
cd pem
|
|
||||||
ln -s ../deploy-cluster-aws/pem/bigchaindb.pem .
|
|
||||||
```
|
|
||||||
|
|
||||||
Then:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
fab put_benchmark_utils
|
|
||||||
fab prepare_backlog:<num txs per node> # wait for process to finish
|
|
||||||
fab start_bigchaindb
|
|
||||||
```
|
|
@ -29,8 +29,6 @@ coverage:
|
|||||||
- "docs/*"
|
- "docs/*"
|
||||||
- "tests/*"
|
- "tests/*"
|
||||||
- "bigchaindb/version.py"
|
- "bigchaindb/version.py"
|
||||||
- "benchmarking-tests/*"
|
|
||||||
- "speed-tests/*"
|
|
||||||
- "ntools/*"
|
- "ntools/*"
|
||||||
- "k8s/*"
|
- "k8s/*"
|
||||||
|
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
# Speed Tests
|
|
||||||
|
|
||||||
This folder contains tests related to the code performance of a single node.
|
|
@ -1,97 +0,0 @@
|
|||||||
import json
|
|
||||||
import time
|
|
||||||
|
|
||||||
import rapidjson
|
|
||||||
from line_profiler import LineProfiler
|
|
||||||
|
|
||||||
import bigchaindb
|
|
||||||
|
|
||||||
# BIG TODO: Adjust for new transaction model
|
|
||||||
|
|
||||||
|
|
||||||
def speedtest_validate_transaction():
|
|
||||||
# create a transaction
|
|
||||||
b = bigchaindb.Bigchain()
|
|
||||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
|
||||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
|
||||||
|
|
||||||
# setup the profiler
|
|
||||||
profiler = LineProfiler()
|
|
||||||
profiler.enable_by_count()
|
|
||||||
profiler.add_function(bigchaindb.Bigchain.validate_transaction)
|
|
||||||
|
|
||||||
# validate_transaction 1000 times
|
|
||||||
for i in range(1000):
|
|
||||||
b.validate_transaction(tx_signed)
|
|
||||||
|
|
||||||
profiler.print_stats()
|
|
||||||
|
|
||||||
|
|
||||||
def speedtest_serialize_block_json():
|
|
||||||
# create a block
|
|
||||||
b = bigchaindb.Bigchain()
|
|
||||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
|
||||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
|
||||||
block = b.create_block([tx_signed] * 1000)
|
|
||||||
|
|
||||||
time_start = time.time()
|
|
||||||
for _ in range(1000):
|
|
||||||
_ = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
|
||||||
time_elapsed = time.time() - time_start
|
|
||||||
|
|
||||||
print('speedtest_serialize_block_json: {} s'.format(time_elapsed))
|
|
||||||
|
|
||||||
|
|
||||||
def speedtest_serialize_block_rapidjson():
|
|
||||||
# create a block
|
|
||||||
b = bigchaindb.Bigchain()
|
|
||||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
|
||||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
|
||||||
block = b.create_block([tx_signed] * 1000)
|
|
||||||
|
|
||||||
time_start = time.time()
|
|
||||||
for _ in range(1000):
|
|
||||||
_ = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
|
||||||
time_elapsed = time.time() - time_start
|
|
||||||
|
|
||||||
print('speedtest_serialize_block_rapidjson: {} s'.format(time_elapsed))
|
|
||||||
|
|
||||||
|
|
||||||
def speedtest_deserialize_block_json():
|
|
||||||
# create a block
|
|
||||||
b = bigchaindb.Bigchain()
|
|
||||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
|
||||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
|
||||||
block = b.create_block([tx_signed] * 1000)
|
|
||||||
block_serialized = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
|
||||||
|
|
||||||
time_start = time.time()
|
|
||||||
for _ in range(1000):
|
|
||||||
_ = json.loads(block_serialized)
|
|
||||||
time_elapsed = time.time() - time_start
|
|
||||||
|
|
||||||
print('speedtest_deserialize_block_json: {} s'.format(time_elapsed))
|
|
||||||
|
|
||||||
|
|
||||||
def speedtest_deserialize_block_rapidjson():
|
|
||||||
# create a block
|
|
||||||
b = bigchaindb.Bigchain()
|
|
||||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
|
||||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
|
||||||
block = b.create_block([tx_signed] * 1000)
|
|
||||||
block_serialized = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
|
||||||
|
|
||||||
time_start = time.time()
|
|
||||||
for _ in range(1000):
|
|
||||||
_ = rapidjson.loads(block_serialized)
|
|
||||||
time_elapsed = time.time() - time_start
|
|
||||||
|
|
||||||
print('speedtest_deserialize_block_rapidjson: {} s'.format(time_elapsed))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
speedtest_validate_transaction()
|
|
||||||
speedtest_serialize_block_json()
|
|
||||||
speedtest_serialize_block_rapidjson()
|
|
||||||
speedtest_deserialize_block_json()
|
|
||||||
speedtest_deserialize_block_rapidjson()
|
|
Loading…
x
Reference in New Issue
Block a user