mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Merge remote-tracking branch 'upstream/master' into bug/437/pretty-msg-drop-nonexistent-db
This commit is contained in:
commit
8aac5c7e64
4
.gitattributes
vendored
4
.gitattributes
vendored
@ -1,11 +1,9 @@
|
||||
benchmarking-tests export-ignore
|
||||
deploy-cluster-aws export-ignore
|
||||
docs export-ignore
|
||||
ntools export-ignore
|
||||
speed-tests export-ignore
|
||||
tests export-ignore
|
||||
.gitattributes export-ignore
|
||||
.gitignore export-ignore
|
||||
.travis.yml export-ignore
|
||||
*.md export-ignore
|
||||
codecov.yml export-ignore
|
||||
codecov.yml export-ignore
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,8 +71,6 @@ deploy-cluster-aws/confiles/
|
||||
deploy-cluster-aws/client_confile
|
||||
deploy-cluster-aws/hostlist.py
|
||||
deploy-cluster-aws/ssh_key.py
|
||||
benchmarking-tests/hostlist.py
|
||||
benchmarking-tests/ssh_key.py
|
||||
|
||||
# Ansible-specific files
|
||||
ntools/one-m/ansible/hosts
|
||||
|
@ -51,3 +51,15 @@ END BLOCK
|
||||
(END OF EMAIL)
|
||||
|
||||
The next step is to wait for them to copy that comment into the comments of the indicated pull request. Once they do so, it's safe to merge the pull request.
|
||||
|
||||
## How to Handle CLA Agreement Emails with No Associated Pull Request
|
||||
|
||||
Reply with an email like this:
|
||||
|
||||
Hi [First Name],
|
||||
|
||||
Today I got an email (copied below) to tell me that you agreed to the BigchainDB Contributor License Agreement. Did you intend to do that?
|
||||
|
||||
If no, then you can ignore this email.
|
||||
|
||||
If yes, then there's another step to connect your email address with your GitHub account. To do that, you must first create a pull request in one of the BigchainDB repositories on GitHub. Once you've done that, please reply to this email with a link to the pull request. Then I'll send you a special block of text to paste into the comments on that pull request.
|
||||
|
@ -1,3 +0,0 @@
|
||||
# Benchmarking tests
|
||||
|
||||
This folder contains util files and test case folders to benchmark the performance of a BigchainDB cluster.
|
@ -1,154 +0,0 @@
|
||||
import multiprocessing as mp
|
||||
import uuid
|
||||
import argparse
|
||||
import csv
|
||||
import time
|
||||
import logging
|
||||
import rethinkdb as r
|
||||
|
||||
from bigchaindb.common.transaction import Transaction
|
||||
|
||||
from bigchaindb import Bigchain
|
||||
from bigchaindb.utils import ProcessGroup
|
||||
from bigchaindb.commands import utils
|
||||
|
||||
|
||||
SIZE_OF_FILLER = {'minimal': 0,
|
||||
'small': 10**3,
|
||||
'medium': 10**4,
|
||||
'large': 10**5}
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_write_transaction(tx_left, payload_filler):
|
||||
b = Bigchain()
|
||||
payload_dict = {}
|
||||
if payload_filler:
|
||||
payload_dict['filler'] = payload_filler
|
||||
while tx_left > 0:
|
||||
# Include a random uuid string in the payload
|
||||
# to prevent duplicate transactions
|
||||
# (i.e. transactions with the same hash)
|
||||
payload_dict['msg'] = str(uuid.uuid4())
|
||||
tx = Transaction.create([b.me], [b.me], payload=payload_dict)
|
||||
tx = tx.sign([b.me_private])
|
||||
b.write_transaction(tx)
|
||||
tx_left -= 1
|
||||
|
||||
|
||||
def run_add_backlog(args):
|
||||
tx_left = args.num_transactions // mp.cpu_count()
|
||||
payload_filler = 'x' * SIZE_OF_FILLER[args.payload_size]
|
||||
workers = ProcessGroup(target=create_write_transaction,
|
||||
args=(tx_left, payload_filler))
|
||||
workers.start()
|
||||
|
||||
|
||||
def run_gather_metrics(args):
|
||||
# setup a rethinkdb connection
|
||||
conn = r.connect(args.bigchaindb_host, 28015, 'bigchain')
|
||||
|
||||
# setup csv writer
|
||||
csv_file = open(args.csvfile, 'w')
|
||||
csv_writer = csv.writer(csv_file)
|
||||
|
||||
# query for the number of transactions on the backlog
|
||||
num_transactions = r.table('backlog').count().run(conn)
|
||||
num_transactions_received = 0
|
||||
initial_time = None
|
||||
logger.info('Starting gathering metrics.')
|
||||
logger.info('{} transasctions in the backlog'.format(num_transactions))
|
||||
logger.info('This process should exit automatically. '
|
||||
'If this does not happen you can exit at any time using Ctrl-C '
|
||||
'saving all the metrics gathered up to this point.')
|
||||
|
||||
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
|
||||
'timestamp',
|
||||
'tx in block',
|
||||
'tx/s',
|
||||
'% complete'
|
||||
))
|
||||
|
||||
# listen to the changefeed
|
||||
try:
|
||||
for change in r.table('bigchain').changes().run(conn):
|
||||
# check only for new blocks
|
||||
if change['old_val'] is None:
|
||||
block_num_transactions = len(
|
||||
change['new_val']['block']['transactions']
|
||||
)
|
||||
time_now = time.time()
|
||||
csv_writer.writerow(
|
||||
[str(time_now), str(block_num_transactions)]
|
||||
)
|
||||
|
||||
# log statistics
|
||||
if initial_time is None:
|
||||
initial_time = time_now
|
||||
|
||||
num_transactions_received += block_num_transactions
|
||||
elapsed_time = time_now - initial_time
|
||||
percent_complete = round(
|
||||
(num_transactions_received / num_transactions) * 100
|
||||
)
|
||||
|
||||
if elapsed_time != 0:
|
||||
transactions_per_second = round(
|
||||
num_transactions_received / elapsed_time
|
||||
)
|
||||
else:
|
||||
transactions_per_second = float('nan')
|
||||
|
||||
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
|
||||
time_now,
|
||||
block_num_transactions,
|
||||
transactions_per_second,
|
||||
percent_complete
|
||||
))
|
||||
|
||||
if (num_transactions - num_transactions_received) == 0:
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Interrupted. Exiting early...')
|
||||
finally:
|
||||
# close files
|
||||
csv_file.close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='BigchainDB benchmarking utils')
|
||||
subparsers = parser.add_subparsers(title='Commands', dest='command')
|
||||
|
||||
# add transactions to backlog
|
||||
backlog_parser = subparsers.add_parser('add-backlog',
|
||||
help='Add transactions to the backlog')
|
||||
backlog_parser.add_argument('num_transactions',
|
||||
metavar='num_transactions',
|
||||
type=int, default=0,
|
||||
help='Number of transactions to add to the backlog')
|
||||
backlog_parser.add_argument('-s', '--payload-size',
|
||||
choices=SIZE_OF_FILLER.keys(),
|
||||
default='minimal',
|
||||
help='Payload size')
|
||||
|
||||
# metrics
|
||||
metrics_parser = subparsers.add_parser('gather-metrics',
|
||||
help='Gather metrics to a csv file')
|
||||
|
||||
metrics_parser.add_argument('-b', '--bigchaindb-host',
|
||||
required=True,
|
||||
help=('Bigchaindb node hostname to connect '
|
||||
'to gather cluster metrics'))
|
||||
|
||||
metrics_parser.add_argument('-c', '--csvfile',
|
||||
required=True,
|
||||
help='Filename to save the metrics')
|
||||
|
||||
utils.start(parser, globals())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
46
benchmarking-tests/fabfile.py
vendored
46
benchmarking-tests/fabfile.py
vendored
@ -1,46 +0,0 @@
|
||||
from __future__ import with_statement, unicode_literals
|
||||
|
||||
from fabric.api import sudo, env, hosts
|
||||
from fabric.api import task, parallel
|
||||
from fabric.contrib.files import sed
|
||||
from fabric.operations import run, put
|
||||
from fabric.context_managers import settings
|
||||
|
||||
from hostlist import public_dns_names
|
||||
from ssh_key import ssh_key_path
|
||||
|
||||
# Ignore known_hosts
|
||||
# http://docs.fabfile.org/en/1.10/usage/env.html#disable-known-hosts
|
||||
env.disable_known_hosts = True
|
||||
|
||||
# What remote servers should Fabric connect to? With what usernames?
|
||||
env.user = 'ubuntu'
|
||||
env.hosts = public_dns_names
|
||||
|
||||
# SSH key files to try when connecting:
|
||||
# http://docs.fabfile.org/en/1.10/usage/env.html#key-filename
|
||||
env.key_filename = ssh_key_path
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def put_benchmark_utils():
|
||||
put('benchmark_utils.py')
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def prepare_backlog(num_transactions=10000):
|
||||
run('python3 benchmark_utils.py add-backlog {}'.format(num_transactions))
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def start_bigchaindb():
|
||||
run('screen -d -m bigchaindb start &', pty=False)
|
||||
|
||||
|
||||
@task
|
||||
@parallel
|
||||
def kill_bigchaindb():
|
||||
run('killall bigchaindb')
|
@ -1,20 +0,0 @@
|
||||
# Transactions per second
|
||||
|
||||
Measure how many blocks per second are created on the _bigchain_ with a pre filled backlog.
|
||||
|
||||
1. Deploy an aws cluster https://docs.bigchaindb.com/projects/server/en/latest/clusters-feds/aws-testing-cluster.html
|
||||
2. Make a symbolic link to hostlist.py: `ln -s ../deploy-cluster-aws/hostlist.py .`
|
||||
3. Make a symbolic link to bigchaindb.pem:
|
||||
```bash
|
||||
mkdir pem
|
||||
cd pem
|
||||
ln -s ../deploy-cluster-aws/pem/bigchaindb.pem .
|
||||
```
|
||||
|
||||
Then:
|
||||
|
||||
```bash
|
||||
fab put_benchmark_utils
|
||||
fab prepare_backlog:<num txs per node> # wait for process to finish
|
||||
fab start_bigchaindb
|
||||
```
|
@ -29,8 +29,6 @@ coverage:
|
||||
- "docs/*"
|
||||
- "tests/*"
|
||||
- "bigchaindb/version.py"
|
||||
- "benchmarking-tests/*"
|
||||
- "speed-tests/*"
|
||||
- "ntools/*"
|
||||
- "k8s/*"
|
||||
|
||||
|
@ -6,10 +6,10 @@ Command Line Interface
|
||||
:special-members: __init__
|
||||
|
||||
|
||||
:mod:`bigchaindb.commands.bigchain`
|
||||
-----------------------------------
|
||||
:mod:`bigchaindb.commands.bigchaindb`
|
||||
-------------------------------------
|
||||
|
||||
.. automodule:: bigchaindb.commands.bigchain
|
||||
.. automodule:: bigchaindb.commands.bigchaindb
|
||||
|
||||
|
||||
:mod:`bigchaindb.commands.utils`
|
||||
|
2
setup.py
2
setup.py
@ -117,7 +117,7 @@ setup(
|
||||
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'bigchaindb=bigchaindb.commands.bigchain:main'
|
||||
'bigchaindb=bigchaindb.commands.bigchaindb:main'
|
||||
],
|
||||
},
|
||||
install_requires=install_requires,
|
||||
|
@ -1,3 +0,0 @@
|
||||
# Speed Tests
|
||||
|
||||
This folder contains tests related to the code performance of a single node.
|
@ -1,97 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
|
||||
import rapidjson
|
||||
from line_profiler import LineProfiler
|
||||
|
||||
import bigchaindb
|
||||
|
||||
# BIG TODO: Adjust for new transaction model
|
||||
|
||||
|
||||
def speedtest_validate_transaction():
|
||||
# create a transaction
|
||||
b = bigchaindb.Bigchain()
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
|
||||
# setup the profiler
|
||||
profiler = LineProfiler()
|
||||
profiler.enable_by_count()
|
||||
profiler.add_function(bigchaindb.Bigchain.validate_transaction)
|
||||
|
||||
# validate_transaction 1000 times
|
||||
for i in range(1000):
|
||||
b.validate_transaction(tx_signed)
|
||||
|
||||
profiler.print_stats()
|
||||
|
||||
|
||||
def speedtest_serialize_block_json():
|
||||
# create a block
|
||||
b = bigchaindb.Bigchain()
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
block = b.create_block([tx_signed] * 1000)
|
||||
|
||||
time_start = time.time()
|
||||
for _ in range(1000):
|
||||
_ = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
||||
time_elapsed = time.time() - time_start
|
||||
|
||||
print('speedtest_serialize_block_json: {} s'.format(time_elapsed))
|
||||
|
||||
|
||||
def speedtest_serialize_block_rapidjson():
|
||||
# create a block
|
||||
b = bigchaindb.Bigchain()
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
block = b.create_block([tx_signed] * 1000)
|
||||
|
||||
time_start = time.time()
|
||||
for _ in range(1000):
|
||||
_ = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
||||
time_elapsed = time.time() - time_start
|
||||
|
||||
print('speedtest_serialize_block_rapidjson: {} s'.format(time_elapsed))
|
||||
|
||||
|
||||
def speedtest_deserialize_block_json():
|
||||
# create a block
|
||||
b = bigchaindb.Bigchain()
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
block = b.create_block([tx_signed] * 1000)
|
||||
block_serialized = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
||||
|
||||
time_start = time.time()
|
||||
for _ in range(1000):
|
||||
_ = json.loads(block_serialized)
|
||||
time_elapsed = time.time() - time_start
|
||||
|
||||
print('speedtest_deserialize_block_json: {} s'.format(time_elapsed))
|
||||
|
||||
|
||||
def speedtest_deserialize_block_rapidjson():
|
||||
# create a block
|
||||
b = bigchaindb.Bigchain()
|
||||
tx = b.create_transaction(b.me, b.me, None, 'CREATE')
|
||||
tx_signed = b.sign_transaction(tx, b.me_private)
|
||||
block = b.create_block([tx_signed] * 1000)
|
||||
block_serialized = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True)
|
||||
|
||||
time_start = time.time()
|
||||
for _ in range(1000):
|
||||
_ = rapidjson.loads(block_serialized)
|
||||
time_elapsed = time.time() - time_start
|
||||
|
||||
print('speedtest_deserialize_block_rapidjson: {} s'.format(time_elapsed))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
speedtest_validate_transaction()
|
||||
speedtest_serialize_block_json()
|
||||
speedtest_serialize_block_rapidjson()
|
||||
speedtest_deserialize_block_json()
|
||||
speedtest_deserialize_block_rapidjson()
|
@ -5,8 +5,8 @@ import pytest
|
||||
|
||||
@pytest.fixture
|
||||
def mock_run_configure(monkeypatch):
|
||||
from bigchaindb.commands import bigchain
|
||||
monkeypatch.setattr(bigchain, 'run_configure', lambda *args, **kwargs: None)
|
||||
from bigchaindb.commands import bigchaindb
|
||||
monkeypatch.setattr(bigchaindb, 'run_configure', lambda *args, **kwargs: None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -17,8 +17,8 @@ def mock_write_config(monkeypatch):
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_init_with_existing_db(monkeypatch):
|
||||
from bigchaindb.commands import bigchain
|
||||
monkeypatch.setattr(bigchain, '_run_init', lambda: None)
|
||||
from bigchaindb.commands import bigchaindb
|
||||
monkeypatch.setattr(bigchaindb, '_run_init', lambda: None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -11,7 +11,7 @@ def test_bigchain_run_start_with_rethinkdb(mock_start_rethinkdb,
|
||||
mock_processes_start,
|
||||
mock_db_init_with_existing_db,
|
||||
mocked_setup_logging):
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
args = Namespace(start_rethinkdb=True, allow_temp_keypair=False, config=None, yes=True)
|
||||
run_start(args)
|
||||
|
||||
@ -39,7 +39,7 @@ def test_start_rethinkdb_exits_when_cannot_start(mock_popen):
|
||||
|
||||
@patch('rethinkdb.ast.Table.reconfigure')
|
||||
def test_set_shards(mock_reconfigure, monkeypatch, b):
|
||||
from bigchaindb.commands.bigchain import run_set_shards
|
||||
from bigchaindb.commands.bigchaindb import run_set_shards
|
||||
|
||||
# this will mock the call to retrieve the database config
|
||||
# we will set it to return one replica
|
||||
@ -62,7 +62,7 @@ def test_set_shards(mock_reconfigure, monkeypatch, b):
|
||||
|
||||
|
||||
def test_set_shards_raises_exception(monkeypatch, b):
|
||||
from bigchaindb.commands.bigchain import run_set_shards
|
||||
from bigchaindb.commands.bigchaindb import run_set_shards
|
||||
|
||||
# test that we are correctly catching the exception
|
||||
def mock_raise(*args, **kwargs):
|
||||
@ -82,7 +82,7 @@ def test_set_shards_raises_exception(monkeypatch, b):
|
||||
|
||||
@patch('rethinkdb.ast.Table.reconfigure')
|
||||
def test_set_replicas(mock_reconfigure, monkeypatch, b):
|
||||
from bigchaindb.commands.bigchain import run_set_replicas
|
||||
from bigchaindb.commands.bigchaindb import run_set_replicas
|
||||
|
||||
# this will mock the call to retrieve the database config
|
||||
# we will set it to return two shards
|
||||
@ -105,7 +105,7 @@ def test_set_replicas(mock_reconfigure, monkeypatch, b):
|
||||
|
||||
|
||||
def test_set_replicas_raises_exception(monkeypatch, b):
|
||||
from bigchaindb.commands.bigchain import run_set_replicas
|
||||
from bigchaindb.commands.bigchaindb import run_set_replicas
|
||||
|
||||
# test that we are correctly catching the exception
|
||||
def mock_raise(*args, **kwargs):
|
||||
|
@ -8,7 +8,7 @@ import pytest
|
||||
|
||||
def test_make_sure_we_dont_remove_any_command():
|
||||
# thanks to: http://stackoverflow.com/a/18161115/597097
|
||||
from bigchaindb.commands.bigchain import create_parser
|
||||
from bigchaindb.commands.bigchaindb import create_parser
|
||||
|
||||
parser = create_parser()
|
||||
|
||||
@ -27,7 +27,7 @@ def test_make_sure_we_dont_remove_any_command():
|
||||
|
||||
@patch('bigchaindb.commands.utils.start')
|
||||
def test_main_entrypoint(mock_start):
|
||||
from bigchaindb.commands.bigchain import main
|
||||
from bigchaindb.commands.bigchaindb import main
|
||||
main()
|
||||
|
||||
assert mock_start.called
|
||||
@ -37,7 +37,7 @@ def test_bigchain_run_start(mock_run_configure,
|
||||
mock_processes_start,
|
||||
mock_db_init_with_existing_db,
|
||||
mocked_setup_logging):
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
args = Namespace(start_rethinkdb=False, allow_temp_keypair=False, config=None, yes=True)
|
||||
run_start(args)
|
||||
mocked_setup_logging.assert_called_once_with(user_log_config={})
|
||||
@ -48,7 +48,7 @@ def test_bigchain_run_start_assume_yes_create_default_config(
|
||||
monkeypatch, mock_processes_start, mock_generate_key_pair,
|
||||
mock_db_init_with_existing_db, mocked_setup_logging):
|
||||
import bigchaindb
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
from bigchaindb import config_utils
|
||||
|
||||
value = {}
|
||||
@ -76,7 +76,7 @@ def test_bigchain_run_start_assume_yes_create_default_config(
|
||||
@pytest.mark.usefixtures('ignore_local_config_file')
|
||||
def test_bigchain_show_config(capsys):
|
||||
from bigchaindb import config
|
||||
from bigchaindb.commands.bigchain import run_show_config
|
||||
from bigchaindb.commands.bigchaindb import run_show_config
|
||||
|
||||
args = Namespace(config=None)
|
||||
_, _ = capsys.readouterr()
|
||||
@ -89,7 +89,7 @@ def test_bigchain_show_config(capsys):
|
||||
|
||||
def test_bigchain_export_my_pubkey_when_pubkey_set(capsys, monkeypatch):
|
||||
from bigchaindb import config
|
||||
from bigchaindb.commands.bigchain import run_export_my_pubkey
|
||||
from bigchaindb.commands.bigchaindb import run_export_my_pubkey
|
||||
|
||||
args = Namespace(config='dummy')
|
||||
# so in run_export_my_pubkey(args) below,
|
||||
@ -108,7 +108,7 @@ def test_bigchain_export_my_pubkey_when_pubkey_set(capsys, monkeypatch):
|
||||
|
||||
def test_bigchain_export_my_pubkey_when_pubkey_not_set(monkeypatch):
|
||||
from bigchaindb import config
|
||||
from bigchaindb.commands.bigchain import run_export_my_pubkey
|
||||
from bigchaindb.commands.bigchaindb import run_export_my_pubkey
|
||||
|
||||
args = Namespace(config='dummy')
|
||||
monkeypatch.setitem(config['keypair'], 'public', None)
|
||||
@ -125,14 +125,14 @@ def test_bigchain_export_my_pubkey_when_pubkey_not_set(monkeypatch):
|
||||
|
||||
|
||||
def test_bigchain_run_init_when_db_exists(mock_db_init_with_existing_db):
|
||||
from bigchaindb.commands.bigchain import run_init
|
||||
from bigchaindb.commands.bigchaindb import run_init
|
||||
args = Namespace(config=None)
|
||||
run_init(args)
|
||||
|
||||
|
||||
@patch('bigchaindb.backend.schema.drop_database')
|
||||
def test_drop_db_when_assumed_yes(mock_db_drop):
|
||||
from bigchaindb.commands.bigchain import run_drop
|
||||
from bigchaindb.commands.bigchaindb import run_drop
|
||||
args = Namespace(config=None, yes=True)
|
||||
|
||||
run_drop(args)
|
||||
@ -141,9 +141,9 @@ def test_drop_db_when_assumed_yes(mock_db_drop):
|
||||
|
||||
@patch('bigchaindb.backend.schema.drop_database')
|
||||
def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch):
|
||||
from bigchaindb.commands.bigchain import run_drop
|
||||
from bigchaindb.commands.bigchaindb import run_drop
|
||||
args = Namespace(config=None, yes=False)
|
||||
monkeypatch.setattr('bigchaindb.commands.bigchain.input_on_stderr', lambda x: 'y')
|
||||
monkeypatch.setattr('bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'y')
|
||||
|
||||
run_drop(args)
|
||||
assert mock_db_drop.called
|
||||
@ -165,16 +165,16 @@ def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys):
|
||||
|
||||
@patch('bigchaindb.backend.schema.drop_database')
|
||||
def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch):
|
||||
from bigchaindb.commands.bigchain import run_drop
|
||||
from bigchaindb.commands.bigchaindb import run_drop
|
||||
args = Namespace(config=None, yes=False)
|
||||
monkeypatch.setattr('bigchaindb.commands.bigchain.input_on_stderr', lambda x: 'n')
|
||||
monkeypatch.setattr('bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'n')
|
||||
|
||||
run_drop(args)
|
||||
assert not mock_db_drop.called
|
||||
|
||||
|
||||
def test_run_configure_when_config_exists_and_skipping(monkeypatch):
|
||||
from bigchaindb.commands.bigchain import run_configure
|
||||
from bigchaindb.commands.bigchaindb import run_configure
|
||||
monkeypatch.setattr('os.path.exists', lambda path: True)
|
||||
args = Namespace(config='foo', yes=True)
|
||||
return_value = run_configure(args, skip_if_exists=True)
|
||||
@ -188,7 +188,7 @@ def test_run_configure_when_config_does_not_exist(monkeypatch,
|
||||
mock_write_config,
|
||||
mock_generate_key_pair,
|
||||
mock_bigchaindb_backup_config):
|
||||
from bigchaindb.commands.bigchain import run_configure
|
||||
from bigchaindb.commands.bigchaindb import run_configure
|
||||
monkeypatch.setattr('os.path.exists', lambda path: False)
|
||||
monkeypatch.setattr('builtins.input', lambda: '\n')
|
||||
args = Namespace(config='foo', backend='rethinkdb', yes=True)
|
||||
@ -205,7 +205,7 @@ def test_run_configure_when_config_does_exist(monkeypatch,
|
||||
def mock_write_config(newconfig, filename=None):
|
||||
value['return'] = newconfig
|
||||
|
||||
from bigchaindb.commands.bigchain import run_configure
|
||||
from bigchaindb.commands.bigchaindb import run_configure
|
||||
monkeypatch.setattr('os.path.exists', lambda path: True)
|
||||
monkeypatch.setattr('builtins.input', lambda: '\n')
|
||||
monkeypatch.setattr('bigchaindb.config_utils.write_config', mock_write_config)
|
||||
@ -221,7 +221,7 @@ def test_run_configure_when_config_does_exist(monkeypatch,
|
||||
))
|
||||
def test_run_configure_with_backend(backend, monkeypatch, mock_write_config):
|
||||
import bigchaindb
|
||||
from bigchaindb.commands.bigchain import run_configure
|
||||
from bigchaindb.commands.bigchaindb import run_configure
|
||||
|
||||
value = {}
|
||||
|
||||
@ -252,7 +252,7 @@ def test_allow_temp_keypair_generates_one_on_the_fly(
|
||||
mock_gen_keypair, mock_processes_start,
|
||||
mock_db_init_with_existing_db, mocked_setup_logging):
|
||||
import bigchaindb
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
|
||||
bigchaindb.config['keypair'] = {'private': None, 'public': None}
|
||||
|
||||
@ -272,7 +272,7 @@ def test_allow_temp_keypair_doesnt_override_if_keypair_found(mock_gen_keypair,
|
||||
mock_db_init_with_existing_db,
|
||||
mocked_setup_logging):
|
||||
import bigchaindb
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
|
||||
# Preconditions for the test
|
||||
original_private_key = bigchaindb.config['keypair']['private']
|
||||
@ -293,7 +293,7 @@ def test_run_start_when_db_already_exists(mocker,
|
||||
monkeypatch,
|
||||
run_start_args,
|
||||
mocked_setup_logging):
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
from bigchaindb.common.exceptions import DatabaseAlreadyExists
|
||||
mocked_start = mocker.patch('bigchaindb.processes.start')
|
||||
|
||||
@ -301,7 +301,7 @@ def test_run_start_when_db_already_exists(mocker,
|
||||
raise DatabaseAlreadyExists()
|
||||
|
||||
monkeypatch.setattr(
|
||||
'bigchaindb.commands.bigchain._run_init', mock_run_init)
|
||||
'bigchaindb.commands.bigchaindb._run_init', mock_run_init)
|
||||
run_start(run_start_args)
|
||||
mocked_setup_logging.assert_called_once_with(user_log_config={})
|
||||
assert mocked_start.called
|
||||
@ -311,7 +311,7 @@ def test_run_start_when_keypair_not_found(mocker,
|
||||
monkeypatch,
|
||||
run_start_args,
|
||||
mocked_setup_logging):
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
from bigchaindb.commands.messages import CANNOT_START_KEYPAIR_NOT_FOUND
|
||||
from bigchaindb.common.exceptions import KeypairNotFoundException
|
||||
mocked_start = mocker.patch('bigchaindb.processes.start')
|
||||
@ -320,7 +320,7 @@ def test_run_start_when_keypair_not_found(mocker,
|
||||
raise KeypairNotFoundException()
|
||||
|
||||
monkeypatch.setattr(
|
||||
'bigchaindb.commands.bigchain._run_init', mock_run_init)
|
||||
'bigchaindb.commands.bigchaindb._run_init', mock_run_init)
|
||||
|
||||
with pytest.raises(SystemExit) as exc:
|
||||
run_start(run_start_args)
|
||||
@ -335,7 +335,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker,
|
||||
monkeypatch,
|
||||
run_start_args,
|
||||
mocked_setup_logging):
|
||||
from bigchaindb.commands.bigchain import run_start
|
||||
from bigchaindb.commands.bigchaindb import run_start
|
||||
from bigchaindb.commands.messages import RETHINKDB_STARTUP_ERROR
|
||||
from bigchaindb.common.exceptions import StartupError
|
||||
run_start_args.start_rethinkdb = True
|
||||
@ -362,7 +362,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker,
|
||||
@patch('bigchaindb.commands.utils.start')
|
||||
def test_calling_main(start_mock, base_parser_mock, parse_args_mock,
|
||||
monkeypatch):
|
||||
from bigchaindb.commands.bigchain import main
|
||||
from bigchaindb.commands.bigchaindb import main
|
||||
|
||||
argparser_mock = Mock()
|
||||
parser = Mock()
|
||||
@ -418,9 +418,9 @@ def test_calling_main(start_mock, base_parser_mock, parse_args_mock,
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ignore_local_config_file')
|
||||
@patch('bigchaindb.commands.bigchain.add_replicas')
|
||||
@patch('bigchaindb.commands.bigchaindb.add_replicas')
|
||||
def test_run_add_replicas(mock_add_replicas):
|
||||
from bigchaindb.commands.bigchain import run_add_replicas
|
||||
from bigchaindb.commands.bigchaindb import run_add_replicas
|
||||
from bigchaindb.backend.exceptions import OperationError
|
||||
|
||||
args = Namespace(config=None, replicas=['localhost:27017'])
|
||||
@ -449,9 +449,9 @@ def test_run_add_replicas(mock_add_replicas):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('ignore_local_config_file')
|
||||
@patch('bigchaindb.commands.bigchain.remove_replicas')
|
||||
@patch('bigchaindb.commands.bigchaindb.remove_replicas')
|
||||
def test_run_remove_replicas(mock_remove_replicas):
|
||||
from bigchaindb.commands.bigchain import run_remove_replicas
|
||||
from bigchaindb.commands.bigchaindb import run_remove_replicas
|
||||
from bigchaindb.backend.exceptions import OperationError
|
||||
|
||||
args = Namespace(config=None, replicas=['localhost:27017'])
|
||||
|
@ -54,7 +54,7 @@ def test_configure_bigchaindb_logging(log_level):
|
||||
|
||||
def test_start_raises_if_command_not_implemented():
|
||||
from bigchaindb.commands import utils
|
||||
from bigchaindb.commands.bigchain import create_parser
|
||||
from bigchaindb.commands.bigchaindb import create_parser
|
||||
|
||||
parser = create_parser()
|
||||
|
||||
@ -66,7 +66,7 @@ def test_start_raises_if_command_not_implemented():
|
||||
|
||||
def test_start_raises_if_no_arguments_given():
|
||||
from bigchaindb.commands import utils
|
||||
from bigchaindb.commands.bigchain import create_parser
|
||||
from bigchaindb.commands.bigchaindb import create_parser
|
||||
|
||||
parser = create_parser()
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user