split the init function and add new tests

This commit is contained in:
Luminita 2016-07-15 20:31:52 +02:00
parent b9b0b4a343
commit 8b5cc46adf
2 changed files with 156 additions and 37 deletions

View File

@ -18,13 +18,82 @@ def get_conn():
bigchaindb.config['database']['port']) bigchaindb.config['database']['port'])
def init(): def init_bigchain_table(conn, dbname):
# Try to access the keypair, throws an exception if it does not exist '''Create bigchain table and the secondary indexes'''
b = bigchaindb.Bigchain() logger.info(' - tables')
# create the table
r.db(dbname).table_create('bigchain').run(conn)
conn = get_conn() logger.info(' - indexes')
dbname = bigchaindb.config['database']['name'] # to order blocks by timestamp
r.db(dbname).table('bigchain')\
.index_create('block_timestamp', r.row['block']['timestamp'])\
.run(conn)
# to order blocks by block number
r.db(dbname).table('bigchain')\
.index_create('block_number', r.row['block']['block_number'])\
.run(conn)
# to query the bigchain for a transaction id
r.db(dbname).table('bigchain')\
.index_create('transaction_id',
r.row['block']['transactions']['id'], multi=True)\
.run(conn)
# secondary index for payload data by UUID
r.db(dbname).table('bigchain')\
.index_create('payload_uuid',
r.row['block']['transactions']['transaction']['data']['uuid'], multi=True)\
.run(conn)
# wait for rethinkdb to finish creating secondary indexes
r.db(dbname).table('bigchain').index_wait().run(conn)
def init_backlog_table(conn, dbname):
'''Create backlog table and the secondary indexes.'''
logger.info(' - tables')
# create the table
r.db(dbname).table_create('backlog').run(conn)
logger.info(' - indexes')
# to order transactions by timestamp
r.db(dbname).table('backlog')\
.index_create('transaction_timestamp',
r.row['transaction']['timestamp'])\
.run(conn)
# compound index to read transactions from the backlog per assignee
r.db(dbname).table('backlog')\
.index_create('assignee__transaction_timestamp',
[r.row['assignee'], r.row['transaction']['timestamp']])\
.run(conn)
# wait for rethinkdb to finish creating secondary indexes
r.db(dbname).table('backlog').index_wait().run(conn)
def init_votes_table(conn, dbname):
'''Create votes table and the secondary indexes.'''
logger.info(' - tables')
# create the table
r.db(dbname).table_create('votes').run(conn)
logger.info(' - indexes')
# compound index to order votes by block id and node
r.db(dbname).table('votes')\
.index_create('block_and_voter',
[r.row['vote']['voting_for_block'],
r.row['node_pubkey']])\
.run(conn)
# wait for rethinkdb to finish creating secondary indexes
r.db(dbname).table('votes').index_wait().run(conn)
def get_database_name():
'''Return the database name.'''
return bigchaindb.config['database']['name']
def create_database(conn, dbname):
if r.db_list().contains(dbname).run(conn): if r.db_list().contains(dbname).run(conn):
raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname)) raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname))
@ -32,41 +101,18 @@ def init():
logger.info(' - database `%s`', dbname) logger.info(' - database `%s`', dbname)
r.db_create(dbname).run(conn) r.db_create(dbname).run(conn)
logger.info(' - tables')
# create the tables
r.db(dbname).table_create('bigchain').run(conn)
r.db(dbname).table_create('backlog').run(conn)
r.db(dbname).table_create('votes').run(conn)
logger.info(' - indexes') def init():
# create the secondary indexes # Try to access the keypair, throws an exception if it does not exist
# to order blocks by timestamp b = bigchaindb.Bigchain()
r.db(dbname).table('bigchain').index_create('block_timestamp', r.row['block']['timestamp']).run(conn)
# to order blocks by block number
r.db(dbname).table('bigchain').index_create('block_number', r.row['block']['block_number']).run(conn)
# to order transactions by timestamp
r.db(dbname).table('backlog').index_create('transaction_timestamp', r.row['transaction']['timestamp']).run(conn)
# to query the bigchain for a transaction id
r.db(dbname).table('bigchain').index_create('transaction_id',
r.row['block']['transactions']['id'], multi=True).run(conn)
# compound index to read transactions from the backlog per assignee
r.db(dbname).table('backlog')\
.index_create('assignee__transaction_timestamp', [r.row['assignee'], r.row['transaction']['timestamp']])\
.run(conn)
# compound index to order votes by block id and node conn = get_conn()
r.db(dbname).table('votes').index_create('block_and_voter', dbname = get_database_name()
[r.row['vote']['voting_for_block'], r.row['node_pubkey']]).run(conn) create_database(conn, dbname)
# secondary index for payload data by UUID init_bigchain_table(conn, dbname)
r.db(dbname).table('bigchain')\ init_backlog_table(conn, dbname)
.index_create('payload_uuid', r.row['block']['transactions']['transaction']['data']['uuid'], multi=True)\ init_votes_table(conn, dbname)
.run(conn)
# wait for rethinkdb to finish creating secondary indexes
r.db(dbname).table('backlog').index_wait().run(conn)
r.db(dbname).table('bigchain').index_wait().run(conn)
r.db(dbname).table('votes').index_wait().run(conn)
logger.info(' - genesis block') logger.info(' - genesis block')
b.create_genesis_block() b.create_genesis_block()

View File

@ -38,6 +38,79 @@ def test_init_creates_db_tables_and_indexes():
'assignee__transaction_timestamp').run(conn) is True 'assignee__transaction_timestamp').run(conn) is True
def test_create_database():
conn = utils.get_conn()
dbname = utils.get_database_name()
# The db is set up by fixtures so we need to remove it
# and recreate it just with one table
r.db_drop(dbname).run(conn)
utils.create_database(conn, dbname)
assert r.db_list().contains(dbname).run(conn) is True
def test_init_create_bigchain_table():
conn = utils.get_conn()
dbname = utils.get_database_name()
# The db is set up by fixtures so we need to remove it
# and recreate it just with one table
r.db_drop(dbname).run(conn)
utils.create_database(conn, dbname)
utils.init_bigchain_table(conn, dbname)
assert r.db(dbname).table_list().contains('bigchain').run(conn) is True
assert r.db(dbname).table_list().contains('backlog').run(conn) is False
assert r.db(dbname).table_list().contains('votes').run(conn) is False
assert r.db(dbname).table('bigchain').index_list().contains(
'block_timestamp').run(conn) is True
assert r.db(dbname).table('bigchain').index_list().contains(
'block_number').run(conn) is True
assert r.db(dbname).table('bigchain').index_list().contains(
'transaction_id').run(conn) is True
assert r.db(dbname).table('bigchain').index_list().contains(
'payload_uuid').run(conn) is True
def test_init_create_backlog_table():
conn = utils.get_conn()
dbname = utils.get_database_name()
# The db is set up by fixtures so we need to remove it
# and recreate it just with one table
r.db_drop(dbname).run(conn)
utils.create_database(conn, dbname)
utils.init_backlog_table(conn, dbname)
assert r.db(dbname).table_list().contains('backlog').run(conn) is True
assert r.db(dbname).table_list().contains('bigchain').run(conn) is False
assert r.db(dbname).table_list().contains('votes').run(conn) is False
assert r.db(dbname).table('backlog').index_list().contains(
'transaction_timestamp').run(conn) is True
assert r.db(dbname).table('backlog').index_list().contains(
'assignee__transaction_timestamp').run(conn) is True
def test_init_create_votes_table():
conn = utils.get_conn()
dbname = utils.get_database_name()
# The db is set up by fixtures so we need to remove it
# and recreate it just with one table
r.db_drop(dbname).run(conn)
utils.create_database(conn, dbname)
utils.init_votes_table(conn, dbname)
assert r.db(dbname).table_list().contains('votes').run(conn) is True
assert r.db(dbname).table_list().contains('bigchain').run(conn) is False
assert r.db(dbname).table_list().contains('backlog').run(conn) is False
assert r.db(dbname).table('votes').index_list().contains(
'block_and_voter').run(conn) is True
def test_init_fails_if_db_exists(): def test_init_fails_if_db_exists():
conn = utils.get_conn() conn = utils.get_conn()
dbname = bigchaindb.config['database']['name'] dbname = bigchaindb.config['database']['name']