From 8b5cc46adfb000c959eae4e17d6cd5cb315518a5 Mon Sep 17 00:00:00 2001 From: Luminita Date: Fri, 15 Jul 2016 20:31:52 +0200 Subject: [PATCH 1/3] split the init function and add new tests --- bigchaindb/db/utils.py | 120 ++++++++++++++++++++++++++++------------- tests/db/test_utils.py | 73 +++++++++++++++++++++++++ 2 files changed, 156 insertions(+), 37 deletions(-) diff --git a/bigchaindb/db/utils.py b/bigchaindb/db/utils.py index 482d27fc..c407626a 100644 --- a/bigchaindb/db/utils.py +++ b/bigchaindb/db/utils.py @@ -18,13 +18,82 @@ def get_conn(): bigchaindb.config['database']['port']) -def init(): - # Try to access the keypair, throws an exception if it does not exist - b = bigchaindb.Bigchain() +def init_bigchain_table(conn, dbname): + '''Create bigchain table and the secondary indexes''' + logger.info(' - tables') + # create the table + r.db(dbname).table_create('bigchain').run(conn) - conn = get_conn() - dbname = bigchaindb.config['database']['name'] + logger.info(' - indexes') + # to order blocks by timestamp + r.db(dbname).table('bigchain')\ + .index_create('block_timestamp', r.row['block']['timestamp'])\ + .run(conn) + # to order blocks by block number + r.db(dbname).table('bigchain')\ + .index_create('block_number', r.row['block']['block_number'])\ + .run(conn) + # to query the bigchain for a transaction id + r.db(dbname).table('bigchain')\ + .index_create('transaction_id', + r.row['block']['transactions']['id'], multi=True)\ + .run(conn) + # secondary index for payload data by UUID + r.db(dbname).table('bigchain')\ + .index_create('payload_uuid', + r.row['block']['transactions']['transaction']['data']['uuid'], multi=True)\ + .run(conn) + # wait for rethinkdb to finish creating secondary indexes + r.db(dbname).table('bigchain').index_wait().run(conn) + + +def init_backlog_table(conn, dbname): + '''Create backlog table and the secondary indexes.''' + logger.info(' - tables') + # create the table + r.db(dbname).table_create('backlog').run(conn) + + logger.info(' - indexes') + # to order transactions by timestamp + r.db(dbname).table('backlog')\ + .index_create('transaction_timestamp', + r.row['transaction']['timestamp'])\ + .run(conn) + # compound index to read transactions from the backlog per assignee + r.db(dbname).table('backlog')\ + .index_create('assignee__transaction_timestamp', + [r.row['assignee'], r.row['transaction']['timestamp']])\ + .run(conn) + + # wait for rethinkdb to finish creating secondary indexes + r.db(dbname).table('backlog').index_wait().run(conn) + + +def init_votes_table(conn, dbname): + '''Create votes table and the secondary indexes.''' + logger.info(' - tables') + # create the table + r.db(dbname).table_create('votes').run(conn) + + logger.info(' - indexes') + # compound index to order votes by block id and node + r.db(dbname).table('votes')\ + .index_create('block_and_voter', + [r.row['vote']['voting_for_block'], + r.row['node_pubkey']])\ + .run(conn) + + # wait for rethinkdb to finish creating secondary indexes + r.db(dbname).table('votes').index_wait().run(conn) + + +def get_database_name(): + '''Return the database name.''' + return bigchaindb.config['database']['name'] + + +def create_database(conn, dbname): if r.db_list().contains(dbname).run(conn): raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname)) @@ -32,41 +101,18 @@ def init(): logger.info(' - database `%s`', dbname) r.db_create(dbname).run(conn) - logger.info(' - tables') - # create the tables - r.db(dbname).table_create('bigchain').run(conn) - r.db(dbname).table_create('backlog').run(conn) - r.db(dbname).table_create('votes').run(conn) - logger.info(' - indexes') - # create the secondary indexes - # to order blocks by timestamp - r.db(dbname).table('bigchain').index_create('block_timestamp', r.row['block']['timestamp']).run(conn) - # to order blocks by block number - r.db(dbname).table('bigchain').index_create('block_number', r.row['block']['block_number']).run(conn) - # to order transactions by timestamp - r.db(dbname).table('backlog').index_create('transaction_timestamp', r.row['transaction']['timestamp']).run(conn) - # to query the bigchain for a transaction id - r.db(dbname).table('bigchain').index_create('transaction_id', - r.row['block']['transactions']['id'], multi=True).run(conn) - # compound index to read transactions from the backlog per assignee - r.db(dbname).table('backlog')\ - .index_create('assignee__transaction_timestamp', [r.row['assignee'], r.row['transaction']['timestamp']])\ - .run(conn) +def init(): + # Try to access the keypair, throws an exception if it does not exist + b = bigchaindb.Bigchain() - # compound index to order votes by block id and node - r.db(dbname).table('votes').index_create('block_and_voter', - [r.row['vote']['voting_for_block'], r.row['node_pubkey']]).run(conn) + conn = get_conn() + dbname = get_database_name() + create_database(conn, dbname) - # secondary index for payload data by UUID - r.db(dbname).table('bigchain')\ - .index_create('payload_uuid', r.row['block']['transactions']['transaction']['data']['uuid'], multi=True)\ - .run(conn) - - # wait for rethinkdb to finish creating secondary indexes - r.db(dbname).table('backlog').index_wait().run(conn) - r.db(dbname).table('bigchain').index_wait().run(conn) - r.db(dbname).table('votes').index_wait().run(conn) + init_bigchain_table(conn, dbname) + init_backlog_table(conn, dbname) + init_votes_table(conn, dbname) logger.info(' - genesis block') b.create_genesis_block() diff --git a/tests/db/test_utils.py b/tests/db/test_utils.py index 9e032b25..d6fc7323 100644 --- a/tests/db/test_utils.py +++ b/tests/db/test_utils.py @@ -38,6 +38,79 @@ def test_init_creates_db_tables_and_indexes(): 'assignee__transaction_timestamp').run(conn) is True +def test_create_database(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + assert r.db_list().contains(dbname).run(conn) is True + + +def test_init_create_bigchain_table(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.init_bigchain_table(conn, dbname) + + assert r.db(dbname).table_list().contains('bigchain').run(conn) is True + assert r.db(dbname).table_list().contains('backlog').run(conn) is False + assert r.db(dbname).table_list().contains('votes').run(conn) is False + + assert r.db(dbname).table('bigchain').index_list().contains( + 'block_timestamp').run(conn) is True + assert r.db(dbname).table('bigchain').index_list().contains( + 'block_number').run(conn) is True + assert r.db(dbname).table('bigchain').index_list().contains( + 'transaction_id').run(conn) is True + assert r.db(dbname).table('bigchain').index_list().contains( + 'payload_uuid').run(conn) is True + + +def test_init_create_backlog_table(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.init_backlog_table(conn, dbname) + + assert r.db(dbname).table_list().contains('backlog').run(conn) is True + assert r.db(dbname).table_list().contains('bigchain').run(conn) is False + assert r.db(dbname).table_list().contains('votes').run(conn) is False + + assert r.db(dbname).table('backlog').index_list().contains( + 'transaction_timestamp').run(conn) is True + assert r.db(dbname).table('backlog').index_list().contains( + 'assignee__transaction_timestamp').run(conn) is True + + +def test_init_create_votes_table(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.init_votes_table(conn, dbname) + + assert r.db(dbname).table_list().contains('votes').run(conn) is True + assert r.db(dbname).table_list().contains('bigchain').run(conn) is False + assert r.db(dbname).table_list().contains('backlog').run(conn) is False + + assert r.db(dbname).table('votes').index_list().contains( + 'block_and_voter').run(conn) is True + + def test_init_fails_if_db_exists(): conn = utils.get_conn() dbname = bigchaindb.config['database']['name'] From 4c399ad234998d27ddb56f53d3eec4f8d14196dc Mon Sep 17 00:00:00 2001 From: Luminita Date: Tue, 19 Jul 2016 15:26:56 +0200 Subject: [PATCH 2/3] split index and table creation --- bigchaindb/db/utils.py | 65 +++++++++++++++++++----------------------- tests/db/test_utils.py | 50 ++++++++++++++++++++++++++------ 2 files changed, 71 insertions(+), 44 deletions(-) diff --git a/bigchaindb/db/utils.py b/bigchaindb/db/utils.py index ca5b05b3..7a6c293c 100644 --- a/bigchaindb/db/utils.py +++ b/bigchaindb/db/utils.py @@ -18,13 +18,27 @@ def get_conn(): bigchaindb.config['database']['port']) -def init_bigchain_table(conn, dbname): - '''Create bigchain table and the secondary indexes''' - logger.info(' - tables') - # create the table - r.db(dbname).table_create('bigchain').run(conn) +def get_database_name(): + return bigchaindb.config['database']['name'] - logger.info(' - indexes') + +def create_database(conn, dbname): + if r.db_list().contains(dbname).run(conn): + raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname)) + + logger.info('Create:') + logger.info(' - database `%s`', dbname) + r.db_create(dbname).run(conn) + + +def create_table(conn, dbname, table_name): + logger.info(' - %s table', table_name) + # create the table + r.db(dbname).table_create(table_name).run(conn) + + +def create_bigchain_secondary_index(conn, dbname): + logger.info(' - bigchain indexes') # to order blocks by timestamp r.db(dbname).table('bigchain')\ .index_create('block_timestamp', r.row['block']['timestamp'])\ @@ -44,13 +58,8 @@ def init_bigchain_table(conn, dbname): r.db(dbname).table('bigchain').index_wait().run(conn) -def init_backlog_table(conn, dbname): - '''Create backlog table and the secondary indexes.''' - logger.info(' - tables') - # create the table - r.db(dbname).table_create('backlog').run(conn) - - logger.info(' - indexes') +def create_backlog_secondary_index(conn, dbname): + logger.info(' - backlog indexes') # to order transactions by timestamp r.db(dbname).table('backlog')\ .index_create('transaction_timestamp', @@ -66,12 +75,7 @@ def init_backlog_table(conn, dbname): r.db(dbname).table('backlog').index_wait().run(conn) -def init_votes_table(conn, dbname): - '''Create votes table and the secondary indexes.''' - logger.info(' - tables') - # create the table - r.db(dbname).table_create('votes').run(conn) - +def create_votes_secondary_index(conn, dbname): logger.info(' - indexes') # compound index to order votes by block id and node r.db(dbname).table('votes')\ @@ -84,20 +88,6 @@ def init_votes_table(conn, dbname): r.db(dbname).table('votes').index_wait().run(conn) -def get_database_name(): - '''Return the database name.''' - return bigchaindb.config['database']['name'] - - -def create_database(conn, dbname): - if r.db_list().contains(dbname).run(conn): - raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname)) - - logger.info('Create:') - logger.info(' - database `%s`', dbname) - r.db_create(dbname).run(conn) - - def init(): # Try to access the keypair, throws an exception if it does not exist b = bigchaindb.Bigchain() @@ -106,9 +96,12 @@ def init(): dbname = get_database_name() create_database(conn, dbname) - init_bigchain_table(conn, dbname) - init_backlog_table(conn, dbname) - init_votes_table(conn, dbname) + table_names = ['bigchain', 'backlog', 'votes'] + for table_name in table_names: + create_table(conn, dbname, table_name) + create_bigchain_secondary_index(conn, dbname) + create_backlog_secondary_index(conn, dbname) + create_votes_secondary_index(conn, dbname) logger.info(' - genesis block') b.create_genesis_block() diff --git a/tests/db/test_utils.py b/tests/db/test_utils.py index bd74e565..0299224c 100644 --- a/tests/db/test_utils.py +++ b/tests/db/test_utils.py @@ -48,7 +48,7 @@ def test_create_database(): assert r.db_list().contains(dbname).run(conn) is True -def test_init_create_bigchain_table(): +def test_create_bigchain_table(): conn = utils.get_conn() dbname = utils.get_database_name() @@ -56,23 +56,33 @@ def test_init_create_bigchain_table(): # and recreate it just with one table r.db_drop(dbname).run(conn) utils.create_database(conn, dbname) - utils.init_bigchain_table(conn, dbname) + utils.create_table(conn, dbname, 'bigchain') assert r.db(dbname).table_list().contains('bigchain').run(conn) is True assert r.db(dbname).table_list().contains('backlog').run(conn) is False assert r.db(dbname).table_list().contains('votes').run(conn) is False + +def test_create_bigchain_secondary_index(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.create_table(conn, dbname, 'bigchain') + utils.create_bigchain_secondary_index(conn, dbname) + assert r.db(dbname).table('bigchain').index_list().contains( 'block_timestamp').run(conn) is True - assert r.db(dbname).table('bigchain').index_list().contains( - 'block_number').run(conn) is True assert r.db(dbname).table('bigchain').index_list().contains( 'transaction_id').run(conn) is True assert r.db(dbname).table('bigchain').index_list().contains( 'payload_uuid').run(conn) is True -def test_init_create_backlog_table(): +def test_create_backlog_table(): conn = utils.get_conn() dbname = utils.get_database_name() @@ -80,19 +90,31 @@ def test_init_create_backlog_table(): # and recreate it just with one table r.db_drop(dbname).run(conn) utils.create_database(conn, dbname) - utils.init_backlog_table(conn, dbname) + utils.create_table(conn, dbname, 'backlog') assert r.db(dbname).table_list().contains('backlog').run(conn) is True assert r.db(dbname).table_list().contains('bigchain').run(conn) is False assert r.db(dbname).table_list().contains('votes').run(conn) is False + +def test_create_backlog_secondary_index(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.create_table(conn, dbname, 'backlog') + utils.create_backlog_secondary_index(conn, dbname) + assert r.db(dbname).table('backlog').index_list().contains( 'transaction_timestamp').run(conn) is True assert r.db(dbname).table('backlog').index_list().contains( 'assignee__transaction_timestamp').run(conn) is True -def test_init_create_votes_table(): +def test_create_votes_table(): conn = utils.get_conn() dbname = utils.get_database_name() @@ -100,12 +122,24 @@ def test_init_create_votes_table(): # and recreate it just with one table r.db_drop(dbname).run(conn) utils.create_database(conn, dbname) - utils.init_votes_table(conn, dbname) + utils.create_table(conn, dbname, 'votes') assert r.db(dbname).table_list().contains('votes').run(conn) is True assert r.db(dbname).table_list().contains('bigchain').run(conn) is False assert r.db(dbname).table_list().contains('backlog').run(conn) is False + +def test_create_votes_secondary_index(): + conn = utils.get_conn() + dbname = utils.get_database_name() + + # The db is set up by fixtures so we need to remove it + # and recreate it just with one table + r.db_drop(dbname).run(conn) + utils.create_database(conn, dbname) + utils.create_table(conn, dbname, 'votes') + utils.create_votes_secondary_index(conn, dbname) + assert r.db(dbname).table('votes').index_list().contains( 'block_and_voter').run(conn) is True From 2b25f11f768a2789936e9c75f9a98b09fccca662 Mon Sep 17 00:00:00 2001 From: Luminita Date: Thu, 21 Jul 2016 09:40:38 +0200 Subject: [PATCH 3/3] fix logger output messages --- bigchaindb/db/utils.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/bigchaindb/db/utils.py b/bigchaindb/db/utils.py index 7a6c293c..603f143c 100644 --- a/bigchaindb/db/utils.py +++ b/bigchaindb/db/utils.py @@ -26,19 +26,18 @@ def create_database(conn, dbname): if r.db_list().contains(dbname).run(conn): raise exceptions.DatabaseAlreadyExists('Database `{}` already exists'.format(dbname)) - logger.info('Create:') - logger.info(' - database `%s`', dbname) + logger.info('Create database `%s`.', dbname) r.db_create(dbname).run(conn) def create_table(conn, dbname, table_name): - logger.info(' - %s table', table_name) + logger.info('Create `%s` table.', table_name) # create the table r.db(dbname).table_create(table_name).run(conn) def create_bigchain_secondary_index(conn, dbname): - logger.info(' - bigchain indexes') + logger.info('Create `bigchain` secondary index.') # to order blocks by timestamp r.db(dbname).table('bigchain')\ .index_create('block_timestamp', r.row['block']['timestamp'])\ @@ -59,7 +58,7 @@ def create_bigchain_secondary_index(conn, dbname): def create_backlog_secondary_index(conn, dbname): - logger.info(' - backlog indexes') + logger.info('Create `backlog` secondary index.') # to order transactions by timestamp r.db(dbname).table('backlog')\ .index_create('transaction_timestamp', @@ -76,7 +75,7 @@ def create_backlog_secondary_index(conn, dbname): def create_votes_secondary_index(conn, dbname): - logger.info(' - indexes') + logger.info('Create `votes` secondary index.') # compound index to order votes by block id and node r.db(dbname).table('votes')\ .index_create('block_and_voter', @@ -103,7 +102,7 @@ def init(): create_backlog_secondary_index(conn, dbname) create_votes_secondary_index(conn, dbname) - logger.info(' - genesis block') + logger.info('Create genesis block.') b.create_genesis_block() logger.info('Done, have fun!')