mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Problem: There is no logging for benchmark stats (#2349)
* add log level for benchmarking Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * Bug fixes and code re-factor Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * remove un-necessary imports Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * fix failing tests Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * update docs Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * fix typo Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * fix flake8 Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * fix flake8 issues Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com> * nitpic fixes Signed-off-by: Shahbaz Nazir <shahbaz@bigchaindb.com>
This commit is contained in:
parent
58fbccf3f1
commit
2da720e542
@ -16,10 +16,11 @@ import bigchaindb
|
|||||||
from bigchaindb import backend
|
from bigchaindb import backend
|
||||||
from bigchaindb.backend import schema
|
from bigchaindb.backend import schema
|
||||||
from bigchaindb.backend import query
|
from bigchaindb.backend import query
|
||||||
from bigchaindb.commands import utils
|
|
||||||
from bigchaindb.commands.utils import (
|
|
||||||
configure_bigchaindb, start_logging_process, input_on_stderr)
|
|
||||||
from bigchaindb.backend.query import VALIDATOR_UPDATE_ID, PRE_COMMIT_ID
|
from bigchaindb.backend.query import VALIDATOR_UPDATE_ID, PRE_COMMIT_ID
|
||||||
|
from bigchaindb.commands import utils
|
||||||
|
from bigchaindb.commands.utils import (configure_bigchaindb,
|
||||||
|
input_on_stderr)
|
||||||
|
from bigchaindb.log import setup_logging
|
||||||
from bigchaindb.tendermint.lib import BigchainDB
|
from bigchaindb.tendermint.lib import BigchainDB
|
||||||
from bigchaindb.tendermint.utils import public_key_from_base64
|
from bigchaindb.tendermint.utils import public_key_from_base64
|
||||||
|
|
||||||
@ -159,11 +160,13 @@ def run_recover(b):
|
|||||||
|
|
||||||
|
|
||||||
@configure_bigchaindb
|
@configure_bigchaindb
|
||||||
@start_logging_process
|
|
||||||
def run_start(args):
|
def run_start(args):
|
||||||
"""Start the processes to run the node"""
|
"""Start the processes to run the node"""
|
||||||
logger.info('BigchainDB Version %s', bigchaindb.__version__)
|
|
||||||
|
|
||||||
|
# Configure Logging
|
||||||
|
setup_logging()
|
||||||
|
|
||||||
|
logger.info('BigchainDB Version %s', bigchaindb.__version__)
|
||||||
run_recover(BigchainDB())
|
run_recover(BigchainDB())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -10,9 +10,7 @@ import sys
|
|||||||
|
|
||||||
import bigchaindb
|
import bigchaindb
|
||||||
import bigchaindb.config_utils
|
import bigchaindb.config_utils
|
||||||
from bigchaindb.log import DEFAULT_LOGGING_CONFIG
|
|
||||||
from bigchaindb.version import __version__
|
from bigchaindb.version import __version__
|
||||||
from logging.config import dictConfig as set_logging_config
|
|
||||||
|
|
||||||
|
|
||||||
def configure_bigchaindb(command):
|
def configure_bigchaindb(command):
|
||||||
@ -48,51 +46,6 @@ def configure_bigchaindb(command):
|
|||||||
return configure
|
return configure
|
||||||
|
|
||||||
|
|
||||||
def start_logging_process(command):
|
|
||||||
"""Decorator to start the logging subscriber process.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
command: The command to decorate.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The command wrapper function.
|
|
||||||
|
|
||||||
.. important::
|
|
||||||
|
|
||||||
Configuration, if needed, should be applied before invoking this
|
|
||||||
decorator, as starting the subscriber process for logging will
|
|
||||||
configure the root logger for the child process based on the
|
|
||||||
state of :obj:`bigchaindb.config` at the moment this decorator
|
|
||||||
is invoked.
|
|
||||||
|
|
||||||
"""
|
|
||||||
@functools.wraps(command)
|
|
||||||
def start_logging(args):
|
|
||||||
logging_configs = DEFAULT_LOGGING_CONFIG
|
|
||||||
new_logging_configs = bigchaindb.config['log']
|
|
||||||
|
|
||||||
# Update log file location
|
|
||||||
logging_configs['handlers']['file']['filename'] = new_logging_configs['file']
|
|
||||||
logging_configs['handlers']['errors']['filename'] = new_logging_configs['error_file']
|
|
||||||
|
|
||||||
# Update log levels
|
|
||||||
logging_configs['handlers']['console']['level'] = \
|
|
||||||
new_logging_configs['level_console'].upper()
|
|
||||||
logging_configs['handlers']['file']['level'] = new_logging_configs['level_logfile'].upper()
|
|
||||||
|
|
||||||
# Update log date format
|
|
||||||
logging_configs['formatters']['console']['datefmt'] = new_logging_configs['datefmt_console']
|
|
||||||
logging_configs['formatters']['file']['datefmt'] = new_logging_configs['datefmt_logfile']
|
|
||||||
|
|
||||||
# Update log string format
|
|
||||||
logging_configs['formatters']['console']['format'] = new_logging_configs['fmt_console']
|
|
||||||
logging_configs['formatters']['file']['format'] = new_logging_configs['fmt_console']
|
|
||||||
|
|
||||||
set_logging_config(logging_configs)
|
|
||||||
command(args)
|
|
||||||
return start_logging
|
|
||||||
|
|
||||||
|
|
||||||
def _convert(value, default=None, convert=None):
|
def _convert(value, default=None, convert=None):
|
||||||
def convert_bool(value):
|
def convert_bool(value):
|
||||||
if value.lower() in ('true', 't', 'yes', 'y'):
|
if value.lower() in ('true', 't', 'yes', 'y'):
|
||||||
@ -190,7 +143,7 @@ base_parser.add_argument('-c', '--config',
|
|||||||
# the environment variables provided to configure the logger.
|
# the environment variables provided to configure the logger.
|
||||||
base_parser.add_argument('-l', '--log-level',
|
base_parser.add_argument('-l', '--log-level',
|
||||||
type=str.upper, # convert to uppercase for comparison to choices
|
type=str.upper, # convert to uppercase for comparison to choices
|
||||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
choices=['DEBUG', 'BENCHMARK', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
||||||
help='Log level')
|
help='Log level')
|
||||||
|
|
||||||
base_parser.add_argument('-y', '--yes', '--yes-please',
|
base_parser.add_argument('-y', '--yes', '--yes-please',
|
||||||
|
@ -1,8 +1,13 @@
|
|||||||
|
import bigchaindb
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from bigchaindb.common.exceptions import ConfigurationError
|
||||||
|
from logging.config import dictConfig as set_logging_config
|
||||||
from os.path import expanduser, join
|
from os.path import expanduser, join
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LOG_DIR = expanduser('~')
|
DEFAULT_LOG_DIR = expanduser('~')
|
||||||
|
BENCHMARK_LOG_LEVEL = 15
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LOGGING_CONFIG = {
|
DEFAULT_LOGGING_CONFIG = {
|
||||||
@ -21,6 +26,11 @@ DEFAULT_LOGGING_CONFIG = {
|
|||||||
'%(message)s (%(processName)-10s - pid: %(process)d)'),
|
'%(message)s (%(processName)-10s - pid: %(process)d)'),
|
||||||
'datefmt': '%Y-%m-%d %H:%M:%S',
|
'datefmt': '%Y-%m-%d %H:%M:%S',
|
||||||
},
|
},
|
||||||
|
'benchmark': {
|
||||||
|
'class': 'logging.Formatter',
|
||||||
|
'format': ('%(asctime)s, %(levelname)s, %(message)s'),
|
||||||
|
'datefmt': '%Y-%m-%d %H:%M:%S',
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'handlers': {
|
'handlers': {
|
||||||
'console': {
|
'console': {
|
||||||
@ -46,10 +56,98 @@ DEFAULT_LOGGING_CONFIG = {
|
|||||||
'formatter': 'file',
|
'formatter': 'file',
|
||||||
'level': logging.ERROR,
|
'level': logging.ERROR,
|
||||||
},
|
},
|
||||||
|
'benchmark': {
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': 'bigchaindb-benchmark.log',
|
||||||
|
'mode': 'w',
|
||||||
|
'maxBytes': 209715200,
|
||||||
|
'backupCount': 5,
|
||||||
|
'formatter': 'benchmark',
|
||||||
|
'level': BENCHMARK_LOG_LEVEL,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'loggers': {},
|
'loggers': {},
|
||||||
'root': {
|
'root': {
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
'handlers': ['console', 'file', 'errors'],
|
'handlers': ['console', 'file', 'errors', 'benchmark'],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def benchmark(self, message, *args, **kws):
|
||||||
|
# Yes, logger takes its '*args' as 'args'.
|
||||||
|
if self.isEnabledFor(BENCHMARK_LOG_LEVEL):
|
||||||
|
self._log(BENCHMARK_LOG_LEVEL, message, args, **kws)
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_log_level(level):
|
||||||
|
try:
|
||||||
|
return level.upper()
|
||||||
|
except AttributeError as exc:
|
||||||
|
raise ConfigurationError('Log level must be a string!') from exc
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging():
|
||||||
|
"""Function to configure log hadlers.
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
Configuration, if needed, should be applied before invoking this
|
||||||
|
decorator, as starting the subscriber process for logging will
|
||||||
|
configure the root logger for the child process based on the
|
||||||
|
state of :obj:`bigchaindb.config` at the moment this decorator
|
||||||
|
is invoked.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Add a new logging level for logging benchmark
|
||||||
|
logging.addLevelName(BENCHMARK_LOG_LEVEL, 'BENCHMARK')
|
||||||
|
logging.BENCHMARK = BENCHMARK_LOG_LEVEL
|
||||||
|
logging.Logger.benchmark = benchmark
|
||||||
|
|
||||||
|
logging_configs = DEFAULT_LOGGING_CONFIG
|
||||||
|
new_logging_configs = bigchaindb.config['log']
|
||||||
|
|
||||||
|
if 'file' in new_logging_configs:
|
||||||
|
filename = new_logging_configs['file']
|
||||||
|
logging_configs['handlers']['file']['filename'] = filename
|
||||||
|
|
||||||
|
if 'error_file' in new_logging_configs:
|
||||||
|
error_filename = new_logging_configs['error_file']
|
||||||
|
logging_configs['handlers']['errors']['filename'] = error_filename
|
||||||
|
|
||||||
|
if 'level_console' in new_logging_configs:
|
||||||
|
level = _normalize_log_level(new_logging_configs['level_console'])
|
||||||
|
logging_configs['handlers']['console']['level'] = level
|
||||||
|
|
||||||
|
if 'level_logfile' in new_logging_configs:
|
||||||
|
level = _normalize_log_level(new_logging_configs['level_logfile'])
|
||||||
|
logging_configs['handlers']['file']['level'] = level
|
||||||
|
logging_configs['handlers']['benchmark']['level'] = level
|
||||||
|
|
||||||
|
if 'fmt_console' in new_logging_configs:
|
||||||
|
fmt = new_logging_configs['fmt_console']
|
||||||
|
logging_configs['formatters']['console']['format'] = fmt
|
||||||
|
|
||||||
|
if 'fmt_logfile' in new_logging_configs:
|
||||||
|
fmt = new_logging_configs['fmt_logfile']
|
||||||
|
logging_configs['formatters']['file']['format'] = fmt
|
||||||
|
|
||||||
|
if 'datefmt_console' in new_logging_configs:
|
||||||
|
fmt = new_logging_configs['datefmt_console']
|
||||||
|
logging_configs['formatters']['console']['datefmt'] = fmt
|
||||||
|
|
||||||
|
if 'datefmt_logfile' in new_logging_configs:
|
||||||
|
fmt = new_logging_configs['datefmt_logfile']
|
||||||
|
logging_configs['formatters']['file']['datefmt'] = fmt
|
||||||
|
|
||||||
|
log_levels = new_logging_configs.get('granular_levels', {})
|
||||||
|
|
||||||
|
for logger_name, level in log_levels.items():
|
||||||
|
level = _normalize_log_level(level)
|
||||||
|
try:
|
||||||
|
logging_configs['loggers'][logger_name]['level'] = level
|
||||||
|
except KeyError:
|
||||||
|
logging_configs['loggers'][logger_name] = {'level': level}
|
||||||
|
|
||||||
|
set_logging_config(logging_configs)
|
||||||
|
@ -38,7 +38,6 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
def info(self):
|
def info(self):
|
||||||
"""Return height of the latest committed block."""
|
"""Return height of the latest committed block."""
|
||||||
|
|
||||||
r = ResponseInfo()
|
r = ResponseInfo()
|
||||||
block = self.bigchaindb.get_latest_block()
|
block = self.bigchaindb.get_latest_block()
|
||||||
if block:
|
if block:
|
||||||
@ -55,13 +54,17 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
raw_tx: a raw string (in bytes) transaction."""
|
raw_tx: a raw string (in bytes) transaction."""
|
||||||
|
|
||||||
|
logger.benchmark('CHECK_TX_INIT')
|
||||||
logger.debug('check_tx: %s', raw_transaction)
|
logger.debug('check_tx: %s', raw_transaction)
|
||||||
transaction = decode_transaction(raw_transaction)
|
transaction = decode_transaction(raw_transaction)
|
||||||
if self.bigchaindb.is_valid_transaction(transaction):
|
if self.bigchaindb.is_valid_transaction(transaction):
|
||||||
logger.debug('check_tx: VALID')
|
logger.debug('check_tx: VALID')
|
||||||
|
logger.benchmark('CHECK_TX_END, tx_id:%s', transaction['id'])
|
||||||
return Result.ok()
|
return Result.ok()
|
||||||
else:
|
else:
|
||||||
logger.debug('check_tx: INVALID')
|
logger.debug('check_tx: INVALID')
|
||||||
|
logger.benchmark('CHECK_TX_END, tx_id:%s', transaction['id'])
|
||||||
return Result.error()
|
return Result.error()
|
||||||
|
|
||||||
def begin_block(self, req_begin_block):
|
def begin_block(self, req_begin_block):
|
||||||
@ -70,6 +73,9 @@ class App(BaseApplication):
|
|||||||
req_begin_block: block object which contains block header
|
req_begin_block: block object which contains block header
|
||||||
and block hash.
|
and block hash.
|
||||||
"""
|
"""
|
||||||
|
logger.benchmark('BEGIN BLOCK, height:%s, num_txs:%s',
|
||||||
|
req_begin_block.header.height,
|
||||||
|
req_begin_block.header.num_txs)
|
||||||
|
|
||||||
self.block_txn_ids = []
|
self.block_txn_ids = []
|
||||||
self.block_transactions = []
|
self.block_transactions = []
|
||||||
@ -98,7 +104,6 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
height (int): new height of the chain."""
|
height (int): new height of the chain."""
|
||||||
|
|
||||||
self.new_height = height
|
self.new_height = height
|
||||||
block_txn_hash = calculate_hash(self.block_txn_ids)
|
block_txn_hash = calculate_hash(self.block_txn_ids)
|
||||||
block = self.bigchaindb.get_latest_block()
|
block = self.bigchaindb.get_latest_block()
|
||||||
@ -130,7 +135,6 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
"""Store the new height and along with block hash."""
|
"""Store the new height and along with block hash."""
|
||||||
|
|
||||||
data = self.block_txn_hash.encode('utf-8')
|
data = self.block_txn_hash.encode('utf-8')
|
||||||
|
|
||||||
# register a new block only when new transactions are received
|
# register a new block only when new transactions are received
|
||||||
@ -146,6 +150,7 @@ class App(BaseApplication):
|
|||||||
logger.debug('Commit-ing new block with hash: apphash=%s ,'
|
logger.debug('Commit-ing new block with hash: apphash=%s ,'
|
||||||
'height=%s, txn ids=%s', data, self.new_height,
|
'height=%s, txn ids=%s', data, self.new_height,
|
||||||
self.block_txn_ids)
|
self.block_txn_ids)
|
||||||
|
logger.benchmark('COMMIT_BLOCK, height:%s', self.new_height)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@ -289,7 +289,7 @@ defined by [Python](https://docs.python.org/3.6/library/logging.html#levels),
|
|||||||
but case insensitive for convenience's sake:
|
but case insensitive for convenience's sake:
|
||||||
|
|
||||||
```
|
```
|
||||||
"critical", "error", "warning", "info", "debug", "notset"
|
"critical", "error", "warning", "info", "benchmark", "debug", "notset"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Example**:
|
**Example**:
|
||||||
@ -311,7 +311,7 @@ defined by [Python](https://docs.python.org/3.6/library/logging.html#levels),
|
|||||||
but case insensitive for convenience's sake:
|
but case insensitive for convenience's sake:
|
||||||
|
|
||||||
```
|
```
|
||||||
"critical", "error", "warning", "info", "debug", "notset"
|
"critical", "error", "warning", "info", "benchmark", "debug", "notset"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Example**:
|
**Example**:
|
||||||
|
@ -16,6 +16,7 @@ import pytest
|
|||||||
from pymongo import MongoClient
|
from pymongo import MongoClient
|
||||||
|
|
||||||
from bigchaindb.common import crypto
|
from bigchaindb.common import crypto
|
||||||
|
from bigchaindb.log import setup_logging
|
||||||
from bigchaindb.tendermint.lib import Block
|
from bigchaindb.tendermint.lib import Block
|
||||||
|
|
||||||
TEST_DB_NAME = 'bigchain_test'
|
TEST_DB_NAME = 'bigchain_test'
|
||||||
@ -111,6 +112,10 @@ def _configure_bigchaindb(request):
|
|||||||
config = config_utils.env_config(config)
|
config = config_utils.env_config(config)
|
||||||
config_utils.set_config(config)
|
config_utils.set_config(config)
|
||||||
|
|
||||||
|
# NOTE: since we use a custom log level
|
||||||
|
# for benchmark logging we need to setup logging
|
||||||
|
setup_logging()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def _setup_database(_configure_bigchaindb):
|
def _setup_database(_configure_bigchaindb):
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from abci.types_pb2 import RequestBeginBlock
|
||||||
|
|
||||||
|
|
||||||
pytestmark = [pytest.mark.tendermint, pytest.mark.bdb]
|
pytestmark = [pytest.mark.tendermint, pytest.mark.bdb]
|
||||||
|
|
||||||
@ -57,8 +58,10 @@ def test_deliver_tx__valid_create_updates_db(b):
|
|||||||
.sign([alice.private_key])
|
.sign([alice.private_key])
|
||||||
|
|
||||||
app = App(b)
|
app = App(b)
|
||||||
|
|
||||||
|
begin_block = RequestBeginBlock()
|
||||||
app.init_chain(['ignore'])
|
app.init_chain(['ignore'])
|
||||||
app.begin_block('ignore')
|
app.begin_block(begin_block)
|
||||||
|
|
||||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
assert result.is_ok()
|
assert result.is_ok()
|
||||||
@ -89,7 +92,9 @@ def test_deliver_tx__double_spend_fails(b):
|
|||||||
|
|
||||||
app = App(b)
|
app = App(b)
|
||||||
app.init_chain(['ignore'])
|
app.init_chain(['ignore'])
|
||||||
app.begin_block('ignore')
|
|
||||||
|
begin_block = RequestBeginBlock()
|
||||||
|
app.begin_block(begin_block)
|
||||||
|
|
||||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
assert result.is_ok()
|
assert result.is_ok()
|
||||||
@ -109,7 +114,9 @@ def test_deliver_transfer_tx__double_spend_fails(b):
|
|||||||
|
|
||||||
app = App(b)
|
app = App(b)
|
||||||
app.init_chain(['ignore'])
|
app.init_chain(['ignore'])
|
||||||
app.begin_block('ignore')
|
|
||||||
|
begin_block = RequestBeginBlock()
|
||||||
|
app.begin_block(begin_block)
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
bob = generate_key_pair()
|
bob = generate_key_pair()
|
||||||
@ -152,7 +159,9 @@ def test_end_block_return_validator_updates(b):
|
|||||||
|
|
||||||
app = App(b)
|
app = App(b)
|
||||||
app.init_chain(['ignore'])
|
app.init_chain(['ignore'])
|
||||||
app.begin_block('ignore')
|
|
||||||
|
begin_block = RequestBeginBlock()
|
||||||
|
app.begin_block(begin_block)
|
||||||
|
|
||||||
validator = {'pub_key': {'type': 'ed25519',
|
validator = {'pub_key': {'type': 'ed25519',
|
||||||
'data': 'B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014'},
|
'data': 'B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014'},
|
||||||
@ -182,7 +191,8 @@ def test_store_pre_commit_state_in_end_block(b, alice):
|
|||||||
app = App(b)
|
app = App(b)
|
||||||
app.init_chain(['ignore'])
|
app.init_chain(['ignore'])
|
||||||
|
|
||||||
app.begin_block('ignore')
|
begin_block = RequestBeginBlock()
|
||||||
|
app.begin_block(begin_block)
|
||||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
app.end_block(99)
|
app.end_block(99)
|
||||||
|
|
||||||
@ -191,7 +201,7 @@ def test_store_pre_commit_state_in_end_block(b, alice):
|
|||||||
assert resp['height'] == 99
|
assert resp['height'] == 99
|
||||||
assert resp['transactions'] == [tx.id]
|
assert resp['transactions'] == [tx.id]
|
||||||
|
|
||||||
app.begin_block('ignore')
|
app.begin_block(begin_block)
|
||||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
app.end_block(100)
|
app.end_block(100)
|
||||||
resp = query.get_pre_commit_state(b.connection, PRE_COMMIT_ID)
|
resp = query.get_pre_commit_state(b.connection, PRE_COMMIT_ID)
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
import json
|
|
||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from abci.server import ProtocolHandler
|
|
||||||
from io import BytesIO
|
|
||||||
import abci.types_pb2 as types
|
import abci.types_pb2 as types
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
from abci.server import ProtocolHandler
|
||||||
from abci.encoding import read_message
|
from abci.encoding import read_message
|
||||||
from abci.messages import to_request_deliver_tx, to_request_check_tx
|
from abci.messages import to_request_deliver_tx, to_request_check_tx
|
||||||
|
from copy import deepcopy
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.tendermint
|
@pytest.mark.tendermint
|
||||||
|
Loading…
x
Reference in New Issue
Block a user