From 85bb4a9233f4488715459c1fa62b80ef10a12432 Mon Sep 17 00:00:00 2001 From: Krish Date: Thu, 8 Dec 2016 08:54:21 +0000 Subject: [PATCH] Fix 781 and 124 The queue/pipeline used in the block creation process is unbounded. When too many transactions occur, the backlog table fills up quite easily and bigchaindb just reads all the transaction in the backlog to create a block. This causes memory usage to grow indefinitely. Limiting the queue size to 1000 transactions for now as the block creation and voting happens in batches of 1000. Can be increased later in case block size is increased. --- bigchaindb/pipelines/block.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bigchaindb/pipelines/block.py b/bigchaindb/pipelines/block.py index 9039938b..0394aa23 100644 --- a/bigchaindb/pipelines/block.py +++ b/bigchaindb/pipelines/block.py @@ -8,7 +8,7 @@ function. import logging import rethinkdb as r -from multipipes import Pipeline, Node +from multipipes import Pipeline, Node, Pipe from bigchaindb.models import Transaction from bigchaindb.pipelines.utils import ChangeFeed @@ -161,6 +161,7 @@ def create_pipeline(): block_pipeline = BlockPipeline() pipeline = Pipeline([ + Pipe(maxsize=1000), Node(block_pipeline.filter_tx), Node(block_pipeline.validate_tx, fraction_of_cores=1), Node(block_pipeline.create, timeout=1),