From bae05e80a068bee066919072345780d0cb02fc6d Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 28 Feb 2017 11:27:55 +0100 Subject: [PATCH 001/150] sign whole transaction body instead of partial transaction --- bigchaindb/common/transaction.py | 86 ++++++++----------- .../source/data-models/transaction-model.rst | 2 +- tests/common/test_transaction.py | 19 ++-- 3 files changed, 44 insertions(+), 63 deletions(-) diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 9da2421a..ebef6987 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -686,22 +686,17 @@ class Transaction(object): key_pairs = {gen_public_key(PrivateKey(private_key)): PrivateKey(private_key) for private_key in private_keys} - for index, input_ in enumerate(self.inputs): - # NOTE: We clone the current transaction but only add the output - # and input we're currently working on plus all - # previously signed ones. - tx_partial = Transaction(self.operation, self.asset, [input_], - self.outputs, self.metadata, - self.version) - - tx_partial_dict = tx_partial.to_dict() - tx_partial_dict = Transaction._remove_signatures(tx_partial_dict) - tx_serialized = Transaction._to_str(tx_partial_dict) - self._sign_input(input_, index, tx_serialized, key_pairs) + tx_dict = self.to_dict() + tx_dict = Transaction._remove_signatures(tx_dict) + tx_serialized = Transaction._to_str(tx_dict) + for i, input_ in enumerate(self.inputs): + message = '%s:%s' % (i, tx_serialized) + self.inputs[i] = self._sign_input(input_, message, key_pairs) return self - def _sign_input(self, input_, index, tx_serialized, key_pairs): - """Signs a single Input with a partial Transaction as message. + @classmethod + def _sign_input(cls, input_, message, key_pairs): + """Signs a single Input. Note: This method works only for the following Cryptoconditions @@ -712,31 +707,27 @@ class Transaction(object): Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. - index (int): The index of the input to be signed. - tx_serialized (str): The Transaction to be used as message. + message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with. """ if isinstance(input_.fulfillment, Ed25519Fulfillment): - self._sign_simple_signature_fulfillment(input_, index, - tx_serialized, key_pairs) + return cls._sign_simple_signature_fulfillment(input_, message, + key_pairs) elif isinstance(input_.fulfillment, ThresholdSha256Fulfillment): - self._sign_threshold_signature_fulfillment(input_, index, - tx_serialized, - key_pairs) + return cls._sign_threshold_signature_fulfillment(input_, message, + key_pairs) else: raise ValueError("Fulfillment couldn't be matched to " 'Cryptocondition fulfillment type.') - def _sign_simple_signature_fulfillment(self, input_, index, - tx_serialized, key_pairs): + @classmethod + def _sign_simple_signature_fulfillment(cls, input_, message, key_pairs): """Signs a Ed25519Fulfillment. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The input to be signed. - index (int): The index of the input to be - signed. - tx_serialized (str): The Transaction to be used as message. + message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with. """ # NOTE: To eliminate the dangers of accidentally signing a condition by @@ -748,23 +739,21 @@ class Transaction(object): try: # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings - input_.fulfillment.sign(tx_serialized.encode(), key_pairs[public_key]) + input_.fulfillment.sign(message.encode(), key_pairs[public_key]) except KeyError: raise KeypairMismatchException('Public key {} is not a pair to ' 'any of the private keys' .format(public_key)) - self.inputs[index] = input_ + return input_ - def _sign_threshold_signature_fulfillment(self, input_, index, - tx_serialized, key_pairs): + @classmethod + def _sign_threshold_signature_fulfillment(cls, input_, message, key_pairs): """Signs a ThresholdSha256Fulfillment. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. - index (int): The index of the Input to be - signed. - tx_serialized (str): The Transaction to be used as message. + message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with. """ input_ = deepcopy(input_) @@ -794,8 +783,8 @@ class Transaction(object): # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings - subffill.sign(tx_serialized.encode(), private_key) - self.inputs[index] = input_ + subffill.sign(message.encode(), private_key) + return input_ def inputs_valid(self, outputs=None): """Validates the Inputs in the Transaction against given @@ -848,24 +837,19 @@ class Transaction(object): raise ValueError('Inputs and ' 'output_condition_uris must have the same count') - def gen_tx(input_, output, output_condition_uri=None): - """Splits multiple IO Transactions into partial single IO - Transactions. - """ - tx = Transaction(self.operation, self.asset, [input_], - self.outputs, self.metadata, self.version) - tx_dict = tx.to_dict() - tx_dict = Transaction._remove_signatures(tx_dict) - tx_serialized = Transaction._to_str(tx_dict) + tx_dict = self.to_dict() + tx_dict = Transaction._remove_signatures(tx_dict) + tx_serialized = Transaction._to_str(tx_dict) - return self.__class__._input_valid(input_, - self.operation, - tx_serialized, - output_condition_uri) + def validate(i, output_condition_uri=None): + """ Validate input against output condition URI """ + message = '%s:%s' % (i, tx_serialized) - partial_transactions = map(gen_tx, self.inputs, - self.outputs, output_condition_uris) - return all(partial_transactions) + return self._input_valid(self.inputs[i], self.operation, message, + output_condition_uri) + + return all(validate(i, cond) + for i, cond in enumerate(output_condition_uris)) @staticmethod def _input_valid(input_, operation, tx_serialized, output_condition_uri=None): diff --git a/docs/server/source/data-models/transaction-model.rst b/docs/server/source/data-models/transaction-model.rst index 0ce10d03..3610d1fe 100644 --- a/docs/server/source/data-models/transaction-model.rst +++ b/docs/server/source/data-models/transaction-model.rst @@ -49,4 +49,4 @@ Here's some explanation of the contents of a :ref:`transaction `: Later, when we get to the models for the block and the vote, we'll see that both include a signature (from the node which created it). You may wonder why transactions don't have signatures... The answer is that they do! They're just hidden inside the ``fulfillment`` string of each input. A creation transaction is signed by whoever created it. A transfer transaction is signed by whoever currently controls or owns it. -What gets signed? For each input in the transaction, the "fullfillment message" that gets signed includes the ``operation``, ``data``, ``version``, ``id``, corresponding ``condition``, and the fulfillment itself, except with its fulfillment string set to ``null``. The computed signature goes into creating the ``fulfillment`` string of the input. +What gets signed? For each input in the transaction, the "fullfillment message" that gets signed includes the JSON serialized body of the transaction, minus any fulfillment strings, and with "n:" prepended where n is the index of the input being signed. The computed signature goes into creating the ``fulfillment`` string of the input. diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index a2782583..12c71497 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -496,7 +496,8 @@ def test_validate_tx_simple_create_signature(user_input, user_output, user_priv, tx = Transaction(Transaction.CREATE, asset_definition, [user_input], [user_output]) expected = deepcopy(user_output) - expected.fulfillment.sign(str(tx).encode(), PrivateKey(user_priv)) + message = ('0:' + str(tx)).encode() + expected.fulfillment.sign(message, PrivateKey(user_priv)) tx.sign([user_priv]) assert tx.inputs[0].to_dict()['fulfillment'] == \ @@ -513,7 +514,6 @@ def test_invoke_simple_signature_fulfillment_with_invalid_params(utx, with raises(KeypairMismatchException): invalid_key_pair = {'wrong_pub_key': 'wrong_priv_key'} utx._sign_simple_signature_fulfillment(user_input, - 0, 'somemessage', invalid_key_pair) @@ -524,13 +524,11 @@ def test_sign_threshold_with_invalid_params(utx, user_user2_threshold_input, with raises(KeypairMismatchException): utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, - 0, 'somemessage', {user3_pub: user3_priv}) with raises(KeypairMismatchException): user_user2_threshold_input.owners_before = ['somewrongvalue'] utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, - 0, 'somemessage', None) @@ -560,13 +558,11 @@ def test_validate_multiple_inputs(user_input, user_output, user_priv, expected_first = deepcopy(tx) expected_second = deepcopy(tx) - expected_first.inputs = [expected_first.inputs[0]] - expected_second.inputs = [expected_second.inputs[1]] - expected_first_bytes = str(expected_first).encode() + expected_first_bytes = ('0:' + str(tx)).encode() expected_first.inputs[0].fulfillment.sign(expected_first_bytes, PrivateKey(user_priv)) - expected_second_bytes = str(expected_second).encode() + expected_second_bytes = ('1:' + str(tx)).encode() expected_second.inputs[0].fulfillment.sign(expected_second_bytes, PrivateKey(user_priv)) tx.sign([user_priv]) @@ -596,10 +592,11 @@ def test_validate_tx_threshold_create_signature(user_user2_threshold_input, tx = Transaction(Transaction.CREATE, asset_definition, [user_user2_threshold_input], [user_user2_threshold_output]) + message = ('0:' + str(tx)).encode() expected = deepcopy(user_user2_threshold_output) - expected.fulfillment.subconditions[0]['body'].sign(str(tx).encode(), + expected.fulfillment.subconditions[0]['body'].sign(message, PrivateKey(user_priv)) - expected.fulfillment.subconditions[1]['body'].sign(str(tx).encode(), + expected.fulfillment.subconditions[1]['body'].sign(message, PrivateKey(user2_priv)) tx.sign([user_priv, user2_priv]) @@ -861,7 +858,7 @@ def test_create_transfer_transaction_single_io(tx, user_pub, user2_pub, expected_input = deepcopy(inputs[0]) expected['id'] = transfer_tx['id'] - expected_input.fulfillment.sign(serialize(expected).encode(), + expected_input.fulfillment.sign(('0:' + serialize(expected)).encode(), PrivateKey(user_priv)) expected_ffill = expected_input.fulfillment.serialize_uri() transfer_ffill = transfer_tx['inputs'][0]['fulfillment'] From ae8153bd10f4af9121c6175059e345c34b328232 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 15:05:02 +0100 Subject: [PATCH 002/150] new home of structural validation in tests --- .../common/schema/test_transaction_schema.py | 39 ---------- .../validation/test_transaction_structure.py | 71 +++++++++++++++++++ 2 files changed, 71 insertions(+), 39 deletions(-) create mode 100644 tests/validation/test_transaction_structure.py diff --git a/tests/common/schema/test_transaction_schema.py b/tests/common/schema/test_transaction_schema.py index dca10e70..86b192e0 100644 --- a/tests/common/schema/test_transaction_schema.py +++ b/tests/common/schema/test_transaction_schema.py @@ -19,42 +19,3 @@ def test_validate_transaction_signed_transfer(signed_transfer_tx): def test_validate_transaction_fails(): with raises(SchemaValidationError): validate_transaction_schema({}) - - -def test_validate_fails_metadata_empty_dict(create_tx): - create_tx.metadata = {'a': 1} - validate_transaction_schema(create_tx.to_dict()) - create_tx.metadata = None - validate_transaction_schema(create_tx.to_dict()) - create_tx.metadata = {} - with raises(SchemaValidationError): - validate_transaction_schema(create_tx.to_dict()) - - -def test_transfer_asset_schema(signed_transfer_tx): - tx = signed_transfer_tx.to_dict() - validate_transaction_schema(tx) - tx['asset']['data'] = {} - with raises(SchemaValidationError): - validate_transaction_schema(tx) - del tx['asset']['data'] - tx['asset']['id'] = 'b' * 63 - with raises(SchemaValidationError): - validate_transaction_schema(tx) - - -def test_create_single_input(create_tx): - tx = create_tx.to_dict() - tx['inputs'] += tx['inputs'] - with raises(SchemaValidationError): - validate_transaction_schema(tx) - tx['inputs'] = [] - with raises(SchemaValidationError): - validate_transaction_schema(tx) - - -def test_create_tx_no_fulfills(create_tx): - tx = create_tx.to_dict() - tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0} - with raises(SchemaValidationError): - validate_transaction_schema(tx) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py new file mode 100644 index 00000000..6cff9350 --- /dev/null +++ b/tests/validation/test_transaction_structure.py @@ -0,0 +1,71 @@ +""" +All tests of transaction structure. The concern here is that transaction +structural / schematic issues are caught when reading a transaction +(ie going from dict -> transaction). +""" + +import pytest + +from bigchaindb.common.exceptions import SchemaValidationError +from bigchaindb.models import Transaction + + +################################################################################ +# Helper functions + +def validate(tx): + if isinstance(tx, Transaction): + tx = tx.to_dict() + Transaction.from_dict(tx) + + +def validate_throws(tx): + with pytest.raises(SchemaValidationError): + validate(tx) + + +################################################################################ +# Metadata + +def test_validate_fails_metadata_empty_dict(create_tx): + create_tx.metadata = {'a': 1} + validate(create_tx) + create_tx.metadata = None + validate(create_tx) + create_tx.metadata = {} + validate_throws(create_tx) + + +################################################################################ +# Asset + +def test_transfer_asset_schema(signed_transfer_tx): + tx = signed_transfer_tx.to_dict() + validate_transaction_schema(tx) + tx['asset']['data'] = {} + with raises(SchemaValidationError): + validate_transaction_schema(tx) + del tx['asset']['data'] + tx['asset']['id'] = 'b' * 63 + with raises(SchemaValidationError): + validate_transaction_schema(tx) + + +################################################################################ +# Inputs + +def test_create_single_input(create_tx): + tx = create_tx.to_dict() + tx['inputs'] += tx['inputs'] + with raises(SchemaValidationError): + validate_transaction_schema(tx) + tx['inputs'] = [] + with raises(SchemaValidationError): + validate_transaction_schema(tx) + + +def test_create_tx_no_fulfills(create_tx): + tx = create_tx.to_dict() + tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0} + with raises(SchemaValidationError): + validate_transaction_schema(tx) From 8a5814bb08eb2cdca34a9d41f4bfd1dc2da9b932 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 15:12:12 +0100 Subject: [PATCH 003/150] clarify schema testing module --- .../common/schema/test_transaction_schema.py | 21 -------- tests/common/schema/test_vote_schema.py | 13 ----- tests/common/{schema => }/test_schema.py | 50 ++++++++++++++++++- 3 files changed, 48 insertions(+), 36 deletions(-) delete mode 100644 tests/common/schema/test_transaction_schema.py delete mode 100644 tests/common/schema/test_vote_schema.py rename tests/common/{schema => }/test_schema.py (58%) diff --git a/tests/common/schema/test_transaction_schema.py b/tests/common/schema/test_transaction_schema.py deleted file mode 100644 index 86b192e0..00000000 --- a/tests/common/schema/test_transaction_schema.py +++ /dev/null @@ -1,21 +0,0 @@ -from pytest import raises - -from bigchaindb.common.exceptions import SchemaValidationError -from bigchaindb.common.schema import validate_transaction_schema - - -def test_validate_transaction_create(create_tx): - validate_transaction_schema(create_tx.to_dict()) - - -def test_validate_transaction_signed_create(signed_create_tx): - validate_transaction_schema(signed_create_tx.to_dict()) - - -def test_validate_transaction_signed_transfer(signed_transfer_tx): - validate_transaction_schema(signed_transfer_tx.to_dict()) - - -def test_validate_transaction_fails(): - with raises(SchemaValidationError): - validate_transaction_schema({}) diff --git a/tests/common/schema/test_vote_schema.py b/tests/common/schema/test_vote_schema.py deleted file mode 100644 index a9de9492..00000000 --- a/tests/common/schema/test_vote_schema.py +++ /dev/null @@ -1,13 +0,0 @@ -from pytest import raises - -from bigchaindb.common.exceptions import SchemaValidationError -from bigchaindb.common.schema import validate_vote_schema - - -def test_validate_vote(structurally_valid_vote): - validate_vote_schema(structurally_valid_vote) - - -def test_validate_vote_fails(): - with raises(SchemaValidationError): - validate_vote_schema({}) diff --git a/tests/common/schema/test_schema.py b/tests/common/test_schema.py similarity index 58% rename from tests/common/schema/test_schema.py rename to tests/common/test_schema.py index 02a00ee2..ef972f22 100644 --- a/tests/common/schema/test_schema.py +++ b/tests/common/test_schema.py @@ -1,6 +1,18 @@ -from bigchaindb.common.schema import ( - TX_SCHEMA, VOTE_SCHEMA, drop_schema_descriptions) +""" +This module is tests related to schema checking, but _not_ of granular schematic +properties related to validation. +""" +from pytest import raises + +from bigchaindb.common.exceptions import SchemaValidationError +from bigchaindb.common.schema import ( + TX_SCHEMA, VOTE_SCHEMA, drop_schema_descriptions, + validate_transaction_schema, validate_vote_schema) + + +################################################################################ +# Test of schema utils def _test_additionalproperties(node, path=''): """ @@ -67,3 +79,37 @@ def test_drop_descriptions(): } drop_schema_descriptions(node) assert node == expected + + +################################################################################ +# Test call transaction schema + + +def test_validate_transaction_create(create_tx): + validate_transaction_schema(create_tx.to_dict()) + + +def test_validate_transaction_signed_create(signed_create_tx): + validate_transaction_schema(signed_create_tx.to_dict()) + + +def test_validate_transaction_signed_transfer(signed_transfer_tx): + validate_transaction_schema(signed_transfer_tx.to_dict()) + + +def test_validate_transaction_fails(): + with raises(SchemaValidationError): + validate_transaction_schema({}) + + +################################################################################ +# Test call vote schema + + +def test_validate_vote(structurally_valid_vote): + validate_vote_schema(structurally_valid_vote) + + +def test_validate_vote_fails(): + with raises(SchemaValidationError): + validate_vote_schema({}) From 66830fc1d9228b9fd70c0937b6d30d89d5f533d1 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 15:18:47 +0100 Subject: [PATCH 004/150] move version validation to transaction structure test module --- tests/common/test_transaction.py | 34 -------------- .../validation/test_transaction_structure.py | 44 +++++++++++++------ 2 files changed, 31 insertions(+), 47 deletions(-) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index 45cadc3b..153c0e6c 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -947,37 +947,3 @@ def test_cant_add_empty_input(): with raises(TypeError): tx.add_input(None) - - -def test_validate_version(utx): - import re - import bigchaindb.version - from .utils import validate_transaction_model - from bigchaindb.common.exceptions import SchemaValidationError - - short_ver = bigchaindb.version.__short_version__ - assert utx.version == re.match(r'^(.*\d)', short_ver).group(1) - - validate_transaction_model(utx) - - # At version 1, transaction version will break step with server version. - utx.version = '1.0.0' - with raises(SchemaValidationError): - validate_transaction_model(utx) - - -def test_create_tx_no_asset_id(b, utx): - from bigchaindb.common.exceptions import SchemaValidationError - from .utils import validate_transaction_model - utx.asset['id'] = 'b' * 64 - with raises(SchemaValidationError): - validate_transaction_model(utx) - - -def test_transfer_tx_asset_schema(transfer_utx): - from bigchaindb.common.exceptions import SchemaValidationError - from .utils import validate_transaction_model - tx = transfer_utx - tx.asset['data'] = {} - with raises(SchemaValidationError): - validate_transaction_model(tx) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 6cff9350..dfdeac01 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -5,6 +5,7 @@ structural / schematic issues are caught when reading a transaction """ import pytest +import re from bigchaindb.common.exceptions import SchemaValidationError from bigchaindb.models import Transaction @@ -19,7 +20,7 @@ def validate(tx): Transaction.from_dict(tx) -def validate_throws(tx): +def validate_raises(tx): with pytest.raises(SchemaValidationError): validate(tx) @@ -33,7 +34,7 @@ def test_validate_fails_metadata_empty_dict(create_tx): create_tx.metadata = None validate(create_tx) create_tx.metadata = {} - validate_throws(create_tx) + validate_raises(create_tx) ################################################################################ @@ -41,14 +42,17 @@ def test_validate_fails_metadata_empty_dict(create_tx): def test_transfer_asset_schema(signed_transfer_tx): tx = signed_transfer_tx.to_dict() - validate_transaction_schema(tx) + validate(tx) tx['asset']['data'] = {} - with raises(SchemaValidationError): - validate_transaction_schema(tx) + validate_raises(tx) del tx['asset']['data'] tx['asset']['id'] = 'b' * 63 - with raises(SchemaValidationError): - validate_transaction_schema(tx) + validate_raises(tx) + + +def test_create_tx_no_asset_id(create_tx): + create_tx.asset['id'] = 'b' * 64 + validate_raises(create_tx) ################################################################################ @@ -57,15 +61,29 @@ def test_transfer_asset_schema(signed_transfer_tx): def test_create_single_input(create_tx): tx = create_tx.to_dict() tx['inputs'] += tx['inputs'] - with raises(SchemaValidationError): - validate_transaction_schema(tx) + validate_raises(tx) tx['inputs'] = [] - with raises(SchemaValidationError): - validate_transaction_schema(tx) + validate_raises(tx) def test_create_tx_no_fulfills(create_tx): tx = create_tx.to_dict() tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0} - with raises(SchemaValidationError): - validate_transaction_schema(tx) + validate_raises(tx) + + +################################################################################ +# Version + +def test_validate_version(create_tx): + import re + import bigchaindb.version + + short_ver = bigchaindb.version.__short_version__ + assert create_tx.version == re.match(r'^(.*\d)', short_ver).group(1) + + validate(create_tx) + + # At version 1, transaction version will break step with server version. + create_tx.version = '1.0.0' + validate_raises(create_tx) From 48a3ba96ae5b9dcd9feb4715615a0a36aebb112a Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 16:18:08 +0100 Subject: [PATCH 005/150] move tx tests from test_models.py --- tests/{test_models.py => test_block_model.py} | 0 tests/validation/test_transaction_structure.py | 13 +++++++++++++ 2 files changed, 13 insertions(+) rename tests/{test_models.py => test_block_model.py} (100%) diff --git a/tests/test_models.py b/tests/test_block_model.py similarity index 100% rename from tests/test_models.py rename to tests/test_block_model.py diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index dfdeac01..461d408f 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -25,6 +25,14 @@ def validate_raises(tx): validate(tx) +################################################################################ +# Operation + +def test_validate_invalid_operation(create_tx): + create_tx.operation = 'something invalid' + validate_raises(create_tx) + + ################################################################################ # Metadata @@ -58,6 +66,11 @@ def test_create_tx_no_asset_id(create_tx): ################################################################################ # Inputs +def test_no_inputs(create_tx): + create_tx.inputs = [] + validate_raises(create_tx) + + def test_create_single_input(create_tx): tx = create_tx.to_dict() tx['inputs'] += tx['inputs'] From 4b060e64882ee57adcd141d12b2e862059121281 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 18:07:25 +0100 Subject: [PATCH 006/150] move tests related to assets --- tests/assets/test_digital_assets.py | 26 ------ tests/assets/test_divisible_assets.py | 85 ------------------- .../validation/test_transaction_structure.py | 29 ++++++- 3 files changed, 25 insertions(+), 115 deletions(-) diff --git a/tests/assets/test_digital_assets.py b/tests/assets/test_digital_assets.py index c31ec3da..1e63dbec 100644 --- a/tests/assets/test_digital_assets.py +++ b/tests/assets/test_digital_assets.py @@ -1,4 +1,3 @@ -from bigchaindb.common.exceptions import ValidationError import pytest import random @@ -19,18 +18,6 @@ def test_asset_transfer(b, user_pk, user_sk): assert tx_transfer_signed.asset['id'] == tx_create.id -def test_validate_bad_asset_creation(b, user_pk): - from bigchaindb.models import Transaction - - # `data` needs to be a dictionary - tx = Transaction.create([b.me], [([user_pk], 1)]) - tx.asset['data'] = 'a' - tx_signed = tx.sign([b.me_private]) - - with pytest.raises(ValidationError): - Transaction.from_dict(tx_signed.to_dict()) - - @pytest.mark.bdb @pytest.mark.usefixtures('inputs') def test_validate_transfer_asset_id_mismatch(b, user_pk, user_sk): @@ -91,19 +78,6 @@ def test_asset_id_mismatch(b, user_pk): Transaction.get_asset_id([tx1, tx2]) -def test_create_invalid_divisible_asset(b, user_pk, user_sk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import ValidationError - - # Asset amount must be more than 0 - tx = Transaction.create([user_pk], [([user_pk], 1)]) - tx.outputs[0].amount = 0 - tx.sign([user_sk]) - - with pytest.raises(ValidationError): - Transaction.from_dict(tx.to_dict()) - - def test_create_valid_divisible_asset(b, user_pk, user_sk): from bigchaindb.models import Transaction diff --git a/tests/assets/test_divisible_assets.py b/tests/assets/test_divisible_assets.py index 87a29c2b..e1ea726f 100644 --- a/tests/assets/test_divisible_assets.py +++ b/tests/assets/test_divisible_assets.py @@ -635,88 +635,3 @@ def test_divide(b, user_pk, user_sk): assert len(tx_transfer_signed.outputs) == 3 for output in tx_transfer_signed.outputs: assert output.amount == 1 - - -# Check that negative inputs are caught when creating a TRANSFER transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_transfer(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create_signed = tx_create.sign([b.me_private]) - # create block - block = b.create_block([tx_create_signed]) - assert block.validate(b) == block - b.write_block(block) - # vote - vote = b.vote(block.id, b.get_last_voted_block().id, True) - b.write_vote(vote) - - with pytest.raises(AmountError): - Transaction.transfer(tx_create.to_inputs(), - [([b.me], 4), ([b.me], -1)], - asset_id=tx_create.id) - - -# Check that negative inputs are caught when validating a TRANSFER transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_transfer_validate(b, user_pk, user_sk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create_signed = tx_create.sign([b.me_private]) - # create block - block = b.create_block([tx_create_signed]) - assert block.validate(b) == block - b.write_block(block) - # vote - vote = b.vote(block.id, b.get_last_voted_block().id, True) - b.write_vote(vote) - - # create a transfer transaction with 3 outputs and check if the amount - # of each output is 1 - tx_transfer = Transaction.transfer(tx_create.to_inputs(), - [([b.me], 4), ([b.me], 1)], - asset_id=tx_create.id) - tx_transfer.outputs[1].amount = -1 - tx_transfer_signed = tx_transfer.sign([user_sk]) - - with pytest.raises(AmountError): - tx_transfer_signed.validate(b) - - -# Check that negative inputs are caught when creating a CREATE transaction -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_create(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - with pytest.raises(AmountError): - Transaction.create([b.me], [([user_pk], -3)]) - - -# Check that negative inputs are caught when validating a CREATE transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_create_validate(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create.outputs[0].amount = -3 - tx_create_signed = tx_create.sign([b.me_private]) - - with pytest.raises(AmountError): - tx_create_signed.validate(b) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 461d408f..cf7c43ca 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -5,9 +5,8 @@ structural / schematic issues are caught when reading a transaction """ import pytest -import re -from bigchaindb.common.exceptions import SchemaValidationError +from bigchaindb.common.exceptions import AmountError, SchemaValidationError from bigchaindb.models import Transaction @@ -20,11 +19,16 @@ def validate(tx): Transaction.from_dict(tx) -def validate_raises(tx): - with pytest.raises(SchemaValidationError): +def validate_raises(tx, exc=SchemaValidationError): + with pytest.raises(exc): validate(tx) +# We should test that validation works when we expect it to +def test_validation_passes(create_tx): + validate(create_tx) + + ################################################################################ # Operation @@ -63,6 +67,11 @@ def test_create_tx_no_asset_id(create_tx): validate_raises(create_tx) +def test_create_tx_asset_type(create_tx): + create_tx.asset['data'] = 'a' + validate_raises(create_tx) + + ################################################################################ # Inputs @@ -85,6 +94,18 @@ def test_create_tx_no_fulfills(create_tx): validate_raises(tx) +################################################################################ +# Outputs + + +def test_bad_amounts(create_tx, signed_transfer_tx): + for tx in [create_tx, signed_transfer_tx]: + tx.outputs[0].amount = 0 + validate_raises(tx, AmountError) + tx.outputs[0].amount = -1 + validate_raises(tx, AmountError) + + ################################################################################ # Version From 47c6a722ad4a2834a5d41c9e8e84f8abea31541d Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 18:35:53 +0100 Subject: [PATCH 007/150] move tests to validate transaction ID --- tests/common/test_transaction.py | 27 ++++--------------- .../validation/test_transaction_structure.py | 24 ++++++++++++++++- 2 files changed, 28 insertions(+), 23 deletions(-) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index 153c0e6c..ec288de5 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -1,3 +1,8 @@ +""" +These are tests of the API of the Transaction class and associated classes. +Tests for transaction validation are separate. +""" + from pytest import raises @@ -341,28 +346,6 @@ def test_transaction_deserialization(user_input, user_output, data): validate_transaction_model(tx) -def test_tx_serialization_with_incorrect_hash(utx): - from bigchaindb.common.transaction import Transaction - from bigchaindb.common.exceptions import InvalidHash - - utx_dict = utx.to_dict() - utx_dict['id'] = 'a' * 64 - with raises(InvalidHash): - Transaction.from_dict(utx_dict) - utx_dict.pop('id') - - -def test_tx_serialization_hash_function(tx): - import sha3 - import json - tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - del tx_dict['id'] - payload = json.dumps(tx_dict, skipkeys=False, sort_keys=True, - separators=(',', ':')) - assert sha3.sha3_256(payload.encode()).hexdigest() == tx.id - - def test_invalid_input_initialization(user_input, user_pub): from bigchaindb.common.transaction import Input diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index cf7c43ca..6b64a559 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -6,7 +6,8 @@ structural / schematic issues are caught when reading a transaction import pytest -from bigchaindb.common.exceptions import AmountError, SchemaValidationError +from bigchaindb.common.exceptions import (AmountError, InvalidHash, + SchemaValidationError) from bigchaindb.models import Transaction @@ -29,6 +30,27 @@ def test_validation_passes(create_tx): validate(create_tx) +################################################################################ +# ID + + +def test_tx_serialization_hash_function(create_tx): + import sha3 + import json + tx = create_tx.to_dict() + tx['inputs'][0]['fulfillment'] = None + del tx['id'] + payload = json.dumps(tx, skipkeys=False, sort_keys=True, + separators=(',', ':')) + assert sha3.sha3_256(payload.encode()).hexdigest() == create_tx.id + + +def test_tx_serialization_with_incorrect_hash(create_tx): + tx = create_tx.to_dict() + tx['id'] = 'a' * 64 + validate_raises(tx, InvalidHash) + + ################################################################################ # Operation From 28e06399ae27fc824190af7dd22026199f18aa94 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 18:40:04 +0100 Subject: [PATCH 008/150] fix breakage from other branch --- tests/common/test_schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/common/test_schema.py b/tests/common/test_schema.py index ef972f22..1db17170 100644 --- a/tests/common/test_schema.py +++ b/tests/common/test_schema.py @@ -7,7 +7,7 @@ from pytest import raises from bigchaindb.common.exceptions import SchemaValidationError from bigchaindb.common.schema import ( - TX_SCHEMA, VOTE_SCHEMA, drop_schema_descriptions, + TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions, validate_transaction_schema, validate_vote_schema) @@ -31,7 +31,7 @@ def _test_additionalproperties(node, path=''): def test_transaction_schema_additionalproperties(): - _test_additionalproperties(TX_SCHEMA) + _test_additionalproperties(TX_SCHEMA_COMMON) def test_vote_schema_additionalproperties(): From 56b81f9d8d684d782ea1b387a36d9e04ec79b1a9 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 14 Mar 2017 21:45:36 +0100 Subject: [PATCH 009/150] change amount type to string in schema file --- bigchaindb/common/schema/transaction.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/bigchaindb/common/schema/transaction.yaml b/bigchaindb/common/schema/transaction.yaml index 86e5947b..ff52a366 100644 --- a/bigchaindb/common/schema/transaction.yaml +++ b/bigchaindb/common/schema/transaction.yaml @@ -132,7 +132,7 @@ definitions: - public_keys properties: amount: - type: integer + type: string description: | Integral amount of the asset represented by this output. In the case of a non divisible asset, this will always be 1. @@ -158,10 +158,6 @@ definitions: "$ref": "#/definitions/public_keys" description: | List of public keys associated with the conditions on an output. - amount: - type: integer - description: | - Integral amount of the asset represented by this condition. input: type: "object" description: From f23bfa52d161556d64560566fc157d46430c9e6f Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 10:00:00 +0100 Subject: [PATCH 010/150] Output.from_dict accepts string --- bigchaindb/common/transaction.py | 8 ++++++-- tests/common/test_transaction.py | 8 ++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 23b8f169..00126cd9 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -264,7 +264,7 @@ class Output(object): output = { 'public_keys': self.public_keys, 'condition': condition, - 'amount': self.amount + 'amount': str(self.amount), } return output @@ -381,7 +381,11 @@ class Output(object): except KeyError: # NOTE: Hashlock condition case fulfillment = data['condition']['uri'] - return cls(fulfillment, data['public_keys'], data['amount']) + try: + amount = int(data['amount']) + except ValueError: + raise AmountError('Invalid amount: %s' % amount) + return cls(fulfillment, data['public_keys'], amount) class Transaction(object): diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index ec288de5..5a9ad766 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -83,7 +83,7 @@ def test_output_serialization(user_Ed25519, user_pub): 'details': user_Ed25519.to_dict(), }, 'public_keys': [user_pub], - 'amount': 1, + 'amount': '1', } cond = Output(user_Ed25519, [user_pub], 1) @@ -101,7 +101,7 @@ def test_output_deserialization(user_Ed25519, user_pub): 'details': user_Ed25519.to_dict() }, 'public_keys': [user_pub], - 'amount': 1, + 'amount': '1', } cond = Output.from_dict(cond) @@ -120,7 +120,7 @@ def test_output_hashlock_serialization(): 'uri': hashlock, }, 'public_keys': None, - 'amount': 1, + 'amount': '1', } cond = Output(hashlock, amount=1) @@ -140,7 +140,7 @@ def test_output_hashlock_deserialization(): 'uri': hashlock }, 'public_keys': None, - 'amount': 1, + 'amount': '1', } cond = Output.from_dict(cond) From 69bafc80c0598c7b7b51509547bcd4290540fdb4 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 10:17:41 +0100 Subject: [PATCH 011/150] put limits on amount size --- bigchaindb/common/schema/transaction.yaml | 1 + bigchaindb/common/transaction.py | 2 ++ tests/validation/test_transaction_structure.py | 16 +++++++++++++++- 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/bigchaindb/common/schema/transaction.yaml b/bigchaindb/common/schema/transaction.yaml index ff52a366..f63b652e 100644 --- a/bigchaindb/common/schema/transaction.yaml +++ b/bigchaindb/common/schema/transaction.yaml @@ -133,6 +133,7 @@ definitions: properties: amount: type: string + pattern: "^[0-9]{1,20}$" description: | Integral amount of the asset represented by this output. In the case of a non divisible asset, this will always be 1. diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 00126cd9..746844e4 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -229,6 +229,8 @@ class Output(object): raise TypeError('`amount` must be an int') if amount < 1: raise AmountError('`amount` must be greater than 0') + if amount > 9 * 10 ** 18: + raise AmountError('`amount` must be <= 9000000000000000000') self.fulfillment = fulfillment self.amount = amount diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 6b64a559..f6678b32 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -14,6 +14,7 @@ from bigchaindb.models import Transaction ################################################################################ # Helper functions + def validate(tx): if isinstance(tx, Transaction): tx = tx.to_dict() @@ -120,7 +121,7 @@ def test_create_tx_no_fulfills(create_tx): # Outputs -def test_bad_amounts(create_tx, signed_transfer_tx): +def test_low_amounts(create_tx, signed_transfer_tx): for tx in [create_tx, signed_transfer_tx]: tx.outputs[0].amount = 0 validate_raises(tx, AmountError) @@ -128,6 +129,19 @@ def test_bad_amounts(create_tx, signed_transfer_tx): validate_raises(tx, AmountError) +def test_high_amounts(create_tx): + # Should raise a SchemaValidationError - don't want to allow ridiculously + # large numbers to get converted to int + create_tx.outputs[0].amount = 10 ** 21 + validate_raises(create_tx) + # Should raise AmountError + create_tx.outputs[0].amount = 9 * 10 ** 18 + 1 + validate_raises(create_tx, AmountError) + # Should pass + create_tx.outputs[0].amount -= 1 + validate(create_tx) + + ################################################################################ # Version From 48a766400dcf630949ca42f1d833cbbde6cc28c1 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 11:03:00 +0100 Subject: [PATCH 012/150] try to bump codecov into correct calculation --- tests/validation/test_transaction_structure.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 6b64a559..63063f7f 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -119,7 +119,6 @@ def test_create_tx_no_fulfills(create_tx): ################################################################################ # Outputs - def test_bad_amounts(create_tx, signed_transfer_tx): for tx in [create_tx, signed_transfer_tx]: tx.outputs[0].amount = 0 From a869f6d1dd5778f11f1ca48483be29bafbc25214 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 11:08:19 +0100 Subject: [PATCH 013/150] fix wrong exception after schema change --- tests/validation/test_transaction_structure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index f6678b32..9edfd367 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -126,7 +126,7 @@ def test_low_amounts(create_tx, signed_transfer_tx): tx.outputs[0].amount = 0 validate_raises(tx, AmountError) tx.outputs[0].amount = -1 - validate_raises(tx, AmountError) + validate_raises(tx) def test_high_amounts(create_tx): From 25650597b1c88ab622fc1189d4f98f3b3a9d4828 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 11:27:35 +0100 Subject: [PATCH 014/150] test invalid string amount case --- bigchaindb/common/transaction.py | 2 +- tests/common/test_transaction.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 746844e4..81b5614f 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -386,7 +386,7 @@ class Output(object): try: amount = int(data['amount']) except ValueError: - raise AmountError('Invalid amount: %s' % amount) + raise AmountError('Invalid amount: %s' % data['amount']) return cls(fulfillment, data['public_keys'], amount) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index 5a9ad766..565a5bf8 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -930,3 +930,13 @@ def test_cant_add_empty_input(): with raises(TypeError): tx.add_input(None) + + +def test_output_from_dict_invalid_amount(user_output): + from bigchaindb.common.transaction import Output + from bigchaindb.common.exceptions import AmountError + + out = user_output.to_dict() + out['amount'] = 'a' + with raises(AmountError): + Output.from_dict(out) From d08d932627a5ec0b1915bf1508225b2244ca36ad Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 13:37:56 +0100 Subject: [PATCH 015/150] make max output amount a constant --- bigchaindb/common/transaction.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 81b5614f..8d415246 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -209,6 +209,8 @@ class Output(object): owners before a Transaction was confirmed. """ + MAX_AMOUNT = 9 * 10 ** 18 + def __init__(self, fulfillment, public_keys=None, amount=1): """Create an instance of a :class:`~.Output`. @@ -229,8 +231,8 @@ class Output(object): raise TypeError('`amount` must be an int') if amount < 1: raise AmountError('`amount` must be greater than 0') - if amount > 9 * 10 ** 18: - raise AmountError('`amount` must be <= 9000000000000000000') + if amount > self.MAX_AMOUNT: + raise AmountError('`amount` must be <= %s' % self.MAX_AMOUNT) self.fulfillment = fulfillment self.amount = amount From 860e7cda02614833a3fc7e7ed178fb2059de136d Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 14:32:54 +0100 Subject: [PATCH 016/150] test that transaction with no hash raises schema validation error --- tests/validation/test_transaction_structure.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 63063f7f..df79f594 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -51,6 +51,12 @@ def test_tx_serialization_with_incorrect_hash(create_tx): validate_raises(tx, InvalidHash) +def test_tx_serialization_with_no_hash(create_tx): + tx = create_tx.to_dict() + del tx['id'] + validate_raises(tx) + + ################################################################################ # Operation From bbf5c49f9aa9ff8983989467a98388c3f30973e4 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 15 Mar 2017 17:19:46 +0100 Subject: [PATCH 017/150] test that transfer tx cannot have no inputs --- tests/validation/test_transaction_structure.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index df79f594..7f813afc 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -122,6 +122,11 @@ def test_create_tx_no_fulfills(create_tx): validate_raises(tx) +def test_transfer_has_inputs(signed_transfer_tx): + signed_transfer_tx.inputs = [] + validate_raises(signed_transfer_tx) + + ################################################################################ # Outputs From 03ca4b1fd3beddd7b810ed0078e2ca0628db1a2b Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Fri, 17 Mar 2017 15:34:17 +0100 Subject: [PATCH 018/150] make stepping pipeline queue items mutable --- tests/pipelines/stepping.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pipelines/stepping.py b/tests/pipelines/stepping.py index 030863c6..1a9d3a69 100644 --- a/tests/pipelines/stepping.py +++ b/tests/pipelines/stepping.py @@ -106,7 +106,7 @@ class MultipipesStepper: for item in items: if type(item) != tuple: item = (item,) - queue.append(item) + queue.append(list(item)) def step(self, name, **kwargs): """ Advance pipeline stage. Throws Empty if no data to consume. """ From ddbdf64e33637bcebfb0af04121fac9974f6d3b6 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Mon, 20 Mar 2017 17:30:02 +0100 Subject: [PATCH 019/150] voting module raises CriticalDuplicateVote if there's a duplicate vote --- bigchaindb/exceptions.py | 4 ++++ bigchaindb/voting.py | 46 +++++++++++++++++----------------------- tests/test_voting.py | 37 +++++++++++++++++++++----------- 3 files changed, 48 insertions(+), 39 deletions(-) diff --git a/bigchaindb/exceptions.py b/bigchaindb/exceptions.py index 336ce231..a11fd4f8 100644 --- a/bigchaindb/exceptions.py +++ b/bigchaindb/exceptions.py @@ -8,3 +8,7 @@ class CriticalDoubleSpend(BigchainDBError): class CriticalDoubleInclusion(BigchainDBError): """Data integrity error that requires attention""" + + +class CriticalDuplicateVote(BigchainDBError): + """Data integrity error that requires attention""" diff --git a/bigchaindb/voting.py b/bigchaindb/voting.py index 0622a363..cc20944d 100644 --- a/bigchaindb/voting.py +++ b/bigchaindb/voting.py @@ -1,6 +1,7 @@ import collections from bigchaindb.common.schema import SchemaValidationError, validate_vote_schema +from bigchaindb.exceptions import CriticalDuplicateVote from bigchaindb.common.utils import serialize from bigchaindb.common.crypto import PublicKey @@ -33,7 +34,8 @@ class Voting: n_voters = len(eligible_voters) eligible_votes, ineligible_votes = \ cls.partition_eligible_votes(votes, eligible_voters) - results = cls.count_votes(eligible_votes) + by_voter = cls.dedupe_by_voter(eligible_votes) + results = cls.count_votes(by_voter) results['block_id'] = block['id'] results['status'] = cls.decide_votes(n_voters, **results['counts']) results['ineligible'] = ineligible_votes @@ -60,38 +62,29 @@ class Voting: return eligible, ineligible @classmethod - def count_votes(cls, eligible_votes): + def dedupe_by_voter(cls, eligible_votes): + """ + Throw a critical error if there is a duplicate vote + """ + by_voter = {} + for vote in eligible_votes: + pubkey = vote['node_pubkey'] + if pubkey in by_voter: + raise CriticalDuplicateVote(pubkey) + by_voter[pubkey] = vote + return by_voter + + @classmethod + def count_votes(cls, by_voter): """ Given a list of eligible votes, (votes from known nodes that are listed as voters), produce the number that say valid and the number that say - invalid. - - * Detect if there are multiple votes from a single node and return them - in a separate "cheat" dictionary. - * Votes must agree on previous block, otherwise they become invalid. - - note: - The sum of votes returned by this function does not necessarily - equal the length of the list of votes fed in. It may differ for - example if there are found to be multiple votes submitted by a - single voter. + invalid. Votes must agree on previous block, otherwise they become invalid. """ prev_blocks = collections.Counter() - cheat = [] malformed = [] - # Group by pubkey to detect duplicate voting - by_voter = collections.defaultdict(list) - for vote in eligible_votes: - by_voter[vote['node_pubkey']].append(vote) - - for pubkey, votes in by_voter.items(): - if len(votes) > 1: - cheat.append(votes) - continue - - vote = votes[0] - + for vote in by_voter.values(): if not cls.verify_vote_schema(vote): malformed.append(vote) continue @@ -111,7 +104,6 @@ class Voting: 'n_valid': n_valid, 'n_invalid': len(by_voter) - n_valid, }, - 'cheat': cheat, 'malformed': malformed, 'previous_block': prev_block, 'other_previous_block': dict(prev_blocks), diff --git a/tests/test_voting.py b/tests/test_voting.py index d1e1957d..07a60f24 100644 --- a/tests/test_voting.py +++ b/tests/test_voting.py @@ -2,6 +2,7 @@ import pytest from collections import Counter from bigchaindb.core import Bigchain +from bigchaindb.exceptions import CriticalDuplicateVote from bigchaindb.voting import Voting, INVALID, VALID, UNDECIDED @@ -37,24 +38,22 @@ def test_count_votes(): def verify_vote_schema(cls, vote): return vote['node_pubkey'] != 'malformed' - voters = (['cheat', 'cheat', 'says invalid', 'malformed'] + + voters = (['says invalid', 'malformed'] + ['kosher' + str(i) for i in range(10)]) votes = [Bigchain(v).vote('block', 'a', True) for v in voters] - votes[2]['vote']['is_block_valid'] = False + votes[0]['vote']['is_block_valid'] = False # Incorrect previous block subtracts from n_valid and adds to n_invalid votes[-1]['vote']['previous_block'] = 'z' - assert TestVoting.count_votes(votes) == { + by_voter = dict(enumerate(votes)) + + assert TestVoting.count_votes(by_voter) == { 'counts': { 'n_valid': 9, # 9 kosher votes - 'n_invalid': 4, # 1 cheat, 1 invalid, 1 malformed, 1 rogue prev block - # One of the cheat votes counts towards n_invalid, the other is - # not counted here. - # len(cheat) + n_valid + n_invalid == len(votes) + 'n_invalid': 3, # 1 invalid, 1 malformed, 1 rogue prev block }, - 'cheat': [votes[:2]], - 'malformed': [votes[3]], + 'malformed': [votes[1]], 'previous_block': 'a', 'other_previous_block': {'z': 1}, } @@ -70,7 +69,8 @@ def test_must_agree_prev_block(): votes = [Bigchain(v).vote('block', 'a', True) for v in voters] votes[0]['vote']['previous_block'] = 'b' votes[1]['vote']['previous_block'] = 'c' - assert TestVoting.count_votes(votes) == { + by_voter = dict(enumerate(votes)) + assert TestVoting.count_votes(by_voter) == { 'counts': { 'n_valid': 2, 'n_invalid': 2, @@ -78,7 +78,6 @@ def test_must_agree_prev_block(): 'previous_block': 'a', 'other_previous_block': {'b': 1, 'c': 1}, 'malformed': [], - 'cheat': [], } @@ -230,8 +229,22 @@ def test_block_election(b): 'block_id': 'xyz', 'counts': {'n_valid': 2, 'n_invalid': 0}, 'ineligible': [votes[-1]], - 'cheat': [], 'malformed': [], 'previous_block': 'a', 'other_previous_block': {}, } + + +def test_duplicate_vote_throws_critical_error(b): + class TestVoting(Voting): + @classmethod + def verify_vote_signature(cls, vote): + return True + keyring = 'abc' + block = {'id': 'xyz', 'block': {'voters': 'ab'}} + votes = [{ + 'node_pubkey': c, + 'vote': {'is_block_valid': True, 'previous_block': 'a'} + } for c in 'aabc'] + with pytest.raises(CriticalDuplicateVote): + TestVoting.block_election(block, votes, keyring) From d8b84891b65cfce024c4f2f6fa98ffb9f8d1f04b Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Tue, 21 Mar 2017 16:23:56 +0100 Subject: [PATCH 020/150] federation tests --- tests/integration/test_federation.py | 215 +++++++++++++++++++++++++++ tests/pipelines/stepping.py | 2 + 2 files changed, 217 insertions(+) create mode 100644 tests/integration/test_federation.py diff --git a/tests/integration/test_federation.py b/tests/integration/test_federation.py new file mode 100644 index 00000000..c18c65de --- /dev/null +++ b/tests/integration/test_federation.py @@ -0,0 +1,215 @@ +from copy import deepcopy +import pytest +import random + +import bigchaindb +from bigchaindb.core import Bigchain +from contextlib import contextmanager +from bigchaindb.common.crypto import generate_key_pair +from tests.pipelines.stepping import create_stepper + + +################################################################################ +# Test setup code + + +@contextmanager +def federation(n): + """ + Return a list of Bigchain objects and pipeline steppers to represent + a BigchainDB federation + """ + keys = [generate_key_pair() for _ in range(n)] + config_orig = bigchaindb.config + + @contextmanager + def make_nodes(i): + nonlocal keys + if i == 0: + yield [] + else: + config = deepcopy(config_orig) + keys = [keys[-1]] + keys[:-1] + config['keypair']['private'] = keys[0][0] + config['keypair']['public'] = keys[0][1] + config['keyring'] = list(list(zip(*keys[1:]))[1]) + bigchaindb.config = config + stepper = create_stepper() + with stepper.start(): + node = (Bigchain(), stepper) + with make_nodes(i-1) as rest: + yield [node] + rest + + with make_nodes(n) as steppers: + bigchaindb.config = config_orig + yield zip(*steppers) + + +@pytest.fixture +def federation_3(): + with federation(3) as f: + yield f + + +def process_tx(steps): + steps.block_changefeed(timeout=1) + if steps.block_filter_tx(): + steps.block_validate_tx() + steps.block_create(timeout=True) + steps.block_write() + steps.block_delete_tx() + + +def input_single_create(b): + from bigchaindb.common.transaction import Transaction + metadata = {'r': random.random()} + tx = Transaction.create([b.me], [([b.me], 1)], metadata).sign([b.me_private]) + b.write_transaction(tx) + return tx + + +def process_vote(steps, result=None): + steps.vote_changefeed() + steps.vote_validate_block() + steps.vote_ungroup() + steps.vote_validate_tx() + if result is not None: + steps.queues['vote_vote'][0][0] = result + vote = steps.vote_vote() + steps.vote_write_vote() + return vote + + +################################################################################ +# Tests here on down + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_valid(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s2, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s0, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'valid' + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_invalid(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s2, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s0, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] is None + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_disagree_prev_block(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s0, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + s1.vote.last_voted_id = '5' * 64 + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + s2.vote.last_voted_id = '6' * 64 + process_vote(s2, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] is None + + +@pytest.mark.skip() # TODO: wait for #1309 +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_dupe_vote(federation_3): + from bigchaindb.exceptions import CriticalDuplicateVote + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + vote = process_vote(s0, True) + # Drop the unique index and write the vote again + bx[0].connection.db.votes.drop_index('block_and_voter') + s0.queues['vote_write_vote'].append([vote]) + s0.vote_write_vote() + for i in range(3): + with pytest.raises(CriticalDuplicateVote): + bx[i].get_transaction(tx.id, True)[1] + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_sybill(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + # What we need is some votes from unknown nodes! + for s in [s0, s1, s2]: + s.vote.bigchain.me_private = generate_key_pair()[0] + process_vote(s0, True) + process_vote(s1, True) + process_vote(s2, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + + +@pytest.mark.skip() +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_dos(federation_3): + """ + https://github.com/bigchaindb/bigchaindb/issues/1314 + Test that a node cannot block another node's opportunity to vote + on a block by writing an incorrectly signed vote + """ + raise NotImplementedError() + + +@pytest.mark.skip('Revisit when we have block election status cache') +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_bad_block_voters_list(federation_3): + """ + See https://github.com/bigchaindb/bigchaindb/issues/1224 + """ + [bx, (s0, s1, s2)] = federation_3 + b = s0.block.bigchain + # First remove other nodes from node 0 so that it self assigns the tx + b.nodes_except_me = [] + tx = input_single_create(b) + # Now create a block voters list which will not match other keyrings + b.nodes_except_me = [bx[1].me] + process_tx(s0) + process_vote(s0) + process_vote(s1) + process_vote(s2) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'invalid' diff --git a/tests/pipelines/stepping.py b/tests/pipelines/stepping.py index 1a9d3a69..36f68a6a 100644 --- a/tests/pipelines/stepping.py +++ b/tests/pipelines/stepping.py @@ -163,6 +163,8 @@ def _update_stepper(stepper, prefix, pipeline): n1 = (nodes + [None])[i+1] f = stepper.add_input if i == 0 else stepper.add_stage f(prefix, n0, n1) + # Expose pipeline state + setattr(stepper, prefix, nodes[-1].target.__self__) def create_stepper(): From 9fd40682f235d043b12e48f83308a2b4e6de9087 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Fri, 24 Mar 2017 15:38:27 +0100 Subject: [PATCH 021/150] docs re: database.connection_timeout and database.max_tries --- .../source/server-reference/configuration.md | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 4cd9e9d4..42f22d4e 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -16,6 +16,8 @@ For convenience, here's a list of all the relevant environment variables (docume `BIGCHAINDB_DATABASE_PORT`
`BIGCHAINDB_DATABASE_NAME`
`BIGCHAINDB_DATABASE_REPLICASET`
+`BIGCHAINDB_DATABASE_CONNECTION_TIMEOUT`
+`BIGCHAINDB_DATABASE_MAX_TRIES`
`BIGCHAINDB_SERVER_BIND`
`BIGCHAINDB_SERVER_WORKERS`
`BIGCHAINDB_SERVER_THREADS`
@@ -85,9 +87,18 @@ Note how the keys in the list are separated by colons. ``` -## database.backend, database.host, database.port, database.name & database.replicaset +## database.* -The database backend to use (`rethinkdb` or `mongodb`) and its hostname, port and name. If the database backend is `mongodb`, then there's a fifth setting: the name of the replica set. If the database backend is `rethinkdb`, you *can* set the name of the replica set, but it won't be used for anything. +The settings with names of the form `database.*` are for the database backend +(currently either RethinkDB or MongoDB). They are: + +* `database.backend` is either `rethinkdb` or `mongodb`. +* `database.host` is the hostname (FQDN) of the backend database. +* `database.port` is self-explanatory. +* `database.name` is a user-chosen name for the database inside RethinkDB or MongoDB, e.g. `bigchain`. +* `database.replicaset` is only relevant if using MongoDB; it's the name of the MongoDB replica set, e.g. `bigchain-rs`. +* `database.connection_timeout` is the maximum number of milliseconds that BigchainDB will wait before giving up on one attempt to connect to the database backend. +* `database.max_tries` is the maximum number of times that BigchainDB will try to establish a connection with the database backend. If 0, then it will try forever. **Example using environment variables** ```text @@ -96,6 +107,8 @@ export BIGCHAINDB_DATABASE_HOST=localhost export BIGCHAINDB_DATABASE_PORT=27017 export BIGCHAINDB_DATABASE_NAME=bigchain export BIGCHAINDB_DATABASE_REPLICASET=bigchain-rs +export BIGCHAINDB_DATABASE_CONNECTION_TIMEOUT=5000 +export BIGCHAINDB_DATABASE_MAX_TRIES=3 ``` **Default values** @@ -105,8 +118,10 @@ If (no environment variables were set and there's no local config file), or you "database": { "backend": "rethinkdb", "host": "localhost", + "port": 28015, "name": "bigchain", - "port": 28015 + "connection_timeout": 5000, + "max_tries": 3 } ``` @@ -115,9 +130,11 @@ If you used `bigchaindb -y configure mongodb` to create a default local config f "database": { "backend": "mongodb", "host": "localhost", - "name": "bigchain", "port": 27017, - "replicaset": "bigchain-rs" + "name": "bigchain", + "replicaset": "bigchain-rs", + "connection_timeout": 5000, + "max_tries": 3 } ``` From 4bcd7dd1e2a80feb56d5e0fd4d93f1879b6f9ae2 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Thu, 30 Mar 2017 14:56:33 +0200 Subject: [PATCH 022/150] Delete outdated speed-tests folder --- speed-tests/README.md | 3 -- speed-tests/speed_tests.py | 97 -------------------------------------- 2 files changed, 100 deletions(-) delete mode 100644 speed-tests/README.md delete mode 100644 speed-tests/speed_tests.py diff --git a/speed-tests/README.md b/speed-tests/README.md deleted file mode 100644 index 7b07d338..00000000 --- a/speed-tests/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Speed Tests - -This folder contains tests related to the code performance of a single node. \ No newline at end of file diff --git a/speed-tests/speed_tests.py b/speed-tests/speed_tests.py deleted file mode 100644 index 87a81b0f..00000000 --- a/speed-tests/speed_tests.py +++ /dev/null @@ -1,97 +0,0 @@ -import json -import time - -import rapidjson -from line_profiler import LineProfiler - -import bigchaindb - -# BIG TODO: Adjust for new transaction model - - -def speedtest_validate_transaction(): - # create a transaction - b = bigchaindb.Bigchain() - tx = b.create_transaction(b.me, b.me, None, 'CREATE') - tx_signed = b.sign_transaction(tx, b.me_private) - - # setup the profiler - profiler = LineProfiler() - profiler.enable_by_count() - profiler.add_function(bigchaindb.Bigchain.validate_transaction) - - # validate_transaction 1000 times - for i in range(1000): - b.validate_transaction(tx_signed) - - profiler.print_stats() - - -def speedtest_serialize_block_json(): - # create a block - b = bigchaindb.Bigchain() - tx = b.create_transaction(b.me, b.me, None, 'CREATE') - tx_signed = b.sign_transaction(tx, b.me_private) - block = b.create_block([tx_signed] * 1000) - - time_start = time.time() - for _ in range(1000): - _ = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True) - time_elapsed = time.time() - time_start - - print('speedtest_serialize_block_json: {} s'.format(time_elapsed)) - - -def speedtest_serialize_block_rapidjson(): - # create a block - b = bigchaindb.Bigchain() - tx = b.create_transaction(b.me, b.me, None, 'CREATE') - tx_signed = b.sign_transaction(tx, b.me_private) - block = b.create_block([tx_signed] * 1000) - - time_start = time.time() - for _ in range(1000): - _ = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True) - time_elapsed = time.time() - time_start - - print('speedtest_serialize_block_rapidjson: {} s'.format(time_elapsed)) - - -def speedtest_deserialize_block_json(): - # create a block - b = bigchaindb.Bigchain() - tx = b.create_transaction(b.me, b.me, None, 'CREATE') - tx_signed = b.sign_transaction(tx, b.me_private) - block = b.create_block([tx_signed] * 1000) - block_serialized = json.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True) - - time_start = time.time() - for _ in range(1000): - _ = json.loads(block_serialized) - time_elapsed = time.time() - time_start - - print('speedtest_deserialize_block_json: {} s'.format(time_elapsed)) - - -def speedtest_deserialize_block_rapidjson(): - # create a block - b = bigchaindb.Bigchain() - tx = b.create_transaction(b.me, b.me, None, 'CREATE') - tx_signed = b.sign_transaction(tx, b.me_private) - block = b.create_block([tx_signed] * 1000) - block_serialized = rapidjson.dumps(block, skipkeys=False, ensure_ascii=False, sort_keys=True) - - time_start = time.time() - for _ in range(1000): - _ = rapidjson.loads(block_serialized) - time_elapsed = time.time() - time_start - - print('speedtest_deserialize_block_rapidjson: {} s'.format(time_elapsed)) - - -if __name__ == '__main__': - speedtest_validate_transaction() - speedtest_serialize_block_json() - speedtest_serialize_block_rapidjson() - speedtest_deserialize_block_json() - speedtest_deserialize_block_rapidjson() From 87eb070ed68533abd42f736b3ac293e0c6883416 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Fri, 3 Mar 2017 01:47:08 +0100 Subject: [PATCH 023/150] Refactor core.BigchainDB.get_outputs --- bigchaindb/core.py | 64 +++++++++++++++++++++++++-------------------- bigchaindb/utils.py | 13 +++++++++ 2 files changed, 49 insertions(+), 28 deletions(-) diff --git a/bigchaindb/core.py b/bigchaindb/core.py index a9143f33..e6783a6d 100644 --- a/bigchaindb/core.py +++ b/bigchaindb/core.py @@ -19,14 +19,17 @@ class Bigchain(object): Create, read, sign, write transactions to the database """ - # return if a block has been voted invalid BLOCK_INVALID = 'invalid' - # return if a block is valid, or tx is in valid block + """return if a block has been voted invalid""" + BLOCK_VALID = TX_VALID = 'valid' - # return if block is undecided, or tx is in undecided block + """return if a block is valid, or tx is in valid block""" + BLOCK_UNDECIDED = TX_UNDECIDED = 'undecided' - # return if transaction is in backlog + """return if block is undecided, or tx is in undecided block""" + TX_IN_BACKLOG = 'backlog' + """return if transaction is in backlog""" def __init__(self, public_key=None, private_key=None, keyring=[], connection=None, backlog_reassign_delay=None): """Initialize the Bigchain instance @@ -372,32 +375,37 @@ class Bigchain(object): """ # get all transactions in which owner is in the `owners_after` list response = backend.query.get_owned_ids(self.connection, owner) - links = [] + return [ + TransactionLink(tx['id'], index) + for tx in response + if not self.is_tx_strictly_in_invalid_block(tx['id']) + for index, output in enumerate(tx['outputs']) + if utils.output_has_owner(output, owner) + ] - for tx in response: - # disregard transactions from invalid blocks - validity = self.get_blocks_status_containing_tx(tx['id']) - if Bigchain.BLOCK_VALID not in validity.values(): - if Bigchain.BLOCK_UNDECIDED not in validity.values(): - continue + def is_tx_strictly_in_invalid_block(self, txid): + """ + Checks whether the transaction with the given ``txid`` + *strictly* belongs to an invalid block. - # NOTE: It's OK to not serialize the transaction here, as we do not - # use it after the execution of this function. - # a transaction can contain multiple outputs so we need to iterate over all of them - # to get a list of outputs available to spend - for index, output in enumerate(tx['outputs']): - # for simple signature conditions there are no subfulfillments - # check if the owner is in the condition `owners_after` - if len(output['public_keys']) == 1: - if output['condition']['details']['public_key'] == owner: - links.append(TransactionLink(tx['id'], index)) - else: - # for transactions with multiple `public_keys` there will be several subfulfillments nested - # in the condition. We need to iterate the subfulfillments to make sure there is a - # subfulfillment for `owner` - if utils.condition_details_has_owner(output['condition']['details'], owner): - links.append(TransactionLink(tx['id'], index)) - return links + Args: + txid (str): Transaction id. + + Returns: + bool: ``True`` if the transaction *strictly* belongs to a + block that is invalid. ``False`` otherwise. + + Note: + Since a transaction may be in multiple blocks, with + different statuses, the term "strictly" is used to + emphasize that if a transaction is said to be in an invalid + block, it means that it is not in any other block that is + either valid or undecided. + + """ + validity = self.get_blocks_status_containing_tx(txid) + return (Bigchain.BLOCK_VALID not in validity.values() and + Bigchain.BLOCK_UNDECIDED not in validity.values()) def get_owned_ids(self, owner): """Retrieve a list of ``txid`` s that can be used as inputs. diff --git a/bigchaindb/utils.py b/bigchaindb/utils.py index 4d7177d9..f87916b7 100644 --- a/bigchaindb/utils.py +++ b/bigchaindb/utils.py @@ -113,6 +113,19 @@ def condition_details_has_owner(condition_details, owner): return False +def output_has_owner(output, owner): + # TODO + # Check whether it is really necessary to treat the single key case + # differently from the multiple keys case, and why not just use the same + # function for both cases. + if len(output['public_keys']) > 1: + return condition_details_has_owner( + output['condition']['details'], owner) + elif len(output['public_keys']) == 1: + return output['condition']['details']['public_key'] == owner + # TODO raise proper exception, e.g. invalid tx payload? + + def is_genesis_block(block): """Check if the block is the genesis block. From ca200b1da7ba77f9aa753fd20b8eb09ea4a95838 Mon Sep 17 00:00:00 2001 From: Jack Riches Date: Sun, 2 Apr 2017 12:22:56 +0100 Subject: [PATCH 024/150] Treat --log-level argument as case-insensitive --- bigchaindb/commands/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index cf8ddb4f..6cc5cb6a 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -198,6 +198,7 @@ base_parser.add_argument('-c', '--config', '(use "-" for stdout)') base_parser.add_argument('-l', '--log-level', + type=lambda l: l.upper(), # case insensitive conversion choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help='Log level') From 09866920af8ae5b197dc45eb976ebf2c0cb0d1bd Mon Sep 17 00:00:00 2001 From: Anuj Date: Sun, 2 Apr 2017 17:53:39 +0530 Subject: [PATCH 025/150] Pretty message when dropping a non-existent database --- bigchaindb/commands/bigchain.py | 8 ++++++-- tests/commands/test_commands.py | 12 ++++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/bigchaindb/commands/bigchain.py b/bigchaindb/commands/bigchain.py index be17d75f..ce0cbfa0 100644 --- a/bigchaindb/commands/bigchain.py +++ b/bigchaindb/commands/bigchain.py @@ -12,7 +12,8 @@ import sys from bigchaindb.common import crypto from bigchaindb.common.exceptions import (StartupError, DatabaseAlreadyExists, - KeypairNotFoundException) + KeypairNotFoundException, + DatabaseDoesNotExist) import bigchaindb from bigchaindb import backend, processes from bigchaindb.backend import schema @@ -166,7 +167,10 @@ def run_drop(args): conn = backend.connect() dbname = bigchaindb.config['database']['name'] - schema.drop_database(conn, dbname) + try: + schema.drop_database(conn, dbname) + except DatabaseDoesNotExist: + print("Cannot drop '{name}'. The database does not exist.".format(name=dbname), file=sys.stderr) @configure_bigchaindb diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 50b995b0..ad603351 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -149,6 +149,18 @@ def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): assert mock_db_drop.called +@patch('bigchaindb.backend.schema.drop_database') +def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): + from bigchaindb.commands.bigchain import run_drop + from bigchaindb.common.exceptions import DatabaseDoesNotExist + args = Namespace(config=None, yes=True) + mock_db_drop.side_effect = DatabaseDoesNotExist + + run_drop(args) + output_message = capsys.readouterr()[1] + assert output_message == "Cannot drop 'bigchain'. The database does not exist.\n" + + @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch): from bigchaindb.commands.bigchain import run_drop From f3f1ecdaecf2e42f713e6090176eeb24f6b074e3 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Sun, 2 Apr 2017 16:46:41 +0200 Subject: [PATCH 026/150] Added to HOW_TO_HANDLE_PULL_REQUESTS.md Added new subsection: How to Handle CLA Agreement Emails with No Associated Pull Request --- HOW_TO_HANDLE_PULL_REQUESTS.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/HOW_TO_HANDLE_PULL_REQUESTS.md b/HOW_TO_HANDLE_PULL_REQUESTS.md index 4dfbec15..6114c7ac 100644 --- a/HOW_TO_HANDLE_PULL_REQUESTS.md +++ b/HOW_TO_HANDLE_PULL_REQUESTS.md @@ -51,3 +51,15 @@ END BLOCK (END OF EMAIL) The next step is to wait for them to copy that comment into the comments of the indicated pull request. Once they do so, it's safe to merge the pull request. + +## How to Handle CLA Agreement Emails with No Associated Pull Request + +Reply with an email like this: + +Hi [First Name], + +Today I got an email (copied below) to tell me that you agreed to the BigchainDB Contributor License Agreement. Did you intend to do that? + +If no, then you can ignore this email. + +If yes, then there's another step to connect your email address with your GitHub account. To do that, you must first create a pull request in one of the BigchainDB repositories on GitHub. Once you've done that, please reply to this email with a link to the pull request. Then I'll send you a special block of text to paste into the comments on that pull request. From eff1406c09a6f05eb7d9c97fcf840f8daa8d29d8 Mon Sep 17 00:00:00 2001 From: Jack Riches Date: Sun, 2 Apr 2017 23:46:44 +0100 Subject: [PATCH 027/150] Refactor awawy unnecessary lambda to str.upper --- bigchaindb/commands/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index 6cc5cb6a..15887340 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -198,7 +198,7 @@ base_parser.add_argument('-c', '--config', '(use "-" for stdout)') base_parser.add_argument('-l', '--log-level', - type=lambda l: l.upper(), # case insensitive conversion + type=str.upper # convert to uppercase for comparison to choices choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help='Log level') From c3f89fd447e7729dad98b521c5c484311e1e2a8a Mon Sep 17 00:00:00 2001 From: Anuj Date: Mon, 3 Apr 2017 13:13:22 +0530 Subject: [PATCH 028/150] Taking DB name from config in test for non-existent db drop --- tests/commands/test_commands.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index ad603351..c0e2b5af 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -151,6 +151,7 @@ def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): + from bigchaindb import config from bigchaindb.commands.bigchain import run_drop from bigchaindb.common.exceptions import DatabaseDoesNotExist args = Namespace(config=None, yes=True) @@ -158,7 +159,8 @@ def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): run_drop(args) output_message = capsys.readouterr()[1] - assert output_message == "Cannot drop 'bigchain'. The database does not exist.\n" + assert output_message == "Cannot drop '{name}'. The database does not exist.\n".format( + name=config['database']['name']) @patch('bigchaindb.backend.schema.drop_database') From e7b0b227f18f88747a992e29367d52d20167185f Mon Sep 17 00:00:00 2001 From: Lavina Date: Wed, 29 Mar 2017 20:05:01 +0530 Subject: [PATCH 029/150] Rename bigchain.py command module to bigchaindb.py --- .../commands/{bigchain.py => bigchaindb.py} | 0 docs/server/source/appendices/commands.rst | 4 +- setup.py | 2 +- tests/commands/conftest.py | 8 +-- tests/commands/rethinkdb/test_commands.py | 10 ++-- tests/commands/test_commands.py | 58 +++++++++---------- tests/commands/test_utils.py | 4 +- 7 files changed, 43 insertions(+), 43 deletions(-) rename bigchaindb/commands/{bigchain.py => bigchaindb.py} (100%) diff --git a/bigchaindb/commands/bigchain.py b/bigchaindb/commands/bigchaindb.py similarity index 100% rename from bigchaindb/commands/bigchain.py rename to bigchaindb/commands/bigchaindb.py diff --git a/docs/server/source/appendices/commands.rst b/docs/server/source/appendices/commands.rst index 35d37b27..fd367cdb 100644 --- a/docs/server/source/appendices/commands.rst +++ b/docs/server/source/appendices/commands.rst @@ -6,10 +6,10 @@ Command Line Interface :special-members: __init__ -:mod:`bigchaindb.commands.bigchain` +:mod:`bigchaindb.commands.bigchaindb` ----------------------------------- -.. automodule:: bigchaindb.commands.bigchain +.. automodule:: bigchaindb.commands.bigchaindb :mod:`bigchaindb.commands.utils` diff --git a/setup.py b/setup.py index 5fb201f4..c05b554a 100644 --- a/setup.py +++ b/setup.py @@ -117,7 +117,7 @@ setup( entry_points={ 'console_scripts': [ - 'bigchaindb=bigchaindb.commands.bigchain:main' + 'bigchaindb=bigchaindb.commands.bigchaindb:main' ], }, install_requires=install_requires, diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py index 30c577f5..4a60c0cc 100644 --- a/tests/commands/conftest.py +++ b/tests/commands/conftest.py @@ -5,8 +5,8 @@ import pytest @pytest.fixture def mock_run_configure(monkeypatch): - from bigchaindb.commands import bigchain - monkeypatch.setattr(bigchain, 'run_configure', lambda *args, **kwargs: None) + from bigchaindb.commands import bigchaindb + monkeypatch.setattr(bigchaindb, 'run_configure', lambda *args, **kwargs: None) @pytest.fixture @@ -17,8 +17,8 @@ def mock_write_config(monkeypatch): @pytest.fixture def mock_db_init_with_existing_db(monkeypatch): - from bigchaindb.commands import bigchain - monkeypatch.setattr(bigchain, '_run_init', lambda: None) + from bigchaindb.commands import bigchaindb + monkeypatch.setattr(bigchaindb, '_run_init', lambda: None) @pytest.fixture diff --git a/tests/commands/rethinkdb/test_commands.py b/tests/commands/rethinkdb/test_commands.py index 165fef0d..0eab914c 100644 --- a/tests/commands/rethinkdb/test_commands.py +++ b/tests/commands/rethinkdb/test_commands.py @@ -11,7 +11,7 @@ def test_bigchain_run_start_with_rethinkdb(mock_start_rethinkdb, mock_processes_start, mock_db_init_with_existing_db, mocked_setup_logging): - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start args = Namespace(start_rethinkdb=True, allow_temp_keypair=False, config=None, yes=True) run_start(args) @@ -39,7 +39,7 @@ def test_start_rethinkdb_exits_when_cannot_start(mock_popen): @patch('rethinkdb.ast.Table.reconfigure') def test_set_shards(mock_reconfigure, monkeypatch, b): - from bigchaindb.commands.bigchain import run_set_shards + from bigchaindb.commands.bigchaindb import run_set_shards # this will mock the call to retrieve the database config # we will set it to return one replica @@ -62,7 +62,7 @@ def test_set_shards(mock_reconfigure, monkeypatch, b): def test_set_shards_raises_exception(monkeypatch, b): - from bigchaindb.commands.bigchain import run_set_shards + from bigchaindb.commands.bigchaindb import run_set_shards # test that we are correctly catching the exception def mock_raise(*args, **kwargs): @@ -82,7 +82,7 @@ def test_set_shards_raises_exception(monkeypatch, b): @patch('rethinkdb.ast.Table.reconfigure') def test_set_replicas(mock_reconfigure, monkeypatch, b): - from bigchaindb.commands.bigchain import run_set_replicas + from bigchaindb.commands.bigchaindb import run_set_replicas # this will mock the call to retrieve the database config # we will set it to return two shards @@ -105,7 +105,7 @@ def test_set_replicas(mock_reconfigure, monkeypatch, b): def test_set_replicas_raises_exception(monkeypatch, b): - from bigchaindb.commands.bigchain import run_set_replicas + from bigchaindb.commands.bigchaindb import run_set_replicas # test that we are correctly catching the exception def mock_raise(*args, **kwargs): diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 50b995b0..186dfbc6 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -8,7 +8,7 @@ import pytest def test_make_sure_we_dont_remove_any_command(): # thanks to: http://stackoverflow.com/a/18161115/597097 - from bigchaindb.commands.bigchain import create_parser + from bigchaindb.commands.bigchaindb import create_parser parser = create_parser() @@ -27,7 +27,7 @@ def test_make_sure_we_dont_remove_any_command(): @patch('bigchaindb.commands.utils.start') def test_main_entrypoint(mock_start): - from bigchaindb.commands.bigchain import main + from bigchaindb.commands.bigchaindb import main main() assert mock_start.called @@ -37,7 +37,7 @@ def test_bigchain_run_start(mock_run_configure, mock_processes_start, mock_db_init_with_existing_db, mocked_setup_logging): - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start args = Namespace(start_rethinkdb=False, allow_temp_keypair=False, config=None, yes=True) run_start(args) mocked_setup_logging.assert_called_once_with(user_log_config={}) @@ -48,7 +48,7 @@ def test_bigchain_run_start_assume_yes_create_default_config( monkeypatch, mock_processes_start, mock_generate_key_pair, mock_db_init_with_existing_db, mocked_setup_logging): import bigchaindb - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start from bigchaindb import config_utils value = {} @@ -76,7 +76,7 @@ def test_bigchain_run_start_assume_yes_create_default_config( @pytest.mark.usefixtures('ignore_local_config_file') def test_bigchain_show_config(capsys): from bigchaindb import config - from bigchaindb.commands.bigchain import run_show_config + from bigchaindb.commands.bigchaindb import run_show_config args = Namespace(config=None) _, _ = capsys.readouterr() @@ -89,7 +89,7 @@ def test_bigchain_show_config(capsys): def test_bigchain_export_my_pubkey_when_pubkey_set(capsys, monkeypatch): from bigchaindb import config - from bigchaindb.commands.bigchain import run_export_my_pubkey + from bigchaindb.commands.bigchaindb import run_export_my_pubkey args = Namespace(config='dummy') # so in run_export_my_pubkey(args) below, @@ -108,7 +108,7 @@ def test_bigchain_export_my_pubkey_when_pubkey_set(capsys, monkeypatch): def test_bigchain_export_my_pubkey_when_pubkey_not_set(monkeypatch): from bigchaindb import config - from bigchaindb.commands.bigchain import run_export_my_pubkey + from bigchaindb.commands.bigchaindb import run_export_my_pubkey args = Namespace(config='dummy') monkeypatch.setitem(config['keypair'], 'public', None) @@ -125,14 +125,14 @@ def test_bigchain_export_my_pubkey_when_pubkey_not_set(monkeypatch): def test_bigchain_run_init_when_db_exists(mock_db_init_with_existing_db): - from bigchaindb.commands.bigchain import run_init + from bigchaindb.commands.bigchaindb import run_init args = Namespace(config=None) run_init(args) @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_when_assumed_yes(mock_db_drop): - from bigchaindb.commands.bigchain import run_drop + from bigchaindb.commands.bigchaindb import run_drop args = Namespace(config=None, yes=True) run_drop(args) @@ -141,9 +141,9 @@ def test_drop_db_when_assumed_yes(mock_db_drop): @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): - from bigchaindb.commands.bigchain import run_drop + from bigchaindb.commands.bigchaindb import run_drop args = Namespace(config=None, yes=False) - monkeypatch.setattr('bigchaindb.commands.bigchain.input_on_stderr', lambda x: 'y') + monkeypatch.setattr('bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'y') run_drop(args) assert mock_db_drop.called @@ -151,16 +151,16 @@ def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch): - from bigchaindb.commands.bigchain import run_drop + from bigchaindb.commands.bigchaindb import run_drop args = Namespace(config=None, yes=False) - monkeypatch.setattr('bigchaindb.commands.bigchain.input_on_stderr', lambda x: 'n') + monkeypatch.setattr('bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'n') run_drop(args) assert not mock_db_drop.called def test_run_configure_when_config_exists_and_skipping(monkeypatch): - from bigchaindb.commands.bigchain import run_configure + from bigchaindb.commands.bigchaindb import run_configure monkeypatch.setattr('os.path.exists', lambda path: True) args = Namespace(config='foo', yes=True) return_value = run_configure(args, skip_if_exists=True) @@ -174,7 +174,7 @@ def test_run_configure_when_config_does_not_exist(monkeypatch, mock_write_config, mock_generate_key_pair, mock_bigchaindb_backup_config): - from bigchaindb.commands.bigchain import run_configure + from bigchaindb.commands.bigchaindb import run_configure monkeypatch.setattr('os.path.exists', lambda path: False) monkeypatch.setattr('builtins.input', lambda: '\n') args = Namespace(config='foo', backend='rethinkdb', yes=True) @@ -191,7 +191,7 @@ def test_run_configure_when_config_does_exist(monkeypatch, def mock_write_config(newconfig, filename=None): value['return'] = newconfig - from bigchaindb.commands.bigchain import run_configure + from bigchaindb.commands.bigchaindb import run_configure monkeypatch.setattr('os.path.exists', lambda path: True) monkeypatch.setattr('builtins.input', lambda: '\n') monkeypatch.setattr('bigchaindb.config_utils.write_config', mock_write_config) @@ -207,7 +207,7 @@ def test_run_configure_when_config_does_exist(monkeypatch, )) def test_run_configure_with_backend(backend, monkeypatch, mock_write_config): import bigchaindb - from bigchaindb.commands.bigchain import run_configure + from bigchaindb.commands.bigchaindb import run_configure value = {} @@ -238,7 +238,7 @@ def test_allow_temp_keypair_generates_one_on_the_fly( mock_gen_keypair, mock_processes_start, mock_db_init_with_existing_db, mocked_setup_logging): import bigchaindb - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start bigchaindb.config['keypair'] = {'private': None, 'public': None} @@ -258,7 +258,7 @@ def test_allow_temp_keypair_doesnt_override_if_keypair_found(mock_gen_keypair, mock_db_init_with_existing_db, mocked_setup_logging): import bigchaindb - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start # Preconditions for the test original_private_key = bigchaindb.config['keypair']['private'] @@ -279,7 +279,7 @@ def test_run_start_when_db_already_exists(mocker, monkeypatch, run_start_args, mocked_setup_logging): - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start from bigchaindb.common.exceptions import DatabaseAlreadyExists mocked_start = mocker.patch('bigchaindb.processes.start') @@ -287,7 +287,7 @@ def test_run_start_when_db_already_exists(mocker, raise DatabaseAlreadyExists() monkeypatch.setattr( - 'bigchaindb.commands.bigchain._run_init', mock_run_init) + 'bigchaindb.commands.bigchaindb._run_init', mock_run_init) run_start(run_start_args) mocked_setup_logging.assert_called_once_with(user_log_config={}) assert mocked_start.called @@ -297,7 +297,7 @@ def test_run_start_when_keypair_not_found(mocker, monkeypatch, run_start_args, mocked_setup_logging): - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start from bigchaindb.commands.messages import CANNOT_START_KEYPAIR_NOT_FOUND from bigchaindb.common.exceptions import KeypairNotFoundException mocked_start = mocker.patch('bigchaindb.processes.start') @@ -306,7 +306,7 @@ def test_run_start_when_keypair_not_found(mocker, raise KeypairNotFoundException() monkeypatch.setattr( - 'bigchaindb.commands.bigchain._run_init', mock_run_init) + 'bigchaindb.commands.bigchaindb._run_init', mock_run_init) with pytest.raises(SystemExit) as exc: run_start(run_start_args) @@ -321,7 +321,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker, monkeypatch, run_start_args, mocked_setup_logging): - from bigchaindb.commands.bigchain import run_start + from bigchaindb.commands.bigchaindb import run_start from bigchaindb.commands.messages import RETHINKDB_STARTUP_ERROR from bigchaindb.common.exceptions import StartupError run_start_args.start_rethinkdb = True @@ -348,7 +348,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker, @patch('bigchaindb.commands.utils.start') def test_calling_main(start_mock, base_parser_mock, parse_args_mock, monkeypatch): - from bigchaindb.commands.bigchain import main + from bigchaindb.commands.bigchaindb import main argparser_mock = Mock() parser = Mock() @@ -404,9 +404,9 @@ def test_calling_main(start_mock, base_parser_mock, parse_args_mock, @pytest.mark.usefixtures('ignore_local_config_file') -@patch('bigchaindb.commands.bigchain.add_replicas') +@patch('bigchaindb.commands.bigchaindb.add_replicas') def test_run_add_replicas(mock_add_replicas): - from bigchaindb.commands.bigchain import run_add_replicas + from bigchaindb.commands.bigchaindb import run_add_replicas from bigchaindb.backend.exceptions import OperationError args = Namespace(config=None, replicas=['localhost:27017']) @@ -435,9 +435,9 @@ def test_run_add_replicas(mock_add_replicas): @pytest.mark.usefixtures('ignore_local_config_file') -@patch('bigchaindb.commands.bigchain.remove_replicas') +@patch('bigchaindb.commands.bigchaindb.remove_replicas') def test_run_remove_replicas(mock_remove_replicas): - from bigchaindb.commands.bigchain import run_remove_replicas + from bigchaindb.commands.bigchaindb import run_remove_replicas from bigchaindb.backend.exceptions import OperationError args = Namespace(config=None, replicas=['localhost:27017']) diff --git a/tests/commands/test_utils.py b/tests/commands/test_utils.py index 5f190717..f3b64c18 100644 --- a/tests/commands/test_utils.py +++ b/tests/commands/test_utils.py @@ -54,7 +54,7 @@ def test_configure_bigchaindb_logging(log_level): def test_start_raises_if_command_not_implemented(): from bigchaindb.commands import utils - from bigchaindb.commands.bigchain import create_parser + from bigchaindb.commands.bigchaindb import create_parser parser = create_parser() @@ -66,7 +66,7 @@ def test_start_raises_if_command_not_implemented(): def test_start_raises_if_no_arguments_given(): from bigchaindb.commands import utils - from bigchaindb.commands.bigchain import create_parser + from bigchaindb.commands.bigchaindb import create_parser parser = create_parser() From 57d3770564d8ef8d6665815a3d53f2d30faa6ac4 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Fri, 31 Mar 2017 13:26:19 +0200 Subject: [PATCH 030/150] Add missing underline title characters --- docs/server/source/appendices/commands.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/appendices/commands.rst b/docs/server/source/appendices/commands.rst index fd367cdb..460145f4 100644 --- a/docs/server/source/appendices/commands.rst +++ b/docs/server/source/appendices/commands.rst @@ -7,7 +7,7 @@ Command Line Interface :mod:`bigchaindb.commands.bigchaindb` ------------------------------------ +------------------------------------- .. automodule:: bigchaindb.commands.bigchaindb From cee2f94f89f632fd853430825a683c7d92407729 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Mon, 3 Apr 2017 11:57:56 +0200 Subject: [PATCH 031/150] Remove benchmarking-tests folder. Remove references to removed folders. --- .gitattributes | 4 +- .gitignore | 2 - benchmarking-tests/README.md | 3 - benchmarking-tests/benchmark_utils.py | 154 -------------------------- benchmarking-tests/fabfile.py | 46 -------- benchmarking-tests/test1/README.md | 20 ---- codecov.yml | 2 - 7 files changed, 1 insertion(+), 230 deletions(-) delete mode 100644 benchmarking-tests/README.md delete mode 100644 benchmarking-tests/benchmark_utils.py delete mode 100644 benchmarking-tests/fabfile.py delete mode 100644 benchmarking-tests/test1/README.md diff --git a/.gitattributes b/.gitattributes index cd945c78..d278a72d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,11 +1,9 @@ -benchmarking-tests export-ignore deploy-cluster-aws export-ignore docs export-ignore ntools export-ignore -speed-tests export-ignore tests export-ignore .gitattributes export-ignore .gitignore export-ignore .travis.yml export-ignore *.md export-ignore -codecov.yml export-ignore \ No newline at end of file +codecov.yml export-ignore diff --git a/.gitignore b/.gitignore index efa00db2..7aba48d1 100644 --- a/.gitignore +++ b/.gitignore @@ -71,8 +71,6 @@ deploy-cluster-aws/confiles/ deploy-cluster-aws/client_confile deploy-cluster-aws/hostlist.py deploy-cluster-aws/ssh_key.py -benchmarking-tests/hostlist.py -benchmarking-tests/ssh_key.py # Ansible-specific files ntools/one-m/ansible/hosts diff --git a/benchmarking-tests/README.md b/benchmarking-tests/README.md deleted file mode 100644 index d94ec70b..00000000 --- a/benchmarking-tests/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Benchmarking tests - -This folder contains util files and test case folders to benchmark the performance of a BigchainDB cluster. \ No newline at end of file diff --git a/benchmarking-tests/benchmark_utils.py b/benchmarking-tests/benchmark_utils.py deleted file mode 100644 index d7418a36..00000000 --- a/benchmarking-tests/benchmark_utils.py +++ /dev/null @@ -1,154 +0,0 @@ -import multiprocessing as mp -import uuid -import argparse -import csv -import time -import logging -import rethinkdb as r - -from bigchaindb.common.transaction import Transaction - -from bigchaindb import Bigchain -from bigchaindb.utils import ProcessGroup -from bigchaindb.commands import utils - - -SIZE_OF_FILLER = {'minimal': 0, - 'small': 10**3, - 'medium': 10**4, - 'large': 10**5} - - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -def create_write_transaction(tx_left, payload_filler): - b = Bigchain() - payload_dict = {} - if payload_filler: - payload_dict['filler'] = payload_filler - while tx_left > 0: - # Include a random uuid string in the payload - # to prevent duplicate transactions - # (i.e. transactions with the same hash) - payload_dict['msg'] = str(uuid.uuid4()) - tx = Transaction.create([b.me], [b.me], payload=payload_dict) - tx = tx.sign([b.me_private]) - b.write_transaction(tx) - tx_left -= 1 - - -def run_add_backlog(args): - tx_left = args.num_transactions // mp.cpu_count() - payload_filler = 'x' * SIZE_OF_FILLER[args.payload_size] - workers = ProcessGroup(target=create_write_transaction, - args=(tx_left, payload_filler)) - workers.start() - - -def run_gather_metrics(args): - # setup a rethinkdb connection - conn = r.connect(args.bigchaindb_host, 28015, 'bigchain') - - # setup csv writer - csv_file = open(args.csvfile, 'w') - csv_writer = csv.writer(csv_file) - - # query for the number of transactions on the backlog - num_transactions = r.table('backlog').count().run(conn) - num_transactions_received = 0 - initial_time = None - logger.info('Starting gathering metrics.') - logger.info('{} transasctions in the backlog'.format(num_transactions)) - logger.info('This process should exit automatically. ' - 'If this does not happen you can exit at any time using Ctrl-C ' - 'saving all the metrics gathered up to this point.') - - logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format( - 'timestamp', - 'tx in block', - 'tx/s', - '% complete' - )) - - # listen to the changefeed - try: - for change in r.table('bigchain').changes().run(conn): - # check only for new blocks - if change['old_val'] is None: - block_num_transactions = len( - change['new_val']['block']['transactions'] - ) - time_now = time.time() - csv_writer.writerow( - [str(time_now), str(block_num_transactions)] - ) - - # log statistics - if initial_time is None: - initial_time = time_now - - num_transactions_received += block_num_transactions - elapsed_time = time_now - initial_time - percent_complete = round( - (num_transactions_received / num_transactions) * 100 - ) - - if elapsed_time != 0: - transactions_per_second = round( - num_transactions_received / elapsed_time - ) - else: - transactions_per_second = float('nan') - - logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format( - time_now, - block_num_transactions, - transactions_per_second, - percent_complete - )) - - if (num_transactions - num_transactions_received) == 0: - break - except KeyboardInterrupt: - logger.info('Interrupted. Exiting early...') - finally: - # close files - csv_file.close() - - -def main(): - parser = argparse.ArgumentParser(description='BigchainDB benchmarking utils') - subparsers = parser.add_subparsers(title='Commands', dest='command') - - # add transactions to backlog - backlog_parser = subparsers.add_parser('add-backlog', - help='Add transactions to the backlog') - backlog_parser.add_argument('num_transactions', - metavar='num_transactions', - type=int, default=0, - help='Number of transactions to add to the backlog') - backlog_parser.add_argument('-s', '--payload-size', - choices=SIZE_OF_FILLER.keys(), - default='minimal', - help='Payload size') - - # metrics - metrics_parser = subparsers.add_parser('gather-metrics', - help='Gather metrics to a csv file') - - metrics_parser.add_argument('-b', '--bigchaindb-host', - required=True, - help=('Bigchaindb node hostname to connect ' - 'to gather cluster metrics')) - - metrics_parser.add_argument('-c', '--csvfile', - required=True, - help='Filename to save the metrics') - - utils.start(parser, globals()) - - -if __name__ == '__main__': - main() diff --git a/benchmarking-tests/fabfile.py b/benchmarking-tests/fabfile.py deleted file mode 100644 index 0dd4e964..00000000 --- a/benchmarking-tests/fabfile.py +++ /dev/null @@ -1,46 +0,0 @@ -from __future__ import with_statement, unicode_literals - -from fabric.api import sudo, env, hosts -from fabric.api import task, parallel -from fabric.contrib.files import sed -from fabric.operations import run, put -from fabric.context_managers import settings - -from hostlist import public_dns_names -from ssh_key import ssh_key_path - -# Ignore known_hosts -# http://docs.fabfile.org/en/1.10/usage/env.html#disable-known-hosts -env.disable_known_hosts = True - -# What remote servers should Fabric connect to? With what usernames? -env.user = 'ubuntu' -env.hosts = public_dns_names - -# SSH key files to try when connecting: -# http://docs.fabfile.org/en/1.10/usage/env.html#key-filename -env.key_filename = ssh_key_path - - -@task -@parallel -def put_benchmark_utils(): - put('benchmark_utils.py') - - -@task -@parallel -def prepare_backlog(num_transactions=10000): - run('python3 benchmark_utils.py add-backlog {}'.format(num_transactions)) - - -@task -@parallel -def start_bigchaindb(): - run('screen -d -m bigchaindb start &', pty=False) - - -@task -@parallel -def kill_bigchaindb(): - run('killall bigchaindb') diff --git a/benchmarking-tests/test1/README.md b/benchmarking-tests/test1/README.md deleted file mode 100644 index 38a4569b..00000000 --- a/benchmarking-tests/test1/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# Transactions per second - -Measure how many blocks per second are created on the _bigchain_ with a pre filled backlog. - -1. Deploy an aws cluster https://docs.bigchaindb.com/projects/server/en/latest/clusters-feds/aws-testing-cluster.html -2. Make a symbolic link to hostlist.py: `ln -s ../deploy-cluster-aws/hostlist.py .` -3. Make a symbolic link to bigchaindb.pem: -```bash -mkdir pem -cd pem -ln -s ../deploy-cluster-aws/pem/bigchaindb.pem . -``` - -Then: - -```bash -fab put_benchmark_utils -fab prepare_backlog: # wait for process to finish -fab start_bigchaindb -``` diff --git a/codecov.yml b/codecov.yml index 547c6b99..0ab4582d 100644 --- a/codecov.yml +++ b/codecov.yml @@ -29,8 +29,6 @@ coverage: - "docs/*" - "tests/*" - "bigchaindb/version.py" - - "benchmarking-tests/*" - - "speed-tests/*" - "ntools/*" - "k8s/*" From 2560f02c36c08867c544858159cdb576160668ec Mon Sep 17 00:00:00 2001 From: Jack Riches Date: Mon, 3 Apr 2017 17:19:03 +0100 Subject: [PATCH 032/150] Fix syntax error (missing comma) --- bigchaindb/commands/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index 15887340..9bec5a03 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -198,7 +198,7 @@ base_parser.add_argument('-c', '--config', '(use "-" for stdout)') base_parser.add_argument('-l', '--log-level', - type=str.upper # convert to uppercase for comparison to choices + type=str.upper, # convert to uppercase for comparison to choices choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help='Log level') From d5c8d3067e06a95ae72d39e0bf9698bb6000ba68 Mon Sep 17 00:00:00 2001 From: Jack Riches Date: Mon, 3 Apr 2017 23:06:36 +0100 Subject: [PATCH 033/150] Use two spaces before inline comment (PEP8) (fix flake8 error) --- bigchaindb/commands/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index 9bec5a03..f163a825 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -198,7 +198,7 @@ base_parser.add_argument('-c', '--config', '(use "-" for stdout)') base_parser.add_argument('-l', '--log-level', - type=str.upper, # convert to uppercase for comparison to choices + type=str.upper, # convert to uppercase for comparison to choices choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help='Log level') From bb68a44b9674fd0a10f38254188b5526400eaba8 Mon Sep 17 00:00:00 2001 From: Anuj Date: Tue, 4 Apr 2017 13:55:24 +0530 Subject: [PATCH 034/150] Renamed bigchain import to bigchaindb --- tests/commands/test_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 2670725a..6fb424d6 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -152,7 +152,7 @@ def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): from bigchaindb import config - from bigchaindb.commands.bigchain import run_drop + from bigchaindb.commands.bigchaindb import run_drop from bigchaindb.common.exceptions import DatabaseDoesNotExist args = Namespace(config=None, yes=True) mock_db_drop.side_effect = DatabaseDoesNotExist From 6f916d5781c3cea4e7c7d6447652953fe0b5be30 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Tue, 4 Apr 2017 11:25:26 +0200 Subject: [PATCH 035/150] Fixed documentation about transactions endpoint. (#1360) * Fixed documentation about transactions endpoint. * clarify how bigchaindb handles invalid transactions * rephrase --- .../source/drivers-clients/http-client-server-api.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/server/source/drivers-clients/http-client-server-api.rst b/docs/server/source/drivers-clients/http-client-server-api.rst index 26ccd2f5..39e4395e 100644 --- a/docs/server/source/drivers-clients/http-client-server-api.rst +++ b/docs/server/source/drivers-clients/http-client-server-api.rst @@ -46,8 +46,12 @@ Transactions Get the transaction with the ID ``tx_id``. - This endpoint returns a transaction only if a ``VALID`` block on - ``bigchain`` exists. + This endpoint returns a transaction if it was included in a ``VALID`` block, + if it is still waiting to be processed (``BACKLOG``) or is still in an + undecided block (``UNDECIDED``). All instances of a transaction in invalid + blocks are ignored and treated as if they don't exist. If a request is made + for a transaction and instances of that transaction are found only in + invalid blocks, then the response will be ``404 Not Found``. :param tx_id: transaction ID :type tx_id: hex string From 1e07a5b111efff10451b6ca94f5dade14c8d0c58 Mon Sep 17 00:00:00 2001 From: vrde Date: Tue, 28 Mar 2017 14:51:02 +0200 Subject: [PATCH 036/150] Add ssl, login, and passwd to configure command --- bigchaindb/__init__.py | 39 +++++++++++++++++++++++----- bigchaindb/commands/bigchaindb.py | 17 +++++-------- bigchaindb/commands/utils.py | 42 +++++++++++++++++++++++++++++-- tests/commands/test_utils.py | 27 ++++++++++++++++++++ tests/test_config_utils.py | 12 +++++++++ 5 files changed, 118 insertions(+), 19 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index c0e4fd56..1be419b3 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -5,24 +5,51 @@ import os # PORT_NUMBER = reduce(lambda x, y: x * y, map(ord, 'BigchainDB')) % 2**16 # basically, the port number is 9984 -_database_rethinkdb = { - 'backend': os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'rethinkdb'), + +_base_database_rethinkdb = { 'host': os.environ.get('BIGCHAINDB_DATABASE_HOST', 'localhost'), 'port': int(os.environ.get('BIGCHAINDB_DATABASE_PORT', 28015)), 'name': os.environ.get('BIGCHAINDB_DATABASE_NAME', 'bigchain'), - 'connection_timeout': 5000, - 'max_tries': 3, } -_database_mongodb = { - 'backend': os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'mongodb'), +# The following variable is used by `bigchaindb configure` to +# prompt the user for database values. We cannot rely on +# _base_database_rethinkdb.keys() or _base_database_mongodb.keys() +# because dicts are unordered. I tried to configure + +_base_database_rethinkdb_keys = ('host', 'port', 'name') + +_base_database_mongodb = { 'host': os.environ.get('BIGCHAINDB_DATABASE_HOST', 'localhost'), 'port': int(os.environ.get('BIGCHAINDB_DATABASE_PORT', 27017)), 'name': os.environ.get('BIGCHAINDB_DATABASE_NAME', 'bigchain'), 'replicaset': os.environ.get('BIGCHAINDB_DATABASE_REPLICASET', 'bigchain-rs'), + 'ssl': bool(os.environ.get('BIGCHAINDB_DATABASE_SSL', False)), + 'login': os.environ.get('BIGCHAINDB_DATABASE_LOGIN'), + 'password': os.environ.get('BIGCHAINDB_DATABASE_PASSWORD') +} + +_base_database_mongodb_keys = ('host', 'port', 'name', 'replicaset', + 'ssl', 'login', 'password') + +_database_rethinkdb = { + 'backend': os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'rethinkdb'), 'connection_timeout': 5000, 'max_tries': 3, } +_database_rethinkdb.update(_base_database_rethinkdb) + +_database_mongodb = { + 'backend': os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'mongodb'), + 'connection_timeout': 5000, + 'max_tries': 3, +} +_database_mongodb.update(_base_database_mongodb) + +_database_keys_map = { + 'mongodb': _base_database_mongodb_keys, + 'rethinkdb': _base_database_rethinkdb_keys +} _database_map = { 'mongodb': _database_mongodb, diff --git a/bigchaindb/commands/bigchaindb.py b/bigchaindb/commands/bigchaindb.py index ce0cbfa0..d4e37daa 100644 --- a/bigchaindb/commands/bigchaindb.py +++ b/bigchaindb/commands/bigchaindb.py @@ -88,26 +88,21 @@ def run_configure(args, skip_if_exists=False): # select the correct config defaults based on the backend print('Generating default configuration for backend {}' .format(args.backend), file=sys.stderr) + database_keys = bigchaindb._database_keys_map[args.backend] conf['database'] = bigchaindb._database_map[args.backend] if not args.yes: for key in ('bind', ): val = conf['server'][key] - conf['server'][key] = \ - input_on_stderr('API Server {}? (default `{}`): '.format(key, val)) \ - or val + conf['server'][key] = input_on_stderr('API Server {}? (default `{}`): '.format(key, val), val) - for key in ('host', 'port', 'name'): + for key in database_keys: val = conf['database'][key] - conf['database'][key] = \ - input_on_stderr('Database {}? (default `{}`): '.format(key, val)) \ - or val + conf['database'][key] = input_on_stderr('Database {}? (default `{}`): '.format(key, val), val) val = conf['backlog_reassign_delay'] - conf['backlog_reassign_delay'] = \ - input_on_stderr(('Stale transaction reassignment delay (in ' - 'seconds)? (default `{}`): '.format(val))) \ - or val + conf['backlog_reassign_delay'] = input_on_stderr( + 'Stale transaction reassignment delay (in seconds)? (default `{}`): '.format(val), val) if config_path != '-': bigchaindb.config_utils.write_config(conf, config_path) diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index f163a825..cd59856c 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -74,12 +74,50 @@ def start_logging_process(command): return start_logging +def _convert(value, default=None, convert=None): + def convert_bool(value): + if value.lower() in ('true', 't', 'yes', 'y'): + return True + if value.lower() in ('false', 'f', 'no', 'n'): + return False + raise ValueError('{} cannot be converted to bool'.format(value)) + + if value == '': + value = None + + if convert is None: + if default is not None: + convert = type(default) + else: + convert = str + + if convert == bool: + convert = convert_bool + + if value is None: + return default + else: + return convert(value) + + # We need this because `input` always prints on stdout, while it should print # to stderr. It's a very old bug, check it out here: # - https://bugs.python.org/issue1927 -def input_on_stderr(prompt=''): +def input_on_stderr(prompt='', default=None, convert=None): + """Output a string to stderr and wait for input. + + Args: + prompt (str): the message to display. + default: the default value to return if the user + leaves the field empty + convert (callable): a callable to be used to convert + the value the user inserted. If None, the type of + ``default`` will be used. + """ + print(prompt, end='', file=sys.stderr) - return builtins.input() + value = builtins.input() + return _convert(value, default, convert) def start_rethinkdb(): diff --git a/tests/commands/test_utils.py b/tests/commands/test_utils.py index f3b64c18..85aa8de4 100644 --- a/tests/commands/test_utils.py +++ b/tests/commands/test_utils.py @@ -13,6 +13,33 @@ def reset_bigchaindb_config(monkeypatch): monkeypatch.setattr('bigchaindb.config', bigchaindb._config) +def test_input_on_stderr(): + from bigchaindb.commands.utils import input_on_stderr, _convert + + with patch('builtins.input', return_value='I love cats'): + assert input_on_stderr() == 'I love cats' + + # input_on_stderr uses `_convert` internally, from now on we will + # just use that function + + assert _convert('hack the planet') == 'hack the planet' + assert _convert('42') == '42' + assert _convert('42', default=10) == 42 + assert _convert('', default=10) == 10 + assert _convert('42', convert=int) == 42 + assert _convert('True', convert=bool) is True + assert _convert('False', convert=bool) is False + assert _convert('t', convert=bool) is True + assert _convert('3.14', default=1.0) == 3.14 + assert _convert('TrUe', default=False) is True + + with pytest.raises(ValueError): + assert _convert('TRVE', default=False) + + with pytest.raises(ValueError): + assert _convert('ಠ_ಠ', convert=int) + + @pytest.mark.usefixtures('ignore_local_config_file', 'reset_bigchaindb_config') def test_configure_bigchaindb_configures_bigchaindb(): from bigchaindb.commands.utils import configure_bigchaindb diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 4234e242..51e4d595 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -19,6 +19,15 @@ def clean_config(monkeypatch, request): monkeypatch.setattr('bigchaindb.config', original_config) +def test_ordered_keys_match_database_config(): + import bigchaindb + + assert set(bigchaindb._base_database_rethinkdb.keys()) ==\ + set(bigchaindb._base_database_rethinkdb_keys) + assert set(bigchaindb._base_database_mongodb.keys()) ==\ + set(bigchaindb._base_database_mongodb_keys) + + def test_bigchain_instance_is_initialized_when_conf_provided(request): import bigchaindb from bigchaindb import config_utils @@ -181,6 +190,9 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'connection_timeout': 5000, 'max_tries': 3, 'replicaset': 'bigchain-rs', + 'ssl': False, + 'login': None, + 'password': None } database = {} From cb87221bdf2761f4d399b4da5cf45d8040f0587d Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Tue, 4 Apr 2017 17:57:44 +0200 Subject: [PATCH 037/150] Voting pipeline now checks for duplicated transactions in blocks during validation. --- bigchaindb/models.py | 9 +++++---- tests/pipelines/test_vote.py | 12 ++++++++++++ tests/test_models.py | 2 +- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/bigchaindb/models.py b/bigchaindb/models.py index c371e792..2f46ba20 100644 --- a/bigchaindb/models.py +++ b/bigchaindb/models.py @@ -187,6 +187,11 @@ class Block(object): if not self.is_signature_valid(): raise InvalidSignature('Invalid block signature') + # Check that the block contains no duplicated transactions + txids = [tx.id for tx in self.transactions] + if len(txids) != len(set(txids)): + raise DuplicateTransaction('Block has duplicate transaction') + def _validate_block_transactions(self, bigchain): """Validate Block transactions. @@ -196,10 +201,6 @@ class Block(object): Raises: ValidationError: If an invalid transaction is found """ - txids = [tx.id for tx in self.transactions] - if len(txids) != len(set(txids)): - raise DuplicateTransaction('Block has duplicate transaction') - for tx in self.transactions: # If a transaction is not valid, `validate_transactions` will # throw an an exception and block validation will be canceled. diff --git a/tests/pipelines/test_vote.py b/tests/pipelines/test_vote.py index fa167d17..7df7ca11 100644 --- a/tests/pipelines/test_vote.py +++ b/tests/pipelines/test_vote.py @@ -111,6 +111,18 @@ def test_validate_block_with_invalid_id(b): assert invalid_dummy_tx == [vote_obj.invalid_dummy_tx] +@pytest.mark.genesis +def test_validate_block_with_duplicated_transactions(b): + from bigchaindb.pipelines import vote + + tx = dummy_tx(b) + block = b.create_block([tx, tx]).to_dict() + + vote_obj = vote.Vote() + block_id, invalid_dummy_tx = vote_obj.validate_block(block) + assert invalid_dummy_tx == [vote_obj.invalid_dummy_tx] + + @pytest.mark.genesis def test_validate_block_with_invalid_signature(b): from bigchaindb.pipelines import vote diff --git a/tests/test_models.py b/tests/test_models.py index db6a6975..6e559cb2 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -152,4 +152,4 @@ class TestBlockModel(object): tx = Transaction.create([b.me], [([b.me], 1)]) block = b.create_block([tx, tx]) with raises(DuplicateTransaction): - block._validate_block_transactions(b) + block._validate_block(b) From 5d2f66524c04be4ef30f73732d5fb13ddab8ecae Mon Sep 17 00:00:00 2001 From: vrde Date: Tue, 4 Apr 2017 18:58:34 +0200 Subject: [PATCH 038/150] Cleanup configuration keys for db --- bigchaindb/__init__.py | 13 ++++--------- tests/test_config_utils.py | 10 ---------- 2 files changed, 4 insertions(+), 19 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index 1be419b3..4c555e47 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -17,7 +17,10 @@ _base_database_rethinkdb = { # _base_database_rethinkdb.keys() or _base_database_mongodb.keys() # because dicts are unordered. I tried to configure -_base_database_rethinkdb_keys = ('host', 'port', 'name') +_database_keys_map = { + 'mongodb': ('host', 'port', 'name', 'replicaset'), + 'rethinkdb': ('host', 'port', 'name') +} _base_database_mongodb = { 'host': os.environ.get('BIGCHAINDB_DATABASE_HOST', 'localhost'), @@ -29,9 +32,6 @@ _base_database_mongodb = { 'password': os.environ.get('BIGCHAINDB_DATABASE_PASSWORD') } -_base_database_mongodb_keys = ('host', 'port', 'name', 'replicaset', - 'ssl', 'login', 'password') - _database_rethinkdb = { 'backend': os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'rethinkdb'), 'connection_timeout': 5000, @@ -46,11 +46,6 @@ _database_mongodb = { } _database_mongodb.update(_base_database_mongodb) -_database_keys_map = { - 'mongodb': _base_database_mongodb_keys, - 'rethinkdb': _base_database_rethinkdb_keys -} - _database_map = { 'mongodb': _database_mongodb, 'rethinkdb': _database_rethinkdb diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 51e4d595..04c70325 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -11,7 +11,6 @@ ORIGINAL_CONFIG = copy.deepcopy(bigchaindb._config) @pytest.fixture(scope='function', autouse=True) def clean_config(monkeypatch, request): - import bigchaindb original_config = copy.deepcopy(ORIGINAL_CONFIG) backend = request.config.getoption('--database-backend') @@ -19,15 +18,6 @@ def clean_config(monkeypatch, request): monkeypatch.setattr('bigchaindb.config', original_config) -def test_ordered_keys_match_database_config(): - import bigchaindb - - assert set(bigchaindb._base_database_rethinkdb.keys()) ==\ - set(bigchaindb._base_database_rethinkdb_keys) - assert set(bigchaindb._base_database_mongodb.keys()) ==\ - set(bigchaindb._base_database_mongodb_keys) - - def test_bigchain_instance_is_initialized_when_conf_provided(request): import bigchaindb from bigchaindb import config_utils From 09a440ee91b7bc3bd64069f70177338887981d04 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Wed, 5 Apr 2017 16:52:56 +0200 Subject: [PATCH 039/150] Fix get_spent incorrectly raising CriticalDoubleSpent --- bigchaindb/core.py | 55 +++++++++++++++++++++++++++++----------------- 1 file changed, 35 insertions(+), 20 deletions(-) diff --git a/bigchaindb/core.py b/bigchaindb/core.py index e6783a6d..91f19f66 100644 --- a/bigchaindb/core.py +++ b/bigchaindb/core.py @@ -324,8 +324,12 @@ class Bigchain(object): def get_spent(self, txid, output): """Check if a `txid` was already used as an input. - A transaction can be used as an input for another transaction. Bigchain needs to make sure that a - given `txid` is only used once. + A transaction can be used as an input for another transaction. Bigchain + needs to make sure that a given `txid` is only used once. + + This method will check if the `txid` and `output` has already been + spent in a transaction that is in either the `VALID`, `UNDECIDED` or + `BACKLOG` state. Args: txid (str): The id of the transaction @@ -334,32 +338,43 @@ class Bigchain(object): Returns: The transaction (Transaction) that used the `txid` as an input else `None` + + Raises: + CriticalDoubleSpend: If the given `txid` and `output` was spent in + more than one valid transaction. """ # checks if an input was already spent # checks if the bigchain has any transaction with input {'txid': ..., # 'output': ...} - transactions = list(backend.query.get_spent(self.connection, txid, output)) + transactions = list(backend.query.get_spent(self.connection, txid, + output)) # a transaction_id should have been spent at most one time - if transactions: - # determine if these valid transactions appear in more than one valid block - num_valid_transactions = 0 - for transaction in transactions: - # ignore invalid blocks - # FIXME: Isn't there a faster solution than doing I/O again? - if self.get_transaction(transaction['id']): - num_valid_transactions += 1 - if num_valid_transactions > 1: - raise core_exceptions.CriticalDoubleSpend( - '`{}` was spent more than once. There is a problem' - ' with the chain'.format(txid)) + # determine if these valid transactions appear in more than one valid + # block + num_valid_transactions = 0 + non_invalid_transactions = [] + for transaction in transactions: + # ignore transactions in invalid blocks + # FIXME: Isn't there a faster solution than doing I/O again? + _, status = self.get_transaction(transaction['id'], + include_status=True) + if status == self.TX_VALID: + num_valid_transactions += 1 + # `txid` can only have been spent in at most on valid block. + if num_valid_transactions > 1: + raise core_exceptions.CriticalDoubleSpend( + '`{}` was spent more than once. There is a problem' + ' with the chain'.format(txid)) + # if its not and invalid transaction + if status is not None: + non_invalid_transactions.append(transaction) - if num_valid_transactions: - return Transaction.from_dict(transactions[0]) - else: - # all queried transactions were invalid - return None + if non_invalid_transactions: + return Transaction.from_dict(non_invalid_transactions[0]) else: + # Either no transaction was returned spending the `txid` as + # input or the returned transactions are not valid. return None def get_outputs(self, owner): From de04dcda0c0a51bdf24d5f0c15772c624a5a558d Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Thu, 6 Apr 2017 16:07:35 +0200 Subject: [PATCH 040/150] Fixed docstring. Removed redundant `else` branch. --- bigchaindb/core.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/bigchaindb/core.py b/bigchaindb/core.py index 91f19f66..5d2e9c03 100644 --- a/bigchaindb/core.py +++ b/bigchaindb/core.py @@ -325,9 +325,9 @@ class Bigchain(object): """Check if a `txid` was already used as an input. A transaction can be used as an input for another transaction. Bigchain - needs to make sure that a given `txid` is only used once. + needs to make sure that a given `(txid, output)` is only used once. - This method will check if the `txid` and `output` has already been + This method will check if the `(txid, output)` has already been spent in a transaction that is in either the `VALID`, `UNDECIDED` or `BACKLOG` state. @@ -336,11 +336,11 @@ class Bigchain(object): output (num): the index of the output in the respective transaction Returns: - The transaction (Transaction) that used the `txid` as an input else - `None` + The transaction (Transaction) that used the `(txid, output)` as an + input else `None` Raises: - CriticalDoubleSpend: If the given `txid` and `output` was spent in + CriticalDoubleSpend: If the given `(txid, output)` was spent in more than one valid transaction. """ # checks if an input was already spent @@ -372,10 +372,9 @@ class Bigchain(object): if non_invalid_transactions: return Transaction.from_dict(non_invalid_transactions[0]) - else: - # Either no transaction was returned spending the `txid` as - # input or the returned transactions are not valid. - return None + + # Either no transaction was returned spending the `(txid, output)` as + # input or the returned transactions are not valid. def get_outputs(self, owner): """Retrieve a list of links to transaction outputs for a given public From 0cbf144ddf364813d2651793494ad71ab0d58553 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Tue, 28 Mar 2017 16:05:44 +0200 Subject: [PATCH 041/150] Initial implementation of an event_handler --- bigchaindb/events.py | 33 +++++++++++++++++++ bigchaindb/pipelines/election.py | 24 +++++++++++--- .../pipelines/events_consumer_example.py | 14 ++++++++ bigchaindb/processes.py | 16 ++++++++- 4 files changed, 81 insertions(+), 6 deletions(-) create mode 100644 bigchaindb/events.py create mode 100644 bigchaindb/pipelines/events_consumer_example.py diff --git a/bigchaindb/events.py b/bigchaindb/events.py new file mode 100644 index 00000000..a061ad50 --- /dev/null +++ b/bigchaindb/events.py @@ -0,0 +1,33 @@ +from enum import Enum +from multiprocessing import Queue + + +class EventTypes(Enum): + BLOCK_VALID = 1 + BLOCK_INVALID = 2 + + +class Event(object): + + def __init__(self, event_type, event_data): + self.type = event_type + self.data = event_data + + +class EventHandler(object): + + def __init__(self, events_queue): + self.events_queue = events_queue + + def put_event(self, event, timeout=None): + # TODO: handle timeouts + self.events_queue.put(event, timeout=None) + + def get_event(self, timeout=None): + # TODO: handle timeouts + return self.events_queue.get(timeout=None) + + +def setup_events_queue(): + # TODO: set bounds to the queue + return Queue() diff --git a/bigchaindb/pipelines/election.py b/bigchaindb/pipelines/election.py index a5818b3e..b17f5722 100644 --- a/bigchaindb/pipelines/election.py +++ b/bigchaindb/pipelines/election.py @@ -13,6 +13,7 @@ from bigchaindb import backend from bigchaindb.backend.changefeed import ChangeFeed from bigchaindb.models import Block from bigchaindb import Bigchain +from bigchaindb.events import EventHandler, Event, EventTypes logger = logging.getLogger(__name__) @@ -22,8 +23,9 @@ logger_results = logging.getLogger('pipeline.election.results') class Election: """Election class.""" - def __init__(self): + def __init__(self, events_queue): self.bigchain = Bigchain() + self.event_handler = EventHandler(events_queue) def check_for_quorum(self, next_vote): """ @@ -42,6 +44,7 @@ class Election: next_block = self.bigchain.get_block(block_id) result = self.bigchain.block_election(next_block) + self.handle_block_events(result, block_id) if result['status'] == self.bigchain.BLOCK_INVALID: return Block.from_dict(next_block) @@ -67,9 +70,20 @@ class Election: self.bigchain.write_transaction(tx) return invalid_block + def handle_block_events(self, result, block_id): + if result['status'] == self.bigchain.BLOCK_UNDECIDED: + return + elif result['status'] == self.bigchain.BLOCK_INVALID: + event_type = EventTypes.BLOCK_INVALID + elif result['status'] == self.bigchain.BLOCK_VALID: + event_type = EventTypes.BLOCK_VALID -def create_pipeline(): - election = Election() + event = Event(event_type, {'block_id': block_id}) + self.event_handler.put_event(event) + + +def create_pipeline(events_queue): + election = Election(events_queue) election_pipeline = Pipeline([ Node(election.check_for_quorum), @@ -84,8 +98,8 @@ def get_changefeed(): return backend.get_changefeed(connection, 'votes', ChangeFeed.INSERT) -def start(): - pipeline = create_pipeline() +def start(events_queue): + pipeline = create_pipeline(events_queue) pipeline.setup(indata=get_changefeed()) pipeline.start() return pipeline diff --git a/bigchaindb/pipelines/events_consumer_example.py b/bigchaindb/pipelines/events_consumer_example.py new file mode 100644 index 00000000..7e833c82 --- /dev/null +++ b/bigchaindb/pipelines/events_consumer_example.py @@ -0,0 +1,14 @@ +import multiprocessing as mp + +from bigchaindb.events import EventHandler + + +def consume_events(events_queue): + event_handler = EventHandler(events_queue) + while True: + event = event_handler.get_event() + print('Event type: {} Event data: {}'.format(event.type, event.data)) + + +def events_consumer(events_queue): + return mp.Process(target=consume_events, args=(events_queue,)) diff --git a/bigchaindb/processes.py b/bigchaindb/processes.py index 01d7a55a..687422ca 100644 --- a/bigchaindb/processes.py +++ b/bigchaindb/processes.py @@ -3,6 +3,8 @@ import multiprocessing as mp import bigchaindb from bigchaindb.pipelines import vote, block, election, stale +from bigchaindb.pipelines.events_consumer_example import events_consumer +from bigchaindb.events import setup_events_queue from bigchaindb.web import server @@ -25,6 +27,13 @@ BANNER = """ def start(): logger.info('Initializing BigchainDB...') + # Create the events queue + # The events queue needs to be initialized once and shared between + # processes. This seems the best way to do it + # At this point only the election processs and the event consumer require + # this queue. + events_queue = setup_events_queue() + # start the processes logger.info('Starting block') block.start() @@ -36,12 +45,17 @@ def start(): stale.start() logger.info('Starting election') - election.start() + election.start(events_queue) # start the web api app_server = server.create_server(bigchaindb.config['server']) p_webapi = mp.Process(name='webapi', target=app_server.run) p_webapi.start() + # start the example events consumer + logger.info('Starting the events consumer example') + p_events_consumer = events_consumer(events_queue) + p_events_consumer.start() + # start message logger.info(BANNER.format(bigchaindb.config['server']['bind'])) From 5d39b42b7a8f32389a28b456ca1cd855a8f47b31 Mon Sep 17 00:00:00 2001 From: vrde Date: Thu, 30 Mar 2017 17:27:03 +0200 Subject: [PATCH 042/150] Add dependencies and first test --- bigchaindb/web/websocket_server.py | 56 ++++++++++++++++++++++++++++++ setup.py | 1 + tests/web/test_websocket_server.py | 15 ++++++++ 3 files changed, 72 insertions(+) create mode 100644 bigchaindb/web/websocket_server.py create mode 100644 tests/web/test_websocket_server.py diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py new file mode 100644 index 00000000..7a5b3d77 --- /dev/null +++ b/bigchaindb/web/websocket_server.py @@ -0,0 +1,56 @@ +"""WebSocket server for the BigchainDB Event Stream API.""" + +import asyncio +from uuid import uuid4 + +from aiohttp import web + + +class PoisonPill: + pass + + +POISON_PILL = PoisonPill() + + +class Dispatcher: + + def __init__(self, event_source): + self.event_source = event_source + self.subscribers = {} + + def subscribe(self, uuid, ws): + self.subscribers[uuid] = ws + + @asyncio.coroutine + def publish(self): + while True: + event = yield from self.event_source.get() + if event == POISON_PILL: + return + for uuid, ws in self.subscribers.items(): + ws.send_str(event) + + +@asyncio.coroutine +def websocket_handler(request): + ws = web.WebSocketResponse() + yield from ws.prepare(request) + uuid = uuid4() + request.app['dispatcher'].subscribe(uuid, ws) + while True: + # Consume input buffer + yield from ws.receive() + return ws + + +def init_app(event_source, loop=None): + dispatcher = Dispatcher(event_source) + + # Schedule the dispatcher + loop.create_task(dispatcher.publish()) + + app = web.Application(loop=loop) + app['dispatcher'] = dispatcher + app.router.add_get('/', websocket_handler) + return app diff --git a/setup.py b/setup.py index c05b554a..ee8871d4 100644 --- a/setup.py +++ b/setup.py @@ -54,6 +54,7 @@ tests_require = [ 'pytest-mock', 'pytest-xdist', 'pytest-flask', + 'pytest-aiohttp', 'tox', ] + docs_require diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py new file mode 100644 index 00000000..fb6d555b --- /dev/null +++ b/tests/web/test_websocket_server.py @@ -0,0 +1,15 @@ +import asyncio + + +@asyncio.coroutine +def test_websocket(test_client, loop): + from bigchaindb.web.websocket_server import init_app, POISON_PILL + + event_source = asyncio.Queue(loop=loop) + app = init_app(event_source, loop=loop) + client = yield from test_client(app) + ws = yield from client.ws_connect('/') + yield from event_source.put('antani') + yield from event_source.put(POISON_PILL) + result = yield from ws.receive() + assert result.data == 'antani' From 83397de397179d219938dc63280a61f5f8b56f58 Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 31 Mar 2017 15:56:29 +0200 Subject: [PATCH 043/150] Add more tests and utils --- bigchaindb/web/websocket_server.py | 61 +++++++++++++++++++++++---- tests/web/test_websocket_server.py | 67 ++++++++++++++++++++++++++++-- 2 files changed, 118 insertions(+), 10 deletions(-) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index 7a5b3d77..9d8f5ef9 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -1,29 +1,48 @@ """WebSocket server for the BigchainDB Event Stream API.""" import asyncio +import logging from uuid import uuid4 +import aiohttp from aiohttp import web -class PoisonPill: - pass - - -POISON_PILL = PoisonPill() +logger = logging.getLogger(__name__) +POISON_PILL = 'POISON_PILL' class Dispatcher: + """Dispatch events to websockets. + + This class implements a simple publish/subscribe pattern. + """ def __init__(self, event_source): + """Create a new instance. + + Args: + event_source: a source of events. Elements in the queue + should be strings. + """ + self.event_source = event_source self.subscribers = {} def subscribe(self, uuid, ws): + """Add a websocket to the list of subscribers. + + Args: + uuid (str): a unique identifier for the websocket. + ws: the websocket to publish information. + """ + self.subscribers[uuid] = ws @asyncio.coroutine def publish(self): + """Publish new events to the subscribers.""" + while True: event = yield from self.event_source.get() if event == POISON_PILL: @@ -34,17 +53,29 @@ class Dispatcher: @asyncio.coroutine def websocket_handler(request): + """Handle a new socket connection.""" + + logger.debug('New websocket connection.') ws = web.WebSocketResponse() yield from ws.prepare(request) uuid = uuid4() request.app['dispatcher'].subscribe(uuid, ws) + while True: # Consume input buffer - yield from ws.receive() - return ws + msg = yield from ws.receive() + if msg.type == aiohttp.WSMsgType.ERROR: + logger.debug('Websocket exception: {}'.format(ws.exception())) + return def init_app(event_source, loop=None): + """Init the application server. + + Return: + An aiohttp application. + """ + dispatcher = Dispatcher(event_source) # Schedule the dispatcher @@ -54,3 +85,19 @@ def init_app(event_source, loop=None): app['dispatcher'] = dispatcher app.router.add_get('/', websocket_handler) return app + + +@asyncio.coroutine +def constant_event_source(event_source): + while True: + yield from asyncio.sleep(1) + yield from event_source.put('meow') + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + event_source = asyncio.Queue() + + loop.create_task(constant_event_source(event_source)) + app = init_app(event_source, loop=loop) + aiohttp.web.run_app(app, port=9985) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index fb6d555b..382a20f0 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -1,6 +1,41 @@ +import pytest import asyncio +class MockWebSocket: + def __init__(self): + self.received = [] + + def send_str(self, s): + self.received.append(s) + + +@asyncio.coroutine +@pytest.mark.skipif(reason='This test raises a RuntimeError, dunno how to solve it now.') +def test_dispatcher(loop): + from bigchaindb.web.websocket_server import Dispatcher, POISON_PILL + + ws0 = MockWebSocket() + ws1 = MockWebSocket() + + event_source = asyncio.Queue(loop=loop) + dispatcher = Dispatcher(event_source) + + dispatcher.subscribe(0, ws0) + dispatcher.subscribe(1, ws1) + + yield from event_source.put('hack') + yield from event_source.put('the') + + yield from event_source.put('planet!') + yield from event_source.put(POISON_PILL) + + loop.run_until_complete(dispatcher.publish()) + + assert ws0.received == ['hack', 'the', 'planet!'] + assert ws1.received == ['planet!'] + + @asyncio.coroutine def test_websocket(test_client, loop): from bigchaindb.web.websocket_server import init_app, POISON_PILL @@ -9,7 +44,33 @@ def test_websocket(test_client, loop): app = init_app(event_source, loop=loop) client = yield from test_client(app) ws = yield from client.ws_connect('/') - yield from event_source.put('antani') - yield from event_source.put(POISON_PILL) + + yield from event_source.put('hack') + yield from event_source.put('the') + yield from event_source.put('planet!') + result = yield from ws.receive() - assert result.data == 'antani' + assert result.data == 'hack' + + result = yield from ws.receive() + assert result.data == 'the' + + result = yield from ws.receive() + assert result.data == 'planet!' + + yield from event_source.put(POISON_PILL) + + +@asyncio.coroutine +@pytest.mark.skipif(reason="Still don't understand how to trigger custom errors.") +def test_websocket_error(test_client, loop): + from bigchaindb.web.websocket_server import init_app, POISON_PILL + + event_source = asyncio.Queue(loop=loop) + app = init_app(event_source, loop=loop) + client = yield from test_client(app) + ws = yield from client.ws_connect('/') + + yield from ws.close() + + yield from event_source.put(POISON_PILL) From 96daa986994413cefdfc6f933091840233a53ff5 Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Wed, 29 Mar 2017 12:09:14 +0200 Subject: [PATCH 044/150] Adverstise Event stream api in api info endpoint. Updated tests. --- bigchaindb/web/views/base.py | 5 +++++ bigchaindb/web/views/info.py | 9 ++++++--- tests/web/test_info.py | 1 + 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/bigchaindb/web/views/base.py b/bigchaindb/web/views/base.py index 171a3bb6..5a0ec97b 100644 --- a/bigchaindb/web/views/base.py +++ b/bigchaindb/web/views/base.py @@ -21,3 +21,8 @@ def make_error(status_code, message=None): def base_url(): return '%s://%s/' % (request.environ['wsgi.url_scheme'], request.environ['HTTP_HOST']) + + +def base_ws_uri(): + """Base websocket uri.""" + return '%s://%s/' % ('ws', request.environ['HTTP_HOST']) diff --git a/bigchaindb/web/views/info.py b/bigchaindb/web/views/info.py index 04a15749..b35c6378 100644 --- a/bigchaindb/web/views/info.py +++ b/bigchaindb/web/views/info.py @@ -4,7 +4,7 @@ import flask from flask_restful import Resource import bigchaindb -from bigchaindb.web.views.base import base_url +from bigchaindb.web.views.base import base_url, base_ws_uri from bigchaindb import version @@ -30,16 +30,19 @@ class RootIndex(Resource): class ApiV1Index(Resource): def get(self): api_root = base_url() + 'api/v1/' + websocket_root = base_ws_uri() + 'api/v1/' docs_url = [ 'https://docs.bigchaindb.com/projects/server/en/v', version.__version__, '/drivers-clients/http-client-server-api.html', ] - return { + return flask.jsonify({ '_links': { 'docs': ''.join(docs_url), 'self': api_root, 'statuses': api_root + 'statuses/', 'transactions': api_root + 'transactions/', + # TODO: The version should probably not be hardcoded + 'streams_v1': websocket_root + 'streams/', }, - } + }) diff --git a/tests/web/test_info.py b/tests/web/test_info.py index c55f467f..93e14cbd 100644 --- a/tests/web/test_info.py +++ b/tests/web/test_info.py @@ -31,5 +31,6 @@ def test_api_v1_endpoint(client): 'self': 'http://localhost/api/v1/', 'statuses': 'http://localhost/api/v1/statuses/', 'transactions': 'http://localhost/api/v1/transactions/', + 'streams_v1': 'ws://localhost/api/v1/streams/', } } From 83a7cffc3fe88c2ffeda8a15a19d72c9f010309d Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Mon, 3 Apr 2017 14:29:31 +0200 Subject: [PATCH 045/150] fix tests --- bigchaindb/pipelines/election.py | 31 +++++++++++++++++-------------- bigchaindb/processes.py | 2 +- tests/test_processes.py | 10 ++++++---- 3 files changed, 24 insertions(+), 19 deletions(-) diff --git a/bigchaindb/pipelines/election.py b/bigchaindb/pipelines/election.py index b17f5722..8f3116cc 100644 --- a/bigchaindb/pipelines/election.py +++ b/bigchaindb/pipelines/election.py @@ -23,9 +23,11 @@ logger_results = logging.getLogger('pipeline.election.results') class Election: """Election class.""" - def __init__(self, events_queue): + def __init__(self, events_queue=None): self.bigchain = Bigchain() - self.event_handler = EventHandler(events_queue) + self.event_handler = None + if events_queue: + self.event_handler = EventHandler(events_queue) def check_for_quorum(self, next_vote): """ @@ -71,19 +73,20 @@ class Election: return invalid_block def handle_block_events(self, result, block_id): - if result['status'] == self.bigchain.BLOCK_UNDECIDED: - return - elif result['status'] == self.bigchain.BLOCK_INVALID: - event_type = EventTypes.BLOCK_INVALID - elif result['status'] == self.bigchain.BLOCK_VALID: - event_type = EventTypes.BLOCK_VALID + if self.event_handler: + if result['status'] == self.bigchain.BLOCK_UNDECIDED: + return + elif result['status'] == self.bigchain.BLOCK_INVALID: + event_type = EventTypes.BLOCK_INVALID + elif result['status'] == self.bigchain.BLOCK_VALID: + event_type = EventTypes.BLOCK_VALID - event = Event(event_type, {'block_id': block_id}) - self.event_handler.put_event(event) + event = Event(event_type, {'block_id': block_id}) + self.event_handler.put_event(event) -def create_pipeline(events_queue): - election = Election(events_queue) +def create_pipeline(events_queue=None): + election = Election(events_queue=events_queue) election_pipeline = Pipeline([ Node(election.check_for_quorum), @@ -98,8 +101,8 @@ def get_changefeed(): return backend.get_changefeed(connection, 'votes', ChangeFeed.INSERT) -def start(events_queue): - pipeline = create_pipeline(events_queue) +def start(events_queue=None): + pipeline = create_pipeline(events_queue=events_queue) pipeline.setup(indata=get_changefeed()) pipeline.start() return pipeline diff --git a/bigchaindb/processes.py b/bigchaindb/processes.py index 687422ca..5194c05a 100644 --- a/bigchaindb/processes.py +++ b/bigchaindb/processes.py @@ -45,7 +45,7 @@ def start(): stale.start() logger.info('Starting election') - election.start(events_queue) + election.start(events_queue=events_queue) # start the web api app_server = server.create_server(bigchaindb.config['server']) diff --git a/tests/test_processes.py b/tests/test_processes.py index bd69d52c..32d784bb 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -1,6 +1,6 @@ from unittest.mock import patch -from multiprocessing import Process +from multiprocessing import Process, Queue from bigchaindb.pipelines import vote, block, election, stale @@ -9,14 +9,16 @@ from bigchaindb.pipelines import vote, block, election, stale @patch.object(block, 'start') @patch.object(vote, 'start') @patch.object(Process, 'start') -def test_processes_start(mock_vote, mock_block, mock_election, mock_stale, - mock_process): +def test_processes_start(mock_process, mock_vote, mock_block, mock_election, + mock_stale): from bigchaindb import processes processes.start() mock_vote.assert_called_with() mock_block.assert_called_with() - mock_election.assert_called_with() mock_stale.assert_called_with() mock_process.assert_called_with() + assert mock_election.call_count == 1 + # the events queue is declared inside processes.start() + assert type(mock_election.call_args[1]['events_queue']) == type(Queue()) From 730b7482f6c8df79d30b8de48152770d9596cfdf Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Mon, 3 Apr 2017 14:31:38 +0200 Subject: [PATCH 046/150] cleanup code --- bigchaindb/events.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigchaindb/events.py b/bigchaindb/events.py index a061ad50..bc448ce3 100644 --- a/bigchaindb/events.py +++ b/bigchaindb/events.py @@ -7,14 +7,14 @@ class EventTypes(Enum): BLOCK_INVALID = 2 -class Event(object): +class Event: def __init__(self, event_type, event_data): self.type = event_type self.data = event_data -class EventHandler(object): +class EventHandler: def __init__(self, events_queue): self.events_queue = events_queue From bcc2e1f781f5c082df091e75ba4aa8ebad7fe20f Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Mon, 3 Apr 2017 14:48:50 +0200 Subject: [PATCH 047/150] fixed pep8 issue --- tests/test_processes.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index 32d784bb..7f8ffcd9 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -19,6 +19,5 @@ def test_processes_start(mock_process, mock_vote, mock_block, mock_election, mock_block.assert_called_with() mock_stale.assert_called_with() mock_process.assert_called_with() - assert mock_election.call_count == 1 # the events queue is declared inside processes.start() - assert type(mock_election.call_args[1]['events_queue']) == type(Queue()) + assert mock_election.call_count == 1 From a92c091eeb11b4fd1cc1c20684c10e0e1576392f Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Mon, 3 Apr 2017 14:55:21 +0200 Subject: [PATCH 048/150] fix pep8 issue --- tests/test_processes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index 7f8ffcd9..00716010 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -1,6 +1,6 @@ from unittest.mock import patch -from multiprocessing import Process, Queue +from multiprocessing import Process from bigchaindb.pipelines import vote, block, election, stale From 64a033b17a49e68b685a386a2ef258290b6c612a Mon Sep 17 00:00:00 2001 From: vrde Date: Mon, 3 Apr 2017 11:48:48 +0200 Subject: [PATCH 049/150] Code cleanup, rename some vars --- bigchaindb/web/websocket_server.py | 60 +++++++++++++++++++----------- 1 file changed, 38 insertions(+), 22 deletions(-) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index 9d8f5ef9..6915d54a 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -29,15 +29,15 @@ class Dispatcher: self.event_source = event_source self.subscribers = {} - def subscribe(self, uuid, ws): + def subscribe(self, uuid, websocket): """Add a websocket to the list of subscribers. Args: uuid (str): a unique identifier for the websocket. - ws: the websocket to publish information. + websocket: the websocket to publish information. """ - self.subscribers[uuid] = ws + self.subscribers[uuid] = websocket @asyncio.coroutine def publish(self): @@ -47,8 +47,8 @@ class Dispatcher: event = yield from self.event_source.get() if event == POISON_PILL: return - for uuid, ws in self.subscribers.items(): - ws.send_str(event) + for uuid, websocket in self.subscribers.items(): + websocket.send_str(event) @asyncio.coroutine @@ -56,20 +56,20 @@ def websocket_handler(request): """Handle a new socket connection.""" logger.debug('New websocket connection.') - ws = web.WebSocketResponse() - yield from ws.prepare(request) + websocket = web.WebSocketResponse() + yield from websocket.prepare(request) uuid = uuid4() - request.app['dispatcher'].subscribe(uuid, ws) + request.app['dispatcher'].subscribe(uuid, websocket) while True: # Consume input buffer - msg = yield from ws.receive() + msg = yield from websocket.receive() if msg.type == aiohttp.WSMsgType.ERROR: - logger.debug('Websocket exception: {}'.format(ws.exception())) + logger.debug('Websocket exception: %s', websocket.exception()) return -def init_app(event_source, loop=None): +def init_app(event_source, *, loop=None): """Init the application server. Return: @@ -87,17 +87,33 @@ def init_app(event_source, loop=None): return app -@asyncio.coroutine -def constant_event_source(event_source): - while True: - yield from asyncio.sleep(1) - yield from event_source.put('meow') +def start(event_source, *, loop=None): + """Create and start the WebSocket server.""" + + if not loop: + loop = asyncio.get_event_loop() + + app = init_app(event_source, loop=loop) + aiohttp.web.run_app(app, port=9985) + + +def test_websocket_server(): + """Set up a server and output a message every second. + Used for testing purposes.""" + + @asyncio.coroutine + def constant_event_source(event_source): + """Put a message in ``event_source`` every second.""" + + while True: + yield from asyncio.sleep(1) + yield from event_source.put('meow') + + loop = asyncio.get_event_loop() + event_source = asyncio.Queue() + loop.create_task(constant_event_source(event_source)) + start(event_source, loop=loop) if __name__ == '__main__': - loop = asyncio.get_event_loop() - event_source = asyncio.Queue() - - loop.create_task(constant_event_source(event_source)) - app = init_app(event_source, loop=loop) - aiohttp.web.run_app(app, port=9985) + test_websocket_server() From f23faaa65fa1bdbe487266432faea9e4331cecae Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 7 Apr 2017 09:16:22 +0200 Subject: [PATCH 050/150] Add WebSocket server --- bigchaindb/pipelines/election.py | 2 +- bigchaindb/processes.py | 12 +- bigchaindb/web/views/base.py | 2 +- bigchaindb/web/websocket_server.py | 103 ++++++++++++---- setup.py | 1 + tests/web/test_info.py | 2 +- tests/web/test_websocket_server.py | 183 +++++++++++++++++++++++++---- 7 files changed, 246 insertions(+), 59 deletions(-) diff --git a/bigchaindb/pipelines/election.py b/bigchaindb/pipelines/election.py index 8f3116cc..fc7cb077 100644 --- a/bigchaindb/pipelines/election.py +++ b/bigchaindb/pipelines/election.py @@ -81,7 +81,7 @@ class Election: elif result['status'] == self.bigchain.BLOCK_VALID: event_type = EventTypes.BLOCK_VALID - event = Event(event_type, {'block_id': block_id}) + event = Event(event_type, self.bigchain.get_block(block_id)) self.event_handler.put_event(event) diff --git a/bigchaindb/processes.py b/bigchaindb/processes.py index 5194c05a..205cdd3c 100644 --- a/bigchaindb/processes.py +++ b/bigchaindb/processes.py @@ -3,9 +3,8 @@ import multiprocessing as mp import bigchaindb from bigchaindb.pipelines import vote, block, election, stale -from bigchaindb.pipelines.events_consumer_example import events_consumer from bigchaindb.events import setup_events_queue -from bigchaindb.web import server +from bigchaindb.web import server, websocket_server logger = logging.getLogger(__name__) @@ -52,10 +51,11 @@ def start(): p_webapi = mp.Process(name='webapi', target=app_server.run) p_webapi.start() - # start the example events consumer - logger.info('Starting the events consumer example') - p_events_consumer = events_consumer(events_queue) - p_events_consumer.start() + logger.info('WebSocket server started') + p_websocket_server = mp.Process(name='ws', + target=websocket_server.start, + args=(events_queue,)) + p_websocket_server.start() # start message logger.info(BANNER.format(bigchaindb.config['server']['bind'])) diff --git a/bigchaindb/web/views/base.py b/bigchaindb/web/views/base.py index 5a0ec97b..5ab409b0 100644 --- a/bigchaindb/web/views/base.py +++ b/bigchaindb/web/views/base.py @@ -25,4 +25,4 @@ def base_url(): def base_ws_uri(): """Base websocket uri.""" - return '%s://%s/' % ('ws', request.environ['HTTP_HOST']) + return 'ws://localhost:9985/' diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index 6915d54a..dc320754 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -1,15 +1,64 @@ """WebSocket server for the BigchainDB Event Stream API.""" +# NOTE +# +# This module contains some functions and utilities that might belong to other +# modules. For now, I prefer to keep everything in this module. Why? Because +# those functions are needed only here. +# +# When we will extend this part of the project and we find that we need those +# functionalities elsewhere, we can start creating new modules and organizing +# things in a better way. + + +import json import asyncio import logging +import threading from uuid import uuid4 import aiohttp from aiohttp import web +from bigchaindb.events import EventTypes + logger = logging.getLogger(__name__) POISON_PILL = 'POISON_PILL' +EVENTS_ENDPOINT = '/api/v1/streams/' + + +def _put_into_capped_queue(queue, value): + """Put a new item in a capped queue. + + If the queue reached its limit, get the first element + ready and put the new one. Note that the first element + will be lost (that's the purpose of a capped queue). + + Args: + queue: a queue + value: the value to put + """ + while True: + try: + queue.put_nowait(value) + return + except asyncio.QueueFull: + queue.get_nowait() + + +def _multiprocessing_to_asyncio(in_queue, out_queue, loop): + """Bridge between a synchronous multiprocessing queue + and an asynchronous asyncio queue. + + Args: + in_queue (multiprocessing.Queue): input queue + out_queue (asyncio.Queue): output queue + """ + + while True: + value = in_queue.get() + loop.call_soon_threadsafe(_put_into_capped_queue, out_queue, value) class Dispatcher: @@ -45,10 +94,27 @@ class Dispatcher: while True: event = yield from self.event_source.get() + str_buffer = [] + if event == POISON_PILL: return - for uuid, websocket in self.subscribers.items(): - websocket.send_str(event) + + if isinstance(event, str): + str_buffer.append(event) + + elif event.type == EventTypes.BLOCK_VALID: + block = event.data + + for tx in block['block']['transactions']: + asset_id = tx['id'] if tx['operation'] == 'CREATE' else tx['asset']['id'] + data = {'blockid': block['id'], + 'assetid': asset_id, + 'txid': tx['id']} + str_buffer.append(json.dumps(data)) + + for _, websocket in self.subscribers.items(): + for str_item in str_buffer: + websocket.send_str(str_item) @asyncio.coroutine @@ -83,37 +149,22 @@ def init_app(event_source, *, loop=None): app = web.Application(loop=loop) app['dispatcher'] = dispatcher - app.router.add_get('/', websocket_handler) + app.router.add_get(EVENTS_ENDPOINT, websocket_handler) return app -def start(event_source, *, loop=None): +def start(sync_event_source, loop=None): """Create and start the WebSocket server.""" if not loop: loop = asyncio.get_event_loop() + event_source = asyncio.Queue(maxsize=1024, loop=loop) + + bridge = threading.Thread(target=_multiprocessing_to_asyncio, + args=(sync_event_source, event_source, loop), + daemon=True) + bridge.start() + app = init_app(event_source, loop=loop) aiohttp.web.run_app(app, port=9985) - - -def test_websocket_server(): - """Set up a server and output a message every second. - Used for testing purposes.""" - - @asyncio.coroutine - def constant_event_source(event_source): - """Put a message in ``event_source`` every second.""" - - while True: - yield from asyncio.sleep(1) - yield from event_source.put('meow') - - loop = asyncio.get_event_loop() - event_source = asyncio.Queue() - loop.create_task(constant_event_source(event_source)) - start(event_source, loop=loop) - - -if __name__ == '__main__': - test_websocket_server() diff --git a/setup.py b/setup.py index ee8871d4..45d6f04f 100644 --- a/setup.py +++ b/setup.py @@ -77,6 +77,7 @@ install_requires = [ 'multipipes~=0.1.0', 'jsonschema~=2.5.1', 'pyyaml~=3.12', + 'aiohttp~=2.0', ] setup( diff --git a/tests/web/test_info.py b/tests/web/test_info.py index 93e14cbd..4dc60168 100644 --- a/tests/web/test_info.py +++ b/tests/web/test_info.py @@ -31,6 +31,6 @@ def test_api_v1_endpoint(client): 'self': 'http://localhost/api/v1/', 'statuses': 'http://localhost/api/v1/statuses/', 'transactions': 'http://localhost/api/v1/transactions/', - 'streams_v1': 'ws://localhost/api/v1/streams/', + 'streams_v1': 'ws://localhost:9985/api/v1/streams/', } } diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 382a20f0..b205fb25 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -1,6 +1,23 @@ +import json +import random + import pytest import asyncio +from bigchaindb.models import Transaction + + +def create_block(b, total=1): + transactions = [ + Transaction.create( + [b.me], + [([b.me], 1)], + metadata={'msg': random.random()}, + ).sign([b.me_private]) + for _ in range(total) + ] + return b.create_block(transactions) + class MockWebSocket: def __init__(self): @@ -11,39 +28,100 @@ class MockWebSocket: @asyncio.coroutine -@pytest.mark.skipif(reason='This test raises a RuntimeError, dunno how to solve it now.') -def test_dispatcher(loop): - from bigchaindb.web.websocket_server import Dispatcher, POISON_PILL +def test_bridge_sync_async_queue(loop): + import queue + import threading + from bigchaindb.web.websocket_server import _multiprocessing_to_asyncio - ws0 = MockWebSocket() - ws1 = MockWebSocket() + sync_queue = queue.Queue() + async_queue = asyncio.Queue(loop=loop) - event_source = asyncio.Queue(loop=loop) - dispatcher = Dispatcher(event_source) + bridge = threading.Thread(target=_multiprocessing_to_asyncio, + args=(sync_queue, async_queue, loop), + daemon=True) + bridge.start() - dispatcher.subscribe(0, ws0) - dispatcher.subscribe(1, ws1) + sync_queue.put('fahren') + sync_queue.put('auf') + sync_queue.put('der') + sync_queue.put('Autobahn') - yield from event_source.put('hack') - yield from event_source.put('the') + result = yield from async_queue.get() + assert result == 'fahren' - yield from event_source.put('planet!') - yield from event_source.put(POISON_PILL) + result = yield from async_queue.get() + assert result == 'auf' - loop.run_until_complete(dispatcher.publish()) + result = yield from async_queue.get() + assert result == 'der' - assert ws0.received == ['hack', 'the', 'planet!'] - assert ws1.received == ['planet!'] + result = yield from async_queue.get() + assert result == 'Autobahn' + + assert async_queue.qsize() == 0 @asyncio.coroutine -def test_websocket(test_client, loop): - from bigchaindb.web.websocket_server import init_app, POISON_PILL +def test_put_into_capped_queue(loop): + from bigchaindb.web.websocket_server import _put_into_capped_queue + q = asyncio.Queue(maxsize=2, loop=loop) + + _put_into_capped_queue(q, 'Friday') + assert q._queue[0] == 'Friday' + + _put_into_capped_queue(q, "I'm") + assert q._queue[0] == 'Friday' + assert q._queue[1] == "I'm" + + _put_into_capped_queue(q, 'in') + assert q._queue[0] == "I'm" + assert q._queue[1] == 'in' + + _put_into_capped_queue(q, 'love') + assert q._queue[0] == 'in' + assert q._queue[1] == 'love' + + +@asyncio.coroutine +def test_capped_queue(loop): + import queue + import threading + import time + from bigchaindb.web.websocket_server import _multiprocessing_to_asyncio + + sync_queue = queue.Queue() + async_queue = asyncio.Queue(maxsize=2, loop=loop) + + bridge = threading.Thread(target=_multiprocessing_to_asyncio, + args=(sync_queue, async_queue, loop), + daemon=True) + bridge.start() + + sync_queue.put('we') + sync_queue.put('are') + sync_queue.put('the') + sync_queue.put('robots') + + # Wait until the thread processes all the items + time.sleep(1) + + result = yield from async_queue.get() + assert result == 'the' + + result = yield from async_queue.get() + assert result == 'robots' + + assert async_queue.qsize() == 0 + + +@asyncio.coroutine +def test_websocket_string_event(test_client, loop): + from bigchaindb.web.websocket_server import init_app, POISON_PILL, EVENTS_ENDPOINT event_source = asyncio.Queue(loop=loop) app = init_app(event_source, loop=loop) client = yield from test_client(app) - ws = yield from client.ws_connect('/') + ws = yield from client.ws_connect(EVENTS_ENDPOINT) yield from event_source.put('hack') yield from event_source.put('the') @@ -62,15 +140,72 @@ def test_websocket(test_client, loop): @asyncio.coroutine -@pytest.mark.skipif(reason="Still don't understand how to trigger custom errors.") -def test_websocket_error(test_client, loop): - from bigchaindb.web.websocket_server import init_app, POISON_PILL +def test_websocket_block_event(b, test_client, loop): + from bigchaindb import events + from bigchaindb.web.websocket_server import init_app, POISON_PILL, EVENTS_ENDPOINT event_source = asyncio.Queue(loop=loop) app = init_app(event_source, loop=loop) client = yield from test_client(app) - ws = yield from client.ws_connect('/') + ws = yield from client.ws_connect(EVENTS_ENDPOINT) + block = create_block(b, 10).to_dict() + block_event = events.Event(events.EventTypes.BLOCK_VALID, block) - yield from ws.close() + yield from event_source.put(block_event) + + for tx in block['block']['transactions']: + result = yield from ws.receive() + json_result = json.loads(result.data) + assert json_result['txid'] == tx['id'] + # Since the transactions are all CREATEs, asset id == transaction id + assert json_result['assetid'] == tx['id'] + assert json_result['blockid'] == block['id'] yield from event_source.put(POISON_PILL) + + +@pytest.mark.skip('Processes are not stopping properly, and the whole test suite would hang') +@pytest.mark.genesis +def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): + # XXX: I think that the `pytest-aiohttp` plugin is sparkling too much + # magic in the `asyncio` module: running this test without monkey-patching + # `asycio.get_event_loop` (and without the `loop` fixture) raises a: + # RuntimeError: There is no current event loop in thread 'MainThread'. + # + # That's pretty weird because this test doesn't use the pytest-aiohttp + # plugin explicitely. + monkeypatch.setattr('asyncio.get_event_loop', lambda: loop) + + import json + import random + import aiohttp + + from bigchaindb.common import crypto + from bigchaindb import processes + from bigchaindb.models import Transaction + + # Start BigchainDB + processes.start() + + loop = asyncio.get_event_loop() + + import time + time.sleep(1) + + ws_url = client.get('http://localhost:9984/api/v1/').json['_links']['streams_v1'] + + # Connect to the WebSocket endpoint + session = aiohttp.ClientSession() + ws = loop.run_until_complete(session.ws_connect(ws_url)) + + # Create a keypair and generate a new asset + user_priv, user_pub = crypto.generate_key_pair() + asset = {'random': random.random()} + tx = Transaction.create([user_pub], [([user_pub], 1)], asset=asset) + tx = tx.sign([user_priv]) + # Post the transaction to the BigchainDB Web API + client.post('/api/v1/transactions/', data=json.dumps(tx.to_dict())) + + result = loop.run_until_complete(ws.receive()) + json_result = json.loads(result.data) + assert json_result['txid'] == tx.id From cf006e34a5135ef433b204530abd1c5a1d605003 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 6 Apr 2017 15:58:41 +0200 Subject: [PATCH 051/150] Make the keyword argument a keyword-only argument As per PEP 3102. This helps making the code clearer. --- bigchaindb/web/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigchaindb/web/server.py b/bigchaindb/web/server.py index b1525f9f..6604a177 100644 --- a/bigchaindb/web/server.py +++ b/bigchaindb/web/server.py @@ -22,7 +22,7 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication): - http://docs.gunicorn.org/en/latest/custom.html """ - def __init__(self, app, options=None): + def __init__(self, app, *, options=None): '''Initialize a new standalone application. Args: @@ -91,5 +91,5 @@ def create_server(settings): settings['logger_class'] = 'bigchaindb.log.loggers.HttpServerLogger' app = create_app(debug=settings.get('debug', False), threads=settings['threads']) - standalone = StandaloneApplication(app, settings) + standalone = StandaloneApplication(app, options=settings) return standalone From c64a35c362c2dd71fddbeb59c7fcfc24e88cf66b Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 6 Apr 2017 16:01:42 +0200 Subject: [PATCH 052/150] Use new super syntax as per PEP 3135 --- bigchaindb/web/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigchaindb/web/server.py b/bigchaindb/web/server.py index 6604a177..46495368 100644 --- a/bigchaindb/web/server.py +++ b/bigchaindb/web/server.py @@ -32,7 +32,7 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication): ''' self.options = options or {} self.application = app - super(StandaloneApplication, self).__init__() + super().__init__() def load_config(self): config = dict((key, value) for key, value in self.options.items() From d260e16f117a2bfc75a0fc7f03b325e802978ba2 Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 7 Apr 2017 10:51:00 +0200 Subject: [PATCH 053/150] Add configuration for websocket server --- bigchaindb/__init__.py | 4 ++++ bigchaindb/commands/bigchaindb.py | 4 ++++ bigchaindb/web/views/base.py | 5 ++++- bigchaindb/web/websocket_server.py | 5 ++++- 4 files changed, 16 insertions(+), 2 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index 4c555e47..98e6b27b 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -59,6 +59,10 @@ config = { 'workers': None, # if none, the value will be cpu_count * 2 + 1 'threads': None, # if none, the value will be cpu_count * 2 + 1 }, + 'wsserver': { + 'host': os.environ.get('BIGCHAINDB_WSSERVER_HOST') or 'localhost', + 'port': int(os.environ.get('BIGCHAINDB_WSSERVER_PORT', 9985)), + }, 'database': _database_map[ os.environ.get('BIGCHAINDB_DATABASE_BACKEND', 'rethinkdb') ], diff --git a/bigchaindb/commands/bigchaindb.py b/bigchaindb/commands/bigchaindb.py index d4e37daa..a46019da 100644 --- a/bigchaindb/commands/bigchaindb.py +++ b/bigchaindb/commands/bigchaindb.py @@ -96,6 +96,10 @@ def run_configure(args, skip_if_exists=False): val = conf['server'][key] conf['server'][key] = input_on_stderr('API Server {}? (default `{}`): '.format(key, val), val) + for key in ('host', 'port'): + val = conf['wsserver'][key] + conf['wsserver'][key] = input_on_stderr('WebSocket Server {}? (default `{}`): '.format(key, val), val) + for key in database_keys: val = conf['database'][key] conf['database'][key] = input_on_stderr('Database {}? (default `{}`): '.format(key, val), val) diff --git a/bigchaindb/web/views/base.py b/bigchaindb/web/views/base.py index 5ab409b0..7b12c5bb 100644 --- a/bigchaindb/web/views/base.py +++ b/bigchaindb/web/views/base.py @@ -5,6 +5,9 @@ import logging from flask import jsonify, request +from bigchaindb import config + + logger = logging.getLogger(__name__) @@ -25,4 +28,4 @@ def base_url(): def base_ws_uri(): """Base websocket uri.""" - return 'ws://localhost:9985/' + return 'ws://{host}:{port}/'.format(**config['wsserver']) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index dc320754..dad06b94 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -20,6 +20,7 @@ from uuid import uuid4 import aiohttp from aiohttp import web +from bigchaindb import config from bigchaindb.events import EventTypes @@ -167,4 +168,6 @@ def start(sync_event_source, loop=None): bridge.start() app = init_app(event_source, loop=loop) - aiohttp.web.run_app(app, port=9985) + aiohttp.web.run_app(app, + host=config['wsserver']['host'], + port=config['wsserver']['port']) From be763022ad7c448cdee4629e9e5f4565d35bd7ce Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 7 Apr 2017 14:07:05 +0200 Subject: [PATCH 054/150] Update documentation (tnx @ttmc) --- docs/server/source/drivers-clients/index.rst | 1 - docs/server/source/index.rst | 1 + .../{drivers-clients => }/websocket-event-stream-api.rst | 3 --- 3 files changed, 1 insertion(+), 4 deletions(-) rename docs/server/source/{drivers-clients => }/websocket-event-stream-api.rst (97%) diff --git a/docs/server/source/drivers-clients/index.rst b/docs/server/source/drivers-clients/index.rst index 704832c0..18894f60 100644 --- a/docs/server/source/drivers-clients/index.rst +++ b/docs/server/source/drivers-clients/index.rst @@ -15,7 +15,6 @@ community projects listed below. :maxdepth: 1 http-client-server-api - websocket-event-stream-api The Python Driver Transaction CLI diff --git a/docs/server/source/index.rst b/docs/server/source/index.rst index 6ac4b9f5..7a458934 100644 --- a/docs/server/source/index.rst +++ b/docs/server/source/index.rst @@ -11,6 +11,7 @@ BigchainDB Server Documentation nodes/index dev-and-test/index server-reference/index + websocket-event-stream-api drivers-clients/index clusters-feds/index data-models/index diff --git a/docs/server/source/drivers-clients/websocket-event-stream-api.rst b/docs/server/source/websocket-event-stream-api.rst similarity index 97% rename from docs/server/source/drivers-clients/websocket-event-stream-api.rst rename to docs/server/source/websocket-event-stream-api.rst index 22effbc1..88efb7bb 100644 --- a/docs/server/source/drivers-clients/websocket-event-stream-api.rst +++ b/docs/server/source/websocket-event-stream-api.rst @@ -1,9 +1,6 @@ The WebSocket Event Stream API ============================== -.. important:: - This is currently scheduled to be implemented in BigchainDB Server 0.10. - BigchainDB provides real-time event streams over the WebSocket protocol with the Event Stream API. From aeb8827e30bd313eee756b88318c6c5f69654d19 Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 7 Apr 2017 14:07:24 +0200 Subject: [PATCH 055/150] Use try..except..else --- bigchaindb/web/websocket_server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index dad06b94..a725f9ee 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -43,9 +43,10 @@ def _put_into_capped_queue(queue, value): while True: try: queue.put_nowait(value) - return except asyncio.QueueFull: queue.get_nowait() + else: + return def _multiprocessing_to_asyncio(in_queue, out_queue, loop): From be3f62dd108f021ce64d8623ba3dad3aefbd9cd3 Mon Sep 17 00:00:00 2001 From: vrde Date: Fri, 7 Apr 2017 14:57:11 +0200 Subject: [PATCH 056/150] Update endpoints and docs --- bigchaindb/web/views/base.py | 2 +- bigchaindb/web/views/info.py | 5 +++-- bigchaindb/web/websocket_server.py | 2 +- docs/server/source/websocket-event-stream-api.rst | 7 ++++++- tests/test_config_utils.py | 8 ++++++++ tests/web/test_info.py | 2 +- 6 files changed, 20 insertions(+), 6 deletions(-) diff --git a/bigchaindb/web/views/base.py b/bigchaindb/web/views/base.py index 7b12c5bb..0c226d7d 100644 --- a/bigchaindb/web/views/base.py +++ b/bigchaindb/web/views/base.py @@ -28,4 +28,4 @@ def base_url(): def base_ws_uri(): """Base websocket uri.""" - return 'ws://{host}:{port}/'.format(**config['wsserver']) + return 'ws://{host}:{port}'.format(**config['wsserver']) diff --git a/bigchaindb/web/views/info.py b/bigchaindb/web/views/info.py index b35c6378..9b084ac5 100644 --- a/bigchaindb/web/views/info.py +++ b/bigchaindb/web/views/info.py @@ -6,6 +6,7 @@ from flask_restful import Resource import bigchaindb from bigchaindb.web.views.base import base_url, base_ws_uri from bigchaindb import version +from bigchaindb.web.websocket_server import EVENTS_ENDPOINT class RootIndex(Resource): @@ -30,7 +31,7 @@ class RootIndex(Resource): class ApiV1Index(Resource): def get(self): api_root = base_url() + 'api/v1/' - websocket_root = base_ws_uri() + 'api/v1/' + websocket_root = base_ws_uri() + EVENTS_ENDPOINT docs_url = [ 'https://docs.bigchaindb.com/projects/server/en/v', version.__version__, @@ -43,6 +44,6 @@ class ApiV1Index(Resource): 'statuses': api_root + 'statuses/', 'transactions': api_root + 'transactions/', # TODO: The version should probably not be hardcoded - 'streams_v1': websocket_root + 'streams/', + 'streams_v1': websocket_root, }, }) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index a725f9ee..ae7d6da2 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -26,7 +26,7 @@ from bigchaindb.events import EventTypes logger = logging.getLogger(__name__) POISON_PILL = 'POISON_PILL' -EVENTS_ENDPOINT = '/api/v1/streams/' +EVENTS_ENDPOINT = '/api/v1/streams/valid_tx' def _put_into_capped_queue(queue, value): diff --git a/docs/server/source/websocket-event-stream-api.rst b/docs/server/source/websocket-event-stream-api.rst index 88efb7bb..1dedc45f 100644 --- a/docs/server/source/websocket-event-stream-api.rst +++ b/docs/server/source/websocket-event-stream-api.rst @@ -1,6 +1,11 @@ The WebSocket Event Stream API ============================== +.. important:: + The WebSocket Event Stream runs on a different port than the Web API. The + default port for the Web API is `9984`, while the one for the Event Stream + is `9985`. + BigchainDB provides real-time event streams over the WebSocket protocol with the Event Stream API. @@ -25,7 +30,7 @@ response contains a ``streams_`` property in ``_links``:: { "_links": { - "streams_v1": "ws://example.com:9984/api/v1/streams/" + "streams_v1": "ws://example.com:9985/api/v1/streams/" } } diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 04c70325..7ee74432 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -144,6 +144,8 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): DATABASE_PORT = 4242 DATABASE_BACKEND = request.config.getoption('--database-backend') SERVER_BIND = '1.2.3.4:56' + WSSERVER_HOST = '1.2.3.4' + WSSERVER_PORT = 57 KEYRING = 'pubkey_0:pubkey_1:pubkey_2' file_config = { @@ -157,6 +159,8 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'BIGCHAINDB_DATABASE_PORT': str(DATABASE_PORT), 'BIGCHAINDB_DATABASE_BACKEND': DATABASE_BACKEND, 'BIGCHAINDB_SERVER_BIND': SERVER_BIND, + 'BIGCHAINDB_WSSERVER_HOST': WSSERVER_HOST, + 'BIGCHAINDB_WSSERVER_PORT': WSSERVER_PORT, 'BIGCHAINDB_KEYRING': KEYRING}) import bigchaindb @@ -198,6 +202,10 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'workers': None, 'threads': None, }, + 'wsserver': { + 'host': WSSERVER_HOST, + 'port': WSSERVER_PORT, + }, 'database': database, 'keypair': { 'public': None, diff --git a/tests/web/test_info.py b/tests/web/test_info.py index 4dc60168..eeb80f78 100644 --- a/tests/web/test_info.py +++ b/tests/web/test_info.py @@ -31,6 +31,6 @@ def test_api_v1_endpoint(client): 'self': 'http://localhost/api/v1/', 'statuses': 'http://localhost/api/v1/statuses/', 'transactions': 'http://localhost/api/v1/transactions/', - 'streams_v1': 'ws://localhost:9985/api/v1/streams/', + 'streams_v1': 'ws://localhost:9985/api/v1/streams/valid_tx', } } From da29bbc605caeb2f0ea9ab1ef712176b73c0ecee Mon Sep 17 00:00:00 2001 From: Rodolphe Marques Date: Fri, 7 Apr 2017 15:02:49 +0200 Subject: [PATCH 057/150] added tests for the events --- .../pipelines/events_consumer_example.py | 14 ----------- tests/pipelines/test_election.py | 24 +++++++++++++++++++ tests/test_events.py | 21 ++++++++++++++++ 3 files changed, 45 insertions(+), 14 deletions(-) delete mode 100644 bigchaindb/pipelines/events_consumer_example.py create mode 100644 tests/test_events.py diff --git a/bigchaindb/pipelines/events_consumer_example.py b/bigchaindb/pipelines/events_consumer_example.py deleted file mode 100644 index 7e833c82..00000000 --- a/bigchaindb/pipelines/events_consumer_example.py +++ /dev/null @@ -1,14 +0,0 @@ -import multiprocessing as mp - -from bigchaindb.events import EventHandler - - -def consume_events(events_queue): - event_handler = EventHandler(events_queue) - while True: - event = event_handler.get_event() - print('Event type: {} Event data: {}'.format(event.type, event.data)) - - -def events_consumer(events_queue): - return mp.Process(target=consume_events, args=(events_queue,)) diff --git a/tests/pipelines/test_election.py b/tests/pipelines/test_election.py index 3127dcaf..c3254601 100644 --- a/tests/pipelines/test_election.py +++ b/tests/pipelines/test_election.py @@ -199,3 +199,27 @@ def test_full_pipeline(b, user_pk): tx_from_block = set([tx.id for tx in invalid_block.transactions]) tx_from_backlog = set([tx['id'] for tx in list(query.get_stale_transactions(b.connection, 0))]) assert tx_from_block == tx_from_backlog + + +def test_handle_block_events(): + from bigchaindb.events import setup_events_queue, EventTypes + + events_queue = setup_events_queue() + e = election.Election(events_queue=events_queue) + block_id = 'a' * 64 + + assert events_queue.qsize() == 0 + + # no event should be emited in case a block is undecided + e.handle_block_events({'status': Bigchain.BLOCK_UNDECIDED}, block_id) + assert events_queue.qsize() == 0 + + # put an invalid block event in the queue + e.handle_block_events({'status': Bigchain.BLOCK_INVALID}, block_id) + event = e.event_handler.get_event() + assert event.type == EventTypes.BLOCK_INVALID + + # put an valid block event in the queue + e.handle_block_events({'status': Bigchain.BLOCK_VALID}, block_id) + event = e.event_handler.get_event() + assert event.type == EventTypes.BLOCK_VALID diff --git a/tests/test_events.py b/tests/test_events.py new file mode 100644 index 00000000..22369b51 --- /dev/null +++ b/tests/test_events.py @@ -0,0 +1,21 @@ +def tests_event_handler(): + from bigchaindb.events import (EventTypes, Event, EventHandler, + setup_events_queue) + + # create and event + event_data = {'msg': 'some data'} + event = Event(EventTypes.BLOCK_VALID, event_data) + # create the events queue + events_queue = setup_events_queue() + + # create event handler + event_handler = EventHandler(events_queue) + + # push and event to the queue + event_handler.put_event(event) + + # get the event from the queue + event_from_queue = event_handler.get_event() + + assert event_from_queue.type == event.type + assert event_from_queue.data == event.data From 85d5d085067ccc96c616bf7f3234a5aa1be61996 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Fri, 7 Apr 2017 15:40:17 +0200 Subject: [PATCH 058/150] Add tip on upgrading to CONTRIBUTING.md --- CONTRIBUTING.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eedb866a..840a0895 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -145,6 +145,13 @@ Once you accept and submit the CLA, we'll email you with further instructions. ( Someone will then merge your branch or suggest changes. If we suggest changes, you won't have to open a new pull request, you can just push new code to the same branch (on `origin`) as you did before creating the pull request. +### Tip: Upgrading All BigchainDB Dependencies + +Over time, your versions of the Python packages used by BigchainDB will get out of date. You can upgrade them using: +```text +pip install --upgrade -e .[dev] +``` + ## Quick Links * [BigchainDB Community links](https://www.bigchaindb.com/community) From b4988b29e3cacbfcb6376c7225009100362af12c Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Mon, 10 Apr 2017 13:58:58 +0200 Subject: [PATCH 059/150] Moved HTTP API docs to a top-level section --- .gitignore | 2 +- .../generate_http_server_api_documentation.py | 2 +- docs/server/source/drivers-clients/index.rst | 3 +- .../http-client-server-api.rst | 38 +++++++++---------- docs/server/source/index.rst | 1 + 5 files changed, 23 insertions(+), 23 deletions(-) rename docs/server/source/{drivers-clients => }/http-client-server-api.rst (92%) diff --git a/.gitignore b/.gitignore index 7aba48d1..20d71296 100644 --- a/.gitignore +++ b/.gitignore @@ -78,7 +78,7 @@ ntools/one-m/ansible/ansible.cfg # Just in time documentation docs/server/source/schema -docs/server/source/drivers-clients/samples +docs/server/source/http-samples # Terraform state files # See https://stackoverflow.com/a/41482391 diff --git a/docs/server/generate_http_server_api_documentation.py b/docs/server/generate_http_server_api_documentation.py index ba082ba3..731bee2c 100644 --- a/docs/server/generate_http_server_api_documentation.py +++ b/docs/server/generate_http_server_api_documentation.py @@ -269,7 +269,7 @@ def main(): ctx['block_list'] = pretty_json(block_list) base_path = os.path.join(os.path.dirname(__file__), - 'source/drivers-clients/samples') + 'source/http-samples') if not os.path.exists(base_path): os.makedirs(base_path) diff --git a/docs/server/source/drivers-clients/index.rst b/docs/server/source/drivers-clients/index.rst index 704832c0..6eabb429 100644 --- a/docs/server/source/drivers-clients/index.rst +++ b/docs/server/source/drivers-clients/index.rst @@ -14,13 +14,12 @@ community projects listed below. .. toctree:: :maxdepth: 1 - http-client-server-api websocket-event-stream-api The Python Driver Transaction CLI -Community Driven Libraries and Tools +Community-Driven Libraries and Tools ------------------------------------ Please note that some of these projects may be work in progress, but may nevertheless be very useful. diff --git a/docs/server/source/drivers-clients/http-client-server-api.rst b/docs/server/source/http-client-server-api.rst similarity index 92% rename from docs/server/source/drivers-clients/http-client-server-api.rst rename to docs/server/source/http-client-server-api.rst index 39e4395e..957e6c8e 100644 --- a/docs/server/source/drivers-clients/http-client-server-api.rst +++ b/docs/server/source/http-client-server-api.rst @@ -22,7 +22,7 @@ or ``https://example.com:9984`` then you should get an HTTP response with something like the following in the body: -.. literalinclude:: samples/index-response.http +.. literalinclude:: http-samples/index-response.http :language: http @@ -35,7 +35,7 @@ or ``https://example.com:9984/api/v1/``, then you should get an HTTP response that allows you to discover the BigchainDB API endpoints: -.. literalinclude:: samples/api-index-response.http +.. literalinclude:: http-samples/api-index-response.http :language: http @@ -58,12 +58,12 @@ Transactions **Example request**: - .. literalinclude:: samples/get-tx-id-request.http + .. literalinclude:: http-samples/get-tx-id-request.http :language: http **Example response**: - .. literalinclude:: samples/get-tx-id-response.http + .. literalinclude:: http-samples/get-tx-id-response.http :language: http :resheader Content-Type: ``application/json`` @@ -110,12 +110,12 @@ Transactions **Example request**: - .. literalinclude:: samples/get-tx-by-asset-request.http + .. literalinclude:: http-samples/get-tx-by-asset-request.http :language: http **Example response**: - .. literalinclude:: samples/get-tx-by-asset-response.http + .. literalinclude:: http-samples/get-tx-by-asset-response.http :language: http :resheader Content-Type: ``application/json`` @@ -139,12 +139,12 @@ Transactions **Example request**: - .. literalinclude:: samples/post-tx-request.http + .. literalinclude:: http-samples/post-tx-request.http :language: http **Example response**: - .. literalinclude:: samples/post-tx-response.http + .. literalinclude:: http-samples/post-tx-response.http :language: http :resheader Content-Type: ``application/json`` @@ -227,12 +227,12 @@ Statuses **Example request**: - .. literalinclude:: samples/get-statuses-tx-request.http + .. literalinclude:: http-samples/get-statuses-tx-request.http :language: http **Example response**: - .. literalinclude:: samples/get-statuses-tx-valid-response.http + .. literalinclude:: http-samples/get-statuses-tx-valid-response.http :language: http :resheader Content-Type: ``application/json`` @@ -250,17 +250,17 @@ Statuses **Example request**: - .. literalinclude:: samples/get-statuses-block-request.http + .. literalinclude:: http-samples/get-statuses-block-request.http :language: http **Example response**: - .. literalinclude:: samples/get-statuses-block-invalid-response.http + .. literalinclude:: http-samples/get-statuses-block-invalid-response.http :language: http **Example response**: - .. literalinclude:: samples/get-statuses-block-valid-response.http + .. literalinclude:: http-samples/get-statuses-block-valid-response.http :language: http :resheader Content-Type: ``application/json`` @@ -298,12 +298,12 @@ Blocks **Example request**: - .. literalinclude:: samples/get-block-request.http + .. literalinclude:: http-samples/get-block-request.http :language: http **Example response**: - .. literalinclude:: samples/get-block-response.http + .. literalinclude:: http-samples/get-block-response.http :language: http @@ -353,12 +353,12 @@ Blocks **Example request**: - .. literalinclude:: samples/get-block-txid-request.http + .. literalinclude:: http-samples/get-block-txid-request.http :language: http **Example response**: - .. literalinclude:: samples/get-block-txid-response.http + .. literalinclude:: http-samples/get-block-txid-response.http :language: http :resheader Content-Type: ``application/json`` @@ -384,12 +384,12 @@ Votes **Example request**: - .. literalinclude:: samples/get-vote-request.http + .. literalinclude:: http-samples/get-vote-request.http :language: http **Example response**: - .. literalinclude:: samples/get-vote-response.http + .. literalinclude:: http-samples/get-vote-response.http :language: http :resheader Content-Type: ``application/json`` diff --git a/docs/server/source/index.rst b/docs/server/source/index.rst index 6ac4b9f5..018ad329 100644 --- a/docs/server/source/index.rst +++ b/docs/server/source/index.rst @@ -11,6 +11,7 @@ BigchainDB Server Documentation nodes/index dev-and-test/index server-reference/index + http-client-server-api drivers-clients/index clusters-feds/index data-models/index From 8964ba33b406dbaee3bd0effb4d5e3d6f88fa1b2 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Mon, 10 Apr 2017 15:14:18 +0200 Subject: [PATCH 060/150] updated links to the HTTP API docs in this repo --- bigchaindb/web/views/blocks.py | 4 +--- bigchaindb/web/views/info.py | 2 +- bigchaindb/web/views/statuses.py | 4 +--- bigchaindb/web/views/transactions.py | 4 +--- bigchaindb/web/views/votes.py | 4 +--- docs/root/source/index.rst | 2 +- docs/server/source/server-reference/configuration.md | 2 +- tests/web/test_info.py | 2 +- 8 files changed, 8 insertions(+), 16 deletions(-) diff --git a/bigchaindb/web/views/blocks.py b/bigchaindb/web/views/blocks.py index 7e840fe5..1ea1a28f 100644 --- a/bigchaindb/web/views/blocks.py +++ b/bigchaindb/web/views/blocks.py @@ -1,8 +1,6 @@ """This module provides the blueprint for the blocks API endpoints. -For more information please refer to the documentation on ReadTheDocs: - - https://docs.bigchaindb.com/projects/server/en/latest/drivers-clients/ - http-client-server-api.html +For more information please refer to the documentation: http://bigchaindb.com/http-api """ from flask import current_app from flask_restful import Resource, reqparse diff --git a/bigchaindb/web/views/info.py b/bigchaindb/web/views/info.py index 04a15749..02232d19 100644 --- a/bigchaindb/web/views/info.py +++ b/bigchaindb/web/views/info.py @@ -33,7 +33,7 @@ class ApiV1Index(Resource): docs_url = [ 'https://docs.bigchaindb.com/projects/server/en/v', version.__version__, - '/drivers-clients/http-client-server-api.html', + '/http-client-server-api.html', ] return { '_links': { diff --git a/bigchaindb/web/views/statuses.py b/bigchaindb/web/views/statuses.py index 39f880b1..a8186146 100644 --- a/bigchaindb/web/views/statuses.py +++ b/bigchaindb/web/views/statuses.py @@ -1,8 +1,6 @@ """This module provides the blueprint for the statuses API endpoints. -For more information please refer to the documentation on ReadTheDocs: - - https://docs.bigchaindb.com/projects/server/en/latest/drivers-clients/ - http-client-server-api.html +For more information please refer to the documentation: http://bigchaindb.com/http-api """ from flask import current_app from flask_restful import Resource, reqparse diff --git a/bigchaindb/web/views/transactions.py b/bigchaindb/web/views/transactions.py index 925aed7a..9f024f54 100644 --- a/bigchaindb/web/views/transactions.py +++ b/bigchaindb/web/views/transactions.py @@ -1,8 +1,6 @@ """This module provides the blueprint for some basic API endpoints. -For more information please refer to the documentation on ReadTheDocs: - - https://docs.bigchaindb.com/projects/server/en/latest/drivers-clients/ - http-client-server-api.html +For more information please refer to the documentation: http://bigchaindb.com/http-api """ import logging diff --git a/bigchaindb/web/views/votes.py b/bigchaindb/web/views/votes.py index 68265b40..45a86812 100644 --- a/bigchaindb/web/views/votes.py +++ b/bigchaindb/web/views/votes.py @@ -1,8 +1,6 @@ """This module provides the blueprint for the votes API endpoints. -For more information please refer to the documentation on ReadTheDocs: - - https://docs.bigchaindb.com/projects/server/en/latest/drivers-clients/ - http-client-server-api.html +For more information please refer to the documentation: http://bigchaindb.com/http-api """ from flask import current_app from flask_restful import Resource, reqparse diff --git a/docs/root/source/index.rst b/docs/root/source/index.rst index 003d07b3..1dd71003 100644 --- a/docs/root/source/index.rst +++ b/docs/root/source/index.rst @@ -53,7 +53,7 @@ At a high level, one can communicate with a BigchainDB cluster (set of nodes) us
Python Driver Docs diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 4cd9e9d4..50003d10 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -124,7 +124,7 @@ If you used `bigchaindb -y configure mongodb` to create a default local config f ## server.bind, server.workers & server.threads -These settings are for the [Gunicorn HTTP server](http://gunicorn.org/), which is used to serve the [HTTP client-server API](../drivers-clients/http-client-server-api.html). +These settings are for the [Gunicorn HTTP server](http://gunicorn.org/), which is used to serve the [HTTP client-server API](../http-client-server-api.html). `server.bind` is where to bind the Gunicorn HTTP server socket. It's a string. It can be any valid value for [Gunicorn's bind setting](http://docs.gunicorn.org/en/stable/settings.html#bind). If you want to allow IPv4 connections from anyone, on port 9984, use '0.0.0.0:9984'. In a production setting, we recommend you use Gunicorn behind a reverse proxy server. If Gunicorn and the reverse proxy are running on the same machine, then use 'localhost:PORT' where PORT is _not_ 9984 (because the reverse proxy needs to listen on port 9984). Maybe use PORT=9983 in that case because we know 9983 isn't used. If Gunicorn and the reverse proxy are running on different machines, then use 'A.B.C.D:9984' where A.B.C.D is the IP address of the reverse proxy. There's [more information about deploying behind a reverse proxy in the Gunicorn documentation](http://docs.gunicorn.org/en/stable/deploy.html). (They call it a proxy.) diff --git a/tests/web/test_info.py b/tests/web/test_info.py index c55f467f..c0233159 100644 --- a/tests/web/test_info.py +++ b/tests/web/test_info.py @@ -23,7 +23,7 @@ def test_api_root_endpoint(client): def test_api_v1_endpoint(client): res = client.get('/api/v1') docs_url = ['https://docs.bigchaindb.com/projects/server/en/vtsttst', - '/drivers-clients/http-client-server-api.html', + '/http-client-server-api.html', ] assert res.json == { '_links': { From ed6c90b86365fe725ef43ac2984378498d3587ea Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 10:45:09 +0200 Subject: [PATCH 061/150] renamed docs/server/source/nodes -> production-nodes --- .../cloud-deployment-templates/template-ansible.md | 2 +- docs/server/source/nodes/index.rst | 10 ---------- docs/server/source/production-nodes/index.rst | 10 ++++++++++ .../{nodes => production-nodes}/node-assumptions.md | 0 .../{nodes => production-nodes}/node-components.md | 0 .../{nodes => production-nodes}/node-requirements.md | 0 .../{nodes => production-nodes}/setup-run-node.md | 0 7 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 docs/server/source/nodes/index.rst create mode 100644 docs/server/source/production-nodes/index.rst rename docs/server/source/{nodes => production-nodes}/node-assumptions.md (100%) rename docs/server/source/{nodes => production-nodes}/node-components.md (100%) rename docs/server/source/{nodes => production-nodes}/node-requirements.md (100%) rename docs/server/source/{nodes => production-nodes}/setup-run-node.md (100%) diff --git a/docs/server/source/cloud-deployment-templates/template-ansible.md b/docs/server/source/cloud-deployment-templates/template-ansible.md index 666ad790..f296a2cf 100644 --- a/docs/server/source/cloud-deployment-templates/template-ansible.md +++ b/docs/server/source/cloud-deployment-templates/template-ansible.md @@ -81,4 +81,4 @@ where, as before, `` must be replaced. ## Next Steps -You could make changes to the Ansible playbook (and the resources it uses) to make the node more production-worthy. See [the section on production node assumptions, components and requirements](../nodes/index.html). +You could make changes to the Ansible playbook (and the resources it uses) to make the node more production-worthy. See [the section on production node assumptions, components and requirements](../production-nodes/index.html). diff --git a/docs/server/source/nodes/index.rst b/docs/server/source/nodes/index.rst deleted file mode 100644 index 1c3671f0..00000000 --- a/docs/server/source/nodes/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -Production Node Assumptions, Components & Requirements -====================================================== - -.. toctree:: - :maxdepth: 1 - - node-assumptions - node-components - node-requirements - setup-run-node diff --git a/docs/server/source/production-nodes/index.rst b/docs/server/source/production-nodes/index.rst new file mode 100644 index 00000000..7b42cbaa --- /dev/null +++ b/docs/server/source/production-nodes/index.rst @@ -0,0 +1,10 @@ +Production Nodes +================ + +.. toctree:: + :maxdepth: 1 + + node-assumptions + node-components + node-requirements + setup-run-node diff --git a/docs/server/source/nodes/node-assumptions.md b/docs/server/source/production-nodes/node-assumptions.md similarity index 100% rename from docs/server/source/nodes/node-assumptions.md rename to docs/server/source/production-nodes/node-assumptions.md diff --git a/docs/server/source/nodes/node-components.md b/docs/server/source/production-nodes/node-components.md similarity index 100% rename from docs/server/source/nodes/node-components.md rename to docs/server/source/production-nodes/node-components.md diff --git a/docs/server/source/nodes/node-requirements.md b/docs/server/source/production-nodes/node-requirements.md similarity index 100% rename from docs/server/source/nodes/node-requirements.md rename to docs/server/source/production-nodes/node-requirements.md diff --git a/docs/server/source/nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md similarity index 100% rename from docs/server/source/nodes/setup-run-node.md rename to docs/server/source/production-nodes/setup-run-node.md From fb2d4b19a9ac826943b27cee76e6d15ed4c42f59 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 10:59:56 +0200 Subject: [PATCH 062/150] bugfix: fixed ref to production-nodes/index in server index.rst --- docs/server/source/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/index.rst b/docs/server/source/index.rst index 6ac4b9f5..3de3cb5c 100644 --- a/docs/server/source/index.rst +++ b/docs/server/source/index.rst @@ -8,7 +8,7 @@ BigchainDB Server Documentation introduction quickstart cloud-deployment-templates/index - nodes/index + production-nodes/index dev-and-test/index server-reference/index drivers-clients/index From 392be982891083f9642d5fac54099c0c76106a92 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 11:15:21 +0200 Subject: [PATCH 063/150] docs: updated Production Node Assumptions page --- .../source/production-nodes/node-assumptions.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/server/source/production-nodes/node-assumptions.md b/docs/server/source/production-nodes/node-assumptions.md index 8275be32..9d52aa5a 100644 --- a/docs/server/source/production-nodes/node-assumptions.md +++ b/docs/server/source/production-nodes/node-assumptions.md @@ -1,13 +1,16 @@ # Production Node Assumptions -If you're not sure what we mean by a BigchainDB *node*, *cluster*, *consortium*, or *production node*, then see [the section in the Introduction where we defined those terms](../introduction.html#some-basic-vocabulary). +Be sure you know the key BigchainDB terminology: + +* [BigchainDB node, BigchainDB cluster and BigchainDB consortum](https://docs.bigchaindb.com/en/latest/terminology.html) +* [dev/test node, bare-bones node and production node](../introduction.html) We make some assumptions about production nodes: -1. **Each production node is set up and managed by an experienced professional system administrator (or a team of them).** - -2. Each production node in a cluster is managed by a different person or team. - -Because of the first assumption, we don't provide a detailed cookbook explaining how to secure a server, or other things that a sysadmin should know. (We do provide some [templates](../cloud-deployment-templates/index.html), but those are just a starting point.) +1. Production nodes use MongoDB, not RethinkDB. +1. Each production node is set up and managed by an experienced professional system administrator or a team of them. +1. Each production node in a cluster is managed by a different person or team. +You can use RethinkDB when building prototypes, but we don't advise or support using it in production. +We don't provide a detailed cookbook explaining how to secure a server, or other things that a sysadmin should know. (We do provide some [templates](../cloud-deployment-templates/index.html), but those are just a starting point.) From 6cc1e7559513fc6e0acb96cd417b125dd493cf66 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 11:15:55 +0200 Subject: [PATCH 064/150] root docs: updated Terminology page --- docs/root/source/terminology.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/root/source/terminology.md b/docs/root/source/terminology.md index 025bea71..66375b38 100644 --- a/docs/root/source/terminology.md +++ b/docs/root/source/terminology.md @@ -1,21 +1,21 @@ # Terminology -There is some specialized terminology associated with BigchainDB. To get started, you should at least know what what we mean by a BigchainDB *node*, *cluster* and *consortium*. +There is some specialized terminology associated with BigchainDB. To get started, you should at least know the following: -## Node +## BigchainDB Node -A **BigchainDB node** is a machine or set of closely-linked machines running RethinkDB/MongoDB Server, BigchainDB Server, and related software. (A "machine" might be a bare-metal server, a virtual machine or a container.) Each node is controlled by one person or organization. +A **BigchainDB node** is a machine or set of closely-linked machines running RethinkDB/MongoDB Server, BigchainDB Server, and related software. Each node is controlled by one person or organization. -## Cluster +## BigchainDB Cluster -A set of BigchainDB nodes can connect to each other to form a **cluster**. Each node in the cluster runs the same software. A cluster contains one logical RethinkDB datastore. A cluster may have additional machines to do things such as cluster monitoring. +A set of BigchainDB nodes can connect to each other to form a **BigchainDB cluster**. Each node in the cluster runs the same software. A cluster contains one logical RethinkDB/MongoDB datastore. A cluster may have additional machines to do things such as cluster monitoring. -## Consortium +## BigchainDB Consortium -The people and organizations that run the nodes in a cluster belong to a **consortium** (i.e. another organization). A consortium must have some sort of governance structure to make decisions. If a cluster is run by a single company, then the "consortium" is just that company. +The people and organizations that run the nodes in a cluster belong to a **BigchainDB consortium** (i.e. another organization). A consortium must have some sort of governance structure to make decisions. If a cluster is run by a single company, then the "consortium" is just that company. **What's the Difference Between a Cluster and a Consortium?** From c3f173528f91547231f9fd2fb89f17e8f6769c0b Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 13:23:10 +0200 Subject: [PATCH 065/150] updated docs page: Production Node Components --- .../server/source/_static/Node-components.png | Bin 83733 -> 36249 bytes .../production-nodes/node-components.md | 27 +++++++----------- 2 files changed, 10 insertions(+), 17 deletions(-) diff --git a/docs/server/source/_static/Node-components.png b/docs/server/source/_static/Node-components.png index cd08607313fa2e20f77e8fd9c955da0951d98da3..427bc4c6cbdc8a10ed5d57d7712f113bd7646fd8 100644 GIT binary patch literal 36249 zcmeFZg;$kd6fJtB1qlHGX%PWQrMtVkyBq0l0cjEGF6l05C8fL0p+ma6-}d+J9e2Dp z-W%io0hi(M$T{EnV(+!rnrqIv4Of(xKu0A+g+L(aQj(&|5D1(H1oG?v1rEG}*zg_v z4S{zSmQqDQL4mC*tbku%yNGGJs5qFrxEnc{K`iVY?93RPO`Ock?42zgT#n#7_#qH- zh?J<1sz>_%qQ9=()f42{L{GZ2CxM8=9%eR+E2cEntih(_@^-33&D1PcSx$+LP9=+3 z_OEtIs`e_oSn;3cavQx;$GFN!8%9RGrk_85I;I|xS@0a%3*Dii)4M zzpg|H<|tY=QaO|u2$+tM2k%Ul<9U04Ygx8%gG0TF{h+C?^th;PzCPzj=vq}%qeGq6 zxSn+wLW4?A(Yy}IOIAPin2Pvno(i8mEDw$jzK!pPR^Wo0(3ju|RHwn^=H#w;pzSfL z&>3BBgOPCkbhv6K(E<=Psvc=H?@x;cZe*EAUT0P@y^}e1-b>3KMFUjjrRVAKv z69cz!&ZwRIA|WBc&h*~TAfg5(Xvkpyu5QOuN-Dg84m?%1`?zrS@C{e&=FaTd)pQ-A z%sqw^C$s{bK43`p&NGyq8`8=M@%iUP*~Ub2O- zEdPx~c zAs*^6c+yuJzesz$U%y%~`C3B&Ci!Z+EznJ5F5-yYpRFq~MMu|lcVKaO_ua`8?7jad zhn@+~wbG<%jG0-oI>TxXvxPgo)}0h+>eOe~ShD^W4-#+>kVzT%gK?3&K}_d`wtMCI zJN{H&*MN$pl{X?uw$f_ODV4VZvboKuR13DoC&p>T43ejsj zMo*g>U?{VNYuDuIN-S1q?C@ver?{52L)Ulu9N%KR!<&>6&u=pPc++>3ri4o;prV6A z_VWIEysIvH96T!c^WH#g;ApvvAKGF=7epR*TGMDBF}<=5hS|g7d_vIsM}zsPYUyv0 z(1v?Ux%1uvzpT<%{Jysv$G>&^FFL9k(`Lea>)Abyzvhs(2a@@*-oWzFZ4w!N8<%g~ z8iZl~t{NYz0HsJZi!%gAPXXo%=l4?$M1K3oI0Sp`3&CWWR@EtV`LEp9P3f8PiQHj(z~{2 zHod=Ce{ngySNeE>Xa5RZV5NR<8ujAh?l1pOaHLl)IFdwf!5`Z8a8(kKav4*ysgT9n zKfDl5E_ps{rI6LIw88eH-9STS__|xom2nz`g*@i%zm$;G zhFQu-^cVznV!3vyh~*h|XCH?1ON9BmhenTnGU}#5Jl}8iHl^ZwFTnTfret*L-ipUS z+pi4&SR5`@T(a1vfI+A4VB+a7|LJ}eGblW&kmNf)(iEkj`vb!S?674u4FZe=)vY=f`BwzQ3xY+^Za^FmrV* z=N+1svEy2;39APmk42tOeEgSgvmFBdpQ+&gm3>j_z(D~>R&U1GIr8}YCrOW6rNYS{ z{#WifSP#SWe*D2F=(6+l;sugb%fI~Kh0lPXJ=w1^l2vhVb7!snkiw0IhGsVlW=ZfU zH$<>yEJN+5K+C^emS84!NT2w>(wtr`cm5@1V=S9B=KXn>^X8;jB%W2vzi|KWq=j>zqshZp^!!RSZ|*>cGrR=ZB>PFM+tfBh1K)x$&Ta+e2_ znCkVyIAkWh@0h<_pV7F~)`Wa7u%cld%P(>x_^ z1QNSh{D6vP*E9AYa*&~XKOH>_BClz=sxW9dnN6_a=;m7U-D>A#aFg#%8gyIHM9UtJXmi;N+7 z@p^;Py`f9@uc*qcjlyR^v?aM=%d?wWd~5$o9(COukNo4)`>C$3u7w3%*ah5*-c%_( zLqx7bMC=CHpUV4sC*F*|oJl(?ylK2%s(qD^K)isxS^MvJiDYyYW1b3qvs<-b04d^% z4y2zQW9+mAHdGxe4uxKbe^+8GYr=3&xeY$&aeN8V#|q;NS|kQ;5;5(?>3Kv<4Ano- z^%8RQd5OT%#@f0)Dn^-|Plt$z+pc%2LKhX_6l>B2|KqdRJFcd%^z`&j;jV0f*ATfq zy**1Yig6zf4FiEJ1^A@Dr2U7+-w`2othU!xYH`2K?z>c*PbgwT=+w);_M83#cD`B; z&+C+nUBnbu`Yzqrhjt`N{2V`-TepZq&dr*eo3j{owEysNx=b0Ws7xNf8uMsMz~hW@ zSP6^`Y@SxGty85buoL7M_?66cdA}+1M9j7TljBF)yf5f!Z+21n!~BOXqXvu(E*o>g zDZ9L=DA|_(rZ;0xq6Aev)PHGLTN4r>9w!ldyIdhx-9Mg%>`zKciXo>1X-;B?KVZD9 z2-tkabLrQ@e==y)6=qdU1qs+;VZi*p$;$s#TT3fKl)_E}IiOvH=O6Gi8%%nrvbcK8 zyW{@m4hI>7KS$o_E40_dTN^d}P*Yo5ub0s%;^xNgqmk{aJ*O&270;l%!SD29XRB-e z12LMQ1B*pqUU83vjYiqJQ`76$uMvc^pkDbz{iZdq=+q?(CR$s0?Pd|rt@##n%PT9h z?D>5E+U^a*aNZ2SjqyZ(BzL3wYaDUPo05=FU`R+vdAXgdD_er}w41A7 zj{ zJ8ejciXzlq|F*f%&3@oM*_+OJGvBdaZ8ox7odySKreJLf;Wr>^4}J-^{Rcyo*&NG3o1SYg-!{W*_XH6XS}R7#pLa zp|LPC6TAp*D1~2=PPH3HMF?UuvYDuDaJ7%;0R?ci$@ZkXpUlhCvyG=X4*@O^bPTdo zrRC)zG-%v%!GY?u3C-1bwvUV_4<}TqEohMJK2fUA7qgRE)y5$Wbh(Z+5kK>Shg$VQ&ei14EEG~tl~h%~H_%Pkfhe}K zwdLjIm6nltGZ1t*J=`H~#yo$zIq3K2F4Gi6j}fffxaT=C8NQ1P3rm+fKd+Y{M;_uB z*<6bYxpK0%UU7|g9^B>LOlXj8JFq^e@0r{-2NE1EtjXqxAm;&xw&M|^xxvIEM)pD+ z`Z$m=8@bx*WJ~GgCT9yv%c6$8)7}jyPY;iug5;b|8z`m>NQBcF4AAS(mFi^}0lblr zRvz`fvW4_kEvk3`N*s_bksy4*Icb{2xDlgjPsO zyvL20F4YM8{CQ(@^Bon{Wr5aY_UvwhU<;LtoTUrO5G&G(-a5|4h0SIqT%aKAl_wv# zSxd1i1Sk%*Jg*OZR-e;hV(Bx;5(UUvk3#LRMhDPtLW6TH!MIgcI*5z+#*pxwpP$$1 zy#={AQ;>-CCFzBlN24w(YaOtNXAsQd*E zN&*KB_xImeGW+JfZ_}}chj_@K6Y<;r7M|tvzO-zicnWBHCU`VHEqLd=GTSV$!w2XW$wbyiMnstV*`nK6JcAJ6Uo{~jB5L>7BmDccRE7ciEwATN-fdA0 zTnly>kBH%UkaZ z5=6OJl^ze&9CgO{eoE};elH%IClzsDMfSAO=6!MN-#k_C@RZv&%`Pu3eM=Ng5lcGV z_TT-upm4`i`FC%fu{oabH*f?f7lV|T8y+q-C`zWJq~zxIB+GVjdfI8w@iJT#`_-$E z4)%{9cQ(BR&htT$OG{36dBB&IaA6+9!^7+JF@Xq{{*dPoaNSjHG-x zeC@e&i5t>4~CDu5?jyk3_dHk3s zUBk!c97QCewgn6Yx9qQGl=!_bkC)nra_Qblc)mAqxHvVy{yCtap`ihwO*ZwI+|FV$ znKUK#&F!r+&G)md(W{cO9@LQBqN1X*vZ&vN^akzsatip7J+EH9s;;hJqr}9YG{c0^5vJZv6z_H^AO<>;g?ugzl^JY$fg95WTmC0#mB#m zlN=fzo-Nn0u(pmcC@CtUVPR3B$5Wxf#l*xE5D+};=~OFKw}aM&k8Y{ZeE;)D5*az{;qJ=9%uK34*~VsXIE`byh5C6&CxcGI ziYo>>dXI(xG6KTCyI<`Jvt`$zh#Rx4`|<^4+QED@laCk!x#cgo%-Y(TCVgUB8c`5A zO~l{7f1Nmq!$pGx;QVrBsbr+2R-KxVkdQJnGc&GBc*;0vR61jQxFE1fkZpHYv() zrKL80ety*Evv?Ui-8w(%5P#kj5sZq*$4lfEfan zS&;mbn;Qn06Ew6u=ReMAjqb=6Lc5`!&P!+pNxBtB{KdR<>@o}4WE@p-O8qa`rOtQwiK8{foI zDWn;#woe@Z$e`DRap8*-Cpnc+UydD4NlqRhNIo?+bsk#Q24 z0tSPbptq0o4-M5dH2j#{K?%CPzW&6S4&qT%RMh1jRl0!r!-ucpdDAm9gaicSDA+Gw z8kw1G?(OO5=%l5lYHMgv<3(N_E=H1n(WDp0LJ%8|#Ao`}Z;Bw4o{_=F#|N@?P*9Ku zuRfSGP0hKPa-Hf(9v+^%ySt<14zj1$LKJb<*4EIHC|yPcNy#s4I5t(%#d_4M|F&YS|J#;6O)-Ti89&3K=_j3F&OJt|hq7Fze7ot>S7V-^PU zp8O#voKsa(gH5!EiH^?3!m@X*u_y*Ikeyhh4u^du<{d4s+2Gs>PP2M-d+(AfX!Qp9 zG)QP3q;!1$3|l%M#(E0)G;ZM(?gYxv7s`GjHQ`kB7u0b71n1kESGfJ|BzV1*F3a1` z{^>aj1$)`#YpdQi2cL`GX?bMm{{H^l+}!r|Hdy_xhp3Cf#j2nJIdZ1g)G(ultnKbL zJ1p~0x6jDaF)<}cQi23Er-P4=j~)I!CFMpuu%VyOMlIkhb>Ko z%Gz2MKPD9wm9ma2utb%MzYm+QFa9xQ=AxveeDfx!y`A*VJ|iRJWWdDnWg1bG8g?u| zuy6SJNipldgaIgeffn`Ui=@2#$mrU2Q`{@79r@w|VsUdRAjpZ{rn+pA@-cOxKSq6`XVX5n;+PU<)h)hw8LWx@bd%r22M#XnYO6?(S`uKin zX=#ASJ$W)9fEB4T5YmegI<)$xMJg>ycw_eN_zPm!&ttQ2ru`y3oBg3J`;`cfg4 zRjM8fO-Z@9Zdx!>f}c%uxeQ=*>m1=6#fPNp)iAOK1G0O%&Naujh8+x6SL zUU>)htt2MoD;Hl^q9O>XviKVF(Ub@vf`^usldG2amodB&9QjhD%UJ$_{1|5Ig1zgf z-Adi&Pwx#MX7wu6vg#?XBcti=ec=+%k9jxD>pX`~zetfq%_V%Oq&`AhQubnXR;i454 z6_1aPcb5k$54B5rrp#JKMsMOI>FMcJ@4gJ1yYr42_v1D1MN(jgTj!wMI1G7Dstu-I zb6o$pc?T^PQFuh2N#R+v|0%~zJYI8;wN>#%x%m779W7^Ml%q;(SGrh?WW%iA>>_nR)YWSYv&L-* z5zaaGX#H31$nf9-vxC`oILnLyb)(GC4IMV=jk0>u1!2sXL>J$GY~=_N-*q7_s+b)f zK}7Dw#f7E-d$f30`|EV5PW0~3^@3dguzH_m(`Le7_OIVh*oG{ii6mU(@$TQ*2Ar<$ zTP9(j_=b0Y6c&-oP%Bz*Hj>T->h8|Yjt7rIj-siV*|=pL#{;3B zo?dm&fXDO1E$pNDLq-e?46vrkM*u)FWxlC*?VZ_OQ4>~Dt@i>{f^so#+F*45$VlSg z=E%qhO3=^d=2H(|ANyM2oIm~jJMDd5(eiSk&F@oqCaTSDg1xVzdX-wV^V;p{?e44Z zv%F4Yke&Tcv<3*chpT-_-D$SN0STQ~4z8Ep_eA2``LMM-t92gwrB9_ILL`J<-@MK@Vl4FMxlaRc_meSTHOp#U3Af0A- z@#4kA!~`5X{K5Wy6&W6YH}rTQW3@Lh%}z`(gC7P4IdZJzw6rr1-r|ASrtEBTj4&Jm zf-iC<*eD2KTpJo1goK1pf^6ri0=rf@xwyo{#j(S?hlYkSW$zYJ@D4I~lux(SZ*i8q z6_9VE4Q#f*yga?*OPy`G)yUk^)wD+BmQrYZDhZemyTorB*8+@|!ChF?=$1*X18W-x zD5`UHR`DP&fG7pCW@@?tf*6yO_e;6a^n$OB>*c-{QgGnIg@%ctAr=XVT;BNZ-rkN2 z_hSaQ>Bfr(fZJLD{1hY?m5`vOq#WBhPfaCkEhm2Ya&><{D#Q&GPo&@+ZXv1i{JJ_; zE-o!F*2u`U^R@uD_!zxPkv0E7a1b>v|EEyzaZS4qO zX;77IY_#arLDjakEh{K^mo{wwl+&!#k;KGv+Hxi!ng6oGpv+o}-)4Y%nN*bwhRh(w z;P@n`xMb2;n*&?qs!b6LFyqaRN5HqQ_(C_rS5vuOxX|p$W4O9yhaQ|mLx_*xxB3Z8 zBkT8i0wiO&>V*)fmMYa-pLE};D_mII{=Ju%*X89Uv;u15LS^dqaXF|g zM@YkeWhH0#DVWcX#=+}0%G5Mp{ZaMgdN%|O^JsaJgMc@YUH<^GS!;pXv=AUUMZ}3E z^fm12sLj%ge`rjx9H*tJ=|;#T@6iz6H~9Blbk%J6OipZp87Zq}c0(y-$sQ@VnYsrx zA{Vsv3NkW#mu=5PDPH0k?CtH5kdS~W5El=nQvVBKNM*5zsjYREl>F0R2m51MBSVVN+XUr}vMB&TV zCGQfEN_jrP@7s}F&62VEKHP*>+HN(Iqn!GpPd2{(pI!i;{T`OWHsP6}i*xn1>)_tr z`be$ALG_CuCdi zUq6>ouNaVdI)r!^UZs92JMKB#;#o7sLb#!L^ZbX)v#~a}!-W*UC#R?H9~?Z~=D`Y7 zX%*5r1JTF1BD<2rowA|V6$qt_@LVX@++U@ZDV0TARL2tct84-a<` zvq6F!48^4kYjZA)eV}8zIq90ghoJZgc*=P=j$kuQ(?q6NeoXkdJe=Kw#WWHisk_p+ z-iT3I)Xg$gS`LS$g|V@(adA1#b?XQ3 z@FJUZ85=AXS_H|lj&JT6^+k(SothRhhtlSv>l${ueE!n6cvfg4I}Iwl^Vbu@LdZHf z|KZ}|qW+Guy)f^rizB=Qr+7-X{?3ElCTGQ+G>9AoTGtpoWN}SYt6;AW%@uiDT|;Bi zvJNA&K#}J1X!&Sw&&Jla)p1=MH3TF@ugCkF?(S|<$;i2QKvbs4{;2DEPpk7TPVzW< z-n;4e_;^IEzM=v{K#4YiIYoB*MWn+^JNeNweH^!(!`QMZ)8-Jd!D)VL>!t)xp2pI| z?!$YdpE*W4eMHfDUOuGhNso)G`)!AVa4_x{X|DHcp7E#ilN@FI5W&T~uvm`10^PEX z?hVa-^sk+#oJYAs7Q)gJ=~}kqQ2Urg zf0-}GY*z?><8tA!iF{f(KmWA2FeERl*hT_bk$Z5gNo0csn@ZAhCJKz7vrPsd!u{@z zCSstcx3#qebuKS2kBE@)^z;-o_jhjJ#jrmAHtg=}ySnM?21O_&G}O8U51POES;)?n zJZ+4c0>$a(UaPg+Hq><}MsH5Q&6%gJFFht^B;vvm95F~p=r?zML4*2kyEGakp@pAL zHxO@&Br=o$_wh+;1ss$1@F(UIKwG~x?s7pXkL@(1b5^g=wF2{J+=B`iSml1Sl*Zw3 zI9J{E`}gXi)>8@874)YEfRM!wUszm}la=j_CF}6IunrLpBq>&;0a%(!KHbvT*x1}0 z4^U{nxTK^pb|j;w%qq&tSO`K&E}-X?rJ4j00e*UW&B3qNB%mfsNl8gdM*vU*R&Ykf zVmMCaB~}-9xF%pdl9OctQ3qO%>ltq80w?f~IW2Q@bCCD@1UlXBs;jDgjc7MHndxM# zmgx1ZuC9WD1iE$5q|ULqAKE@(Rsw!Pko=cd)ZpOYNg7zL&+mkjWvOEORJ>8k_`#`w zU2C5&Zw30qM06zB8$@Rw8f>C;85>sXm(Ds|LWl1^9e6+KZ%)p^!Qr2U1v}s0X}04z zN5XJ9xsg5QQE;utgk%RJ_-96H;qaW9>;;+Jl-uh701eO|AVQjSD*zCP?15Gpb+x49 zDY-I#*Q5sxUe^r$Kfc3z1c*o=i&CIqV__+R4(s)6z*~X10OhLN_~dsmGT;;u;Q}@3 zdvOlK!oms*3jqsocYSS0G>V9fOhiCnh?WGdAxa^H)jc_hBQ`!XBnuKV;1i7BZnw9$ zgTUeE=Z}er@!=PE|6Ym`8?c#}VoKE1)cjdw#Kg8%R^bPIzqZb*A9sN%3=lP6zhXH% zJBN#YOHUtKKLLPy@)Q|IUG2VJlp?O*6!b&jwQKeB=g-?aI}8Txx(DqbYZo%wS|RSOKsk77>s3{ns~Xm zUM~}l_V*W89cL|SaT3d$nwt9f@CK0sdJo_$&Ci!-XI;5$mvOp`7Pa$WM!l~dCv%U_ z&WPA;HO0gbGvA2jDC#yjA>KU#00e%3L=I@9zV(ySvonz0j$5$8yEisA08w>%>I8_r zgoF?~guSgTK)MnV6YsCDudl6ju71+h(*x~bgkF|#PJUrwWY5~HygyF3XnlSC91>*t2lNcit>9r7+ zjEoE>W-MT3-T5S>XMs>m@#IEc+S92p`&mGVrX&_`Ry{M>l*<=4grdX$X*cA|of5k( z5WY=h*6l+r!b~7B(Mlxd?@WOnrjh7orXR8=paLczc+YY z`1FmHH=6*xYboO073v@&j9I=)r8U@xUmQ#Har<@S-Yx~9Lo^=$dd z;x5OMR_kwh^}$#2qhlwo)R}Fmi2E^qGhflB>3_0e#0lHR344r3)AmqHRry3~H+v#I zqJ1XSTdqIvJ{)@^R_{WXVs!Q}+fOI!AGt-u%HlEhh)U;Y@XJGb}&4nnn`rv{CJ^ws`}bx%!Tq zoBOxp^UDgfk_b!#e(LMmW@(2N!GSxnxnbA(tPaENWBcHuZ?b(-&51k`*>&_AR*vZd8_XROq&Q{5=m>g@ZCuyV zRoX0mibRdUgWPkSRjiK7>8}DH2;#S9l83{jowc=8eS@fnm`czuA3QWpOP^ew{kb-4 zZ~J~ey(-4Lv;!37B_@}5`{*tA0Ufdh)qj?i@?w59&$5!W?aHHA4kJI~(Ge+3-5O># zHZ@p+FHe#EVhJq^%jEL10O$+e+)0P5Zrbcbp!}_CeBJqoy%mNMF~n;&9(}@cp_rUz8Ec zZzpCo8f#y9Y6cdwa??0Vsjr*W8aU8(uLcWBr=YDw`GT129g-&d{bFGo%F+tf<2~tBT z4|V%Ac^1h0dPMuJGZ!`-?e`SsQH$hKlRQE z1!lt=i>WVvP{dGdyZ~!PD(UT6gYn^X@u~R1d#`3K@Xw((IgJWZT!b7u(b$hP&Wbl zI!ygqhLBWu-9Omi0O2UN_R9UHMV(&H@I9_nTl=4Gvag(?0yb#^>_fVr&g=X&)m^?g zJ9)hhBDapV9qxblcocSY>2bs4d4v7@5+ja%p*qcOuXe%aE|WoTfe)uHUy%kdAwW^A zaoJI}H95=RaS#3c85b9~=pBsq{rlAu4=~VO8sjtx0~l1@*@`r%A;Po?Yrl0UvC9Di zz1Zx^N?2Y|F#>}T9oMgC(e5Vv9J#v9ohZCvayamF^u^x_Bi~sJOKTvrt8du%?a)$Sv*L`uy*_n+^Gw!$ zcysa2&*teF19hwjR=&Om5!1`%)-Zw7$1YcA-}ZP`-nrJLw&_Xy;WTTWj9!o>!!K?V z#+#Zt(n0-HLNRC|Y>lV;Vb(TzUtaJWUd zkHnaj-oqBs^IBLrIG9{?zg3%5#$e>^AD?W=jhcL$QF~7NtzE9lY$S-L^f^%eL8a4a z3;ykutcJg0Tx)%4N z%j093_wR+2zV{f1gmvt~bqi}ZD!ADc)|aEE$&ujWhb4h9snVpkMbO^M7Ar-JCF4uw zE5BeJNj=_hl!(`tA|m_9_sH@xN}TQ7Eo=PrPllFmUDM6VAvrw~lb&I?99k#;V&)B< zkOcCR3xKPPj#7`l9^jSL1+jnL^Gsry4) z{s$^!9shuE9^u;X kcJKl6KZtSp&O4`iURL?lql`AmTrKIaS=4SlG7YWqhYXw|3 z4g36=H!QJjM;w>!%UB2>vrLH_I-l9}(WU9qItj?>A4BX{bjl>qDyWBsaBA50TEX+?% zgNN(hjgKswluiXW>S2yWqCM1XzPU16Yf4+zL+W<|`xEctWxp{fP!NDQI|3nlzy7dzeT-1WMfiqAj zH_*udS4)GNnwB;@JL}AuzP)Wp7%g6)3_2)~Ie`!bqyb6Ffzi>%`g*;3>sNt-UqG^r zj;;sfo{Nj>f8qzw+h}NtygQx&zm16r4rp#19F|Ao?CtG=P!Cc74r&NMTiD?UcNG*A z+_k@QU_#l`L12@?{jG&(?Ba-HstR%}B@ssdogd=kWoH4*YTtw%?He7Yc+o|@lYfUT zf4^pZ!QPr-C^EF!*|=VTqb#M+t9f@?HS9xm0Z#l71dQH(ZKSwdrb|xkQq|FoZNN^l&ChO}L6T|%1*5!l|kL9*rfw9gy@~^|(afx0`T)5N@FqJGO+!trs zKLzC#Cde^WB}YdiN4~pKWfEg24oY}BLzg{lvagN{eQSAsqQR)|`PjPCTTe9IacaNV z>bv)s;X&xuB&sj!BdOeL9`mS^?T;pXQ9lTjqWbC*YT?LHjP30YkB(y59;;^Ud{g7% zjO^|8)z!gr6Qhb35f-MPps0GbxH*^<*<;L~RjEi*q-qV&CXf*SlW>eb5CHfBT5=zL zgJRX)ogL8j0^djJl32ZIUkpiNVj^f!feC<>5Zz!@gd7WlguBjSG!qcpz{EhCkP2vM zLc)j(N(Z1wi05sB9%gF`w7o#J5f>G$<1QVXpMQ;c11LBK0lU3OX<1oOCEyUEOOS?# zhp#VGr0X9Xbar$5`w3_nT$I@F-@JLl%37C1PaQ0XrawM;Fj|!o&|x+wo7{D6d0+N3 zdKs^DWJ>H`zfcU7CYlb(2h*BwbntMsI3ro2NU2AX78^aj3*e0nm1-%z5(%pv+RTx3 zVOy%CFdJ#k?H^}?c*!nwijKdgWMx{Lqn4hCxDP}vaqoTW-ut?I9qj`J@$6x8a;`7P z=is{>O%VC3X?(S=H*WZKH(4{S&ID(l}`iT`3diyy=nK174*O) z3Q{ZJ-qUGZe718q-662r2j9XpgVqs#xBQfWQvoBeGU0$1`U?CF%gvs94f(4x(K<;X zxdSF@9uU7z28VllG6l+3tk9p1>tK#_oNbobz~Z>}BAuRPkU>EZwzRY?y+M195VUJe z26{I7K16#!WL`Vr`}i`*m*=-Y$^;+F%LbtOY znXk45Fs`g%@O+LUaba)Q2huZ_j!SP((z+f0*dtHi7uceMQ5E&SjtuDYzlBJ zx^MRh$j|uUTy^{wL6+zaO;JeWHKl1YGhYW|Hu~i8Cb9=R@}J*>7(;>{55zNexTxB* zX9TTixANJq^oqzn82$o1jJ|(S1pa7NB;!;pDTn>`@k@nQv6Ti*L2WaN@0PiR<5Z#7 zf2JL4w3}F*m7Tm&Bp1H8-{U2q9V;*tMy=y!B?@>&G%mSq-Q%Sx=Qm3>7Btt+0)YE% zmg!vodbFsjW+!AN2tk2n{K7&?p6M+ShStW!Qc$(PsGh_l>{Iu=eBmpNbb!{16Gz|% zvDxg0P)bfv01`J#z*mJo7@@ZpyXYa2%CvhJIW}Nsx%3a2N47!ve06Nmn0f05`cs_N zNDop*+y|KN4db2_mG#3hpTjMEX{P~ZCQa%6X=L(=i?*ok+}*m|RLpB;m*aqE@KxiY zYJi`m$1~{gBSHHGR;+O~Xg(eq+p zl*7l2XLj>8f$rJ{U9Bcbh1W`%Q7lTzfE+i|Z71YQg^uAXSW4Wk%tu0X zRYu9Bo052zKU?ddRMAmfndRSLhi%Vhv}HEz{mfewEphbjFsx!HZv3k(`V}YeIP-pd z@>Sg3Mr@`VtUG5g|I1O zruC|Gr}~Lb7V0)N?Z`~J(uOauFz3`Qz`G}@{$=yKI6)f40hx{)UC6xTJ_7y@V= zyH3`-5PP0QY7E`aakF}{d7c-|W{LqbCna_!0Sa}3^a%uzZB9;3Q*a;HIgrtQm7Ci_ zfZvrT&}{^}Qpkb-uv}p7m+yQ;b=^s7(S(kKo53nsVu7~sN5aHf6Rp9GUO-(HG?n^l z906e8vqU92)JH7>BBffT6*S9+rjPHeeg@{R2HsGv`z|xsJRP!@7?~bZg9`U{nBt2F zvNRDH7gX*X#LE_byY>3d;LjAr(Aqiq!gqc^M@#TALFgPAfg=uQyB}|vxZ1!ax?Ir*9fN4Aq-GfL7BUz6-VG6n#8?4P786Sz9ofjN%av%U*AvvLhYja#wiF zRp8ZI*a{V`P?hVOa|G5iS*k|ep!W&VVnFl@sIHR$(MX#BY>J5thA6+X-FL5m{h5Q$ zV*h9LE7x**yL7hAcn3^8H;MAP^|h2qzRBEZ?X}0WYzYgJVoXfwcHrz*nw5f4WOM#LPw*OH`q;L;H~W-iFKdFJTZl zXs~U|Ew4Fk2CYl|o|i}%E1n;L6%AT*6z-$pn%q@AxVc$l{_qKtwATbW)aln}nmowB z0OxU3u@BFMsP8AEUZ(Y@ zc!+CovtQi)du1nTK8yb3|JHVoaE!w+$O1PN@A==joxVuBa&Z~J+z{}iA>&KjZKE0r zkmUXI*xr5^`v~s>WD_Rsm-&FxJ$`IES~t=2y&p2yEJcLgMMoFhG~w@BgPWp25l%C0 zrupE38xk~K3uN216*VA)GGz)F5Vl&EEE3hRcv)ooz9s5l7-1?Y=c>VY66xS(7XYBR zD`m>9q5YXdt@@b;klncqLk2omFSHfjm+!`wsP2guO-|m$>_;3KTVWfrp)4APl4~xI?P1q4Tt8P(wo1%^kGqYETM_FkhKWF3J z-Zh%5|Io$hYb#Tin*}X-Bps)NUHs=#C_!I9>7ab7)d&O(_640rCcmF8$LK}R*^iR9 zV#^hgVZQEdMK5X}|(ZO|SECSX$p-E&oaz3)TL)4K#|AiHc0 zrJO;RuGO>5Ci1~n@E;{1bL&NkUnMEMz3=49oO^cJZDz`55BsjPX(BF%+Lif8EMrOj zbU1E0&HYs^dv1BuY&bWq>B4wXot+D^Y@PJ@;pC$R+eI$~Azfd7oi@Xzlz3jB zw7|p?nb;)<+o<_!G}y#1w~E0JB3$h%byYzIyraG3x=h5Qyl&-~;Si zO+PI?gWg1o+uXRnmI)0&HssHWZ3A?}G-N?<9#cM(w>U8$ZvE&eD=seZVQ0D40yC2{ zck=$>p*nhO`5f^4+eb$RT3RKrndGWP;{1ukiCiPO@M1|VscXFP!tx|Y}CZI zv@NOz#06T~fti^G3qO=gm+Xh&yFD6IvHHB}>8;we^nEaNRf3V^e})va6%>ki6b+UQ zkNZsu#MgUMM$-!%KFm3*(RrsB^+sm}IB4>CEpQ?Tl@ZidH-aD=BeNYlFKg@m?2N}$ zUt@v6ac{9-4tyI9b+2z6WP9iqn1?#h0R5)9CJ51HGyD~>-WTiKv;ejg0htL1fb#b} zvovslW7ZJh>nnMd+_zr;;@_IHJ7}A12mTtLn|M)*`5H4B&}=j{aV2IN12p;qDbHqf z*3*TXM8;_D_qtjZXhMJk{T($mOUe)wsy)r74R-5X|8t8QTfIFvIA~d7_f>%lBxhl% z?RyN~-w}k-G;#Hx3ZPMgRq?^4`%BoB{=0YNucPfRbV*1uHphwP4E%c@xk-hvpp+_S zDGjoTcPlyN=Vt`UQyhm>twzX4fU(AWNcIGd1fE7pcS?>E-*v|>i4ujtXOJSJ4+Ly% zY@po%JLiOOQ+KMc}D}+m@~-`!J^)ImMe723ma%ZFK__#D4#N zY=$o*NmsX9IG=|WRp?>`xTmD0!N86YE&7@Jn=^PgxNfj(%x3B}P@ceYWn^S5RWFy8 zljHL^Hng)lSZr}`c^BUfnriS={H6r2UYQx6(xOX(HW3&R?k@H?JdRhuHUZ$cpPQdw z&H869?(ED3(DC8+0tjEFrKMd0!b+@EROZJkzrlMf?CeV*${*u2=`SxXNZ4%&+;US= z<{uyKs$+o{Jv{t5aK2#WXJ`NF?iL0A;{+F%k;K2hygM4ZfEOF^2Lb}1w`_178p{!h zYq(kyQ*r^WP2ge%C*ND{cv@(324D;NQIk)taa*aJDun%Uk}h?)KjL=4_O!nK7RTC1^A+%q z`&##l$*^Rx@5ub=17tA-?1Z|x(QEY>2k8a4uX%Y}-+B+Wz6IN=fP-F^3Jn9JSGo#U zdu|VGJOwU5+5~OzerD##>L+Y$Y|3x`mnG0P@5aJH>J(WpS)kWtPKoL_UEBH#{L1+F zg#`shcj&-!2yBiBLLdU;`+*xDxQ#2y%7C#CkS9P-)cnxJJz7lwRD3Y^@$onf%YwiT zmoy0Wy*=8Hf&2j&Zm=g$T>xyg0b8^vv5U=aKgfD`cxY-S0}ci>@aE<}fVINONf`~M z$yeP5rzTU?1=OZ*Ku{(i_$TFqtse3HAPbr@&wqcfQsrod7x@YgFO>WXFtgUy0+kV* z1P}+Hl?Qqi5yxY#rU-3M^EQbD?R{0C4v!BKLyV=t8-xN1B z%}1;rL!He)rhhBE<>|RDQk&OH@D^plU+~Id>f(B}3!n=0$AjnJn07Qcz%KW%Yh$slKr>i_?+f zGETWTDki3(z8-8!=#d7rGqd0QHCPiSIv8Ii^MM`FmT8w_wMwjEu*T6{%+wTNJ|I^(jwB`NQa2h z-67H~Esdmrbc1xaz@bY(y5Z2>-Q9Qbf1dl{j(f))cf8+T27@?f?|s%@@vAlG+#(9N zL@)mWa?fY5Kg>vQCMUAi23&UQOyP(L8{Q4DHa-Z&=cI?mCy|Ha@m; z@ZDy))LkBO5r*b)3AptN*iD%dkGlh^R#{$^FAlYD3Mr9+VBbZ-3hQOJezExMcPXC% zF+_+1VdSS@;o+F5s51cknV6^nHn)K~&1NocQxgb0t2>Cet`oaffolZZtEwi~ot+)f zGzSNcg(B%3MG%IR$7Kq?iRfDg^YjYP(Dx7K8@`8ONJvPyS8(9rm+P!NMHpAd26qeq z2dk&IC~KvoqgV3d-^XcC@3{R@y`H+SudhE9P6yyn0G8X^yD^mJwbmUOmo>7!z7EJI zZf>iTpkG{$mYzj^0@U*TIptR=vS6njASl3x0(c{+Qw3s>n7bq>7J$VAfJ^e|*8oin zFe@r5DpMC94B6_&6!5mMg=f9~dOOaJ_F-Y*t zz0+>}oC+eprHp^m#r~y&%~Na>AXr9xd|*l)Ot9hz3r;iQ3HZuA6wBkzqw7OfTqHDX zom#Z~8rA2`KS0HwWD-o<#w6ADfY#F7b}1PA+Prn=YHu}LoZw)X`+5!=KTI{egfu1t z4+1!jqGDp@--w8acK7$^*>?%u;PFCoc$bl%Fd%t<{;(h+A&p;uoh;Lnc(5W_ITD0e z+U^9BdFI~~m{S!yfz8jG8vq$GC<+F&t6L!lURI|7N9%CHb2@k@!2aCv{n_yq4K2jm z+Z*r)psuCn)QmrbzmKJcUtN8V{1g-Y^aYk$*B2I1QBloOj%djoK&S#+)SH_{9)97d z&bGQxyoclip)cM=mnNkPX;pWoz%cITsgI6~nC)th=Lnkjc~VKI`Y?N{@O?RwUjgW* zDT^#y<-00S0bUiw?D8iqG|}(-#-})>n^W-FqnQI@^I`f-)E+-?#MJHb;1%+7z?TH| zUyGt!=n!z8U_iB*E{B93mC>Qr@`|4nGg)wA;+ZlXP!$3Y7?8VH{gB(c6zStSwkRn5`!2c%09x>aK2Xd4 zrlF@V0z@{z8MSb%2oIO*d)0I`& z^WQyFr){$W?n=Y)Fuk}iPJ&5zZcyI^0T)gHvn3${L5U0+;@OW0u(TSnkgFs#sI*6$Loj76~=>uQTIyz z1ChDP>*?tMt-5+850to+9pB(RVuMiy@QNcNQJUaV?K-x1*-bJv3!j{wNa)i9cRP#D zRWJ#O&;v}XZPPogt94pgGYFc_Io?1T@$&W__l#$o9%`*uz$W0zN>p<46CfWTbkT{p z&fGWBNr&wwSOJf*x7VD&(P?jr92XUUGytXpNFC6{g3|V(>OgllaE=v!{s8nLaJfJP z0ATIQbY_T%iM7?$O#w_`d%HlH?maOE7dx6BC#=Jy_2YBcO6E+l!E+Hng>l}S8aeR+ z><8c_f#CtbwHJuMKY}v+_0<)CZslJ%4R1Suxe5q)k)J-~C<0ViNy+|Wjq|20>t;CF z-$jdn9*zQzTrJuhT=8xh%2$yQ2A}(QTdXH4zRq{?icJhu4?2p!4F-fVyI73+aI<;~ z4$k(&@Sz9eS#^mdLH2>&>!~imq@hjrG?#JsGQr7P#-<_*fXF*IH~`H7G3nRxD-duI z5)uNl2_s6B-e`0=<+7f;+89U%F#p9CuP37mjt0vQI5{vt08U0m!rg$eQ)$Xsr}^Hj zfb&q)2Xj3;&#da^4v3SOPHklfrs8DNL1L?o*J0Wx%a;I1WocyvB4M@DdgB3`O`PQM zwdLjGwVr5TC#R;=(a}QOsHh&#tV)o@8FO=A>Y5riHQV6M1lVPBMGR@+8(-D zB)o0tS-kF=jw&i=p?_LQ;fb#7HyIOOo%2}rWi+HM;-&{o(=!J*8-yow#C5)aiPeDn zVPIl97|wuq#K#PEA=DKW^>O%+2O?hrcH?){)RXQ`cx&1ocvhFB)gWFO;Xaa@@Tzy( zP!4aNcRYg%wU{-1EvmRAoxR_@mKclEV<^MoJj(NZ+sj;hlyI>*787N4%f*^-7A{nn zl~w6+djTM<0JY|{HBA2IO*+PqCry5qL^z-spKXtAHXJsOjgQ~2h6_!U>dgMdxo@k< z-|TWzwK)-Ja;|wpo#*L1%hUMzXL#-FtG?cAs5@_b*FF2u&-{Wom&dA>gJ0j1BBZ2V zMA&~lZa}itK;zP`w9_;h%{W~!XqYavtQWiYt?$?X2oZcH?JZcrSVxGYE$RT^x6{&Y z1Nt$Da{*$ze`qN0VC$K1DPDy;sEuQ4BH)5@1erSOiI7sxAfm{Z&I>yhhk(yOZ(qp_Y8#m zn3N@1thHAeYM$Q?gC|=y-NYuf4?VyQ4J{6dvpZhth^1EptVs~)=1!1QL+nJUD8xcv z?|W{+4jST_?|6G|Z>}rBg08s2eNP?viA^u>IoYHA_SJ|6N;x$%iAWzDj1PbU12M_M zh~JNbBGK&ZZ9A>hGgInl3eAG4ggpGAfVn!CJeHPpf+7ueRWYG14F%QG!i$1ue+}14 z&4E?0OLc&cy&N8mQKkgy}$j6xiC2FJ8ZM-!ZiZo?mEj2!R z`1U(E8Z?Z+%C8;4U|;eTK0d(k&@GGAya^<98L3{ewXZxTM)S4|6*U|z!0#crDe}{(3EGA2P-;Xg47e(Tigr!z{Gp7Tgvf*L?%m(uYz&+PNx(?#w zSd(0>2&B+P({rue7Fka*XH-Z88bB3$g5`@Tbhos=&grzL&hLPRo)w92wP{^l9-fLy z@8^K@4C2CSJy#1*p#X}?$HN0SuN2()mr_2KpydDz=t1l@kJ=#Pj0K6Hkfs6=aw4c& z0L)(sucKzOyNj(Y>wHG~i20hh6!P}&)UC1U^Lbi`@WGQtfO`2`LBZz6#<|f^JD^|i zeGAVp;~2jrI$6x{N+%LI`VZV+39#X4x!8OUR>Dx45H~wJpTiawX}$l=_*F<;NpCgO z`ts_THQRIVN5d}w(bgRaXqrn)UqGn@L^2~O$b{jYdQiWJ)>P|wc(E8<>NA~GD^87OkfW>f!i2x5Y&JkL)m05Mq1jQsi|6}JlU4J z6EtvSStbMMyF?bpJ8(T|IW&l4dS)gNfgvIzW5x>s879z02n@7dMx#|2QADbGNcPI( zLx#}M0S^K^s*uacJ5El=#bytOu4f&8v-yO5lh{l~g>PHSq8AbpsxtV{z_FmJ1&@R= zoC1d%fK4)jEvn8iu@Wo9m~c_~f#K;02o1tIou+7ax_+rKo>fihc^3Ex@L~=+v+RA1)u?;xJ3CKaEYmD=Y$fdvYeN#j>x~m`Pr9gxQL`NPB4x%*T zCid~9Zwa16LC+DyZ<+hJ8LZ}W&;Nmq_a<;-I|#wm)j1ZPqAY?Ys5x8 z|M?ZHZr-!GWlqKbOXXk!vRT)EU&0&T#|ucIWwrHKhu@9|xGXYCbdlFwYEjM)=j)Z@@LUTvb(v{?()-m;`I zN2E10^!3pZ?`v<#9&=aIHTagq_C@^gAa-32Cl;nSng`V1mJ5NBP)$8A2XQ>Jwdg;h zwU({Dz)SF9T8(kTviQO>UB6vsEPnLT*Qm2dBNbp|)T)7vj*l9$$-jG4Jg#N~AVzZf z&8_t!{~V)J_stiC8CHqdVRIhG%IuC!ImM|QTSOv4InOn5&zt6cX zaX5oIBkf%+`k2{!YX9-PrqULV4z7PS-i{qudEQeOEwdKN2V9AFnQvwZnQSRWYYjXK z`GJQ3D?eM#pFg!F8G_)tv0Se8bsBa5^Ehs>s7E=44*8*0lN*b9;c2lk5z@|YlUNC^ z##DwJ>NI(b;L^Vx$>|GJR#)+Qtk?EoZJMnbKd_l3vI$xDnNR#W0OkSAlnEik=;ik_ z6l=N>IaM9chd%Gd+qB68bzMm`ae8_NJPUBU@o(X@pz=hl-+n!A2;@*2IRwH%yaNI< zV@vRM%q#H*c<@~dq0s)4$zh>OvUZ*xa594UELJW50cc_x#(UC7e*N_OrS=x2r7b*& z@aJ?>qA*}P0#YmRC9}CHUS(MfT8Gi&mk{i89cX@gjVO%>F?E-RI(%gz5c`PH zjOA&(T;U>O;xq4xE^y~2tf2pLz7CkW1(t0Lh#%HyiQ1%fQ+;mJ=STZT*30I0t9@P! zGQQ$R@8%a&z6Yj2`BLWF2YQKLGf>@T{q6m!-S#dFKCcr|z1e4=n}WSkg`JrokbFMk zkzNwejcYqgYdiAF>JHm`>v|guKx?PH>je?79ZP&y4j+*7bteG_G7rQh;pB$3fVSN} z48S#i`S+J~tZS&=;ew561r9=kglaj6E%u8KgB~d4tRvxvbB)|%Cf2d}=HAH$UYpHM7b&)zxDan*(Gj|L ze7A$(|4pXE*w{!9dWNq<7xzIZJp#8*&abD+YQ)OGFghSvuF);1;>E@7*i|LGxU++Y zr2SCp3YJ-vx0%Z>mlz~P4r$(F`Lj4Mj9jBD20B!y&U7HBGQRR@bxiT-G~*Z@vNKA^ z3xnW89>SAp^$sj%k#=n)bbCraLnA|>+DjIyo8!aTOB8xP(3<@{KF5QJp3xbfg9I>P zki=(^w~2OK!Y+=8_eGFt8EM^KpVMqStfsTKn%S^fHaI}bcTKI$COTnL|BBJ)KR<&I z3gKE&Op5A+y3)mCy)DKP4EH$ZKUiP?8!`FW1}xe$^z#9R8BI+s;8|HKO^a;uCy!9d z#KEq}ThAHfaH}p^f$uLDE5t)ke02gYc0KMz`hWbDx&~x4V=J%3wSh5Mdi7BE>Y;%1 zv8`yP`rC3=3qB7&2`w=X^YJ9#XY<7jD#=B}$c<-Ie0ia%2e?DzO;&u8Lc8u)`}WH< zY$sRXI{0B^9%quKT4ML*Oz_zj%pX5c85vfMj1hcU9CsTQ-lm)iu8HN7O$l;8iNRV~ zXpoRP$3wq_hl3ylW~#ikRN>;B$a3h}&E~dIRK!`09!#sh;@x)CmDSzlD&d<_w{_9J zpK#@M<(*U2)J}~vmXevyaZNqhC>uUn>UIm5X%tMcEL{jmZPwpy${X^f$eO!7li$pL zc!if55l32K&rbf1irluJnO<2%`6JIcwcqas+ug4p){d)MoGL5c`8}6dXt+Fc5j9V8 zJ52T9oz`x2+}y|-H{mYHm?#ypgFTj=f>*zv?c$uD)$RTk_rt0oy-*$*Tv&5v;&024 z%6l1hAH~k*;e6QQ%6xM*vo4J&rmP}h_^`4DcJc?q7{|vS8r@}-RVW+CkRcpwG@3ca zUne@j06R8HAxHkCEQ?-jl=X~$prUxVE=ta*L2wtbb}IGO`@3$oT{AH`GH%_m3U*y& z7|}@tNIphW`C(PlX({aXlIr2YM0W4W%-t-BoK z_UyFr@&(YGM@{7KzhCl~LMhnxD%IsmPbMKZJo!UzUFlVLi%9~Bc@d2F(E5d~pJ66R z1A6~0)|gcwx<;N*aq^jz5Akf98K&lTf|=TKAaJ%M?miSY z;fF=Uu2g7*LT-hA?vg|ZVhX>AKIcqia3#-&)Mw79S0 z5sxP^kE=Yo^q9~5x_L{<9)k7N=%&oAb|(2XjwsvdNkP1_+~etj@c^onBno+=i@WGg zXd?=Fy>>|)<3lqniGpUy%)F3Gt>Aj0I^f1Oh}JK{Qsl>Zi;(inIs}sDziBF>|C90% znOwRv?_t<;92tAX8G8pU=)+0S08pM=U2G^(0j?>{I}^cQ-#lFc`SYjyN#r4zeyJ{f ziCoL1jmZ$*|HJ|aCxf>KyQ7$TUgE~Ysmbfrm=|cMYtj`KmR`qkPuJC#K#|G8@>=`V zpHviOzJ~-^8k2{nvlL6u9&vEWYNHo@*ZRy@@LkL0-IgR;nEB*IT$<-0a#yo7qCr;= z`zU2Hob0C-r^ETm6|40ii^&|RQlX!p2L^5T_2rOl_s!CNM>^7Ppj1gRM1EpEfP=W> zm5%tdePMg8wK0?gHE)l&4rxMaU?Y%`)ox1t9a&|xAgx-No906?09K5T~BRL?k%oDo$7$TYn1HNqdBdm%qQ9 zMuXYSPkq-C=mRQxUg<6PXcSz=;ePq#$;vEFkl1fWrIM877|3IZerM#&CVH`Dj9=?` zKNxM|^qWTpxt!IeVds%7n$pjqnx*XeqdCgqBN<#OpHp_MS+YI1wb9z~t;5ciKzGN2 zNfMJ<2v(aRi>#G;-L1n?$Iov2<(FWzKZ_v^k+4VAi%V72_4=3WawGiaA;DuREF}E0 z=(zpvQ05u6xiKQ-ig~fg>+!*2acn04{9HoKFcOgvhgxeBDe!VQWyOYBaUt8#_`T!q z+oo_@a?W|%o4IiyEX?1ZvS6u~GaLTcaQrR?K%f5A?JhxK9Q8_&Va!QW5n5$cHMzw< zj+fh~FewF<^IRc07J?V?r1M+oh>%cHSrT8xKb0(G%Tq&#Yl$swZRk}lC}!JjHL*px zkLssAKy6$@c*Lh->io2kEC8ea@yx|YrOGWWl}VsJ!{}N1vvgCDpec=nS&f9J3PTK9 z=?IVu@5TxS$%}9Rh`k{YHor`eVMUPR0-0^^7a32FGB89I2MNy`F`6eaa{|%-^^`aC zUxWM(kVDZuvmwFYzzRi8Ras}hPAUoc^xV>qptNwsOQRVPt6EY^;No7d#6bssff(e! zBS{2}?TH2p*lD~&o~d%C?my>U-^86~xWE({cC_Yrb^A&@y&-xav~@Ta50Z-`M|tQQ z`X&-rs_Yyguls<5H=+NkZZW5K%;P5TK+5vlfDASt4VSSSPb=1AZSM!PV)RhW=I{ATnd9Mf-*TU7#kmVe zr3_dIK#uKQ|7DZ}1Pax3Ze1BAKfH8)SC}>Umq!+P#Ha2vg28jxjMD2N&`@Arg{L@- zYxLl=*(RPddyzie>;?+Qlg)Wm3cI`ErQL7QIjrACZVe8&af?xq&bdSk;X_R`It{x! z6f|925CXF#ol!-RUj`L^P*Ew-G&|o~H2C`_&sw0K=HN%M{bZ|Iff?xVbh0qhjAg)I z0Xf-ITILJeFx|AH*dPQ|m~`7V&i~W+{(qPf|35uNx*i?^OU`V|tXa3Yo_;_6_N5Tt zp;a9yBIVcq`J_&cs}g*5jQ}%Vd?||D)4L3?-#fcC>#yMXFyHw#Qh)Ftmrl+T_mCMG z8#iVlVARaFg=rmqpUb6V&=I@Or5n&RKZR-LSIMhcY#ri2a8ZNtaDM@cisaCs%uug; zbX-bgY+`CkVq{8cqLKnR%sMA$#3u`Uj}_+7HProJl1rX%Y*NuG%?+T_60b*spI4Rc z6!JQ$(k!%}8VHfCLj*zEpcmzOt}mj4Kg{85_oBh1ZNi^9W-0$%5pYNgHfo5BnWCNmTBGyg+I{AQWH z{?C2=QlI;zWj+Nrd-8Fzb91swsKm~9PW@?eL;J z>@4>xE4_Y8$_`kLb^eLgk)U9Y#&F`As8)q$BX}cTJXJh|TE<(dxXeB7M_y@UKg*$9 zLVMfx_F$pb{&9hV{*+6HSAEJEfFigbPTD1}spCHB=45v?Nr$;y>^wx{7bMm--OgM( z?2HZ7>18zz@6I`X(=>ltGFyR}2)rGg{7oc;w$yjB3L=pfLbt79Ag`%e|C2nHQbdj$ z9~Za~){J+&bTco37~tBJ;>uV5K2Zaj!>86HG6E+r4;6+FQn$M|zanc0S5^yd6!?ww z{=!sRqSO)Vl!2UMrIl<`^o7hXD(TaM{o#;<59bhZG+~huQD1CT>%fbiEMl|NX`96tuxht$9P-ipjp-E(ssC4ZvRx(HhJdF3+6=s z{i|=T7ugC9dGgBAh^rDdLzP`aW1t?p1H25I^SR>G&u0SLvKfCr)}7U1zZM0aCZ7V> z<==lqU%N5~9N?ldynCzp4toSRx~Egn^$#P*937RCDX7gb{ z+;MOjIewRy&X*k~ro|k!b#}Oo1PGWV)`1iFn6dR9ZKhRk$%TUZP7aAR_lC!#4n9DR zN?6!%IcO+!4koU3`=~Jmu^lFD`x%JnPmkNR5ifRg*9NniXDDWUtPt{X}518)LkAJTZb~gO(=U7 zjh|6NrXBiouy>j7a*xLZpve~!OrNEfgV}18@QX@!P^am41GZ>pT2nLjx!p>eMCP9ex{91jWLd?kPY-(a;ojKwo<~&+P zK||+q!}k8bND(x&{J5us(n!aOafV>~`-NGJ#e+OLC)fP5XTlpGM$m&+!CIX6Pns<7 zcZ?(vg_tNeJ>6ZmQjp|QDOH0DX9$U2@9qM-LP)d)ekKua6A)~TdM;@(xhKaZMn}b^ zfS*Z;QujiXjFmaLt`bs8^h)<5@RnRdgvCQ++1%6+4t{V4oJwM;1>`p0y@+;|9;1?i0?WJvCJ^qFZ66ts`dRidhH}O$ZWsv3P;=+Ilsw0tY5WEnBS$ z2K689_wvhTgL}TX-`LZOtGHbIkH&Z874q`3yk0g!n`5Sr<=>;o3_ybTuck1Nb|K(`T$c!J1 zD%pD?Mc?;fMOwr^U-NgY8g6`qvb?gUys|2}J3Y3_`a1sI;|6eV6?TBHE|RZ!Y% zt*fXBJxX z^emd@-OTgLeXRw8e!858io30ST}vHX#tbV<*DakUgSzj?gBZA*Fc@#G?W8~~UWR~f zES%@1T}$m=$(hmAg7`;QO}6_F5QtAE0)mQ$$~_%tO}6yZpt^*XRMW%^KCMQFQJYg| zy*B!w*Zy+5frg0fbrFo1gwat+O!uA|5n|*IPNaxy)T&tR|6yN4S5#O)=@i!~F)5QX z!^*JJ0j=^fJPyfM37emJEWeSkVst#skQ%jn?uVs1*#jG?JVO)xq$|?XI6ytP_%ma{ z$iVu$!#or-FCGb!4?pMl!Y-qB&q^~aeZtFUvdtjuHGk7=x=b6uUi%4t|%v-?VLRYD5?`C$RGuw%Zr zQc**e%G!MWMv#fs;E;{1&^y$jj?eSKb9q9ES+Qb|A$0dq3jsn2BK|rSs8?RbJ@2@t z>DS?%cE<4HRA~ZJECx$r>^d;$5+Y zBRBjMDtGAsJ#~N<&2qy*-%Fqah7lg8Rx zu4fW*k?VTHFSEIwJXV30Uj^)68z#dbJq=2z>&u6w^=l$l_&gg=$Kx8gvwXT_ab#Nj zU>?gjhYaPX%C; zKBDhA@t%p)*cM5nyVwR1NPmpZxaUKdcU5G65KI(j@Z~s zC?GY1HIgSrX3961C@|sD=QA?u0haNjx`0dLp`g;GcnV{v65Evv0wt1~Y~}(OfYARv zmy!%~Vk#Rddnl`dox>hAXGi9SJLit9EwNkv#MI-cGq~W}ZPGgCPIfj{X&r`X+_lZJ z28GQt6qx_=yx=`KF6u2bm$iSjNvaA<)bGvtIaT}5!JO$wR_kScij=|g03kfwc-y3+ z7uzQEHojts7?0;4`#I~C+(|Y|ea95Xz}B;{5bzyyc_Dbs&x7%VZI=oEpR#S1F9_XL zy@fk=8J`9v?!rAUZsJo+*iM6AKsnnn8FE2r(Tz_$;&Uh2v*b)%cQ;1yst>+Qyy0oW zpKt$EAbPeW#*>L~9oxYy+4mAuDcGNPtb>j^?P{?6y(@r;W~(VLrY>xMImyD8ZAbgZ{sP`Pq;dH2)Pkoy6T)1Iljb<80spuhy- zz@X7g*U%LS+Pu>N&4x*VfX6GD+q^#B_pxBt;sWWVXeaZ$Np_#iiDV-fTt^Fg%2x9`-uF$+H+>WHw1-T3r=)m4>O4+TTL}V zuV-H1q2)W$yz5GnWtxMUNKI-2;R!}*0(5oL78o0wMUOg!99SyaTPNl;UJfcj$HT;L zqMwXZ&LiG?u8Y2`;<7x<9LvDW&MOG%^Rs5Jkz7oE4INPr=W{4cgU03xPz+Z< zdO^;@(YJ~zif_%5;|wKQy-om)_xr0nM=~wo`!*Env*T;+10YT@&$60>n!hRl0JjeW` zI2~bRlo#Qj`(Ey8ZHSfZTTEInvXPl(E2(8wUWq@mpi;5`4uqOpwhUM0@V$Md>UZ$r zKdPz7KZupoMKZ;=8(Z<5>u56pu7_}w&8TKV7Lh@@gUCq^>oNY-Izoi1lXFqWbs)jL1a``aIyJLVR) zi4m>^y~g9(Zm0T&ntSS+7 zGcYUMq>aUOM;pH1ML(HHWnntI(Y{q>aH(-@RhN_CD>N#25?k1xc_}J-z#q$0a9x#> zV+%UiIuxe^*ojm$r<=6;hwVfEqE^ND1NZi412RSIF?8hCb`IG^j!!HoPiExd;m14lAE{z zEQMXPk3QN<`zLQHEfzL#V5>1Kb#@1D~=~o3S z=>Vo4-c5aokk1=ybYu>YPdz$P|8m|k$IZ<*I8FTYS+Uc5v`>Z?RUb&&coj3HL5O6X zc0V$Yv28LtSvsF|FG4MPV4qZ}o3kNHJQ3JxD(XlLhP!_waiRHE;$cnh;cZ&};T~?dey5CQmabq2wRiFGJg4T$FD?xG^;vr0B@2NJZxYc2 z|L*~iINtLp080Ygv9el|P$-CVDE zn#yGbEWv+*3WXXBs?iy#o~2E^81=-R_!zW-Kqd{&b|qJ<+KNJ6h8=M*o;MedsEui_IFXbVf*S1WVdy_#Oy?^`U2Qv|86Ocpd~X^R**d=y~)W?stmdd z?0g1TShYPA{(9@elpNNocWc)B^aUWA0)uf&xh%x*Y?a-2Z^}wP6gxBbo8J)~9qb@< z``6=^RAVr6^T|obH7txlIDR#!3;=DAMvqJ86I=GBs$W5J^TgjeI_Lguj6shWD@H3F z9FJ;QAmok=npObxhRaC9V#Rnq+!UgEP{(3376cYzDPq@b(`4^NMT)yQER# zLQItt^zwa`x*&cL^{;@0R_Pluu(R;^IYJL?ilxJHOdTB@D%kCWT1X%?Mg3vv}pL&4c1BSY?-YR%^qiiKV6{1SZ`ncQ#fRSfX3 zdKMYaNsp3xMG=^NF1;m&eo}4}h(NH>U{pZBg6i-^3 z-l2l<``8#1o36%sl2h}?x}TVn2SQ#UnzKi(*&sUuuNS^ouE;OsrCI6ePP^-3nx;q@ z5vWODbLuZ@^ugXX2H7WY1bpEd*>J6l_8r?#PZJJEu2r%uv7 z>9vg;t!e4C{AUaHrHFw^kU(7~fnv9P+w&tZJ7bhDy-`CC0WkOK*r*QQylF04B{>R0g+^i0-@tLaqkfUJbzSUFb%`q zm4rp3ZV_!!Q@%^qfR-$Bq(d$H(ZQ4kA-n9^7KP4t-m~RzT}+(ZMa6}wEpCE>pD6=_+tGhz4gVSro+GTP@^o>tdvJ_ZxB~@@78H$#hYWwcH%-u=(?vX$Q83 zzS2|_t;oozc)^8oq~S=61oY7v>DcN$v%I5{mli^J-K3u^a~(UZifz_eTMsx@Bj3+}3mZFBr{`r_S zmGALc{|v~GgG5?_3JY@rOMnS($KPA|qrp5>Cih|{!O3wqU65qcnpf98Y9`#=CQ||# zWM-5)cFauMrbhxhEuiNyqP;9kirH;`OL^S+;dc7+h0kfvgB;=w^Nu>Soj<@oaISFj zUHV?*?}iC}H+QPNi~zz0xlOOmT~PB0)BZ^e%UcBj3rsx-gcZi&j?F~L?^3dL#6p`q zMlOlDLa^9Ivcf^VX52?#ff>Y}Ed!^SRPTm2%}B2tqa21Lm=cj03iu(wOEXLCqOVp87!OZIKo?eS3bfT8 z0B}ShV{~2$rNY3xyONcgpX4UO)#c%1|F=9QP9!qV$S6=n*&odk8*o;@oZ|m{<;Ghb zwh_R&_|}T2Fvw!UiO+y!$L|R;A{g6lJpS7+Fe#dP!)Pau9UnIyN7Q`b@A>SU0;paT zJc5K;_sn-Tb&Q)IfA21nOe72IkzYh9E9b|LXTThUaHH)|pHz@_IpS4u{g4Wq zL-+8XKJFb>RPVFB8~%Z$(|R0HD)2C1gzYrw4XV+@n{7~!)f0H?_QE;+icF9j!eL&M-%S6DCvDI z;@c20Nu@zm{Jr93rlYN`i-XX|cWw59@;2Ef;c|W-LJ6lzCN$}lJTQ_Sk0)A4pP>f- z1pDTd5B*k6gYzXQbmc2_@ekLo!=TkEI>Q)dCnq#EU=GtdITe z??9~t@qBx0*DDTdbb6GAGP3eaa21o(|PoKx2i6c-NamIsG?PfZpORwAEs*L+<>3CnMp@cfQ z!@eK~J9~7`UGC!2Gr~lho7=zZcA=rO|2_y+R~~BX{Scl8Cu&wQ^t9NlQ2Kr2Um=-$lZ?r1?uPC1CxOHx26Xij-Y zgOPkbot#Hlb5vCGJ0)eL0#fC^s~4o8n^mg&0&MyJuC%bRH1AHXS~k`G>dHqk59((W z_;2?aLLV)*KIHor;kO&hrip{~Qm1s;5Jn4Fp8!>z*Yvq8o&&;`Ia+K6O1@c$PH6S# zh64}jv#e!!_e*&7&XE9C$!b<*Wlhh#MX`cDG;lpOyPO&WC`5OeEBy>mO(5FZ z(c^WlYj#40oG^bmJSG&-kNKrM=BC^OLa6{Srh7|4 zfSLAml`2=hC)EAS%*FoL;aS?IU<)mTrWu|u46sAQBqW|N&vqrLWUg93cE4<)={FMJ z_LkY?VXBBNP)i#J)D)`|l6eta6$KYc-=1_naVwM|J)IWpuyzf=HyWYzac;~U4_ zM{fPnKcZLHB_i(u#2+JAHkZ#Ij*s}p&^sAtp7uffFMfY&?GoUp0zx6!M2v7g*M|CG z0sVtZk%MM?Ru#NEE7;X?OIovtGb%#$<62eZYQR{^3fU8^{GY#+m&VT8JH2zvH)%U9 zpw}!Wi(`K-JT@DpF*sqM<^hzdvrK}$6A(%~+#1aM(z~Dm^j-PJ2@ zVP(bpL<0o(8!SR4-e<@L9uVnv{?*j<$WddD40o_q$*sc@Jy4^8?L=yOFq0*5*_Cf6XCes&u^U+X?^FI+R-?&jX(wfL zM!3>sV+IV8HQHz?`rwHJb;?~#xQ@3krAiS3=TVy+ncwy{hNc#I+_p9q7fYXS4~~C( z+JwA#^PmaqWAN0s=@NuTe1GnIAmyAN9`TcMtZRUDYYlK?q(EV_V+>229xntuLQ3<5 zL)Gg3Rae!KLOH7(MVs|v9Vz(|8gYHqp|Sqdczx)9RSg^@isaTs6|}$JtoohZGLKq! z{Uai%Ip9n`gm6o+qg|8GtE-c1vlx{}{x}9gEL($c=@EpPpg+Erf4}*)Ew@5?$Xg*( z%Eb4h@#PVMLQPg$m!Onq;?s9LP4E>!Fz2a)e%hkB^;?UVxrrT}IW_SoNcm zu`yHh$OoO!l9HU#R32P{wyo&Me56XM&;JhdX@Q|KAWXqadi=okRI7b~dn>RNm;((3_gvWfST5?l0VTyB!NeQXq) z{$~vYa=#Aq+<|+OyM@a&J^>OY-yYM9g-mfYjQGhy4f|IDV!pYSTs5(AN%fj&=Xd}m z-8ld5j`7{so*haeHeHic0ed|5Q;0hsAu#2Ny1IhkQSFGk4UT-Ob^QbpI&xdL%)%`58^zXjH{2*bRU0^Z;xoU=9PknyK>B z!6(PZb#9y1AHP;hv_!@a>g002@Wzh%Nl6|4`(jJzq$&Tir6q$VUi!j_`v>}0L_`gS zJuCft#p3jved9DckWlsR4huf6gM(=xW&MhNwmNoEgdKvL)QHRY#3()87_i9b(YV^> z=eIz6!-shwFjHaP5Ky1H=p@Ad7ygaXi3&8P<>AJn;82mM0R!OpIL*hB0JkSp^6u1+ zWY-yg1K(=Ce!QnA=;_5Yemq9<4-9y>hZlg?<_4$-rKC@6gKgR8=~JMWee)$myby%K z-RjQbbAVHKpoWvjv(tpe0MasGSC5&<=H)hTuTIU5RC$FrU(~)QSn2}|0pR;3bX(l0 zS9FmqcFPS~$hfV41GNA^--!9Cjb9QY{H7K#`iFDU8=MauwyigpX3ARZ^`3Bzn{W|I z0LJoilA_(Gti?TU3CNQG6@caC*5z+og9z!raI;Kv=;-OobQ;X+FbXN_!(IqnLL}WPugDZur2DW4!KIs#Is@qWM0HhaOTwK5( zKoEEO{q%qSt#he!SHVbb{mXowJ_R1|@_}JuVg!7%a|vjIG%JiX#2rw*2qO-05w4TALaHq)$ literal 83733 zcmbq)WmgCM@-`*HxYHed>Oy^)|Z)|MsU}ocZ_O62u1R@4WhzS00O+U^s z)%zhj2Q{@`(5X!`nqqImnFMFgiGO_mQ82cI*jd-zlMs&*yC*E6gjMht!-uxuA3}&E zylL$VtEX*OUX2jLbKj5{fyNHBW=m@>2N#!D2b0nCcOQj({QtMMu>GAHVDSI9T;8W| z+S~rO9RI(5>zT2cU^T2)t3O-zJd`#O7UmU%K)UGJkx24vtWnSBQ1`RAEw;t;6&HHp z|Mzva%UXKD=}O4{^q4r_4>v9(6CW-HUW^dm2W_nwd=j3*j=yky9L3h2d3HMs@7=U! z`oGs+Fs@*imZquWd9?CGm~ya%BLE=_s5&Ch!k%SpnQ3+zN_B_7lR3zUaEZTl_}@1@ zyK`4;RcN)*^Et&-vd56Q;RZP+EyIG;s&PU@JcA<_8qG_m4i4$1IRCd^c?qyMczJo- zU3Br!g6fvu0)-At>K9s;5K;eIQQo3ryG2FVxy z_#zgS&f|7jtW>x=mc@XHh=7Ve-(aH~D;}Va8?}WyYn7tNjZCn(`n=@7;N0o#n3D0| zm(K0jFzadg*V7aF$DGVVgn(crZk#;ury(GMcI3&ZEWHl37X!O^^xwk6!&`V`M&5Gd zmWZ1^x&f|)5>p7gva(`_9}M~B<|aXg?njJ{fk6^rP_33NF90@XP7xu60qNm>k0Xj#IcKma&ig9Z22nx z(Bx#?6zN%Kwg3pt0GK3qLQPY1X?i+u?#PLofTkKaj?x+U|E(y z?)=6wR&4G5r-Rb>H+5{(b-@({%zRC2@eCbbv86hm?>pWFM;`LK_WYES(L7&aw$c+f z#RNhyT=ae5!-8R-9JyPo_e#vS2-HmrM%fKKcksyKGBWw*sTsfNfkjIzEhlGVW@d)* z zDaR#&Y+m5MRBf0Pi+?;jI}lohkCgqce`$CL*PK8E!eL`$qEwY(7TB)|Gg>-2UhmiE z0-3a9=Wh~mv9YmCOiT(23b4L^tkkBqIue28)|M#5Ov)*~=3O#EdBq7Lfqdn! z?`tcyXsOMPD7An8oGWmO9`*VzScG&{>tdz8HaU-riOuA7$(k?e`8dBZn^=%q5+?OE zP=~J_P1bGLx`B}2t+CA%i@89F+vk2gmIsKFZLQIV06;o(&e(esG<7@QZ^LuN*Jf&FE z>K|IHb6e>#uZ7BG)|;OUvPpG#JKnM^`m87J7VOW9ARlqzDJ=e&wW!<6T%vWq89=jOnc!QA_>)Sv}myl3kt|7C~C8`0r0Z6KJAOd zD=jGj`{~VaaC4^;hghSn?yj#lIvi+iV+ygS1u|d?o|`6xZE*HfvGtCIy=ThnSY6~(aNrYj?oO0EyoYonBZJ^<^HC}vI2BGoI5CmcgOaI& z8UV4B7g$V3Sy@@(062s~`Jaw!fo=5Q%OVEHk+tMMl0coz&)TKP&?CRISk5!17MwV{ zOGrqVnCzyq{XBMdS5s3nFfg#Rq;1(AMzp=t813w|D;%fl4TLBa^U|7N;3am^_byn? z{?e{%Ddb4Q40E!BbWi_CG|k|9olK{~4I&9(M6RztBnhCdLx^S|1`8D{ehHW#MeMFx zq7HW069iXN|71_At*IGJ=ZcSy2clj?M5I!utuKn;=-^<$d6P`SKI1aueL&0#{d-W1 zI2mFfDG7;h4rP!&y>|2OjEpwpA?(7bgOiht>8BoM?5dR5~EQ7b)LbqR%F z+)uaX^(gHwlKj2RwV0yHDk_AWw!{n!KuO=<-}iloFq+CHMTuE2gX4e^%^P%k3PiT0 zk9adB};NqZNUvp#VS0Be>~Mycopukm;cj?Oyh zQr||mQygBNVXquU>7TqN#$yfNliIbE#Xz7JeD8uhn9fd02Bq|4tGP^NX|-EzUsXG_ zRxSj9!;*MXH~%G4kTEcm@p?4qzR#7Np>vXgho>-BJ|VV40tOr)W?m;)p; z#VUh`9JB?ECIZO7^TQPYUU$n5V;FKgKpAapeRJW$4Z3*nQ1SeV9tw~I4UNy;jM(8{ zIXGPIPFH|B%^3xh188rrsHkWi-HZJx0|IpA!*Nxv;b2)4Lo09A7>*-nRK`=?QkiTa z1_?aWj!*aB77_@=UK&U&;YUnIo-0n;%Q(E_xOl=Iq;HFl0}g}}B%PTp05K=w^M0Pu`_Fl5BN zyu4&)W=^D47gG4GQI3j+79A0R{NaPXl9LX>(Z9~l3B&b7LKu+GjVQfVW0ljz zP;hWzetz54=r{YO^s&9G({{4o_#Cmf0~}=Np}+~Ocls~%q?DJJ2L=XG$fm2cdvJ9# z;sybfDKIFgM*Yvib-Yi!K5A5Emv(vM@v)t!=gVluH#7q7?bA~}qyCtMTGJsTW?3^X z>-Sb%r3>DY`HBn-l|a7L5HboC)zs9qwm#2S8y?M;vwV&OfvVMpu`A9mE+#DMs1r&& z?GMt0&!bqM zw$~f;tCYqGf;_FQt%2HXx$U8uvYnWicz9@$A)6E*4`gIFyhUiQLAgrljKvGEo}cGa zgVH&MJMGc{LG-|b{&H4Ru;Mo{9;Sb;8})Afe4@Fz88{kcW##<*{E;+{XcV5^N8+fZ zbW~$pH9#CxRx-3OPpZ@n7*tQs%qXj>67hSr78LXuF#~J}0$w!YBqsjfiJazao&J&I zdlJIH$H<7W|MsIw5UlX~Xnd96)WBn~8*N3ch;ivF7A9tyW&>(>)zfJU11YKBij4ad z%I^BUvzlt7mCk39o>8eI$L7Ps!@)txHXSsOt%Cy+7<_(rhlY$iZBeIMiQ?k&jg#{i zF#%RXef@35n-}9v%G{1*gZ1#c-d)c;%94r#`=OF7{+E5>e8menhAzg4sRJI@^Yti# zZyp{VMF4(dlT;)WjsT$6ytu+>seMx3qlc>fSzoGi$cc zP&ogdhnb5?f4V=`qOSRBm0hTR$3BDG$>Qbl27tk>zR36iBUoP_ygUb6EgTj91!7u| zPp?67Sy`*=#b&-#QfgY7dFt)OMQC_E9k~w zn;u8A7(y~VJ`QM>n5-;aRaFEMWH2}c*qpGiNi$ZhRG^|AUGihyt@%TMxCgMf&^eDfasFb&6a|=B;nV=5G*ED4Wn~2geY-c#&(Fny-(_W0C^45$zeKg9&?{v|-pNd%26yk+ zGa)N!YNlmmWW>iCHY{~_cR$@6;b35NCs1uSA{{998dqzSb7xquG&?!Bc=knnf`Nf~ z>t{07UaI>!hmD2B`+C2nQC>fExC@v*%OF;n+I9w+DFBmAN7Je3=n^FJi;IgrpKg_@ z6VkVa?2^V&!jM1$;7^f#larIc_O`UN$fdE%Z(2%*TBJ>~HsaZNf4c^-94Ht_hEH5cRL__P?%XEi@g`MA7J!Oya-`X92 z2^%t~T>!$c9ylv4Egpy6VSrKAM|=V{_+T<0NS$~JIe`6Zsq+HmOET>mkO73ezy4-t zlW=jl0rdqCy;{jwVrc8m3RR_>td%a*Iu;f%)kZm&G)t^-Mi4 zn55prw#D0ixlJpS=aA$=47e$iTCR*yV6U&Q9cvfT6BF4T_vQJr#++dBUFaKPH)|M? zfsc`tl2WbIp+gS^ij?Vaa%_4!Ep~Xz(%&W60EiH>Fw0HdSDbKrJiMbrM<@42T!-!tvksBG6CLuPX{qtAh zKEy!gV|GeMPo*lr$O-?-!otGKt4*IIm%`k;ww5)e9rZ1INx_Z)6?!;R3TWm6xvbHx z3xK*r5%KfscLl(_gI~@tr#DNrE-KZz7*f4)!h+$8YyaT&>V?QB5km&JT~Yd+qcb*5 zrTVx*Ik-5YEn0uW4K$Ev{uIG(1pj18YRtgo1K_HdVy5eGU0HlFze7rt5E@(StuMOA3 z?K7Hwt3OVY!f}A+2a3WqA>9Ya|NAn~vh(4;eC(dLq%R#&P%>Ri)G@L{B$rFUSSsa^ zxNKhfoAypr0p6d4g2HqtkyLxe_ToU(@9S*R3Q8|D5e zlAxc1$1yYqC*CuH4>Zfa8jt*R!HjOU89e{X#M`&<9;8-28x$18_H%ZflMhG~z&BpO zwzGYfvql2cFThq9H2#_a@HR0qF-KUWh?zfkG?vNRVzpHN@yquJf^Uh?Bd!$$kWWEz z4pI}p$J0fRyC1rN2blq1fcG1dlWPZyfAlDkcjTA1B&NAnB~#fB7(-6(&>_(|wp6b* z_4OW)%%={3x*pt&H3G=3E9h|wW<(hTv7Mf?vmcNUNI$aiS z+HwLIF*!9Qm&wCrJouZCC0j(H;>)LR^;VB37ZTN-Fcy3gf2mRyHpd$@=H_fV#$Ywqebo$>{b`iWKGiEHQ zmdCNABy!o1kGSxQRA0OQGzY2R^gg>qXo;+Qa}}I?2Z3(svBSGf9Ez3WMp@&x&ICaJ z5>P|j)b`z+AjqG zeyW+-{9kh=Vd3sF^;$pyp6>sl4znlVsk-TZy@{5Syq@T1#22zv*G{KDj3(GyeKTkg zqK5^|aHRfSwZ76Nn)}AL{=<2}H2khiokxtc-7CjDZRKGj=^NW=4MhPe=6NO7y%p?kt zsiya!klsxnA0H^8ds%Zcz&vI=_`G+AQ$9!T%0{3FA}c8-*BV~gFz~L<%}Jqx%;Ggc zAfbK}mrK|COHr0y(#)A%t>)eNRvYK?w~f$b+pIfX0*du3$m0}60EE=hZRf7iCP&3z znRc5A*#NhoO*IrE022I12o`}|<+v)mx4;ApMg3r7Z=JjhCXd1?r|lmDT|{?oMR|9~ zQ8f$Nq1CTv&ueCU(c^e!NbM^xFN4BYI(QW5I+@R=xB#;2WTwsXa{*GNt@9Gtdw$K< zyEHd6r1v~vhBa7{bw`CXXXD}(GU2Bnb;~G|wv=|u+bri?8ahk&Xe-XgqqGx>wk5Yw z?@ZYBoj1YxQy*sn=O^fW$105Kq)%rD&-_qQ$74^2&Km>8Dt9p?-1VIjETh`R!1LSl zoxhy>#C{UFPLVDiiz<*pLe5&Zts5fkP-y5vCh7ppZFCLN}NyhcPbOmr~Qj2 zv$c_H`a$}KvRU^m#_Gx5JjmRPnSe(H6YEfPLhq(v3LzRt;;UcwOIIZMQ!a)gWvcq? z(?xtr5}b$qbJ4x8rw|ON&gs#GZ((xY5*cZdhsB{vi+*wjFR_AG<%b3u$$lCptAnC9 zxU=G&o7ZaCAvZL8s}~i=llv)7i#naxSWl!1Ua0@e2ck$J>W?7M59`Ux!`%ctS>#hZ zco489e4hj5|6R#TD8<9dnIt-?h4eD+SHwO3qWS=P=a__gf>ZCYa-pPd+?({-MD}oK z?;|S3&6SOq2&qor1!=L``bqI0%03ep&)V&Ofs@=EP4@@t!b0zOwCI0zM>#y9>i#ir z_LkiLi=gxYzhco=b2$gbg!7)qnIv1Y`3{KP1( z57Kb=Z52CS?GEP;fEG?)^dZ|XGvuE2&Mnan8{VIFEkdx%S?TO*<#FjlrE2Y`0TY&Y zv;A2qOd>~HCNA=L^`@ua6n(bti%2HWBBG#7$MY$ZY-wZ3w6`&!J1!;oUq3M$MmhW; zhz~{`M&2!3lR1GzZI6#Fyk*Eq0>pr}aJ!jNO`_9O(a_-j4`Zwjw!rcz9Mlu0{a#Zq}+uGqAwXV@l&odz;OhAnEiy zg}lnW^_e+aE~M4!i|yK^$ulp4Dc_U+ zS-ocycG}ia{=pL}&S>(q&Gh&_8um#)pYDlmBWfn4Y{-rKwJ)F8*l$`*moBrKjt1Mb z2}^H_$8`iKS(%#m=6Wb;%SzL)s{`z|x}1WYDWY4}1DPp`+g{se{w(Y2g$x4Z_z>53 zibA!QC1#&V0tQP{*-u7wI(Quh7Pr!zjpT4M`k#*A4FVwD#Hspbsr70E3lGY#UKaV% ze_eY!7k|AQ`4e`SxYLmZMw@kYAAIpt&h|Y8`79%`dBE7rZohsHf0^oak05`Vtb-<& z^Rj#Wc>c1bbc;W%+se~z;9$L$)v~|$BK#D{%SgU4$1i)z<(S!AVB>vZjgxwKX-Vl~bI$iHBv--BD zhuNH(ASltNaSp2eojY%aa3x zY%4@CR!$c0$C{be^_5rfR-(6)D6GA$g-_gCjRRCAEXVrhq!||vPmG2Gxsb1PmnLvi1Qrk77RZu3&wLD2qzk)JC57n)B|4T^qt3 zO0IdqA2k>5mgSASIcq+ax0{O>cZ4Zc|Emf3S@QXj961oz%T*wnp4N9hjnh={rRvUz1r2Xm{7i#F@G2h zWiubB?-v|G!Zr7?&Z{;ISRsF&RyIi@IXBm;f~vptJpG1)_n1x6^A>+S>s=d0P9Hrr zcc&4akcn`84RO2wH1IF*I2lljvKR*SM&RNOFb>~(x_wRhY!*ZUgQ}JKr?6>do%9&Z`YSe-QKwF?`99 z#!*^?XFRNKdi<{a1jG+W=hlbiB45ugh<}OGBfqmzuh^6FOg)f;ADFysc=?0$<>xG0 z6T=?jq2Ik8uWtK`=0ie4G8?uJ058((`5xfG^lDY>4|`d6x3{NmR@;rW`|!O^+!-Cu zmqRfm!itKD)}jZz+4B2)dmMI~J%A3e1Oz$YP1c%@0ZqkrD{srk#6AbpXpiefN|>z1 z`dC8hgC&ND#E=Ji9Bq{a*1JgMcS=I@bz(t+d*K$j-E9FLe)<9i>Fg{Guq4xE4Lza{ zmu_$Yw$tNgh2dR{!`gJls>*?|I<>2Q5L<=opI5I%vvt%)LfZbZ1?rxy5#l}ZLjfLy zBuQbEVeQV3MfL9<>v19sDob@u(C|m&^Hg*w!tk@&VBeK+9M>E}4mm zjY%bZ3)go`C(~Zeg$kqxR7`$nlhq{b1}B?Q8o7c{I5J;DINx|ORFdCAqRhoUVw>np zSvjBJvnRv)^3=RzeGo{*LuT0PlJQa_Ah305BE-bzsUHztPf&t@xCE-8C@G=fx`vIh zL-@jmtv)axFqlv3l`p%N(?E82iF5uTKtQqVttHE*)Q0uaEpHYYIrhbs%R9Ts_q|UDaD8YvCjdggfd9;#+Qx_TPEWe%HlIajsAtiw;$fV=To04*>4a5$A}dMA zNx|sye%Ls2Luv=)t+xFby(oD+%4b$_T5Ru&UK}~zo9UBfG}~nq$2!1L%!rNM0NkrC z&4$zO=%HksoUXHF>N`6-LrDyPuT^U@qF8Fv(l6bzQvYkGA^t2pR^oo{1PKhQ+9HvM z4kIK$xvdYG<@mIs42%{9{F{?1{>j)UN*W|MQ}-(X1Q$bgo~RNYo#dCDQp>f&>W>Sr z@7<=L6n@Z=W$kIQ8`Jp$ME2^lYkYW4MGrR2xV1u)jK6vomiC8P`=+An+&~FC4ZnZH z0YWebr{us%Aa`G%VvUzI?0=42{rvSGgDwJ8Pp!)I>>e~*2(?27{bL+Hhl$OiU#-2f z^&L4qBp~|NzUG{DYf?*2wX<2E!h&ZHK<##HK$;NpYcrx(euVauscdx}>IteOF_{|= ztbmU`MBtBNh(sI!Lye_*;Oroi+%zHD1n@9+%amxXaJ(=@7#wV-yuhfpr-#`e1W;9R zuk;kFpx%q3-x(2>Ztl;O8j7b3xq3^@2g@JHcW6ijtn6vG_|*67;Z{r^R}DVC@iVOA z5uG5AOykj-)V_mO<#C?$cPgNa6ab8HExF<1yg*f4LUo@gfLN^*`C{s1t-rWfIML)u^|B>q-M4>?Wt|{ttg%Iw%uUW2o z1`qfR&EJJZLXs+zYI2$%v~-Mx;}4o!`YciBQqsdPQj%^7=U;;?v&498I2Xx92<4GlW}a_;n0OtN)d3geFO92etlZCaagJS(X= z0=!v%KeBW7clSI70+Ri5rKlYB$)8hY_$#@&%e))28c0b+u8xA@$!l}}h;zIhtq8#^ zFmSnEomytcV1h$zfXm}lRMT2K-)zA{hW}Y=-{m`MIJ8A|)rG?jNMqT7D2csg@n2whCoEH0`OdmOEe5~f5Q1Y~q zStpkh4Obo=Y3X4lIo>*pnG#^=X`FQhun?B>*U#GSbdpwv%~-vjFQ6QZ&?g&q6k)I^ zY3%o032|{V3ya5{BwgH~4~U4(2k}xIwt~Uk?f0c`YOGm6LNfJ?a&x>g9NoJ%b!tH&h2>1ybn-2dB|Dpr9mxU@K~JEzU`~c zRhG2#Qn31w>=M%P4faoCna&n{c;q#SEj~zKVmNeIN+r!opO_IrTJKK~nou~byp0{oSKv^#4x`E(UET8JQ?g=SkJgfH=#}PnRGYqe* zSinO6JgfA=kK7G7oq&0*TA}p;1V%mt z@h|(!UEQZEU4pOJ7Zqu{<0dYjBkJq_e2x3Jtw<6r#+mVZ!ZJw`?x?}^@u~6seetJ& z#4^4~fOcKp!trHUPH57qAOz_nrpS|GCl5O#2cv%^0_oLr<9?R-3Y$A%!n#BP=Rr8I zw*e9eOSwqDitk{caXsZ#7@qm4M}*%^ zw%NfII3i?O&rJ+MuNEkV2!hN~%_lXFohhOWtr4rm`wbx$z>Ts~fCNZ4Tc|`JLh%c$ z7Jr$Lh2sJwsdlfxr6UWI7TM-O#I_uPYLvuF6=Ru2KSCJ6BiLf46OszZ(Jl`?6T^x0`tE2JePJ^`}@R)k6e6Ic7Bz4_+9fNB7=L;$j8t&6$; znGx9mnBh4~qD^prx(^f$!9qmt?uywkPX4zAB(KAMDks`foFtY)xQ$$NTxwJYMd-1NpqAu{9mRvaEB+tUm~*^C)(xJ|tF_Uqc)0j~UhkAbz$E??&g3nv zIRzGzdFYnuuuzsj_k5nBTkIJ#>O3Sy>P(GOySVaz5|g8^E6e|^CJ5* zBVAJ{|EZDCp{T7!9D8t9*02+)is0jP>7IbPU{FJh|mq^SHWz5=BQvof>gy z=;*u-rrF$PF-qiL__>VQhp(8>jE?4{9qn~IRh<8F^C(l9q8a16UeGiY?K*;ca>Ncu zr(fL2BMu-Xaf8w+kp$4HP)O*tYO)uGL2}IDK|Z<;Yga3w6UQE&8z~I!&)HBp+FGFH z?fLd{mz|xRzHjgNsi9S=fb#I4?>k^T4tOu)cq?6YZb$i!tn2cjb@mij0DVQedsT^J z>4Cz{iui(ciw);n%W|U1?^_z(aeMRD7Rd4P__pgu=6eC|Gh$u+t-1DU#+q7~`KzR? zjP}bH|8Sk%{m<4-n;NunGrh|n?C`7nyE)TcmA^X!Ip*XIe;yOi-YBWdy*Foc*WL`$x(^KAhw6_%kgmj6 zRxUQu42izT;0!bF3rVa1{w`-a$Ln~yZ1cIn%9@v7*$Ys=Y@4j3K=T9@fSxG*$?Cj& zRVmO6`f`#P@*z^BR0H#63PMdxX@BxF7KV3#E$`J<;M_rxG z7t_W5P*hfOx2l9C`Jt&HQ3!jwG`GzOK@m(IBUah=Q~`$Nph= zMg4QL*3-SL&NjRr?hn^$zHc2RGAxYvYGtE<)N9hhbdQ)4wOLLuZumBW4c+Bk;D2?_ z)0i7fJf|+adL$;js(bN?%QHCE$N+u`6T@r4A(^RiDF*Y#)T9RwZD0}fQx zoqC+k$MeBNUiZ=V3j$y_kItE5zf&;%nNan&ij@u~FdG|t>550yXx&GWZQDjK4JXI# zaopG#ON;7cT6TPcrob7m)xje9vzozz&6>w#TOw~=4siCA#GY1BNjXRVSHIr@0({Yw zt!jHm<-er@L7>`R1&xXQ9J7{gt~4cuH6z5ylH9IV;((Dynm(*w&?ou`$W7t^SqdlS(qX+S(Lne27BdhKppx}*YiHap6x*_*nYo7x zt9#Qm<)_AtXe%vIk#7Hf(<-bTL^f$D)JVk5?ftNScGNe@{rmS!>F>BrsT;cT!~iq? z$Y4KDYhz`Z_r2UM39L-thqgAHgG+Oo-;>Da|2#;-Ja%l7HC2{p&A{C%ypWH9<1>)o z2Aox$609XAa(%gI&04@j?;Yi}rF0(JlCnrz;Ct7Ye+|96nL>)m$M@(>*W@Ah1EIi4x}KD2rfsnv?`3JA46q$Y()dvN$ueW7#RuiETHfsiJ$7Hs2lM zMe@KBeIvC#%k~rND_qANW++pj5~U8r6au_2Uf|v%TeQfrJYKt3RkNh=*l7C{edFh_-TK=G zT21##0Ou5!_pyEw^Pxm<^mxWn}>`;mN0>g)v|^Kd-q zqwljkJ2<$P@%(K&Ia>Gu!;k8NFLNj{*Q)nf_Ejg$Dd(Gpg9b#1gG-g;hz;^~K+~O?ni_bxXaPl6H{e<(@|(&F_d~I%TZ8F| zTu`=o2_4FmQ?@yI;h*nYBuSF@|6!4i#_IJ`v;!vd26XL|p33I=vMrbxKFibH%Ea2( zl5FJ5Ktm?cmw50Xbs9PUgG23V9B!$0)9dy$f)W$B;dI~=FD-mG6FgHTKQA+L&yk|> z)|5>-Auk#Ciy2Ai)>#{m3fU$RpZeeRmX#Ox;6Yjx}*VpwRy*%;drK91y} ziY4bmx1Shrx_#tVEH;&V8QPh+r7vzX)d%s4Okj?a(a-6h6BBfs_T*phh~#drR8(dT zv)oN(7XN{B2UHYi8xPZ<)7Ej1f4keqnOUREjE9=ll^aG&#$& z{ct3jJ)aEzv$RT5eCFG$D@oR~@kp%prw5|8i+?_=oz3uD09*aK6M3QFB_~VNvllOB zAla^ydhxu~9YLPfJ|oEj$Otx`o}+~nfMT>gcLqgli3&gZReDM2-kkweaac2Dtf($FV^qO+`FW9BE^;VvLhpY67`DplDsLH z#P`j=>06NgV995o?O~aR71S9ca5pZ1=J&5|)fx-~2uF~>9~GI2h>l8&+utyNK2|xn zcEhniHc1;2MLQy@K8Jd~7v&BOaH($9SL-L; zwAQ=vz+NJ`CixtH-19r-f&4D0j@Cqni}+gJKd(0bk3zw6*mrUt;6dpC7X9@O$ZCiq zyPzN>Fk@U;y*D#l%hU5F;vkHMoPPlL2X#3D2=09&!hQ1wCw89l9?mS{`#cd^$cGcj zKu#)^kS)e7S36^wp@U=l+<~A)EXyuC+)pJJH!72?TRT)Xmh5{tGt(St^~VD%E_pFI z$$i*U0}e@DemusC`fJ;I`C2`U5uHZv$N4+&eg)skpoT@?b~A~8`C}hH&HguMRCGzA zInS@_L;@IVUv6?pqE#nN8v54J;OC!vwTuo7ktF>7J%4fwjB=$tl^dHiXV-rxO;XjS zwX!ZkiU_&BoHF2)ox09PdVe#c%qQ4sP%T1+R|*3A_L=^WVR7)VBanJu=*u!rj1pq4=`j=UqJAA0j|wM zipr(Ua)Xm!7_c+l!BXBwh2cvKTT+aFu7f`MdnJL|1Bb)X^iHUdQ% zqEhr01i*pMAM(aUziG2-f?{7V#nDYUUh6$s=XLq5!*b9vX&*MBZQYN-2<9tweAa7p zWBc-#MJL5IGu|1psh*s$e4)Udo!{O5h#{lLx^e@W7vM{mx0hfFS6KJ!gMWU^za5^<7=>x1#x9M>y7afQJzP{j%(6JunPR)M#~h zz5J$95+R)!X{#AJSZ+2r7$X|_=FXX0w%-*^)UK&vyK3HWwsQW|_m%&Mn!27|_j2m_ zZY+y;N^9->>28hNa@KE{WeA&ozQ$5wBws2miD9A6IicHRh}+p{q(N76B=KsG+56?v zp|CruVmsQAivTa_`LA*B{o?tX?w3O)<=S7t*mu7WfPy_) z#PN~y_FvIG;4yKW45l)23sSC&2K=h~o2rE2KM5~dKn`z$Pxp5oAFS2h43zYfp~&#- zly6S!FL&2ncE|$f4L|oFLcTs8dOx0bSJaOhvfOPg<9y`)s*A6yb>yuBTpgHPu-}d` zV}F2`$z|cf0{lO2pxjEyMYvi#{&@*z_%Zz#{J_Hzm`q%C4v|su!V+b|6t!7&I*p8G zhc{ngaTqjD%{DJ+Eomx{C0frtWMmm2zx%G*l?gO`qoQx!pJQPR#d^5Ye-9it_T3>u ztE-67!e>6M7k*}9G_bYUKPa=;Zi6B2k#(2HbXe~Xq;t1FZJ{`wDLhT#p?_EpH3-Z3 z2n$RTugEN>c$%GT<($bfA|oLor7;=y0RdMub6agXkD@>Cl3JE+2m@X+RqP*JnmAbQy3#Kzconc!$ir%xrj=vYDQ;sXpm zAy^)-$CLicwdgHQXZp&xF$zm(9C&@{yathB7X@jd>z&Wu;OxRxtps!;+h@xIPxIZ0@=ke$)vETgF?w@@L``><$$KB-$z;**r$2aRdO6$-5tXS@ef=bcr0e<1NG)d1 z;mjM-#QXnd0m?F;9&Jc`GoPX`r8CAU#&sh*qtXDEbB1~#b)>LEJ@s_!t8UXqUSZ3y zrJd29D%;TrYGJsbs+Cl;y+KR|bQ(cX$IHfZalI()a2M!(E-1nOwL0x$EV{9KIx}4= zvCw2Cyz~r40)~2en-|>z*)Ev8wACZ_qu`+z_xG;g4dCgkHNb>gsw!fKW(GOH&n1$*dUJwBPj_JDW4!Q=B&z(aZJ$|5&hYwr-IQK zYD^$lW^YX|^^M)+4Ow!&? z6anw!=i)8OBpT1NApI9%?vzR*A!103m@F2;eeqvDYx`nK=4l%7BUwnYB7GF%-Q|^C zH`B28*HGk(sK{%Hs4G(l;^kd3D|?f-qK6F;Pdc4BYEkBC>f=hRD`e{kKLrQFyPj~WKi76taD zus`n;1p-v`0Uho%JFzd|3GSiv=?*m(`~c@0jgSAbo($LH!}kH^l$t6?FhOX!(uFZg ztUtfY979Pd-k)cB^0iHPhdJ9PS`IgX)7prkNqW2B{wgvr4NdXRIn!pdN}H(_u8j~HG?N@4+o2{%v|e|Tci8B-(eFjm#WA) zqPu9mGx^he-;~&#<3|POMhXap4$jNGcz#{_+?TV*kd$ms>fcv5?M@nrBu<^IY}D%s zo3jz3pP>#O4I;Wt9tXd~A2fyITd#Fgc+<-WoIjDq0_IL}x$Gc~KzN66JUKqsH-`!%!$XSPyePL$fh=js10;%~l~F zh;mz=!}3b$9S*Tz2%=P4$V#6x1BGmyvohpKI)#>Koo)a$T-PKnDQ73HP^V$Sn*;$Jy=5m$`@mFwOm*$PV(ubk7P;U zTg6Ejuc2Wk?5uqu!&KKoAg=|1$Pc>LKd7KF`w_2x4i7CU?Ju*sG+1T_ti?Wq7d3Od z@|X$z6ia<<5$ly5kW5sHfi+2iB#9h1_@l@vx$zu#ZjUwYcMvie5(FN)s-==H3g0>z z<3xbEYPFoTN1ZG(A*$E@;d7W;k5oH~!~Kg`Xsl>SZUMO_{fA&cD2 z4uMg>6=M9$^jou&U6y)nnI8p;hH(ahiuZUdcqg*RGLOTHUNSOndKkXu1~Ufv3+ji> zxvY}-p^FL}tO1Rynf!|BY)&iu;CB>mchK~-(QDO6j2e!wtRm3IXT^> zwCe{?3X|Vj(xoVe9!V}5ja60cO@1z$N%hGwrEFi=or4R`1UlEgYl#I&!lPdkTU8kn zx7KDBFl0!JBN?Uthw$JK>J8@s%;v^1bZG#<79xApKM>IA9e&6BAm z1TaOSw{N)$BwzYUKy&0 zW|rcwIJomQde=B>8rP4x5)aF#?~jD3kniRzB-<%b-;IO6?0e<}+^whps#KFa%$V6F(J?#tV!q ze<&{%#NJzk;}Ny%Wqz^gLWh}ZH#E9HVDR^}X3tRNRFnJZATIQhMW~G=U*u?gbLIQS zTs_oA*FDtbw#{r<5K!d$$o+C$-@*Lrk4Z=|azw=m*sKbdSNWnOt&UdZ#5my9r>~YP zh>Jd{G#`)Ka^({i2Hz9~5P_HUSp`*vQwMuR)uv!&Qxv zwXM?{1XA;$y$3uEw>_9HPyJ!fbecc@X?kw6h$(xI+HQKv%nXw6QxMtD@fV!TZoi^eOCb zk!h>9YZIJx+l$xyNvgC7=bu$dzxi`Ht49q%LSnHzoSW(Jl4?i+xmBVlOGv1=}lC}@7w=qp}wR*iwxB}o3F@szV5j8c-`ET?_6&v zyA?-hpk`eS|7Zfqme2nBacSipA(Nn>z>3Pa(9^(Q_V(Y^8n%+zeh9O6jmSXkM|E$% zNwbVwn*JO50&m6@hj^uaH1FbzCI7(v3Z&L2pxyXzQQLZcc1E(b9ExtI$KZqt1N~Lt zk3|dm6SQD}kw?Gu4+{AfGy7+uJs$d@+OUi9OIkJNc0 zVkQR_RtpIAe#lkpkP$zJp@)YP)Rf}2KSRhs;SGMjpztpS6@M-H3*&R>Q;i|8|2~F2 z(@x6T1SePcQ$`i8iyfP#a#nrKycb$V6dlOAak>+-866 z?b(iAQc-gLjg_SaMxxTPvj0?sK#0_N1E;JFW(pDq$E{rcj@P=E<`bAH4Nh}!`__Ig z-z(xIQmqJ9$@itBLKj!7!071EbSy0+(~X2FEh*VSQ#7#yY~W;XoYz1T&O=9B0$X#L zGpzsg4wxW52L|3~=dXG_tkk=>5i)Ufeb-Gi=y}2O2F*sccu^6Dfhv>raHiWN|)uUVrVEjVuDQ^SEsT9nx>4Lm4(Yv<;bV= z+wCvj4X3wUmsd5WELdOP`5#sT$kDbkIUIr*F=CG-6TbQMf=sK~`+TYix(&D|cE&dK7`K`$qZ=jN7v z^d(h1+}g%|<;}rOdHHs8@@G8i26@^Dpdw$3mHDo*Yxr=ITu@M8;Lb!z$r%718ZNG? zipqZvZ8LG;4JD;;hgqyD@NrdFtLJXdwfhTo<7W*lU3BUcwu`I&&1*bF4T~C_i_iUQ zxiZc<>35VlI({p^Xl#>AV{-Q_YE_X`pyzb{?3O@^ni_2|CqC}^r#_eYgF{{r^S(BL zvy<`p*!y-Jjg1$wCg2ff@Jn+Itor+Q(x_iU;SUcFfX~59Pk(j0^e6&0hF`yaiF^CX zN=mL3LUW2V%Ju4jo|HkgAXHh6Qn;H3htNb%M`s6sU#3Jw3fq4k$K&S~C|shVmD97% zJ}#KI#{8J=dnw@YwA*D9?InqO_udQgvNwju`+Z8BT^7UscVU4|RN#OSpl8&vl*U0t z3h2J8|HBpHZr{AyCbMSk*4uk`s8JCqFK=yEG`!H@wG!y#YJX5Zm{&5jJ3cAxMK;~3~bqn9smI$+ z!VCL{$!$m9!=|A3#S?G{agr3IH<1wuNlF<3l`TJ?p0%5_+nsE>+)S>i(489_5o0HK zwXHxKCO_#x=iS`Y^q(2oVKH#U1>;RUjSw3d7ZfZAL~t<5^XGO;E&Ax;c;Fksjv-nF zSA+-&xvY+g;IJ<#&8Rb479PHH9%_%yoNjF`X(Cx=67|SN8~n1<6+qt7^(G^v#(}4{ zdeDEbcl(kpN@@5Ty-v*h{l7%zVzqXhm39fCVZ5okUH`c`!S6Xi+LER{uvl61!PIK- z*3{J0d?JhJ8TrkFlMEf3pWoZuoSJ$-U)-Lr-JYX*BS_+&)@fU#&B9oQ0{4uU6X!!g z3TYpmnCbmttC@WL?}8y8>xPre$TyzRQb?wqL%l_8C z#*v9m?7=6f(zal~;t#xCmi_gT8vj82mnLo`g_VFVYEsy-ZC_MW;;|!qJpukIpV# zruWg?i`=|Z3&y+6sL*P7#xW#9V8Mw4{(Gp2itm&YG{ye?@*(V9JLwfacFD}Cw^bmPwx#qesnIXZnpXM!kLKzg4{fD zxs=Fge48g<-7^6;cb~&%4`6+-XgZM-wIrpKcr_(E53mcG!3x-c(1vA9{Sm`+X399^N&9sq>RtXc|uq7d@Xf74Eo`x>+$?>OdT>xv$d!5k5T>8!J7E%=Y-^S zf~tQWwvMSGr~y6w!o-ef<$sp%7W> zIAAh=SS5dyRaBhZJ%1`p65AH>@|TMEO2hl^AgJ}<5>}TN#DUKR*oUXoHG)w>-ix-% z{!r_a4)Iy^*Gx4iyc!qf)0n7vxXM9=Jrfl)u!V9>$2p24Q4sy%5}MvPt?6!WrxFzO znJ7_TIo69!`-e=-Fuy35UwNsI%8t&(Ab2|;W+L&EnxL!hYI>AdwSc{L4Lulob6uxE zLMkjQc#whrM(`q|bLEphcC0v-!Z<*um6Vs4my}TExQVaHIDYED_bp|OJj}Ne;G)W{ z$}X=zh%1e3zJJQAxkqG?IIefS|6FuWNpUHIibAJ4~7{qdS{aPkECCx%fIW`Pr zDj+BE0dGuBD$|Kco%Bk>JYs!gHGWj{GEPlm3Jx1bC$6QsbbY0392h z{As*DRYfrHcZXYZn0>?Pyfc-mUZ!v`Ma8qBsI-d7<;*d)-nY?7k=0`f7NimzgC}FS zdDu*HI}#E zLvMVe3)WsmX);6NuhP<&XE<<3mk{!Zsbdz0x2e3Vnvqz5O)I=OhPLa*il?$UM9%Pj z*h5K#eOBQy7wy-0@BW?rW*?R&T_GI}b@!ZYO|#%7`KNGg(9ayjW%aIimJj4Pf1YvlI?XRXmBHE<<-A;c15Z16 z@TUr|s`p%t&utEE)A(2zX#ecqm=mqu zG~8T1-i`)Jopwc%=T8>#M=22zTqZ9)?)Q9Z8eNrfcXuyQV+=2Vz&6~gCrI>o;bK8K(E{XfqJ_UH z#V{0)^Nox`EHbuQ!8)5)jwH#+KJu*b3ST*g2LAz_B(!9rPxO8jixw+);oxdLxApo| z^l@wV(5`C6!0ULzhg_$~WBhR+>v3d@o3sAc4P3!fetMiuNZG*qV}I>j=Iy@fmoEwx zt#@pRa$%_3J9{?^Os(RO=Ib3@BzTh94%>w2`5)Zn*lhCF*2;|!cIsy8@^J3{$}boS zNs1AYjK%d@r=`i65w8hcnwMSa;mY*XLeru>k0TFMT$@U{Rt@DfEk|cF-B~`*Ozzs| z$TI{!&Ms_C&QDXS_I!MtT0#rr@XVZR-4%Q1a(a)oyUCfV zB?(HbUPIkWjXztg9-2MAWKq6)d_f9A+xa>t-CFw$QU8bi<5fZ@e7{UFt&X6cJX^iI zOW`V&FZLzD1(#FU{y(=qbra^JD|u~x$>&WgE3YYUF1y$C_jJDqOi{p4z;MIB60TosGi1Iv zzFcf{5$)D*&{+%oHN-e5>lj-iQj7e&O6?;{q{={GMFS4{wu3VzinFRfp=YxI?;AI%_eRNLJsPKCf?T;JBO%`g|YNI2lVs5lw z_ih~gSe`fE-hNfOBpRQZB7ZJ~T`TJ^t_&bZRDj)LRIT&Eo0;Cu`^C6~50loF# zI&75O#>UYF>nfn22ZsI7&APj$c&vvHPL`e!sq8`n&p;P}DZiWbg^0}{>EJO)uW6-v z{}lIM9u+(uW*QM%5tLJ*c1~rESnSbd=%25Mui;}S*;|8d&Ubxm(fk>AJf90K8C~SN zcJOW342n)J&KFy}ru?v$sA6TcP;a-YqJPy^Z<2}_X!w@rHs|)w(|C#^icz4ve7wn- zEI#Y!$wE!SzIRQyDl3+$D)e|t4>!jfT^n~zs+;{oX>)_TrGrM!7Bt@rlSA^yRrIuO zx?3L?=cXdM??S&XXZigyZ_BA??nfujz$PVBPjsWWJ;s~Vp^xeW@8n`i-7HDvwZjhbUak9{p&KDF30-87vgf18;khnkw& znjo1V1|B)2-xH5cZEL?>n&fhEZk#+koQ~UcL>RnbP&qu{CveDUA-Y09lx;3`M;k;zAE$uv;P5?Pf)WY#^ zggfI6Z*Ft*eZ=tODG6JA@1I=w{heQ##%mZ6oId{TcY>Elzs;-C1O*(vs^{JcP$K3P zf?uwKpOU#L`ncD>{!aAx=!9NamzJBJNrW!;hj%lkfykAQf(#NR#NIW)ja>q=T`7=M zAzWu<98>^JpR&h=4ivm3+>-5XN_Ewdrk8@ z^WjjJnHf$cayCijf0bc5hUSuM8XQFE0J{rNS2_1ed9SA=6&IWfo?{at!_H1Fe0Q!5 zeQ(+T_Kck)!x;?#;SM0<$akl z`K-rRC;%|gsGts?F2v`)XCz=vdE6*!2^Bxik1>wI-i9hO;^8SUB%CBf=Dm4oA=NF@pEq_?1kQBJRK;FdInqoLY~1V# z8(}}891|*rRAgww>+_$eqm0#k<8xH>)FX3K{h}ko_SNsG*N zhr_A5%%if>^VJvQCC9v^=zkXFt(t?%Mu$hwLk3_9gUPBnIkB z%n0;qo$u)Vd0%7|`sI%?&Un01@1EX6V8*f8?QyH%X3I3v;0##;HqI(L>|tsv_J6yn zCEw*wmHb#0yVPPyhGNC_|BMhnn&? zjQ}f>7&-)tf zVwHxuUq18ATgKQTZtLd8OmoX%Gfrs>SNE?^kgUsedhjyhV)4lErP(z~64BeCaGbam zL}VP&C&Zd3B%9*HJ@TFJoo}!lI}5DCep5u-!*Y4Rl&^X0;Gh3@8YyK4;@2Hbq z;Jx@KvDzsaq#020M4ir4AtWSN-{3zG3jWFc1A%c%Y@ana;dbjV6C{~ES61LU3;o$X zxWBp#!T*|<`KbveW_QGLdUJ@oSN-zR9!`@TfR=XEL$n0ihlC4c++QS-{T*qyJ} z(Hf!fmB*RE?!D@8X+R)X8h>>#uor1Emh= z`7zmatNn)(B;y-(<^{v+py4^eSmeN-@6@;;@?YWEQ%|+`TYkP&Lft)9&WI8U%{dvw z$k0X$0vogA-S^<9Yvn2YZTcQ7;#g{HmTKspy`5F&tJ2SrHVW~46&gCa!-YnVvAnk* zdiqJDY^ci}(Q+>J!_J^^Ss$Ox+sA99?3VMy4{(85L9o?jkv(=0J^h0rTaF(2RwBY{ z>eC8g=2!Z7ze5rj;_IB?;^XiOyjR}usM~~o3lUq_%h+pLrjm(%yaNzW+Knt6rx|aS z*_9hy)c(7J;xNU?n})c!g=Jq4ujM!|?Kly|WH#fc)NxNje>pqfh1fV-C`Mj#-WzGc z%(j!W2G&B%caR|g3Foh4uJP(&2L}hBd<@8{z@Q8tXzj@ev-yyiuTnf{zMHG#MmZ&+ zOwZhrOWx9B?`D}`>-wWnakILK56H{0i<Ea(L>Tjwrq+ zsvKi!UG(yZKj==n?V@(UU)%Is0My&ZCv6Z6D`1wJ7bGT}=7YjPn+Sj!bC@(uA#F}~ zC)FXeQDGOSqf4HJp%mYO_nwCNEVVsX7zcv^WTHs0aq>dvQ|iRJr67QE_AZ&#I8ur* zs3#_1^%yiYc^gekt=Sb8D46ONhFJ8|re@8FW0ucxo{7iQIzOCIqcSx(zj(>5U}0v~ ze19C$PMB+k4;DV?*XwWh#sOf*jFKZP!D@FEn9AX?01MXBrMJZxooVYyx z?0Jd|3ve~^EP5~GX=z-m6LbQ9Dh!P!7aMm4U10-*MFc9YB+ung2amK7NJAzkX&09% zKa*!}z622F-B}q6#OPVHMhfV5LLgldXHQ^w?L;qLb1Ps0)ZtXQ{)dzsL4qk+z3`k4 zY9jFO;GuvzRlU5mz!lG9okAa9E*>+Ub2H%qy3B>+Gh<_%BTuo3iS($PD#{f>gUNE4 z|Ks3P_e!oGC0yPwRmvt54k<1!*ROY8KVI$b#BenP3GaUaIGGTANxcUx$S~peoX33B zoL`qYSj&nk>ZbXfT)ErlU6ac5Cih|=n;&DSLl}2uK=$XFbt;* ze9Fqp?Z1V%G>VVAnwy*3+n0MvZw+;r;b_VRF2^lOO6p<4I*d0(?^lkRtilciI$1(> zL)wpLLToS(1!tS9sK5TI`Soz?@Mve(!W;T&^ZKMR9E4#%U9~aS);?YiY80;Cey~#v zaoGjn>r0Y`W!l7UUkQB)&BFZ0YA_Q+52p0)T^hI5*wJ$Pe}X275X`W?a+}d@SE1pE z;B`S<@I@kG05;l@dWwcLJhYC0uG?YSnUcGQ2!F%PrjG`emtTM0Kk>SbDM;~6rr@v3 zqm=ldz|@yu4hQNEGwj=g%Kgw_BYz?8DbcBxYyiuzHX?FS=as#b4+E<4LEbL^z9u zex|``?jil@1}4|=se0l>;pxl1@csq?mS-_t!3Fwr?wbd~(-RZVk^Xtjx<@${^B_(or4dw8dIzZXP*X$RBPUYo^L!_jo?Pq_5 zZF&W7&rxzwQi{HWhb|1U>AD$Bu06|Lp6th@M0+0e3?=9p*}TtHxsSOjUUB(%2DVbq z3lF7Zp^t}#hm#YNDMIKN7=ybax{qT^AL%3?K4cCHdNx(pcXki&%GxjP;Ae%%1atr8 zOTM<5Bq1{Xf2pvEh3fVU08Le2U;jg0WslM4)#IAxAQ7@zV~&r)c(sU_hTx0I zq>EB~K}fbFV+y<5-GQBtXVmJOhu@i>M~{_hm2yE2Yi*nAbGYYqIoy2t>3L^RK7Ky_ z*UcwLM61CK!Ko#$8V6gPXG#V*h^r1Wc`h?&5D?ubK4xC)1J~o53!)y&E7ycW`b8z* zquz#Ul&V{`$E#bfpX6sz=hF@uu-jZLaIHJf+)NnG*ww^UnHh)}VBG;=fiHK?4Q~Y_ zf*tgaSy}DdgE4aO|Eg0UKs_0ggr7ojJ0rN?Ub8KcNheDZ1s_~WSCnvwoP~U7^4pZm zJqRjHMls#rqVr(a25X!AC@zIKlu-M8wM6WM)E>Ao)lna?t!Pe;vj2!^xMgEX6Na6SGRYnx0)P| z$=!8VQ*+2W3sbtHzf-~75|DG0+O@O1dEpl?74eZ}KZJBrAW~nYY2F=92yI=Z5_mV>>kg<7a-UsJznl)?hnfP^Kaz1QTs z8Y6@jVU(|9^;0M4AMF!WidjEQT!zZae59iW)ru2{P79;5z;a7zHtI^yj;H)U`JH!;hg=f6v*4za zQLrVCjetU(F9x&=$F)_|G+a0K(i<%lGT$(WFf1;+qCF2o?SjDgKBvi*3Heou^*mR1 ztn8UuN`@zV<2Nk)bP0q0nV#0lS4l}urpuIZM1^Dn1n}LRuTxu7uA>YGeIZuc=$s}Q%7 zYeH7?!NwC+M2xD`M3%pct*f+__lIv9RIP7;;zusu_u+E5hPU?W#WiXr9W&?IrN7_L z>=sb%?#7JB^*Znv1`)VZeAOOrsXQ!Az$LD2%if+ZL?%e#tH99OHa2wa za%}0LqP#{iTSc-{S|N*X(8x8O$+Y#Z)nX_ysqys!=%4KDZqAL(qhlxXILXidAd<`$ zZGP~_}o`J0W6&1uTL zUrd$)zC0QmC+5W^-lRXN>S1&<+7Zu>i8aLzT<}!kSbE80lGl7up&c zN%T%WPY_U}NU4ySiGO68-`c$1##?=$Xd*XD;s-b6V8J6~J9gy@%RGFblSQ+$of|D|a}5UnJjYuW)rB{GIy-_}cc zV=9b;TLju1LaN{3Ro;utw$fou1~LB>klKt%@RMaRf48>ZCP^dy7$0~_gYWj}?Jr}S zce(a+^wY?wfW&^zPT2{ym_9fHg~ansR9GV>9yz~GrZ6L?Fo!H_V~S{84udr$^_D&r2b1_* zYLmG?iiqO_(D{Nr6~`{S8MILVBHp|6-AuaLR(u~>-_3*9glhbYxrI#CMQQIi&EaB53k0B z--P!S7>Woypj?j-{-CpxpbnxDY|V`bi=A;9ldu0bH_aaqFafTX3glW(rVLH8-D3vLfoP2NP)~9#7&HLfH|Y#X zntY_gA8rCq5$P4y2FKTJb+U)C+b~*>K_z!tWlOC{O+$=Y|KEQ2j#A@id2i!ez>tTS z80hiW_iUq<+@z$WIpNMB|87k70y8APKhTe5Ig&hf7U0IhGC8mfraI_w#V|AYA4gN4 z=5qa^{qp4?tgacIR^oQl0x-xVB>MaNJ1OQC0s0<{5->B%r*}PkrpE>jw4uJf*?N~k zPrmRoETUE5W(=&FP_6u+wejNlCI<{`YJ0y&D;czZ=8j1uhG&_0WJAA&?Bzz6UcJp9 z1Hit@O3q_?BeraMz(ZkSV^dO7K6s|*pdJhb^YNlq0nHJtOZ~;x(4Gk>oCNb@=Vpkx z0-3p^sy>T73qki-qK}!1d0*Xh(fi@7b;nLlfwse<&O}_ue`&k875T zhv$HqME{YeL2rBX$NX!Anje_)q4&e6O)Od}Z+JWYbsX3&a%KK2CjdFr2mS!rtUt$| z{RePzB|DXLLl0lhCu=JDb0MWz!6?Mid5A0TU4Q#pq3GxgyA*btoA-Eh>FrLIYMEQ$o9TdYW41+~4jbXV5n-6m*6N8n7+ zK;eMlK4?vdsSSs?d6Pza4VYwZ?(v4mv+lli_6iL9uZmWQb8$nj3Wsd2d1fZd`sYkz zc1@dC{bYXnq*AL4EvV^s4izS9BisDM#J9NIZz*T2cpNI=}5vGdZ_ zo58HCu*0JB)p4+oufo`lb7{oB#~|!|AqW_T7H>R!pECZBq9Z7B#Okv zoTL{|>e3fd$-R%6g@?r#z?ypX#@Q6)W?-VkQ59Z24}u#7jMj;WjKs4zxhSYmgQgY~ z0_BrQHzvnp$-Cb~Q&T(%$r|!mf^Lg`9)qoitYW__J(A|^K-52ZXF}%{^#X|kjRU=% zePCi+hCUp}KoW8+=E|p;mC| z-qXR&ZZ5%zgGxohUVA?L>=~?04`iA|rgQJ0a3IX?qQct?TI zYG4L^lG!X5quUMATNlj8tn>+%a{j2fMwg|M^{m?~uNNWD?XJytK{ln$JXs;~Xx^Qa z{_ls8NA6ssLyMt~Ua*%uq`0)qe>IWs2@+!>jzJcb`I7<#?6@VUMCU{ zqv5uR{;8WQpAbaI-hArme(<11soQ2!`ml%&RpQ$eGQ8Y-4o@=wrh|W1v9TW)y%9#o z_3s?mmT*CYOv6-g*ukPOz7aB8ir=5O%#DW_D{w0S?EN*EK@b@cfopQ)jRniAs^YTPB?~VQm>r~x&Pe6( z(77zxcja%rUYYxfC@QX^9p`3Ae5>hKXTDZ9z@}lR^~2KOB7r%iy{!@_)f_^>Gg0mE zuwQmm^ux-fz|uAKfSxguBd}*dU({^UG`_HRu6x&kt(n(sLo%)NL+uPuUm-OP+E>l8 zw5KIN1yeA(VbRLfoc{c5Q%B+FZ@bX<>x7RY7%5B`*wH6Hp{E{}fh236vMf%L!o z{2l`7X}o@gV@1Kya;*kNr$YYOIb)Wb^y+=$tln0sib_g`KR$0;5#$(adT^xX(+E;kbxM!ES1+HT#a=-eSYhhO4IL`d&l&0pm*D8UEPnh zmJosxZfnpt>W;daGF-H_w=OO#S5Q>Kn>=VF7;+YPGqBoTFQXkC78cE!ite6vzfNdS zucoRrs&f4S@9vU=3Bb-4xc%;+2?reMeieSW!};Z zoK)A7X4LeWn8dB#-|%v$#zfUbv1vHSl4UZ>>SeCFT#_*|Fk{PaL13X26i6oW5UV1a zo|zh-<+$tbi2Wf?#$T>bnU9Na<3l+7BzM*cD{j7lli=dwgf7cpuUI9J2{Y)xBj^GF zjC7YwPea>`ipt7Jr#a?P-s6E`YU*lg31A?g-2B|v7U70=cKHt03? z_l${MG+m5pXb>?|`HtI1ZTIY^u|a(-ppcQx!wLEPGyn?!uEF$4lG6Bz7$(q(z}epz zD)+jOARM(+d~bUG@{m`&PT`?sr>0hS{FhQTtgJjo5<}u2 z^16S8^~8uM)#D@p6n2M?1E6A{*k?thvJ%n>M|frejSBXel_by_iy?tw0asNkpWb9X zp7l$l^tS1ISy>|AUH0?yZc$72?T$JE)oy_BS-8!!Z#7aaY0nims`?*>{X^ZHD z-aN&^(~j7?2*MIIYxNJ-fkO(>92@B*h!IQ9A|6I#KF6Jo?eNM|P)iwh5sH2SgxRET z;tch*S>pSH^J$494?3TBUdt$viu%Hf)hy3v!z;tNuAk!+bDMcqrZr}+)&;6cN$2`J z{8Z&QRu*{>dFo@KjJLkKtE;XKh9V2~@SL-l#tjm419DY7}Dm_DIGlj)$lk|8h1OzSV2$56V@z?mxoCn`u zJ{{)C!MR`&UO;tW-x9P3D*e@7g2-(z3A{#|}PJMxt>EJ*@SP9a1deyNV zL29o1Eo-ik$^FGmy#g`eF0#{0SOAm)LjR2ZnPqr=xhy%Ho?=9lTZrFd7&g>vOlC;m zs->#V~_2M$?q<6 zfu=D-*IAK7IEbTWj`Z+h_nu|R*2dOWB*XBiFQQ`7(Ax65!-`kSfr?*YJ(tB?X04&2 zj^EFkx}vp1wq^qZ4dbfWMbGuY+K1x2)Sn8|8Tks z5gQfPSEsFy$8KUsQrUEwK9Xh1qIYYb4a;Yb#!mf%AvZTS2$_%!7UoH79=Sy4(cH`X z(_x29{#4Nqw&t~>>iHVrG^@B@dzm&U&HnM^y1QCnfBLOSa_i+W6WfDAnevNZlx;Vi z@ErL}=@h*#IjvSC!(g*2+Aqb$B_BbvZ(^-`+}q)%&Nh8D?96LPLG+_uWU8@xnOoZi0zrztydT2K~)eK z2oxl{rHJKG3K!ar@6H+vyuE#DxOpCTelDqL=;7gcL^?_6oA~KB4}Q3{y-sD|qQfN5 z7}Z+xJejsa3K(BzPZKO$;F*~kF09VqPmaE8sA=G3=2Xj8p{DNaU8__Qy|UdyaO76W z2#`cgkeBJV-jo8(m7b1TQhF>mug|aAEDys?qU*QM!fv}>JA`iAdq;yAU|P%{CaUMY zinL0AP9%@@-xj~y4&<2TPdPN^5>_S*Zc8|)g?X? z1>m(}VPNO~k=xvEd*JHg5fSx85-xt+bGMo|K-5*3AHdCb!7Klta--|S}* z6-^ZJy$N!iK+WeSF+&so?z6<_B%zj~6JPw{(*xlC95>mMBbOhODwzjD(GfchtDYLU@ElkEix5^`Ie-S#i-?@__DE*8mU zRsD-2rU#V*0s|VQga1JU!dE?m3)Yd{tVtc2c$2uo-=LUn+mW_pAAbg%>s@)&m{PS@x4*oA~SIu5GK4wx~Z`39vR>J9{Pk9?R#KwBDYn z{88i_?pL5-e>Cs+J%Kclhte^xQ0)J;03r;E)757;-4nXpuM|Mv!BXXb29o{n-#;)E z6P$BTmSn@uu>Dw(8N%h1l>C9PrBES_R&HJw1!l@lET36Wdd@{=A{L-gI9hM@{0k#L zH$Ary5mIRHy6Iay1)yT-r0sz0qtbqAdlINjhy(?CY4&Y{YHT+l&^6V+@~k!C+C;o`;@167MR7!PR-8G3<)^&c^QFzzxY;O zwTf_HFKjg&I`;SPgPrnubCs%0VTe~MclG)HKI!Dfq+-P2DDcJE)w*i$A)t|{?eB%6 zvEkN<2?fyp5ypHqR+C?^ywr+bLqzDQ78XISLn_AE-zPw&XUNfg1Y+eXf}P; zCJ!R{{;=TPknCcui@)!ObT!&9QPBwKq<+cmUA!A?8AfJM@Z0E7wOh$B{v-_g$BCrt ztC@BrN1J4^1`b>&Ri{O(8i?!W=BCEkMh-mU0Ss9#*wIPoIwM|9#CWo zgj_Y6iIC@{+2^+Rs;am|MD%?Ny1@n8U;^cTNo_|*n~Ry0-aG$Lu?$5*QquoMSfk?7 zh90Zd%GY)@Jh=TaBuF7z`gg>&!GW^W>g^V>E0NS-w!@?MII(c=+A77qe1wsz&y{!3 z3PX7FWsI7BD|bm01qLCy~ZG7Qo@*d6W?n8ikpsji=VdI*KXe_tsLX|;ZW zhDSw-c%9^dikRo;B~C~-sNb;Y)`YgH)^gm#KrJW}5Kvkl?;SWkXs=mA;eVcd&EyGg zX`$s=6@${!J?k?=mrzBkM;Ozi_V=tuBC@SeF@8tC9cNNve^Tp6yN!k?6@+-flyK4{ z(VZ;hE-E#S1a%N~NX^k?G0atANJ~ql!Fj9uu2Cl!C#U00)GEJXvC918kuX!zM`t4? z`@;ItmYPr9)9rKA@%=z^Tfg|t;k$!M5ni(O4o+N{O1>{RYzTy%4|8M=^s84+K|;=( z)?8k5kWH8TgW05Z=0$t`H+#!eFr57Uq@S%&F@tgOU$c0bMg+J)K5&CD1du=g@+_Fs z24)lz9`?2rgd7M%U{cD$VYmhKa-nYvtWr0xZ3GI5Q(Z3p68iL0AdsInCCR@muA6o} z(z;IXMQwv+BCYn1sS#l>FsB_reWdg+$@1=H!d9azF;~uXcBWC*O4ek8skpH zI|L{8$C4k`?Mc2$tJlxD;)pIZ#w^cJfG`NiOCgPXOgLetYLk%CiQtiS&QKz->lN0% z=swQIh9Myjm?Y{X3cEEem|JKC=P(4Kc6oM8%Y%N+)g>P-cG#?mJI21c|xyECh>%jcqakcy(||6gy-$o_DT)8h?E z6!_yQjVZK}$fIVUX;|nx2kh~iHdmMSC-bimNNia9h|y8)6l|fcN4g0!*hy?RSQAW-U?by2-GRvgu{le z0vUmBwWU?1H4Xnq(^ZE>wSDbDPzFI9I;2Ykq@F#fNf6q66+~>LX!Z35rK6|hAt~ba@2oUVtT;IigYqRQ!&{iHJ_!{I3 zi|MtVSt_9xg&#llh`ZL-ex0FCRz%LFrB&sW7M7ijOwW?#E;0sUVl-SVUCc}Wuzh{> z2?P`foN4VW;2olvTtceGo*M%D^1MIZJ+&^U?j@4yoxcbr+7aX_&~9osj(#aEGLT2U zWq(b&A?;=Bs*|?iA$pnf*POfF+8K|BLN#E(Vq*GS&-zpW6Auk#2LeUjmV53+)GE@t zsPSD7!9cd?1=^;d0*8NR4@u@|nD7r)oSJ2F-2WdVU29Gk`-lc*GkV`+dW*I#oLx9+gcWC1r>^|M{R%d_$C*gb@eJb%d6_cF%OE zl{!8zKR3ViAE%ltJ@NYaiMEeHjYe@n{>Nm|fV*ZZB@<%@1eto4A;OaAasW4r#ZoB4ftc4+=n&iR(?uS-y_@l;f-V9c z2K#5$FHpx<4$bI%ynQrMG75)mu+f)(73!Pl=mzp-fr$N2pV6u@`vQljrB@VyflF(% zf@Xgm^VL3j3>Tm6;z#`hO1|nBTC^@g#Ho!9{D^0LEin@%9hI1SlU2U%&xgVMXjgrO z{SKw;In^ca_ND39sl_F1<@MPSb#WpXl#9>U$LrE$LhN$*KU^>khDx8O9#h`U(t=~R z4u^4WZOsVSY!8AfZ!d2QVNBnMx-7us@~EkJ5e^@@OA`d^ULjEl`KFgva-Z zYo*qW=N5WFI*_cae2;pXz{)^1wMU&ZRE=@fSxMeeVe}|`kq3>;<W-z=Vt^IVp-eFwwp^O5hJ0}6sim+v3^&~O_l_*VGQkT}?XVUi{4Mv9Y6%!Ay# z&$bPd6UeNNj*hUfs~#R6PIvse01%?2e59vG1;rZmB?swagPAbv^^4Mj$t`We#q$eM z%isHK&xRfrRIj|fM651=_ky?XjTu4tJ=>eZ%KqoZ0qMT+hBxcKtEY`5RkA$3p~lmO zyr+JrJJcA1Q2uZ*`S53XvS@>J8~fEZ!n^1KqwbSj3Ld_cQ7?CQOQ}WC#aq$1n3&kU z0scIiqf;w8i?xlZbDoqepk{B(&GQ&np@=?|Rh8AhmtyE^XdiEOqG2}m#rE^5@wur+ zwUK{NGDc${^&5uv>dKm>qqYb{vZ3(<-QT)XgA)_X4?BH+ND7h-$uOX)e^PyJC(Vjr9^dy8?A<~#UdS|5uN?mvU57OT!yp`fSLQ82LmhG9bC_+dS%7O$Q6ml48o}eRUr}Mg@n2o1lzj@7swe@oM8=5n> zP@jvtK?>;k*VG(s&IaFV*@ILXp9C8^*JIpr6V*!U_zc0KrH)h>tt?hMG0{I3X zY3pdeHu*CBQgeYjnS_pNf7<6p_AJxt$5s5DjoZeb`yLa><6J_B(Aer}=MS0r9ca>H zQUdWe#=~i8Z#=!4qvSj*UGBR%>?R-w)&-VjBRJ7|?7q{EBB&Q}?KkPgeYe*xjBk+I zR{C%@ z%J;xruEbQuTYr7eV6py(*L+zL>(54|qjm(s_UaDa=8$n$!4IB~0gKJ?n!W1#2LsRB zwKB8rb3Ii2Ah8El7e;C*Y9ulXyf1pCFAO7OT~yQFR-({3{U3%`i;Zf8?8il%L{38) zFurHL#-3hflZv3c1N!xrHqZHKNRY?+sBO7Zyp9fA1Uzb>73e*8WPS0rqGC9_;jq6< zb=b$p+wau({gZb`mygY!pU{QAXnetQ>Uyo2ca4^oFUO4SUF3k(P#ooeYQv$nx<#Z$ zo>7Udv71GqE?atB#(OOeR-NxZ?}uNz`!e9|6#Pto;i?zqtn$E3YPm?Ad|loJWB=G8 zGui~wB!1Hfl|nTYbqoS&5w9G!xwpr5>k`w4oxxiPDxq)H*!H&fsXy6H6gSFno3llS zBRBgO$OdgET}*Xd|9r~-ymXQgB!B%^gexO2-7(S2ygW-w%REX6K^a|+`UXW zi}FFcRaM`=f6q=mSwgY!d2HQ7G6u3RU`Q3z)A*&>xI4{0uSmEN$eBk7igge4a+mAd z38*UqEFTQsj;Ga{L8vGR?RKK=Ztj4owz-8khvV<1UhR5UHwz}P@R ze||3it=_GixETbZY3@yr+`-@Ij9AWjWR{-G9lXG|&&2rI_(izpYQl{r*(fnrR3+1eF7q7W_a@WVQ*Sr`<99&4BXWIYzO6hHdT}Xr$PzRaF zXQgSGC4(TRaz>_?V6aVrrlg?(_lur1(fHU{#_Fo*P}J670DkWdb^6_%V#xp&omaYS zWDCxqGc9f)_03U7VPdkCQrP&eNYziCT~|bUovZ-K&>ZBPe2Ja)QBwEw6Z|HQUt8zu z6t++nl&usCJJ;&xfLejl{D(Qx^x`IuEXvQPGr`%^F5#)w-6o0rX^A!hbq$qB-osV< ziNeXc3w^7Y;o9V!TVaP4oHbnSnca5EsiJc1MDzM3?k83ELM+vyF3H)5qdSI&E9&cw z^L4`lw?>dd31eep&_|@(g?BK_e*J+P#{%>3i$+}fme$szTy$U4V!e+u2Duy#3y-q;Ut?W#Q)=;7| zUPsXhYlyMq|BwqGG6Q*ZU!SHb69$+`(2+fQ^oDY8X9U@J@BaP!kmjB(Jo8j!ZI<*n zQY}>o2LEwMSp;O{1YDTku(e(=*fP%jn3)qud|>`!Tg_NiP>g7<0Ul zJbeG)T*#P*oNN)eX9(0W)oY-BT)%tJoQ%zm(bCtaZWFnAQ}NXPm4p-T{?-HO1Ug|; zHOls#4PQ?Qo`!|S^<5@x1mHK6@D?p#Q3t!}%a@!S94#-xPp0Q?;pgjCgEOoP%t$5Ae2C=$0QNr2=elTg@vc8-E8C|mu=m9 zf(9gwXbBL{@tkBebK6@LFB{hD)%&lG?(b)8ym_+lsOXP|tm-}0%(TZ5qKP%>K6&k@ zkM~)6cNVx^O#Ne(ulfwPEq*L26|=iR2o#m$%CnHJ@Z)bGBnnyU#3qj ztov0!QJgO2UmhS#8Pe?Z_s>>ZX_Cl37u9aqpg`fbuCzISvN_ z(kFa-ic1JH#r6oQ%u~01;*@Nvly@B~ohiY;uYmWCU?BXc>AJ_2mXLiv)3V2n9(-_uO<_?EHbt88ZkS}?M&1^3pwE1v=7ApuLmojL`?;GhV=V7Kc)(UvRz=mE zOF|`!($mwk87{ib!NH%bW?$7ZM30lzBRAatOuLh+BA}ET(RNido{yCc(ok3Jhk}hn z-}BP%o7ezLd3g6z zO%3%lK5_Hj{W{hImGiBh<3$tWoP&PLjggzXl4>f-ZzKcAyS{9k&2KbjwS_No_v%TA ziwb_$e1(hb-MLLG#KY5o#bN*W@gH!}l5eF&qE*&g!zo(3mf*J0Ka|NE=my?9@Wz-K zpFEdQ+L#|2|J8k6XTvpC7BTz5ajNRh-J8)@t*!pr+SqMKV`DMyZVpCLC7sUq572bM z7+?H=ySon#@d=}efRlj}jz|tEzMjVfk{BJwikZ* zfa1wXQPPi^#y}vPxoPS@rCU=Kj-VPcts73bUwX&-4ZTw%dLB} z13G+d2kYc(ziVr43w(@LB#h&x2kO25y|pWi+z00*8d%kD0k zBq2hCmf@o>evkq5tNb2<5|DCzUc3xGT0=vu+(`teD#OVNf5sUh0_N7s2w0b3#Z_iA z%y#qUjmW#g8TPD5{-=YOy#R3mVdnK0(xwsM^ddD@8%g?FUu>cmOxes|_3hU8Y8~%cO?KDxo<6>;9EU;!g?~R_^4>>M7G<;Jm7_z zA$}M=Ev>rA-`28Ii_Q&=jJ}J*q#QSU=y9=Iugjf7MUD8j^Jex@7N40VAR&2Pu0Po` z78&LFa)8;EuL5Pk7;5ZGXMAm5!VZ=j$G-c4dgoaFpfQKl z#(;AXH;E5Q2UgU|ia>#`3ewcWLzvk#>}t4&!{{BKbFK@9fGV`pOrJ8cfiQthDUL}Q z6I0a%95>z;Big6IS(ou{;+}{-Cg`;GIm2$gGP#e7X0NQW_q`y*LZMUM+X=XbefDhmy?UzuZVr8wlcZ}PgPED)1otOZb>6r+=_xj4;%6l48UnV5f$3AM z87$wwE|YlJ{x&i&s#s{Xe3r7nXK(kcpg3PsL%DC|YkRcsv(Jw_>6npVH!3D#8gbwH zzN_zGkEVOC(Y7ueJh{c(y>HU{M@R1)>7~9-S;{{U@!0mYRJ3%^qk^gg12$I=)3b(k_FDO9Ej2EvC1nSR}mOZ=^Ah; zE$Egl7|^%SbWI+vsI|2?hWWn3;%pmf+%}NzH7TDFO;!{7rW(RmR9sv!Y74te_KcJe znquVx?+PM+^7fD_4EaFoEU}kiJeg5; zzl=p8C>Ynr%g2f9fd1m>OVXqZs)1J`+F{D(T6pL^c2evBK}OaC$T%|_?3F=F-JUq zTkOaTR*3`x!8oSRy)r`ntXiAVWzf;ILnDVq1PMfMb~xVqiWeYxD}pZ?`{kv;z-yF` zf9xP+Omm_eDomai^U~5%(-GTWZAR?SMDwvJF`52YlI4nc&Q~s8`FHjp0|4_hiEs_FT21Rv{gH|4K*GN5;p+|(Gab}OiHCuh3ld>nCkcwMNh+{2t} z@*y{N@6KsM5&Mor?-#Ci=QkJL)8yt{OW7s!ChoH$8H+)?S@qan(=v>Cq3um(YYnyA zeo0P_t0*Ul^KG*PMgdU?CbN>$r-ctV%j>Ba(70Jp2SG(j(yZ@ z*^~O${n|dUoQrt)@FOxoL>Kd#y#zZUkAQ$|~NDpYpvbJnxp_7&kt{ppnJ zVXCMWAV`y4ZH3#AZr4Tff+6&4eyPewp8}gP`jl8ErtP_Jluzhyt0l)QFK$Rz7IVFy zBcy?^eMVWoTIq*0TCK1hb>3a-|M5fZ;N<)}SxO%Hr^#vu$Rt!B{BWhzKC|Q7a~6dv zvw5B<3%)BC`hQPoG?~7vO5Etf=rnnK(WPM8$Q@KP}Z$`|ftbPzJu5 zcbz{VB49c2b9E0caF38>j4V?9&1-Fgdu8*bJ3Nx&ub31Tza2e}xSig8QejKAt4|__ zX#eu2)cn+FiizM`w|H67#y+=ky?((EVG-*~`R`C(GZPO3=~K@2|b++8)IZprIuZKU@saDjtTHkGFT24s>kOonV{E$HzBg z;dw`)HapUs1y&h$+d4MIlUf*~+5`R^IhadvTWX~kizXl!j$r=p^Q zS_S@sdsm-@binL*YcJUlN_^kQRPXbLFQukl6z50oRdud~wg+Hb`uYfpH0dl(NQ|~F zgHM({Lx>~m#L3C2&lw&L+AAk)v=yZr0dP#r#*L2EZf#Sjrkr+;sWg;2?L zZVbFEpziSd>vwepVbk^O#ku3p^y`&c+7nliI4vR^u~_JDi4S(FZ!eDhJ&(Wh;73Q= znLIu&P`Aa>mu3lRKG)}@PJ6?C(Pt$9txMjsuEdS310CD!p`a0|#FQbbyP5XZi z1$QLlY9`oCCJg-kbq%TQra*^($?af7O4`|Fv9ZQ^<29^as&tWkD&8%b;;}k}@I#r;(fB&A%GWE`32ieCU)lB8IO@SQMNvA*V`;Ds;l0q^J&s%&n z=ZIvq`hK4&+S`u%4QV82w{sbP@r`V_&}uothlfpX{>4j_4o5&BRh+L#P81U;y+Gi`E`hU}(s#+%HW>=ThRtGg>)RuGlf>p>el+AS{ z>U={cS;AYloA7yjAzK>Tp;A1~RBs6(5{B$5sa}lgJ zfBoV%svf-EKB%8Q{AmFb=Bwm^k{E&7|bsFdV70E$t8)9fdt{9a6DZ`2n^mA5EgXxNUf=^UULT;AnEeQ zf^DYg%WagKl$VfHDcT(K90{FWU5Xnvh-ULTSq`#r%ym*sOmXqXKb1I{)4?}&QecQs zQ7zBUyTKL-f`^+dER|aXrO%@?``de$Y&qrS=?EbgHP+R`BRn?&H&i3f#>R%k&+y<| zVJ#Dc)#j=uwY9X6l9StySN0p7jD&|01Tg%YjY~>O_6W7iXR(J_=xIAmU{~ls2cn=2 zlA1R{cGPIA)nA38qM|+JO7c%I;P`D|V9>hUhZV@T;ANba>2ZDB@m<5$QNm%F159_X?CweJCJB47Qxi+Y!@LJ%&!m-$-*yTk9W7P$+5+EgVG4xg9Ow! zewV2dNP>f_3Im2CL4`ee&ep*21|}vtTGQM-N5tI;d;EdZSuw>!=xxMGhR!mfbc_Pl#HSR+pyG9SzB#>B+L{(Yv< zlb3w!YineZrC4Cpv$tnwWrZRezUCOvN(NEXhSjdm(CEPC=@Do`fxX&NTpZdFkIl%E z)TNh5W(k*;x4_WEgeS^q{CRA4suNxUD9Z-XHb=5XUUVi*q1fMJluF~$0 zD^Y4}rH{D&`=zv*t2j6~Tn6P_tgJyS-vNGFo2YrHHbg;0W$2j!mE6py?Qe~gH5OHe zL>s6%?ta)z#H4KGs^OlbPb)7G;fO zehXN^AQl7~@9J=IZ&#N~xi%9McXUj&O~sf(E3F8U5eWQ4L-V$B@>e>Q9vWhSuR6$X zc6VLN^!>JGTA^A5%7N$@I}s7Z-A$)-yP(uQiXyHnnpI-iGv_2P8%unTwt;K1DbWBc z1Y+Yy*4!;`urkfWfk?) z=$Vz-Z?Z_dFwW!3qNop**Jau7fgO5ozZJJheJz;q{`&RczqeJ9K?+hwz8TVB zMwhe9X?l0rUBLups}tzt=o7JVa>VaJveKd>XGKwtVZ^0WSfwJU=!<3UUZw z6B38jNfa9!^tNm!yRu+5gLXL+2fh+$j6lK112VjiA5)|Q>h%lV|6S{5idXX4L)NWQ zD=#l&wEp+!8*oV#;3=u8tULsF^05ksObIV%HMNM+NMa%{7O?x++1~#6@uNu8OT_b= z7k}MLY#|3>-)@Y3`@6Iy0}kpE;`Hn+CnpCcz8Qy9A>>)vN)I0<>2kQ0-)4%A7P0cC zfdW37PE!BjLreZ-Sdr;=QK20T(@@DNTs|_i6gRCPMmA-kU`r_fZA%=C_2*S@5E{vg zaK{aK&GAm_G7i-petwj5=KVV%&7a{!gW@%cTCg*Ha#%@Dcf5VEpt!!;WiiT-+vKZN ztRXj?_DoFI7kB(lRP2op%sLKZ-XeH3oBT3|B^GYfcyDtSe5~>IMKdSfHdz;%Y5DRO z=IMge>fYXBvZx**#W_AAG7lMDU0oVcXB=lgs}3;%F9#K-Yfb(5E2ps zBW5uF*k6n0*1)9KQfK5= zvfjNsb(iaYFa`fRTDrHl2M#D?C&(&pdwzr~IS2rgEi+ihe5V?Rrgy5=IjCe;p6Z%hNu1dD=76;Tl44=XDcb1@rmwwSQP=RHB0L2+ zN^yo@HV?M-PzJ(;V6i8CRj5Ew?m8xh0U0$5H`)zgxc&Y8@^Yb!MG9_1N^p4B*8c1; zB^5ObW{{Bo0A$FgLa9OCrJ!Pjg)5xeO*sn~Y(PLjgp-}!nq{WnuZfA0PlM+2fh z7Z%7yeO&y^*G7FWgjB6%`ONdSy(N+Zk9|EJVRz(6TXCeqSq5>#4ue7(Fxdw0O5@$6u#x4V02*_EIfke-_`+wh}{hBE|erSI`K z$*uOJtOkBz z>U2RE{wCB=CJp}JWO47kTI}~HBy4JjThRf39YuQJh(B1L=;`TkS?s9;bt^Oyc6N5s zC!@XaziP}{uR0@ts^MP2BjVGkIgmll>%5wH>T>hNjFIghvCJ=eoeIc)iw230wpa z_C!z%UPZ{n%`Nrk5TRkvLqhlj_#H3+3Q%ktGkDTux%#={<;CzgSn zM(>ul=-BD+j`Z(pn7|Pq47*05jDYe3hVUp#$|c)yLt@i?HOfJVg8}+HXK}5Lo}L#x zPEbGM;^Ml{fNoJ)hbrjEDKU@UKP?&iF4r8vu>t-DimwFJHc7W@c7ZRdo}iRgQz*A*O5I zZ$1BRjN6Ok!SaQAH3DpE)Xvu019zI;0o(7%(w{h(U zE%r1wzbp7*ELLGFCs|6wetA42!cA|LwqKkvAa*M>OX6|1WV)|m`mpoXPqab<&PbNY z^S{0f>SL|Q&;OntOvHSxzuiwKhNa4S=%XY&0?Kgek+BEtGoNQgtW`T4_Fv;3ky&_g9&w#iN6=D^UQ#AQ8YUglbHVBb^Q9}`{m5Dzm~9QBpm-B+MQs)*(-UFCeNg-rUtDs zy2BMjx|x|K5PAvvKHs(9Wgx5uEkA!UEO?{e?QSK1`@@q8ZlpYl!Uh#nnTmzAZ$zWP zMIW9^#-gf?b) zjQT1Dx-AX`(g?O4G zt2QbrX&Al^S5ntvcPa%jF)Z#|NnP-$6%JV|#}N<`CMvPO(^Fi3Z0+w~Z(Iz%OfOLU z1TaX4Xaf~h)gW3x@u#X#XxQH2ior%rt;utE;iuFet9=a2{_v3(0x;@~aA~EQ+QV_A;T9wYV>>19>u=ZqlNd7)P$IFD7#! zgX>eEzJ?>qfT7_^^(lTbF?n`JCk`*}>!ZS>`2-4wsk7y{%XHRzN8`05NyTg_OenOG zq~xN*MEQvAy?Bz~Y>K85T=dfNvYwIAZKz(u4<-G-$RJ_f9`JYMGx(*6(?nHPia;SS z-oLsBRpA3ywIAb9@Lf|d&(@yD~i@?)D^&Wxw=t)#Q|?N;Yk@7m37vlsjNj1`?(_7-<` z&Uw*T2%MYE>jY<0kNuWDKkaLDS-cq)MFNnBNqe2w&ifDlT@|{km~%8Z#({Gb+VN9p zCZ08Wo3_oqW3yJfrux!ezWHXJ+?LVw6FJA)N)VN@E&)Y3jU{6Y8QyfWsy;I^B6QIOayEpfRYCvb!%$Cr7EY^JQcA*$>k%ZZhO(q*!$q>&qEypjuS1$jT!*rHKJNknh? z-Nqo4tWxGh7-J0BJ%ad_%Jp^3a_~6|#Hwq^be4r9Z+-l*{%$V<@uH#Jw~3&cI!f?C4b-xR}r_ltCC8AO@FlZ~(6}#p&Itu%Fo8-K}+5 z)Bs8t=z3A2KXy(5tN@s5$lA@?`lCi3HsVe90+e--O$Na``;x(M9C8By*56M=Lz5O4 z$0U_XceQO~-O-Vz-JAf?$46tii-19uT_b7kp1eHMnDsS0#rf1b1F) z9t2TcvLjTqyVqr4;NHh#Q93sE!-)UY6YXQtFa|rq_My{2X}d`c;&(&##UBcK)o5=l z6)GyZIzp=eI!g}l%Pd&Ti+mI+Jn7jyDik4aW@ZL}Ua?^nB(P1qecu1rv~(2mtQrif zoOAQ@^WVLj0}(k}ymGI%v5b78$+@4{xSyfFc*_Hu#Uj#U>3$6JAD+j^UM0IRswd?p{EqCVk1)r%Txo* z1TO+_USlZvP1}^2Ef>N0^r@%yVDtf_vqS)61H1YDy+L6WmrC}gm_fibfz3ZeQC?G1 z1F{pK{U8Hlq-+bPPXb|@YnPys+kDh1!}qsa?{P$h-Po_m$@-_=)Cnr{wl2Y!!EOjp zGHrI!hSPZ-B6P!HD-V>zyyk5YmKk0P^GPP4&oBN2-aZZiHBhR7$PVl**9t2puj1i- z@98;jrayNCsIaN2>ALjkCRAGiP0h`knNnv-@P-4L(zKsTk%^O&4+A#G1Gr6CvX&jr z6PT14g(@$|5pX~ep(TKoRY;tYrG#b=l#P?uEWY2=C!ioUQjBkUm8Z#EE&g5iGy{s* z8jubYhDCXVmP;8$`8aA83n8GeH`KW9!@f;fN^|o`Z48d=u0hE>-&4KBfN{&$-P4Dw zmbg>LCt?-LGxLrj<@OWS{K??hv{)^t9I^Fpn3pcbsur>-A#K`(0iw5rF=Js=R1|nH z!=NkC;$pSoeAT1jBwe0~$~6@i|KZt>aRhQ2zLL83Akf2L>|WYU#SJ!Qv9Uye-?Fk3PvMPOI!yev(CFLBm}qJ3GiEF7UW>r zPE?nD`t%80tH3u>&DfWnFwu{}wz60^$gy>f7!R#Af{X5Os>NhsA>7-g4ZdFG3FISR z;dhqUsH7imHn;UCK_C)majGsOMraA67DWMhT>Et*?H-b|amP$_y;1rVx2*Jk_*r3L zx0@tu#$s=}_yNSe>2W2#dGkg)=WZ;^5zue_pnQxb3XbF}`tZT?V8R0nK^S7*m3X`P z)SqJ5DIHIJpx{B_@n-Y&($dnT+Xg~uH|2r}kc{dB=m)R*l)jmLR?5}M;^67sajqIS z`;5F1vJ>W#VEiAmoh(R4sUk_Ni67SH{bU$;+{u){-ZvaqpO)Ji2TNgUq zf}vxeILgb*pPnByBz4UrAc$)VZ0Eh82#;q||5#A4y1oucK=9(HBmgI?6_aE#gqE(Z zE8q?QF}{8K7Vch^VLkLP|JJ0`;U55pWRdC80aDO8o2Mlv!Z$`T!qt}W+I|9)s^VgY zr%&HMD7ekZSx`{G$fHC zz`qyR-2g-W!>q*y0oa0`x;#Js76Zc{$ngUUrvEI)`un$53W`7K{`<_D(r3E5hD09d z(0^GDZbtMK;KRQ%Q~=St)eN9R3d?>_Y+UCw+XkN<79!N*98yyCd3lR~d%-*aT2>-b zB6C9{qkoTA(EDd$*+7R33kx&$nup*?x~N&$=`n=nxP&y|w+ACbC}B-1>}lH$*D9v& zB9&Ryc+YNn;&qA^#POF99K5XoOCDR9-er|kR32z+EQ!fG+gN*Y50HTW*XAKvYpC6&n zz_f>G<02r9Xln9sa~Bm9)Bq{;WTSzen%Wy|eIQj-kdqT)W1xjUgB@+ax@@;On2z7=p}*=lyg-Y%XeroOiWB7oP2!&$5=cOE`Y{$zvUGPuz;};(3t_45EK;T zT?hSr!B7|<$jdo7@fqiqm-7df!Q1O!TmAr?qE-rEN7JQ^pk$)~2FXo4u>D9Meb=NL8GXWkc0AnR|JdC5-)oyh z@TcV4TVab{;oZR~;@`Nk2&KS3o|BEvIB?1#nvGNMqX&?(8SQ6svA5#p_;^TLmTbPL z5@G606j*Q=5hcUc3A|rX7X;AVqB~|1lv&V&WD3NT*s0C3#PSyp8Su~@t7pID{t^bt zP|gySIey!zZJu`220rK&n6BX9(1^SLJzC7%81vs-xgX&KFte#CqjFqR^-pYiPD~7o zG7v0)kd4Re^Dye5jqV3TVwerB%;$j|Pn>7KTadL7}QC0?bGS5&>+bIMe z)f5pC0q_#Q4-yxrJ7&Q(9r4ZUj=s)+LMuXw>T_G4db9)uMbVRu4Gp-QFBJaLaB)#= z-~O>%w4Q-qVecZ*yW{Ena|tQ;XD_>{v#m{(^1Jae`_xGZu8OjSnK_vCdNBD?Hw#XnMIS3m*yuqU*TWt zQKG%`)BC8(RoolkX{a98#wu1_t6N&+Fp2r>Ezx zbQfy2DKqUv7S*yn-}TQ|8XK38OaE(!+s>MNu;_!-v+Fm?f271%U%hI(!C%}f^>+A~ zXU4_O0`1mxmP&GMQNBgjbdF9i-|>;Iu)x__sb6uC;jf>gN80^V#rcmiS5_CKAa9}ocl#P3dNB_CMgzXM`)LidtI8A#xO(mKJW2KyaUr4aZR_3Bmr z$B&R8ItF`~X29`4NeM3_CY2!+G9VIE64KIqk9U{h?f~0AF)0bWqmsH{a8^-KagVM9 zo@O3kWPQ9}0C@P~?Cc$<8TsM5V)@`ZD0G!>T&&pB$vM0_@-fZ;zk?rp<6B#N6Xad0 z;S_HOuuP0=^t4Sbwj7MF_xT8xRWxj#NSoYd_qCp>oLNb^mPyZoWMa1X@x#e{HN{{* zP2%1}pVj7`l=oI=?RS!QKZe!)MKXGM3Sj-xkO?JFaZCL zSh3)&d_>*whX1LAEQ)!JoQUb_ZSpS1qMgkN$awsRXj}HA-TDUs`$H5Jlmo!8kmv;) zu2Cp9?tB9=Ip>2ced23Kf71zgo~GV z^F^o>QEX!%L+D^7WmG$|aYW19%Wk5w(kc>7MEoZNi>s*U6`_#z>#K>gPY6gX2tL|d z$)mWHGt1}u_z>R!-IBlex;RhK*K=vvMwnlxo%G@T3*V!e`mi`sW$sMAvo0IqavG`k zue;)M)P!nsDnGJyQo3$$3x>wDTJH>6{UuD4VP|Uz#d@z~c6a5?RMA+;LVj{Pw{dz( z8bj`KcIOJadEa+V6Z#pai5AAI`6HBAx70DMxm)Ai-$DzNK451}Nu^0JYaa%0wq zxyOJ|(ahW&GKBZnCr)JKGvSDPfmJf{35mvp!sBAd5rpc@)rsK4k+~J??tXwmF=3FH zYHx2Afk>>5+gw*S_xTS!O{(L8lBevv|IVqM$3)!kwlEX>w|2RkN^f}wZpJtSj~N(6ggekJHa(;%K2HF^dqO{zIpMY2R4V91 z6HOjqA(R9Zb{}zt)K|v{ni^hkC=ue;?2LThuC5-e9MPM-7ep>cLNa5MV&JNh+c;JO zleK2sr&cNLXuGEn7tike>u_+*Bu(r4eAbI|0fk)F$Ra!Y{#yyMQc_bAEzgpAy%;dB zRV-kP%*$iRzOB`wH*b1XIS%$>U#Qe!4tE#E#rz>VOqzsrJ8tUTiq6~~?jF&Rkt&gq z_IFa%+ZY%xv-6LLSpLN9R8{PBaftnx7-$Dy5ab2ZqE02_C8M^6UIh4l48~e;Aoj zk#`3sB#GtP{;>-}c!2hTPtJ9Cy3-5TX+u>8Bh&WClVs2`GSfaop*2plH=WSiz0d(>e+Gtf)puxj(PxaH=qkh}y57{`cjW1CeqO3@M`a zpLEz(d0vnz7rhakeW0!ECYSS|P$euvtK*%1!pcRFvD1!mY^!xwN(?a>@ek+mncaLk zYqA$8h_-pMxpgjGkO&<+{`8$j|N7zy=_hxsRcx1DhC~RU&|od}^{eX~J9k@UR@OAM zn7}LNPN%vIkXVll5)gJj7}Ng!`c zxg3iVOMF9;TPk?*a|xdyx4xCab*#;u?cJSS_JTrbNA5)p<>g|NM`tn=#$c2|eCxCJ z>`V^d6`FRu<3IV@y);Re5PyLGlOe+{#Pu$K)eh4)^2UnpqV!o`gXEKUSvvVYp6+yb zouJ^LYJ!QPP3P-dMMag&PBBYp223Ki(eJ^**YWXpob7pI8)wEAjGl%0noi!+&yAZG zDK`oKihF#3x0iP;!PmysJ4Ul#y|4ffHdEx4K+le5dC$xH_2d424DEY@Etjts zD16(x_?fezO?XsZvEhHGH!ym&`q~L^8-?OV)Du}a5DcU*77r_IasPsz-b23{95;(oxQ13S{Hs;Y~_c4<%yL#Xu3 z;^M(@!+F`;w|`*L2i+FTsh}pdg~_;-#@egl6Ag58-Vg&>UfDcwR_?3!LB5* zr!J$am*C_xta0yFaJT~@@_K+_4z!8F69+>Vq6~0gVN9G1MlI5SdxYy}yZEReC>QaQIpuL=& zY%)Jdotz^;@GWS}Bkx_Wn%arg=+F)d|oP%)*(Tytf+`+MNG|6B?2)VmyC-_xwI#Ug!J>sHn8Y7^&hbn zq97yt-;9R>ie>ZgrY3Y&^NWUt0PvPrh4#I`KSC|e=iP|5Yx{~***2@X<)egVJzzoT<=Pnu{I*CAFC&CbG<{ULk1 zt|9vUQQ_ftC<(Z_ z78VxDD=HKf6o3o&_3Kx#C;-w5Y)c@;jJ&(^Q2&)X(mZkK=ic5Y#vPFRd83ybV?#>4f;eUldDV|D@%aLj5-8OHMJB#&I> z5rMwf69V2g34}X=7!fTsnWW`pt*qy$>8tLiK8xudfOkITPaa=p{I=r@?tj>Odq1Au zLCECm?oR=!#QgIk7+r)6);E5$NOK~>N_h{P`3bb65SLT48j;-b=Mtk~l{L@^_eKZu z!1bP%K>eC>Pek+j;oqiw=$VT0#-73#cu>&V^o1~MIR><$Rsu~8Ku#-J{A}@0M@+x~ zse~EgcGV#v?eSik&6l$LVzl%}(@7~9ce|a-uUi#92Zoly1zl*^sxvSeh>4?sX9bNI z%;hW61W3GyajsmE+WA{5=lkazyL_%L;p&C2IIkh3J8?h3ByBqUBh8) zMsOizO;7=3DgDhG_UbC8=46L^m+Oq7>NGbqJ9e-I>WQ_rwUrgjv`JHna{b>c z8vMu3ZvZ3a#*G`79V*MqA3Rzjd!GY=Av9(gi$rL+_I;0Gnf2{SA0W`|2`a$hPqPc{ zIJm28n1u76h`eI#1~oM(md9^-qvhwY;Bx5~MumnhHGGB!3_1+}OW;*hq`@97qy75N zHT=>6^ajH#c_yop2L)s*v{hm#bgs_m@F+vmX2_0ZDE~wY zl(_(FBR$@E_?WQ!XOagr8b$t@>K!>1d{XL8VPRq@* z0{G^@I&06tt$g^6-X9llAV+UvVxp#o zvc(;hk_^L%$N@_Lo-0S9*QJwFQ`oAy&myIqJ^-5zNh)A*z?g^?!SrPXiuuN-HZ9+p zt~EA5Ja^>e?CkX%*2n$m!Uk3xITT0@^8lL%P(D&!PoH?Z!4Utqc=&ev*dyRm2hfUO z!anvW1=?I|U0^AcnDOMsSUl>zh@7(3Gk_*#egE(0AlZe4TJruqT>uUjl|}G`gc3B{ zzys{zx7sU#?cG5ba7+Mn$xti`Ka5@n&fIQK5a3H zmInIs7z2zPDl<=?WQ~;J%Avp{0i;)ZP0dMJ75(A~UEpmtJ>7dOgf#Y2{%VrhLoIi8 zaX}m{U8#*qhmUu>qjU%91RJgk-U1Bxf>)@JAb|dcapAlgF9C<<`}Yc1;i=y+9^9=u zr~eiDd4O~?A42iCtcpYsVeXq{gzI04Q87E$lW1z!y(Q0)E}Z(jPjFiSvhF zXclQKu@wHd8OB7#ST^tw&Yh|N*4lwgG?YWcdF5iEWx(c9+G`OJAu1@0^+<_a-`TlK z>-jOG{GNx0+ZRi-5B`0!06I$9(@%b0UH+ zE(RsXlL2Tmp!N>Uk#I@Jf`ot-3J^ZlX&%iB*($O{7yVH+N(JgS zdEWrZLMkL$dbM$}`isjujjllq`iL|zECT~Dw&tBG=5!+hDd&Zz^S$dM7bLB(axlW_ zahG zmNNfO0{YQhCp?0u&ISn76;Hg=pInxu9$i}6_h<%q8~}-wuE&dg{f23v)7WenMb&9j zbLJ|pSvwT}3~B;kPIJ3MZF#`^ZsNauD@JT0Egfr965d=so+1VG1o5w^iha|KH#is) zAUy$S>~u(_wt^0YN25MyD@EXN;pFyK90izrgR^HYt@~t^xiH*X^6fmcco7SV+gaah z7zZ8R<4m_JOP|_@2FYc?gG(lH1O_5DPj-Q21j;M|5rLH%5}fq_`b%I>={C343@ z5b|4H56omhA^;Du5Bo=1`ei{342;>?SyUgjl&XA_1TbH0P+-bDV;*26E$OY;@(&0r zj7X=UM1pSpyo0B?xjC;p0ux?9+AzSmSlOJHSGzh}oHSix0nMiF+jm?nowBwVOhJHh z<)T3HZ=Tko-ZXb>A!TyT(DrJ7n>RG^ICgX)!Q9(-Fnr?@h^kgq#dCUrYxX=G?3(?97-?e z*mnHwVL$*aag6WkUb=SD6RE??165+^?v{)) zYi1Q5dE?0osAG~c3;?}{#3&g|3W2nR%}?K}cI4{ue1s^>htr5OnRYu?A(g# zFo;~z`M%)%m|#bxg8N%JDIGYRn&QpQv>!bTN3WDt5{^X$5#uLYXuj`_+qB#^1V4BaPT{W!CpLLE>uzVc%55?=xWs{>)k=n%e1J@a2+$9#TxVB{`(Tu z8*w+?-KqVy?84s5ok&N=>6%D$lG3E}$KL^T?xXKb>f2J8*XnW~*HUg-WcF|7d313L zBFf}xbE9a%EG3`hEBKe6tS3+KWG}F z%L9M&fET(|FTp!jZdgy+@E^#+Bi?^bsS{IEvBJc7@4?I_5v{UkRUFFTKhpob zf~vb;Jaa=PBaiNC(&VjkH5RZfkL*g&=88v08iVOu{eJ5yUx2T#u0$ed}lR zXJ?AHxX+Gv-lel@>3;2RSWzNv{9VMrGb`?!x5;e&2P4lpy7lUIEp@tufBUWO;!~^E z=_sdDCJC^?8)$pd-x&On-}iuY$# z2`Fc6Y9k0@_jJH)S~^#?d@9y-a@jRnm!#@3QUw$jMn+kK76Ubn`L0+}*~Ljl2Y2hj z0XN%YWIgK}GwL~o8X6Ev%D`+;TRn-T<0hqmL7$69$fiT_DN$2yoA}>&V zX5g4m!Y;!UB6+LZZ`*IOpRN2%K3a;JCPc-go2H(9ucPFLB1I zLT@m=e$T-FZ?Lxhh4I|t>=vrXeT^cvy|&a=+Emf9zPNbq5&V;yUY$k8S(#uNOF8Rl z14fqL^6u9NAl2{4s*tA!hS;0idt1T{Y|~3`&`{4F+aW#(lP(-gOZ?rsH;M9&tS$7{ z)!9uDI9|`o!`Xm#X_0*D@}ECk1IV9UdzmKuJ^WMmS2_=n5cbSR>iNm8a5qwKC=U`Q zhg#Wl7#@)@IBcMVRxxj^uY*N$69N{De+DOWy8nXaZuQ1bfx)7$D;D_BY~bJu=31ZR zh|`z>E@@eyR5Q0mB(KX`v|&Fc>t;BF6xXpjb+6?%Es?E#6+bL@MElKlrBA!>YQk() z7q+Fy)el?jb>15CZE@HUnP6_^dT8>J&&6T#i!H`jcR&g9{>`PuEO*W)$VBzA7cdZ- z?_28!bJ)u}RJGS3MUq#u=B{ab0Fq7|PYHQ3)!Mkf0C1ig$;{1BLrJ9!{r>Y-+-aDm z^I+Q7e&4-q{Mph_)zI4;FI;BXdkc$r_?AV40s#%ufPTh(p}DWuOzU>M!P@d?RK2$2 z-`@xO4(D3%lTESWkif4wPhpEBXmE}To$k%Ix#4y#~*Wh zwZLcSa_9@`P;R^L>dEvs9XPvH-|i1MC>88~gYDv4(tf?~D0IBCrKorIXMNWHerbuB zVJN);%?(}L>$X-)PpkbZwW@_I?4hIrW|7--#*9sP+gIlDg;k%8Z;w%jn|WD{F?|ta zqw-OWHRY=Wh}pSWL*I*+k)>(Yk1S--IZ$W@f!u+oDlK1sE%Fj+AIG+T04l!4Wg^yG zIUsI*X5PKHz3aU+cFDNo;%`I`eWL?C?-Q1Ql; zbA8QeLVLEe%+K0t zgl_7)BU^mlu}7lm7sgCi@Dl2BIy>;j#qefh$NpFEteCc=H~wA6FXm;XW;hK`Npvgo&)7~kT4_a@h6PYn25(`F4PpWW@FBa+r0KAxT5gGV`2 zrAA>BzKR)kHja7(FYB6NeOAU+6J3q?@& z;G%c7`}{uav^M~xa7dg&{yU#}zuSj7^k@)v__7o8M}^aKe86o}@c9m|t3G94KPmr) zS~GPV=vK`J2w)b-@09N0^)n`r8D^^h)2!e6;P07@9{gw}L>7aAcoJf$rDfpJ&^oq( z|5l5ryx|I{=qwqux87Re=fH+52iSB}wBwpROkJ5Xq3GX7QXr7V!zS}afh1#gyJ}*= z=Id9L7iSrdx4#Z3Sab^R+aEsu4*gE{em7zb+g?txZz`@fA|I*$PP~0_ck|oz&NwCe?~?Gr#Vh3&1T>E3 zvZwv#`gdHw>iR;v-*4v!fis0h&6Sry?LTWN)BisfK$tv6(nzOvc{m_oas&Ubd)mhR z@m_K9WY20iaOE(aTGWJpq~?^PRy&kj3AbF*z8ocORT^;K0q=3t|Mj#o<)A*G?ck)p zo?2C&mQGC@lO*1BF;Fy9MVBwC?@4SqAB^-aKCaE!jFM~@7y0;ArR%~2#&0MjM?5U+ z0m*8v#GMr<56RArhH;dih!Msf#t)=dPUW7MtWL%^WP`-WBwBe?5gN4-5@ zX`?b~%3+$rb3LJa(Y=+DIxFCEK|FT9OKY)p2bI6?zseht{haNS_e_M({?&J^q-;y2 z^2_D=eKA<~qh%+(WM17I$)XK~Eh+gTd98AscfYk` z*;waA3W5Zi^|iN%`HBN!X@l#s)Tq7SPjoQEr8k}qARf62nbxKy*fCnfha)3snt>7bzbr)GzjOv z*HKa@c!nTWMoN9oPw}x<4xW)4DI7?znih~HzPWKWIf+5S9GUl`aoMs4h+n$-d`A#; z_6c^sZJHt}1R|^HO4V(pf+&Um5IKG0%`&esUR4VFJ>-OQcJII3wqWT0aqKNEj8v(l zGSdH05GIq;*6=&JJN5-IpH)MBg5@EpaV7r?rf0Lel>{-Z`%;AfXU*;Z*-81j!XbrQ->3kD#=Ew;>*L2fX*;7sl4eb)t<@`)wj~62$cUYSWh{H=p|L&$d?1kCy)8a}d2X zsV8&up+x3HWz9{f{l`OojKNyMSZT#Q*L;2J93)-%y61HJntNUlyF+FRfT{CuvhqMPD4%6Hz-GQzK@n)hsMXQgFQd`X3S@%5&AD3wxf zTjSis3+O`x8OCnT(SNUoFINv)GW-@n=q+^7-bnh>qrP%hc6Xc*oD^S_$2}_D z-Z+Xd9J4X^Bp&gvbCdM``Fm=)Zm*MC$+7X!NQ?G%?{jcxOt0VZTS?{nS}fl$j}1l3 zO$Ji{j!KtO0&@TJw$6`EJEM{7!mUT8(Jyyq55AQp-g|FEm}8TKB~i;YN;S1#(LID~ zuT^xbuZVFcq1J@`LXPFJH!wGL9)znxe0+uiM{Om9bx7VD`}H4GIcw9_)=Gh<?!=-9?(*m8nz7$~Q`4oU3u6@Zn>U>0@$F z&`H$QGpSCcM?xN1D3-JI4vPbQ;R$ZXv@Wxjz1h57a^c~7%V_cP7e^TWH?4v!w5EOw z2Q=Wua`Op>)%JrMfBNEa-OJBM22MpZB-qSyzft= zZca+i+dW5aX7$rQti1|ppI1hf#Mh=`Q#UB&&yzgI~7wyJY@jKHE%=a7cwaI-0N4Oo4PCw z*fQ=+(md_zm%gy{ByVVHi^ya6T%spo^}Au8EyHp0Ad)Fk^nTp7{^utIT-$-?e*f)r z1av>ZZy~rUr<^5C5zgOUav)ej^k5%%pd8$tY_@C!^6CrTD1s;Cf1UoO(qVqPH+01(UFSg&|%v3p$mS!yLXXxv2*l!Fp zNon4@z(K~D0M0wOR^tI9>WCcJ)hXNF#F3Cx?)`bOqr1mx&k90HCtJE0&PmYq zfTl5tl=t=3<5h-AxTwqkr+Z_aEY-{lsOPoB7;A4{H?@E`EJ;@}hx#1jyPVwYn5a|7 z?l)uw_xHf4Q{b#T9=Ab>ZYEEttV?u}n85++#@m>;| zD?mDsvYdOkVK0td5)!!7Gh+Vb!j-!Qm9+|j2QY}|PkS61RLHV+VrLj81buvc`s%@Q z-*006?Cd9>lCrMHV?rJ-Z>n%>rrS>aI*CGTlo@&|Qwmkn<3|@gA_{(l5swU~ms_}_ zr2UiE`W_BpAlVjhuyfCbeI2T-;+BF`*z#ax?&BiiL5UvNv|^|{kWu@u7!{xoDj0jx z68?OX)&HIXq3m~Yq!jAC<(SNO~($@K%XWdj*M;5qzY6u;fj=df4Mfu)!o~S?-nR<2HD}y(o6U89>SStyZlhy+ef3x=;nH63MB1P`u)P2$5|77qgF3N?*M z^;_p}z15w?3lIpzJ1rO#6Ua<+N=RFZ@$ZI@(J6rlm4RRW;s9s?xj96CqfQ!TK~JPa zz55y$&Df_tGWW@_3cSk^J1{Qpm+KBP-6F*fw7FQMvA);t8ob3TPBO7VT0Gq-6`r4Y zjnDPyc zU+85}48o!gv;~X~&vK@=|1Eb2j7t8fC(Cm-_QLD|UmcfVS@k0%jDsdm&3al&UR?V> z4ypLQSDE8{tSPxQ|8Zd-Q`j!nm*gvi$HGl>N*v#SUNj8Z^~|(!fs;5qagGI3ZgE1e z;&X}9PS3xxVW-^nwKf*0mUQ)16v?8 zvoGAJ4Segt?xd-7j0Ci8#>LG;)jz0@y|v@=DyZ&_ZT}Q&$bd?@&F z9(&5WzTw1ux)oHXE&oa9k8gRYt!3<7$@2NB4{^lT8a=$m)`;sK9g%0C^$e&>yxvOU zQ1jF;YN#;FF@vP)N2a5q>|aJU;X+FkkuXGNK3f^9YbcdBEjc0%!%0(hG3^yO_CSOc zx;c(~iFC2%fQpr_e_2>I9Ep*6-5;Dfyf4g6^pqec1+|tMIusk_j0UBPcdV1ot)x~M#1Vge2=!7Q9L*fow|{ckPO$m zWQB?JV+fp@rL~?5DP2)i^1G0ay?{SU8U|0X{WT##{?>y@VhjRlR#N|4F-oL{gD)=o z%561@0l#GFZ0n9!xX0~Ef^YcK_bA*5Jhaf?bMIr;Nlp#!elK8Kkb1EW7Xk?ks5O?s zoc7oL3a4AE}PtFJk!!h+JLCfts+im)c$)q zg7BAq`+}Z&Pdp=d(M$=52pDvRMr;%5!ZH8iB0pgWzfng*jPyT0;XJSlVBg{1(uq*R zP+3fIvg;jIv69aW$t@g_#6!s6?E2{CKmVgS&5{$feN0ZKXc}d(obq;nDXmoDc|HCm ze_G~N%^36vga9?ZcQ|*dTcDanls75PM*D?#s%#@n@U1v}Wc~1vTbO4!+pg~et@``N zenjZCjUKoIWS+^&2MN@F*#vd(Z~D_o7m!(x3DB1_efGRJv)=<}#CKS*NPpX|#Ph+_ z?_+eBn~G35Sri=sQ&mb>Zp43}Qmy8;A9aERT5&#A4)*rDs5)hyuf%t66e>y3Q)KFi^c#D(h zATY#hU-2gcV3Pz_xc_>tNPRpQMpdmkv&};IYvqiZx`?;OVcaPYrqF8=E1D*oq^+ow zYe-8?O-&PJuZ#P-dLD@>cXt~z*)wIwzt;d7LcE+FMmGf^CMo90ECD8Kgzgie&&K$f zbMtW#`VLEb4>Lrs%GqL?=QA;(;>Ai_yxjP~72`G(I*5k??@Eu}olrsoAs{S}T_M(4yS;dctQd75iH;EB8OHmT!oCVqgDJdh zloSZ2B=bpgNp_Zt=dOH=%SgvbR<8*gHHbSzLul*i*$y-KS}sr+QNHuNY5`i=5b;>H zlcV(XjX@(+`nH0pPH)jYb%DrZLYMy}b+Ep9cU`V|U|$p(W==C^cwJ5_4$|Eo#2dO? zH7oE^BA>m<-+wC@-5P!8%@`uE#~)nr-K4SbD-z76(^r(aDLA`nLSRlSYjll&kYn9l zf5tf+>aurw$Vo|o8XJk>i(yzqPTNUdv;TSX5-m#5QV~cu^^I-wJ9+524ZTGE$PJHi zn96);K*h1>J79k@(`7FFLWEa;mo7NF@QuwtpV>@QlCv08fHhaEs%L7E9!YxSn*a1q zKcT=~IKVOh&8uZ6Go7y*rJC2dFK;ih-&5$ew$(QMw5)d#>dE2KSCu0~hXixbcDz)m zscDS1=<#en?{Z_2B1Evm(coxke@N1D>9L|i(ziE(fE_9UIN1W`z5L4N;iYflSX|%C z714u3N%^ExGHm$ozshwh{%TOPBiT>(B=KLUZMAh1%prj*9_)CbB0XTTZXczSnF2ZU zTt5&BSn@k37jQ8Hq);DxcPBC#RSnwr6EuM^#Q&hLS*^>HC=`nOnRK_8kLSIAGM!9$ z0qm2`CNuYo?h%BULR#~+JF{Syb-?zo5%0Xq(EP(2-$X(U=PwL#}YRZKCpuac)ucPc=` zg2yfSzZH1@1^#xOvit0DRG3mgMZXEnEAPL{y0;%V1^ReLY=+s?uV72s_WPxaFT*e!PF_ z(wrPnO#X|iwsX)~8-ttrIvE$T5#Np8SRcQ^wtT*pTI(y{Ez^odqpXwI**&JwJoQrU z3x8T!MS8T*)H?2;RGimIav#(R*t9LUj&aE4zT7xxPye@rDK)=bNNvh8pfSyYgBX|9 zws^;1s$6$^#)&J1QY#|rasJCXAk&fJb}8{5qqSC`jwX>tlbp>om{MpR7XlF)(JnL8 zG6QC_5+NUnkrf?g^z0-URI2DEG2${%tK%yc-NpPd0Pz>Tb7>aHBoOq2ixXKRu?D-z zLNsRrf#Q1fQO4fBpY*?7V&*ceMZ@xeb8&i+<;#~^djneB`@Q8DTT&Zl-NEuHI}g&D zZ<0rukGPs|m(OF}2U1jsO8P1drnIm3?XOw0)Huq&{0R(z?t znAAB9YB(ssoeS`oS((DCBR|Dp@5WU1*sNV@^)i$m$`25-Y5rtyw*9)SzaT8HXwG74 zTVUPQl{Eh~dB{jZoIDLpQJ1jBT7}ndo_0lh3$X18fAK0W{A3Bd%7OH# zCqvBp{9j#f^P(>svHK&->_ls@Ai>%-eGxA~VL>UHWjb`T5U_u?Ta|AxGbR+4p>m zg%QOl28B$bMn=mA!79VfQ24_h=Y9n;;FukTU?R1(akV3F`-}`%=;)4FR0;s;gZ~dj znwDC{((V&UXQTVyiR#5?%|gqPRrbBW>YBfpe!prqVxe^p?1qnKYA`7T&$&{n}Mj-9HnihCku1 zpBRgVfWilUp$8)A^USP0%k|_WQ*x))(s3smCuOO;r&l9sy9Txx+sJA#HtC~C7%*%{~DKg@lBZCyhNE35rTfeOFBrs zlU*_IQS&=}L7(}otf^ni*gKofOSMwZlP9mqnGQ zd~;&ZnO%?ixs1KwLNi#uzkcYdp%^ssr*w5#v-{+@OEb+U*;c;2_FS3jFM2nMRkMeLnFlP^?Vbsc zL^KPcNy>Hjic1}uyFZ0~v(LEAVRq%7(9pf`Iu_BQ*R`)7V*aV;UIEOg1>P;>$7D{5 z@2_Sx&t^~6HH2l(O@Ly@l!Zj$)0f|1L;zMa4EgJaE3@JVQPnm# z6oK{qb%9^R*x`80L$1i97FCng%DwIfbfLdoCKRCBxT# z3sB+mlI_ZU7`^k41!gnwcQF%HIKjWZPE>F|qT4K4`hoV(h+c?78Dq#o94fObKEU}d zMk-=im54en`7BhXDp{|C8KoGs%&`rH$uW!>%i>%83HOt8&8-wR^UQmY`s-CgR7YV z&iR1N0QOI5md<(JX7TXxT=Ud@qxq98ztXv;tCH8d=gGViYrqJ%I^qa?*8J@?fw@I* zYzIC`8z`1VBR1;1E?{#=YObwK^f0FM#q05EEN5kzOVioZAJMLEZC(OG?o1ID$$vITPD#TIN3;jVYEVdH z1hO4sQ`ffh&~$!&{+~20KwTu8)A=gtBy#;Rtv*$SF+Mqx?!=CwAQ3U4{t+ldTX?-G zAP)^5Ic7kA5{M2XAw+(zT2>3MR1o*N5<9l0POmzB0&og*Mt-LgKxT%GMbAY!tJHCv z0!Y2$$S9Je<3YvwKXo}OJVOJsQAx~CLX&ZYALRsvEbw$VWTkAl27iy+`NcniW)E6C zB(yYtJFb{0S`jZmvPk!-T>9xrT z=1?l`!T13Q!Wp7Pu|mOYD90DIB=vUQ4kV+ozro)=&t53;n&~8c*3DJMgj2(msbU6b zKmPU>F{lrN$bX=+3Hw7xi9}z)?8r28HYm-~`7mOo;Q2&jh!2`Qderam{kK~(wJ97z zb{a~WKloXB{V$jJ6DbaZH<++*H&vWQ`mzHHTqF-VDvjN)TJi~()SmKTtL6$Rf$FPq zE6uhJN9sH3Ya?9{jNnbUpn=5mfOTD3J>LtN75Dst{4t#BleA$wXIwe2oJ+QhH&D2< z>g-yizQ=9^;nLfGKZ!AWOOgv<1T{6^t|=ESZN*+Asek6Dj#XnkpzQaOx&NG>lkc$p zsidzY&#q6An!vhIP;pH0IqT^h79Q@U6iM+JhBR~O%-TfK)a3X?hF5TtAibcf=ZX@- zDy{80v;5{vtKH_zEwN`sDFZm%$RGW3d(M*ap=u`*(_q<)$@Xq&*gw^J#%vhaUQF1W zdI?!n3$O^=Wp3=>tk=K1Ufuuh7c#+{t5|GaZPk4jPt`a5BGywT-u_)reZWO$ylxt( zox>>$Bbnl#B$)@?RA;pw%yYf8yZk);(tXq!gMiVmnw+59P4{s^N+5cm0TndusO@-n z|Jk!=`IYRect{|zkMSVLZO~dAy1livce98867b}N?yOAs%Ov9RRCKZ84NW^C)M#6@ z3cZ@XOH*x`Sy=(2V*nAMYVzT=d`urb!iwJ6>q@*;{?-al7;?w#>)pw7YiyN=QOF zGkf@Y@&F81;pmjq56nFNFgNZT<%eP`yHaKoq`1Z^Pyo5tYQMFgEJ?Q@g=Pa~6#$UW6HY}#I{jVKVs5>OaC>T;L#!wL zTxo4A`%7;kHN(Oq)ztAXY79C_6GKal5;{HZ%?rfJ9*vXsR0Ul^GpjRX;bOWBxR;PmD29t#d(v2&^dLAI4;G`}C`+K6pI_LRK|_dEhVW zhklZ55UL-*YqXgG+|^(~RkhE75(#z+8D2p%C8H(s->Uq(K;hux7DW{2pccfS05Lqg zq0V%T6ni_r^TVb#tkc9@Q%8!XSvA-gNI!l}^h11RbnpQ2-w}uO>C?pHz!lme#yv^# zxAP@ZDC&t2)qk&@rxvD&eO)XB2bo*&pE%^Ji2s_fb8>QA@KitXYVh@i2dDK>3Ied^ zkovn;5kcuD6m@tG{Xi9c1d19nm~x9a*sdf?%J}#+`ALMJmG|twpdx-4ED+|Qttd3S zuFdl3^-xM~?c|0Fvn9_9?WCaJrcFfX0Je)0YWmTNnJD`4_v{qQ3N~0oSG>d>1PvDmi^7Bd9dyp_g-TPI?jatj+z~NKSfrHS zV)or)=3b0PyQhjm-X}a)YjFp^wo*`ODv&V*#M2dyfxXQtMJyd@v>S!3XiY%*@M?dpUj9VlaI@JvRYN=^ zuv?VA>r63bAKHaps?>`fneiu;(lZn?vjC?eC>lIlwx-nQ_pgr&+nVkDRwq2}r|MfT z6fGh<%8rFZqvjm&%kG1KRRjz5PtRtDe30H!a7O zX$KCVzD|oyNMa^0plqfQSWD(JGT#)rp%e}+s$X2XotAiEyKTwbWGn~F<(vHTQHB6& za(QW^F@+g43-5OsEqh$Wyg%1`{+I4f3bQS1$!B#}i={3V*UU>&@MA|xFR`t_Rk!)E z|6T11MYp5A9tz=#03fzaRj8npKuOG+{D!CNrwll)G=#y%uz$5kmfzOlcJiQs^N0ee zxX@wqataJ9i0S!+QM=t;JB4>)d%NfYjaurs7<8gSo$wKeUC+9_ZJeCgqfb?cy51HW zwqJ5R*Wzb+Ss!Y-qH18J<2QnUP2~AB^8otlJm@%1S2O}jWmNgXygn|9N z1sV@wgib*{7_@xFE`7mQk_=+0d7mVGgxXOa2v0MyX;hv$#OYfCiM90N?0~I6+S3p* zJU(ggnAL#))%6Mkt@=t@^xS{W@P$X0X4Hh3S;KTcRv{0ZSS=~kip;nP1ZEeztAJ?` zHUtU#UQse)Ri(|maU@NG+mkW&{)PdxcR^JA033&POzgxCd| zGIO4GcO45$%If0+Ec|dW9E|(h0@2LvW@?mNdZ#Lf`PPZR@eZC0Abtt77)c|bPmn*i zF6B_;Q*&brBbCP?ihc_QGbVQZ(MYqZh8ADVvF~4Jn{rlGlr?Ta&83?l{P3?qj}&xG zFvC+n?);?K(V)f5MA6$iX7I=H&W}U`Xb^q>Ly5adQ-f_j--E>MGz!BhK{%1thb|Ah zg`4hUCU+=vhG6l3Bl9IQ&2sD%SYf)an=&s39@gccY#p15>gpU2H}tDuPy|m9SkM7R zrMuII-N(aQ&^9ZJ)zV{2v*=0yJM8Ycx>^dbow%ykS~>}3o8;roY^(&3@4=Ko3joUO z=3s@RBUDh9&G%=KFa7PeGjHEN`Vqw+v(>enApb~3dM{}>)Jzh z<0Pj|Bcu8CpL#e5JV>y@0hlkkdqu)<>G7aX5OMq6_b`<23y?h0Am8jNnfDi)PD}4w z+?Jx5PIOsh>xDnP0$HoW+FS~f-E((Zet$JH?3JnPnLR|DqV zc#MC3dYZhR=Af^kmtZMeAVRsS#Qr5-@gF^-s^~DW0QDKdsmH{$D zfO!$y!(~!tO$^<6);aj5;A3O~?p0^{8JVB&wvre}=%sgJUMNkdH}e1|PVTZmrl~;E zy7s!)ny@WH*vr!qpbSG?p!OdX16X&e)CBXH=g>O$6A$-ujF6xR-+|GYi3u%Ui*dCX z5-PdTy7Q&-q2YQ`+?kpr4^kN7*p_k8sQqqz&S5|V|1v&@nhS~%^0!u5C&`nSPitds zdazs0)ULwXX2Hj?o!>eL#z=xJ(X^F5-6^ zT(BW=)XulFo{k3G$wL>V5vz}aEArU`%#Sm-9b24_=*t!y9PRBD%fsbSH9!mSMZwfa zyo3W9ga|RwC8b#rQ`@nPIjF5eLaEN_WwM$5e+JU4;k%>j$}oiyIe>67ogzV;;lW zGfvt#M3n-fvOyFU0eC*z^Dm)cua>=-r7O_G^^+Xgm@4=b2AB**5-o`#-(+bGA)NeE z-+QP`32iUe2 z{=jlt7Bo%=K_UP&7WVGcq6SmA8UJ=m*XP>tq@RT>Xs^6iIletyRG}n@{XswxV5H^A zO%!<@Qt8AT6mBoIl=HM|q2#LZ?aBB&VeVU~H;pd01vBC)x zui#LzbeTN0@j_*~bk+5=(HH2Z^CVml;k!2<)mt+E zeA=3#L&{b$%U2!Mzz{f~G(N42jRAvx0%Tl%VP)GIgIvy&&|ilS$!P>~>EDobMhT04 zlxK)P;|4F4f=h-ecQdG}HA!5fzR!Ps&%mK}X2SC(zqWKl}Ef=nYB%0uDz#g-=x!2LxSQa*W6oYbl; z+t`2XpAbAXbd`kp6M;tmY-0bkt={zf)75J!XPGp91XMPBrCqDl{W92FO6hSZ z3|$O$m}W0%=N6t1eNIV!U71rRYcPY?en_ePQHfzLZ>3jj2UcJu+9C zBp@VMG*(G4%Lp5;)K)_^nBRZvG+6MXh~@M+5B;7g20^5la@g{uT}4!l3egR)->DG8 ziDSCra|+iQ^wXb00x6L&f(KNUUepv|&*3PtWpHh8cm@P*@XIAl?N6UZ%1~sfX})eZ z!5ZNFN41)A(Ei5h#!uRO?up+r)lyB(cCLSN@NY}P5I}>~%1X9Coo-DdMn-2Tf{uWb z780jTWiq;rNtyDyZZ9*S{Z4Z!E;%&m$tD=zNn?SF~wdU-!2#U0%z9rEq2zSBnLBlH-hKL}i^3FxD$=Tl zAvHyw{v)hLVPPWU=)>QYhUJmSp>l%{wyyAb_ZA72sgKBG(vu8!YILS}_qSZ|g-+Mg zwv$-_TX@EE(SCPRK4Rr4-l~TRhT^*iGBDz0_nG1XH?f&Yz^I81zN@I1F z&g~w{K$22%nXl!5$9deo*7;(hZT0QWHyNXMj<*Y0t@#6MF+Wy?1{?%`sTt^BZ2Rmx z2uz;F2MD=z<+e7Rng0&MFvGsL``)|v+;h%7=lfY{zb{>x_f+6Uc-P{Q=P0B8so*#M zxr*yw<-Q3V=mp))%h_}8L*ST@1VNyb`sa zjhb~UspCvjhCl?l&O_{H8;h#9*stA}M?9RtXimUBQgIsm`pZA7$M@Y%!XwWPrZoN2 zKf!9R>bC_@A7ok27Jm{apD_Ggk$NTg$E7D8hq%rA z8(Y`ik?A=F&K1>9K{q`Sy25D#-B;-#J=Sq8!V@7-|NJ0oaQlbcuZ9o0c4zZ|AN8w5Ir(ibNn{%AmP>NNLAtY zv^IaKkZHSmoAu#2X_ao`7c2evq=u4S_vJ_L2O4!a6UpliIIYGc-d&0^EeYOk=m+1LP=@n8@MRz4mGKJ4P!;_Jzt;5WPUg3H&C!{1fwns;}@3 zjpvxH`Q|Yu=EFA>432eQ%NSVGwm)BTvnN5lW!~9O6Rt9Io_J);t@>C`TZgz6tyxbY_q6~fxI`ZFU(fn3udS%=O@2h82G(}V3HMW zNH9JualiKhlRh~yIw|q1fRkIY99UFVvT%yEm0)AXs5h<7(`tCB7#IkG>>IABtlhR- zRw{Bfvk9#BRFLhntYl$eN6E#3%j=rTu85P9NNEWyky(!z4EGe;^G6>qBD~ah2zN@e z0F&=&S?P$wj8ZA;1I-jvaHizkK}UL7S6i%DAyY%!!59JDZuh0qX3g(UI9!@s-#!&L z+(x&1<1nis!*fM^w}L6?v7xbt`whgvhyrpZWpu;iXciDHiNV^Nnk2z4rx?0Oi+}-$ z8!DxXtcOgdKp~l|9=AYO9F7KNPXkVmzRlmQpN6eW)(wtge;(N4a4&IDOt{Aij$<=F zKRhKfwz7|*TE19Kp0pMXhAonIT`YX!G7gM#`}vgJj`Rf*&hi?;fz}ty+B;L_KHK06Z=g5*sh)QuvOmCKr##TrktoA&~EqD zpUfmW@2p7=dJG)&{nmWWg&wK7H4Yw1E-<>ZONuJ!+O4{Df!}{AA-t1j9-9*$JWb~d z0nyr$o(II_2>$Li2Ugf^xllyRw|(Md=VHvrOv$j+_T|!ZN7DUW@e8O+d_v01|L1tJL@_QHk&`N_i-Jp{>bFx zF(u)b$P~86)7)s*lc8z5U=cIBKlCUeeszii&9N<}@%%=o8h&tYS(>Ep!M0Sm^y#60 zy-svl;l*T6(dx5ga%d10w}HnIP@b4N=Bl(@%gCCdBveB(&~cXO1!sU{%sWAc7hK_B z!Y}Fun|?AdH}dQm9rUX=pAWtAu}~CeGeLuJj*7Ns~#9G1>TTXW{xxEzx=b01s-8E_Ab zjc{$B>gp!AeSZzZc@!g)j@VbTC-%g_HJcI|DIa|X?&Nfq_JpMKr`};g6Um=&a@E<4 zg$7}@CEuNRqcjXFoqK;03kJC(|E_+`xOh4pm+|7saXd?&RYI|zI+Ei)6j+c%Ay-x} z$5`__U0YYbvR}qx3EQnzC!9Ce?^yKn&Inpp?dy~0@YK9;=Bm8tYGfmjIsRTDqHXu* zMat2Lm-Tk5-FFmoa}87R>0lCrKo4=#(szP~!rQ)kZj-{|29hmso={ck>wk}Yc`m zuJs1WH^()*2k8ZFLL9pOmeaXzm%bC!A*V81@u!GH^yc@OI(BLk$CXtrC~BQMfJZf=W{?b7~$@;-i-7m6NH{oQCfE{`q9 zyU+^p+z#}NXDBI=d9FixLwR(gG+p}LpWQa9jvS)U#a!T%fk>w7MXX6DyRid4JBfVO z)Ll-)mS4S|Zk3Zfy`r!PIBLJL|DnkCF@DFT*Q$)~q>a!?iRw)Rh#oPQJ?7C&Cbiih zUf%6bP@&Fq=b|{<9up%{Q)kp4cLcNG1;q;Z`7X0_xe{+Km?-@ATh$%;fgjZKz5N0_ zNTZAOn&fyYt0+w)a|2nAH!T&8-@mkJ1{32!BtehYz*PBu`-S;-{Zhzac_sJ57O zg3(LhG#EK-2(fRG^YB^Ee`9`vHvTc~$G2gg_Ve`Ph)5Rse-$1;LR@f5J)!qK+LWpJ zU9jAIi!1C_#$}>x++MDs5+(O66v8o3UGuMt9FxJ={I2_Zq!-4wd?TK8;)G|=Yk&Jy zijVUh_pmqQ;l%kbY(%?XWuJaBc}%YOj|fPG!~E-{=WVY*P2G1SN>jcPE@GLy%P)Tw zC~N&_I{!^9@OVbVZNDXp*qO<9OCwUgTNak1yy;?ti{hQeXiqK^E4pLk(*1mP6`D6n zi`et)w(H|GV_D^3IC2_?F9@PKs6n`pH@Xh?ZtZF->!(&+*&Fgu1Mf5QS=A*J|9vyC zm#D%UyZl@WGgKgYCS&d9uxUBStJo%;BL=-!k|Yl`H5jFPS>hEvE+TF)VI%eF%@{LE zdfw^t@Omrav3& zEN7Y7d>elJ)W(Y1@vVC){~RH^+UXT1ArTSqC0Fca6~JC>4sPtJRBdg+QQhSjmur z<10>L^ii*QIk3R7HsJ@~9=uKf3|&M-Cu=yRxx<1Lu`H~pksJAKFtyJmC;84oUW|>Z z(*u;}^7NJcRiATdGs*4X2<+0W%LI5 z$Kazh!Qzvy->0|=j?kX?7pImcdfG(V0O11I7SGn!tFijFU}N0X^%(Fyw_F>iY^1Mm zby4w?HApc`mXO8X&8&OiuU(|p&Q?Dgyi?9;qDT32!NVww8V~`isyc18Yo{4VLT*4n zDWD@%M@#EzB7YYEpN`o*Tw*2*?{fDyrH({vK6{MG)(}MV4dsgl=f+J>s`llEt`HTxYN}awZNg zmay!4OI&W&E(*m_khDR;$H9>1-8K<5aSe}e+&e~fY7qZH(2BBKx)irspbbh zSNEoA60J!giONTq3?ENZgT5vXMoDDl&R+s#oF~P~$qw>6e;?Jf+n@12f$Y|OmX=NM z0Y(}9e#cl=pL+9G4RVtLJUc@LKS$MWADo|XnbMfe9;b>|H#DqjrH{mBP^=tw^|H%~ z-fEps|IM937vU(_y(8x8Jzu^Ad2bYidkgECFE+4ZJ?r}GbxB8}sql*b>NK<1jp=zSQOIF4rwCC?H_Y)-8I1`VxDl^x$)@C^LLAy&ubY)|Mr}U*vh#7T~`ceCuQj4KNJX`YjpjY{h%YH>12wE`<3c7V@@V( z>-8y>k18kxy#2F%Awl}3=Z=tbOM&#^SaM#ni`28su})i2 zu=0tu-oX7gw4V=t22!j*4puCL1&o>%AhqiagEiioDak&+6o|iqmFYT~aGqMi;=e;s zHZ<$uSLai8a5Kc*ZlEduECKOPu(*bcYmOLF=Bg zF+b0?#zTyJc17-IF_SGZd0V#Wc%9^c{3%DX!@MEpudI<+` z_2{U)3@N`2_?i7dzp_<`aE&E0^JPyN9ubU2q;hvPjmlMklj4$L&~K#$S7mh zru0zeWu=|$AS289$z8TRcb}}HF1MzZ8xYvisfOTf_(>#sGJ<2;w!(n+~hoOU***LN<@(kfLl2V*~w%R*eNj)3dB zy?f@$D0wj)tky$Edxjx57>PJAdpc?@NNwy^)C$K=EFX8{lgf_*SIns9&By6U@nZWR zWZ7te*2G~|l-ho!O%#b5nNOw+1ejYem%(}q7>Opja1`J1sl0U!qvZJV8=3|`J);+j z3t+4cbF?~*&WYW{Cx~q_+qlBZi_Td`-cmh}TLlI4@yW*~)YTzY^p)MjvM}2@pU&Bl z$IMzUkfOz}2oy8^+*PAkwtEqIjHO*3c zPEktG^Y)~5H@dMjWwuPo(Fn7(;e*d2DN~o(R5~SOTov=?clEUErlfaOMU%Pr6v&!p z{O;{2ysGy$HNrgN!QN)3A8DL?pmYHId75Cc#{a$1(9F6&*s>D_8NO?D;>Tc;{Sf(T zwSF*r`Q(1#@?mXmG}4@5j>%8;&-w@)Bem`Jav4_Fr%->dfQT@qSypAPyh8n zrhZ6m+mzFZrv0RZL7!!Vj`Yiu9$Y4g=BtkC{@Tmaeo8;y9!!h(WvMxp4{cc50B^*N zP=ZXZTWJZ1g&zUe#eTNEo(92pWZmbTckF+7u+m3fS}ybqK04!qp@Z*Ts7IGPc}tB> z5_KOs*lD_6Y*bA%s&dyTOS|P6Hz&FA-0#a4P)YY)NIj96`*MojGZw}0<JE?3Rq6@T^<*YdU`t`<(<1FD{*k4E#Y5Wuss;JcqcdNrC)7y;gWsDhyV)3$ zsDqCRM6*=*V_j?ajY2Eh7ooDv1 z$8(#DjOts5?7|2TKv(>Hy@rmPjvGBS(^rV<| z(oEuNRnO|6mhBFD4&fOG0`TUD4MI?Fj`*}(oBn{0ovjwwXnL&j3yZm!-hx*@f%n{+kK({03G$RzJ3vZ3v7`YGA` zS-SAs(4I-FA{b*U3vq#7%I@smeor;`h5J2c??M#cx;Sp@ds{JCaw* zNk(Nhr)~F91I&q1MX>gpUK6cMUc2r-fvni?qW-XfL{YoV&o@Y-b@H^S%*>I{qaEbg zX0uzN*fYzn^1fM}{#7<)*)Y({OS-zcuR?+ZcM_msp5T@7tBzK(Ye}C)N3TjZeSQ@^ zvvBzY|AZR>lY{c%5f5M@1XKG>vN;s6TG0W0(IVRzzF;V3)$}>EK(xW!Xkr5^EZCq= zm-2|)9x&@a9&J}-a-k8>whJ!Jq-QL~rw)->HB;x`u&NJG3VOmh>-3C%Mi_#3bs`VA zx0Rp%eqoUZ>!gmSnv&7IjEpc1eI4yh7ZiI)d3=BjR)`vpHLmWgeGTgIw_oTE?R})d z^XUQdj?Vf&PeqtrNP0JWb-KfnjL+l9-_3-=zL;iDgJDD`hFQ=B1bZurJiI(yYKVih zS}bK|T|q#F@Yv2ty1=;|`w43gW^dw^UCBH5+D(ZThxNxeZN+7=Dkx=$U>Z#Jts4LV54K|#vt!Ll_Wjf)jTHe4ouvhX!N*@4iz+ve%B zrzt#=8xl<)eC}On@pgxmFdSYc^oiA8|N3=mDKBWrd{9>a!g?MabpoAdHtLy8yM4(; zMwhP*8{`B`b$8I2Tr{_!kRT1)k9k9O!qYH zLTeM%)?9pF{+-&#Nv2NfXc{eTp9)K*qXOfjf$QSgJLpah)Z;^*S&#e)rikzSb6&+) z*UfdT^eb;d9868q6YtBHWp1*i+n3qyj|n#D`1!n6xPG3L89Vh>)bFSIXTtexUs};C z_IPERfd>)gTv)@)*wgZhd^+56M@?KsM}PnT&5(ZjH6et!*dgcpFQopSuz5vKs>UiH z#|~qBUeJ%`37%iA&HJs+J!EPz?H;xFuJB$ewTR>Ctuz$#P*#PT)aJ6_Sl%YyGKfpJ z^gN6=UB^>1;1JH&kwZ~EKIUjP`!l3;RUVg9Fvljw-_047DAlsBo4VoVcL`@v+iU#kETS0amwXq5YS4DcTES1Bq@N=A+VXd8EyY0){ zZH4Iec)!-N+}{2X7i3fr0t)%^w|CmxUn3X^g6w>Msi?_D{s`JeG2(kBW7ZgHue{jo z_M|s2FUD-y{}@_D_4W$}-Qz>XCO7+M(qq53&Obyum2{k+PTX1*Onp{#wzO55V$@p6 zrjcSiZdn(_%|qPt$o&v6@LB)A98}u~)wVctXQZGmrO$=!yUlu~Thb`8pCerY{Ld@Po&(6hbbIR4-*NkY7 zGw(L+A9*u4I2iS?^qvf2l z=CN@Q2&6s>(=#nsvD5HS8&VIY@?Q-Cux>$3$Fk$6I-{rdXtTT z`_Vd>#_5t=7V0HgjwY+=r~&~Vo%EBNK%@2n4oag$`(fAMU1+y%T($-)C6UtfbvQxh z-%G`VxF1Y%ahZG>OY(uwsieArq$&#jobtG4I+uFMfpvn~=O0fhN0L{{UIZD}Egpv7PemByWR<)_L5 z7E$BE>G^Kz48zHmJbbJw`)OH684fPX@1L(0SFAO`xs9ri_V?mx9!FM~BjcnmtzK33 zlx53ja_Ayc42>)%tzR_3TdMaTLNmyEbF}$7&f>{U63I3S8sS5NQYM|s#-3*Z30E=6 zcGUXXDY@gWM<3CFe8)IHTZ*NBaw?O`T&-k2GYQD@7Plqm!Ps*=8<_X#o&hZ_v%CVoAt zRNBdxu67#T-jn}+=biXhmG-~7B^^tIBs&I~MCM(*FSaH4jJy};A1V=X^zx>;p6_NJ zuA+GYGk$-q@DBZerE=d(9kl<%w_hW+q=5AFUO*4WOC8wUr}-7gPHXh4#&PfCL1mxi z>@Sii*q`yn!2_OF&@47-Eqz{GofEWurcU{g}r)J^QJe+-mcDuRZUac@7tF3UDnj`9job zg4a{F=juknX0n<6!Gw5fqby2Mw0v((YWYOT8{{IU2XTz)iSQ2g_hZ+*K3jS&=ObAj zyVkbh`F6Xrppy}@&@ET5*s;x95S$KVDC8@=oiIZgIKwv}f9M|pXm}4h?F=VoqOdN+ zzP`9^yuy2yTcz77sycgvE*11md6hjPKd8amuJdfK9u0o zrSvD0Pc9F6c^)l7=fe5kKVolB{3MHq=MhiEyib4UG1MFddrKo~^*XvG$AhkhX`mbo zr=JMqE0CadIlajlRc_D_=Su>1?qoqb%AG>fK_pAR7KgRsKT%&Na()4IWDYZb(U%}491t*zTXRfBIq>{^{> zDtKIR17dKa=Y}fb)~5}I*Dq@psP5t$dy7L1Vog;rGQN9^Dj2`hUjbmPP-hyZ0!g5m z{XtW@BF@`8&93`2a`G?)PDZ~;zUBVw2oI#^dF!jPY2W-u%&1D#_gs`gR%c zVe@rF)-Y%ZXWhsA%ceY{4i4*j8@AnLS*1^bv%BHbGyBNa&*xWy{2BS*l~QuzGc&<8^Duz=6zZ_J`Eo>Hiiat+?iv6m9(6-*M%Mr4`c%KeEkcdj$w z`^snJzFnd&kG#s&%hIC18?|qKeCwu4H@)f&2nWvj@wWnw0e{$`qq~f5^HzAtI0&pG z@U8AZdkMOKY#ixxVHDg@yFMSzmA}WZa)UC&wL!+B$wIp%p+CnmJf60_XK6utT=HFS zGdaH7EmE{?ekDHmuY^Sz#X(6Vk=iV;@$!yc%5gWkf1EujE)r=3#bIfY{kB3}e0nAuI4V(7 znD^WR95jtU`WbX?kP3V0SJQHL?JwMI;9P6vjQhax?W31f2I`?i>HtWBDfFLkJjh%O zi@F1mBII8iq&YceH^?R0aqci>Vf0yHq3F?#XmFF67O!)=qo7fSESc3$wHILy6)Ze< zs)t&R(+7MorlIJ$2NmMP$M?%gVq9zLA8)G6=$%El^%-a$WI zye~G!bhR6zaNUcB8(DyI-mI-4km1z-eyOv`R6!ol7G zg{gp`=~!W*ClzrahonPs`S7^s`tK`|0qTO)Pd6JCVQhig@BM(gn?IqJQsg>#Zo7vJN^LUqe+d%I6R%api}K(mn|fxYPOBwk0S z@7%Z8SZ`4qCt%+JwjJQ{#bkWyb4izkqYmUSD*zWG3|M-g$v5>^Ao_6@>E%_eUtL8s z`f`4MzRtYPn7ez_Z#rJBoBH*TW2ttxuAW>pU#Pv7YrqW%wJfydk`nX_91nVL98h-( zJn7Z}K`&o7gQ}OS_lAV}hlH!$g)8vch@OrMvqq8=p??Yb3 z7(y@h96)T@czyra&`5X@zQcU$s&(;RbLkkpPT!lTl|IkE*IefS^j{jZeP);DzP_J%LKp zV)eZx-cx4a`@vD3$D4YFhO-17ywBTwQg5#*D?0&DLG0K%i?__bQ=>CIPquvye-97z zO0t2@KsN(G15cb;g@!2-By)0bz~5;0lAZ=Vzmqlm``}c(ttbNa9gCIGU87*DPo@c= zHa3EZ=!g*03VvFqhqJ=;Fd*`bgQ)9j?I=? z=0jyCHBIbU^U)IfvwLV&WjH!AIXPLCT;!CwRf7|O9Q)N4=(hQT8NH||1w-++e*Tp^ zz2j%um1d~)=T=5OhstNw4Q{-Oh+A#Hx3eM%b(nzuYqW03gu+SKhmmg;-hj=Ql0whl z7IOhiup$SVmfz$Xz|M~oXx2kBD)#q#(*ABxDY2As&l~xSOwmT{2K>O?GUV!Yg40XQ z{np#{}mtxt;?D*3cGAF%T!O0aFlQ|z-@X-rh%^iXVT(|NODCA za!Mn&rGDqK^d7?Y1^E!yotYM^t~7@sd*OWNl5YBsxxAwm&qn1!nNfgf9EUgrxgus+ zXuESfM4bHzAE+9vh%hDM{R)1AI;C!E1&`r{@Ui;9>H*p2#&{~E(HqBW|D~N)-w(qU z3eYlC#}T{2$EN0xZ0!R5Id{6w@_=qIBiH7<|70&kAvcGa3M;z^q!#6$&wewtS5 zNr+2RgYu|NKf%ws;sw5n!SubW*EsTMvlm@#Il#L)1sq9ST+Vn}kQlp%eFRl`CP;g^ zW$$;s{89anOM}8m&~68ePUIaZ+yhn+K(#F_Ez75D!nTM|8hqY2AT@nYi~6;N+<5_6 zZf0zG_f-GqP1|SY&Lz_f`NZy$k^Zu>iym3S5O_q_TVKDgX3y|pZ{O4RKMS8y7!|}D zT01Ekw<>vTjqU6E_DxxYiqfD&zMnF1_(RN_=Oz>nCsbz5|`x3_|gt4>O8bR7L;~{bXm}@1S3V zeMU`Dl;W34G zkTE;A=EoPm>#Oio%S2iDOBfX*qD zpKl{zU2G7@`?lA0Cv#p@JRn%<3>zp&R)xcS`O9@TN7ZE*MC?-9UMsu2g38>?tOK3Q z?_pp+aNcvHq^#xaPq}eVXpSnJ^-%>E6VRmEm?#;|Q)lO1DaA#;Y`26V00^pf-uHa| z`sxJWK|YEZ-ee|-F|98zcLW!AGUH6j=g>%RBE`L6z!5h5%6qR1L~rUzf?=!`%&=H| zIoPu_$^ii!+d93BkHylVD5N)dAgM7V1GNA>LlDAfPbHbHCt{KSN? z-<5ZG7lQJaD-t+oEq7I7n~Zh<&D4;4a;DxHZ1QPwQq+$TpayOj_Z@iiQUl)J*4fcU z8PMuq=m;s&DpWWL-yH=X_xFZ-T3JDXUX?{muSt@SHKyMTfbkTR$)HrE|Ef-Qx$xEu zAP_v>f4TtYmqj{da}}mo6v184qm( z-PHIuZv?gtm2<`K+r121;#m~7P%;)Y%6WoW`OCPdq*LQAnFdh5@*yOk|7%3hP)|<` zeCo$u1xBkz8U;rJ8CoPcuAHd*?k|8?R!FZMk z$VrShk30d1vuA0~vsq0|O**|SCnspBPJ^S9Hv$e_dr1p+sn0I~-_dkR7XI1YlS1V& zRF8{Nk)=f2`elS=QjIRxutimM^^4(E4UXlLb16o35Wupk2&w@JG0;M5@j0iYqUtGw z??b0A;ouR4eXHP7iRHq!y>{q{Vwm?ni0oMSY}rs(SC^fg4R**yL`00nj>x$-#f9sHv31M1Zi0p9IJwN9>W8WUqO0 z&G$P71_mrKK)(cZM#r*7l(VWl%d1=}9T^!Bq$Ni2K76Q}HwsvVLv}blWijE`L3haT zz?{LAP4tU3)zi!H+h6VlA+Dg1P;W&93~1f~B?hZ%&>0Rskv~y1O2U|Qmuuk1r_j-R ztb~ptn^P6EBq0=ffQSZe;j=se7!(4rNCHenJv}`@%%t%GJS7wUcjT%ocX+wosj}1e z=3HmU)*}2b52BTS>jOLR^0hcE{pVSq$+5AqnVC(e`Wd_0$qid;>rty}faNmPOIs77 zjRYK7t_T2CLxBZl%t8uWI#-r%*_|)Cb;mfJmVDQ6ancar+Nh>K!n98 zvr>k8G#df%o{>ElB_YmDU<>I<1c>fNKCafcrw`iG+kEX5KM(-TpEHRb9a zoJ&^5#heJHuk!5yhn@$1OWONWSz1Wb^g*iskXEL8uA=k-aL}M?A$S||nFGD%5&wkZ zb`Tg3YNG(R${ZIH1L)?|!F-8N9gS%b7S;NeNz!ZNhA$%qqZFh%ZZOWfBU_71R#RN9 zE|LLzTJv5!JxR#)mg|4SI1dCFP8;CFa8ZK66d#}C9=J5c7+Or$fDfjUA%!>P=Z>2Z zQB1cW9IH;E^k6ceB?DcjFs5RR2a5-%Bcr1?Am&XPc8B0uqun|7iI}x;=pdheOhMk) zd0nI=5f8A9FqCX9Yi&f$Z@Hav@^;54cdi`(K3w|X+MYYPb#+OJT3FMH2hbUKNLk2V zYbBh#bhqf&bVcEX+PB_+rYo}!75B@At|5zWb}xOc$`K&8($<#ti69y?v!4y?`MUS0 zo-p1j(Z(J$!J+yk!en7Xg8dc6Z*AqF|4r@?n32aj*VFtB`l}~aI&?y_?;5RUYYvZL-m^K^+#ksk6B_b@` z9aK|Zj*Mk(od?4VC=7uiGp3a{S}^`6B7!Jt2}pndQZ$&{(^~*&S-vW<-I}$OQ0GU0 z5&*{L-)Ru$vT3peI5+)ee}ke^GH_o1AsR5_{(VBm|9ca8f-HDDs6GUj^Y5?J8_1X7 eAUI4s@S3zIW-vajU5y(8UMh;u6iN}V1O5kK{*8|S diff --git a/docs/server/source/production-nodes/node-components.md b/docs/server/source/production-nodes/node-components.md index 9d6b41ec..83707118 100644 --- a/docs/server/source/production-nodes/node-components.md +++ b/docs/server/source/production-nodes/node-components.md @@ -1,23 +1,16 @@ # Production Node Components -A BigchainDB node must include, at least: +A production BigchainDB node must include BigchainDB Server, MongoDB Server (mongod), and scalable storage for MongoDB, but it could include several other components, including: -* BigchainDB Server and -* RethinkDB Server. - -When doing development and testing, it's common to install both on the same machine, but in a production environment, it may make more sense to install them on separate machines. - -In a production environment, a BigchainDB node should have several other components, including: - -* nginx or similar, as a reverse proxy and/or load balancer for the Gunicorn server(s) inside the node -* An NTP daemon running on all machines running BigchainDB code, and possibly other machines -* A RethinkDB proxy server -* A RethinkDB "wire protocol firewall" (in the future: this component doesn't exist yet) -* Scalable storage for RethinkDB (e.g. using RAID) -* Monitoring software, to monitor all the machines in the node -* Configuration management agents (if you're using a configuration managment system that uses agents) +* NGINX or similar, to provide authentication, rate limiting, etc. +* An NTP daemon running on all machines running BigchainDB Server or mongod, and possibly other machines +* **Not** MongoDB Automation Agent. It's for automating the deployment of an entire MongoDB cluster, not just one MongoDB node within a cluster. +* MongoDB Monitoring Agent +* MongoDB Backup Agent +* Log aggregation software +* Monitoring software * Maybe more -The relationship between these components is illustrated below. +The relationship between the main components is illustrated below. Note that BigchainDB Server must be able to communicate with all other mongod instances in the BigchainDB cluster (i.e. in other BigchainDB nodes). -![Components of a node](../_static/Node-components.png) +![Components of a production node](../_static/Node-components.png) From 22cc47b0c544bfc5e7e2a669748d4a9436ba537f Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 14:27:17 +0200 Subject: [PATCH 066/150] Updated docs re: prod node reqs. Moved RethinkDB reqs to Appendices --- docs/server/source/appendices/index.rst | 1 + .../source/appendices/rethinkdb-reqs.md | 47 ++++++++++++++++ .../production-nodes/node-components.md | 8 ++- .../production-nodes/node-requirements.md | 54 +++---------------- 4 files changed, 61 insertions(+), 49 deletions(-) create mode 100644 docs/server/source/appendices/rethinkdb-reqs.md diff --git a/docs/server/source/appendices/index.rst b/docs/server/source/appendices/index.rst index 7beb27f5..7ff0cf9c 100755 --- a/docs/server/source/appendices/index.rst +++ b/docs/server/source/appendices/index.rst @@ -21,6 +21,7 @@ Appendices generate-key-pair-for-ssh firewall-notes ntp-notes + rethinkdb-reqs example-rethinkdb-storage-setups licenses install-with-lxd diff --git a/docs/server/source/appendices/rethinkdb-reqs.md b/docs/server/source/appendices/rethinkdb-reqs.md new file mode 100644 index 00000000..f5c41c92 --- /dev/null +++ b/docs/server/source/appendices/rethinkdb-reqs.md @@ -0,0 +1,47 @@ +# RethinkDB Requirements + +[The RethinkDB documentation](https://rethinkdb.com/docs/) should be your first source of information about its requirements. This page serves mostly to document some of its more obscure requirements. + +RethinkDB Server [will run on any modern OS](https://www.rethinkdb.com/docs/install/). Note that the Fedora package isn't officially supported. Also, official support for Windows is fairly recent ([April 2016](https://rethinkdb.com/blog/2.3-release/)). + + +## Storage Requirements + +When it comes to storage for RethinkDB, there are many things that are nice to have (e.g. SSDs, high-speed input/output [IOPS], replication, reliability, scalability, pay-for-what-you-use), but there are few _requirements_ other than: + +1. have enough storage to store all your data (and its replicas), and +2. make sure your storage solution (hardware and interconnects) can handle your expected read & write rates. + +For RethinkDB's failover mechanisms to work, [every RethinkDB table must have at least three replicas](https://rethinkdb.com/docs/failover/) (i.e. a primary replica and two others). For example, if you want to store 10 GB of unique data, then you need at least 30 GB of storage. (Indexes and internal metadata are stored in RAM.) + +As for the read & write rates, what do you expect those to be for your situation? It's not enough for the storage system alone to handle those rates: the interconnects between the nodes must also be able to handle them. + + +## Memory (RAM) Requirements + +In their [FAQ](https://rethinkdb.com/faq/), RethinkDB recommends that, "RethinkDB servers have at least 2GB of RAM..." ([source](https://rethinkdb.com/faq/)) + +In particular: "RethinkDB requires data structures in RAM on each server proportional to the size of the data on that server’s disk, usually around 1% of the size of the total data set." ([source](https://rethinkdb.com/limitations/)) We asked what they meant by "total data set" and [they said](https://github.com/rethinkdb/rethinkdb/issues/5902#issuecomment-230860607) it's "referring to only the data stored on the particular server." + +Also, "The storage engine is used in conjunction with a custom, B-Tree-aware caching engine which allows file sizes many orders of magnitude greater than the amount of available memory. RethinkDB can operate on a terabyte of data with about ten gigabytes of free RAM." ([source](https://www.rethinkdb.com/docs/architecture/)) (In this case, it's the _cluster_ which has a total of one terabyte of data, and it's the _cluster_ which has a total of ten gigabytes of RAM. That is, if you add up the RethinkDB RAM on all the servers, it's ten gigabytes.) + +In reponse to our questions about RAM requirements, @danielmewes (of RethinkDB) [wrote](https://github.com/rethinkdb/rethinkdb/issues/5902#issuecomment-230860607): + +> ... If you replicate the data, the amount of data per server increases accordingly, because multiple copies of the same data will be held by different servers in the cluster. + +For example, if you increase the data replication factor from 1 to 2 (i.e. the primary plus one copy), then that will double the RAM needed for metadata. Also from @danielmewes: + +> **For reasonable performance, you should probably aim at something closer to 5-10% of the data size.** [Emphasis added] The 1% is the bare minimum and doesn't include any caching. If you want to run near the minimum, you'll also need to manually lower RethinkDB's cache size through the `--cache-size` parameter to free up enough RAM for the metadata overhead... + +RethinkDB has [documentation about its memory requirements](https://rethinkdb.com/docs/memory-usage/). You can use that page to get a better estimate of how much memory you'll need. In particular, note that RethinkDB automatically configures the cache size limit to be about half the available memory, but it can be no lower than 100 MB. As @danielmewes noted, you can manually change the cache size limit (e.g. to free up RAM for queries, metadata, or other things). + +If a RethinkDB process (on a server) runs out of RAM, the operating system will start swapping RAM out to disk, slowing everything down. According to @danielmewes: + +> Going into swap is usually pretty bad for RethinkDB, and RethinkDB servers that have gone into swap often become so slow that other nodes in the cluster consider them unavailable and terminate the connection to them. I recommend adjusting RethinkDB's cache size conservatively to avoid this scenario. RethinkDB will still make use of additional RAM through the operating system's block cache (though less efficiently than when it can keep data in its own cache). + + +## Filesystem Requirements + +RethinkDB "supports most commonly used file systems" ([source](https://www.rethinkdb.com/docs/architecture/)) but it has [issues with BTRFS](https://github.com/rethinkdb/rethinkdb/issues/2781) (B-tree file system). + +It's best to use a filesystem that supports direct I/O, because that will improve RethinkDB performance (if you tell RethinkDB to use direct I/O). Many compressed or encrypted filesystems don't support direct I/O. diff --git a/docs/server/source/production-nodes/node-components.md b/docs/server/source/production-nodes/node-components.md index 83707118..62c5c9a9 100644 --- a/docs/server/source/production-nodes/node-components.md +++ b/docs/server/source/production-nodes/node-components.md @@ -1,6 +1,12 @@ # Production Node Components -A production BigchainDB node must include BigchainDB Server, MongoDB Server (mongod), and scalable storage for MongoDB, but it could include several other components, including: +A production BigchainDB node must include: + +* BigchainDB Server +* MongoDB Server (mongod) +* Scalable storage for MongoDB + +It could also include several other components, including: * NGINX or similar, to provide authentication, rate limiting, etc. * An NTP daemon running on all machines running BigchainDB Server or mongod, and possibly other machines diff --git a/docs/server/source/production-nodes/node-requirements.md b/docs/server/source/production-nodes/node-requirements.md index 56d52f13..9588747b 100644 --- a/docs/server/source/production-nodes/node-requirements.md +++ b/docs/server/source/production-nodes/node-requirements.md @@ -1,59 +1,17 @@ # Production Node Requirements -Note: This section will be broken apart into several pages, e.g. NTP requirements, RethinkDB requirements, BigchainDB requirements, etc. and those pages will add more details. +**This page is about the requirements of BigchainDB Server.** You can find the requirements of MongoDB, NGINX, your NTP daemon, your monitoring software, and other [production node components](node-components.html) in the documentation for that software. ## OS Requirements -* RethinkDB Server [will run on any modern OS](https://www.rethinkdb.com/docs/install/). Note that the Fedora package isn't officially supported. Also, official support for Windows is fairly recent ([April 2016](https://rethinkdb.com/blog/2.3-release/)). -* BigchainDB Server requires Python 3.4+ and Python 3.4+ [will run on any modern OS](https://docs.python.org/3.4/using/index.html). -* BigchaindB Server uses the Python `multiprocessing` package and [some functionality in the `multiprocessing` package doesn't work on OS X](https://docs.python.org/3.4/library/multiprocessing.html#multiprocessing.Queue.qsize). You can still use Mac OS X if you use Docker or a virtual machine. +BigchainDB Server requires Python 3.4+ and Python 3.4+ [will run on any modern OS](https://docs.python.org/3.4/using/index.html), but we recommend using an LTS version of [Ubuntu Server](https://www.ubuntu.com/server) or a similarly server-grade Linux distribution. -The BigchainDB core dev team uses recent LTS versions of Ubuntu and recent versions of Fedora. - -We don't test BigchainDB on Windows or Mac OS X, but you can try. - -* If you run into problems on Windows, then you may want to try using Vagrant. One of our community members ([@Mec-Is](https://github.com/Mec-iS)) wrote [a page about how to install BigchainDB on a VM with Vagrant](https://gist.github.com/Mec-iS/b84758397f1b21f21700). -* If you have Mac OS X and want to experiment with BigchainDB, then you could do that [using Docker](../appendices/run-with-docker.html). +_Don't use macOS_ (formerly OS X, formerly Mac OS X), because it's not a server-grade operating system. Also, BigchaindB Server uses the Python multiprocessing package and [some functionality in the multiprocessing package doesn't work on Mac OS X](https://docs.python.org/3.4/library/multiprocessing.html#multiprocessing.Queue.qsize). -## Storage Requirements +## General Considerations -When it comes to storage for RethinkDB, there are many things that are nice to have (e.g. SSDs, high-speed input/output [IOPS], replication, reliability, scalability, pay-for-what-you-use), but there are few _requirements_ other than: +BigchainDB Server runs many concurrent processes, so more RAM and more CPU cores is better. -1. have enough storage to store all your data (and its replicas), and -2. make sure your storage solution (hardware and interconnects) can handle your expected read & write rates. - -For RethinkDB's failover mechanisms to work, [every RethinkDB table must have at least three replicas](https://rethinkdb.com/docs/failover/) (i.e. a primary replica and two others). For example, if you want to store 10 GB of unique data, then you need at least 30 GB of storage. (Indexes and internal metadata are stored in RAM.) - -As for the read & write rates, what do you expect those to be for your situation? It's not enough for the storage system alone to handle those rates: the interconnects between the nodes must also be able to handle them. - - -## Memory (RAM) Requirements - -In their [FAQ](https://rethinkdb.com/faq/), RethinkDB recommends that, "RethinkDB servers have at least 2GB of RAM..." ([source](https://rethinkdb.com/faq/)) - -In particular: "RethinkDB requires data structures in RAM on each server proportional to the size of the data on that server’s disk, usually around 1% of the size of the total data set." ([source](https://rethinkdb.com/limitations/)) We asked what they meant by "total data set" and [they said](https://github.com/rethinkdb/rethinkdb/issues/5902#issuecomment-230860607) it's "referring to only the data stored on the particular server." - -Also, "The storage engine is used in conjunction with a custom, B-Tree-aware caching engine which allows file sizes many orders of magnitude greater than the amount of available memory. RethinkDB can operate on a terabyte of data with about ten gigabytes of free RAM." ([source](https://www.rethinkdb.com/docs/architecture/)) (In this case, it's the _cluster_ which has a total of one terabyte of data, and it's the _cluster_ which has a total of ten gigabytes of RAM. That is, if you add up the RethinkDB RAM on all the servers, it's ten gigabytes.) - -In reponse to our questions about RAM requirements, @danielmewes (of RethinkDB) [wrote](https://github.com/rethinkdb/rethinkdb/issues/5902#issuecomment-230860607): - -> ... If you replicate the data, the amount of data per server increases accordingly, because multiple copies of the same data will be held by different servers in the cluster. - -For example, if you increase the data replication factor from 1 to 2 (i.e. the primary plus one copy), then that will double the RAM needed for metadata. Also from @danielmewes: - -> **For reasonable performance, you should probably aim at something closer to 5-10% of the data size.** [Emphasis added] The 1% is the bare minimum and doesn't include any caching. If you want to run near the minimum, you'll also need to manually lower RethinkDB's cache size through the `--cache-size` parameter to free up enough RAM for the metadata overhead... - -RethinkDB has [documentation about its memory requirements](https://rethinkdb.com/docs/memory-usage/). You can use that page to get a better estimate of how much memory you'll need. In particular, note that RethinkDB automatically configures the cache size limit to be about half the available memory, but it can be no lower than 100 MB. As @danielmewes noted, you can manually change the cache size limit (e.g. to free up RAM for queries, metadata, or other things). - -If a RethinkDB process (on a server) runs out of RAM, the operating system will start swapping RAM out to disk, slowing everything down. According to @danielmewes: - -> Going into swap is usually pretty bad for RethinkDB, and RethinkDB servers that have gone into swap often become so slow that other nodes in the cluster consider them unavailable and terminate the connection to them. I recommend adjusting RethinkDB's cache size conservatively to avoid this scenario. RethinkDB will still make use of additional RAM through the operating system's block cache (though less efficiently than when it can keep data in its own cache). - - -## Filesystem Requirements - -RethinkDB "supports most commonly used file systems" ([source](https://www.rethinkdb.com/docs/architecture/)) but it has [issues with BTRFS](https://github.com/rethinkdb/rethinkdb/issues/2781) (B-tree file system). - -It's best to use a filesystem that supports direct I/O, because that will improve RethinkDB performance (if you tell RethinkDB to use direct I/O). Many compressed or encrypted filesystems don't support direct I/O. +As mentioned on the page about [production node components](node-components.html), every machine running BigchainDB Server should be running an NTP daemon. From ffc08eaef933605bab94b6c19c832ed49fc84866 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 16:29:08 +0200 Subject: [PATCH 067/150] updated docs page 'Set Up and Run a Cluster Node' --- .../example-rethinkdb-storage-setups.md | 25 ---- docs/server/source/appendices/index.rst | 1 - .../source/appendices/rethinkdb-reqs.md | 14 ++ .../source/production-nodes/setup-run-node.md | 122 ++++-------------- 4 files changed, 41 insertions(+), 121 deletions(-) delete mode 100755 docs/server/source/appendices/example-rethinkdb-storage-setups.md diff --git a/docs/server/source/appendices/example-rethinkdb-storage-setups.md b/docs/server/source/appendices/example-rethinkdb-storage-setups.md deleted file mode 100755 index 0fc4c273..00000000 --- a/docs/server/source/appendices/example-rethinkdb-storage-setups.md +++ /dev/null @@ -1,25 +0,0 @@ -# Example RethinkDB Storage Setups - -## Example Amazon EC2 Setups - -We have some scripts for [deploying a _test_ BigchainDB cluster on AWS](../clusters-feds/aws-testing-cluster.html). Those scripts include command sequences to set up storage for RethinkDB. -In particular, look in the file [/deploy-cluster-aws/fabfile.py](https://github.com/bigchaindb/bigchaindb/blob/master/deploy-cluster-aws/fabfile.py), under `def prep_rethinkdb_storage(USING_EBS)`. Note that there are two cases: - -1. **Using EBS ([Amazon Elastic Block Store](https://aws.amazon.com/ebs/)).** This is always an option, and for some instance types ("EBS-only"), it's the only option. -2. **Using an "instance store" volume provided with an Amazon EC2 instance.** Note that our scripts only use one of the (possibly many) volumes in the instance store. - -There's some explanation of the steps in the [Amazon EC2 documentation about making an Amazon EBS volume available for use](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-using-volumes.html). - -You shouldn't use an EC2 "instance store" to store RethinkDB data for a production node, because it's not replicated and it's only intended for temporary, ephemeral data. If the associated instance crashes, is stopped, or is terminated, the data in the instance store is lost forever. Amazon EBS storage is replicated, has incremental snapshots, and is low-latency. - - -## Example Using Amazon EFS - -TODO - - -## Other Examples? - -TODO - -Maybe RAID, ZFS, ... (over EBS volumes, i.e. a DIY Amazon EFS) diff --git a/docs/server/source/appendices/index.rst b/docs/server/source/appendices/index.rst index 7ff0cf9c..c34b752e 100755 --- a/docs/server/source/appendices/index.rst +++ b/docs/server/source/appendices/index.rst @@ -22,6 +22,5 @@ Appendices firewall-notes ntp-notes rethinkdb-reqs - example-rethinkdb-storage-setups licenses install-with-lxd diff --git a/docs/server/source/appendices/rethinkdb-reqs.md b/docs/server/source/appendices/rethinkdb-reqs.md index f5c41c92..f1351612 100644 --- a/docs/server/source/appendices/rethinkdb-reqs.md +++ b/docs/server/source/appendices/rethinkdb-reqs.md @@ -16,6 +16,20 @@ For RethinkDB's failover mechanisms to work, [every RethinkDB table must have at As for the read & write rates, what do you expect those to be for your situation? It's not enough for the storage system alone to handle those rates: the interconnects between the nodes must also be able to handle them. +**Storage Notes Specific to RethinkDB** + +* The RethinkDB storage engine has a number of SSD optimizations, so you _can_ benefit from using SSDs. ([source](https://www.rethinkdb.com/docs/architecture/)) + +* If you want a RethinkDB cluster to store an amount of data D, with a replication factor of R (on every table), and the cluster has N nodes, then each node will need to be able to store R×D/N data. + +* RethinkDB tables can have [at most 64 shards](https://rethinkdb.com/limitations/). For example, if you have only one table and more than 64 nodes, some nodes won't have the primary of any shard, i.e. they will have replicas only. In other words, once you pass 64 nodes, adding more nodes won't provide more storage space for new data. If the biggest single-node storage available is d, then the most you can store in a RethinkDB cluster is < 64×d: accomplished by putting one primary shard in each of 64 nodes, with all replica shards on other nodes. (This is assuming one table. If there are T tables, then the most you can store is < 64×d×T.) + +* When you set up storage for your RethinkDB data, you may have to select a filesystem. (Sometimes, the filesystem is already decided by the choice of storage.) We recommend using a filesystem that supports direct I/O (Input/Output). Many compressed or encrypted file systems don't support direct I/O. The ext4 filesystem supports direct I/O (but be careful: if you enable the data=journal mode, then direct I/O support will be disabled; the default is data=ordered). If your chosen filesystem supports direct I/O and you're using Linux, then you don't need to do anything to request or enable direct I/O. RethinkDB does that. + +

What is direct I/O? It allows RethinkDB to write directly to the storage device (or use its own in-memory caching mechanisms), rather than relying on the operating system's file read and write caching mechanisms. (If you're using Linux, a write-to-file normally writes to the in-memory Page Cache first; only later does that Page Cache get flushed to disk. The Page Cache is also used when reading files.)

+ +* RethinkDB stores its data in a specific directory. You can tell RethinkDB _which_ directory using the RethinkDB config file, as explained below. In this documentation, we assume the directory is `/data`. If you set up a separate device (partition, RAID array, or logical volume) to store the RethinkDB data, then mount that device on `/data`. + ## Memory (RAM) Requirements diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index cace5003..2d938dd0 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -1,17 +1,13 @@ # Set Up and Run a Cluster Node -This is a page of general guidelines for setting up a production node. It says nothing about how to upgrade software, storage, processing, etc. or other details of node management. It will be expanded more in the future. +This is a page of general guidelines for setting up a production BigchainDB node. Before continuing, please review the pages about production node [assumptions](node-assumptions.html), [components](node-components.html) and [requirements](node-requirements.html). + +Note: These are just guidelines. You can modify them to suit your needs. For example, if you want to initialize the MongoDB replica set before installing BigchainDB, you _can_ do that. We don't cover all possible setup procedures here. -## Get a Server - -The first step is to get a server (or equivalent) which meets [the requirements for a BigchainDB node](node-requirements.html). - - -## Secure Your Server - -The steps that you must take to secure your server depend on your server OS and where your server is physically located. There are many articles and books about how to secure a server. Here we just cover special considerations when securing a BigchainDB node. +## Security Guidelines +There are many articles, websites and books about securing servers, virtual machines, networks, etc. Consult those. There are some [notes on BigchainDB-specific firewall setup](../appendices/firewall-notes.html) in the Appendices. @@ -24,11 +20,9 @@ NTP is a standard protocol. There are many NTP daemons implementing it. We don't Please see the [notes on NTP daemon setup](../appendices/ntp-notes.html) in the Appendices. -## Set Up Storage for RethinkDB Data +## Set Up Storage for MongoDB -Below are some things to consider when setting up storage for the RethinkDB data. The Appendices have a [section with concrete examples](../appendices/example-rethinkdb-storage-setups.html). - -We suggest you set up a separate storage "device" (partition, RAID array, or logical volume) to store the RethinkDB data. Here are some questions to ask: +We suggest you set up a separate storage device (partition, RAID array, or logical volume) to store the data in the MongoDB database. Here are some questions to ask: * How easy will it be to add storage in the future? Will I have to shut down my server? * How big can the storage get? (Remember that [RAID](https://en.wikipedia.org/wiki/RAID) can be used to make several physical drives look like one.) @@ -39,67 +33,23 @@ We suggest you set up a separate storage "device" (partition, RAID array, or log * What's in the Service Level Agreement (SLA), if applicable? * What's the cost? -There are many options and tradeoffs. Don't forget to look into Amazon Elastic Block Store (EBS) and Amazon Elastic File System (EFS), or their equivalents from other providers. - -**Storage Notes Specific to RethinkDB** - -* The RethinkDB storage engine has a number of SSD optimizations, so you _can_ benefit from using SSDs. ([source](https://www.rethinkdb.com/docs/architecture/)) - -* If you want a RethinkDB cluster to store an amount of data D, with a replication factor of R (on every table), and the cluster has N nodes, then each node will need to be able to store R×D/N data. - -* RethinkDB tables can have [at most 64 shards](https://rethinkdb.com/limitations/). For example, if you have only one table and more than 64 nodes, some nodes won't have the primary of any shard, i.e. they will have replicas only. In other words, once you pass 64 nodes, adding more nodes won't provide more storage space for new data. If the biggest single-node storage available is d, then the most you can store in a RethinkDB cluster is < 64×d: accomplished by putting one primary shard in each of 64 nodes, with all replica shards on other nodes. (This is assuming one table. If there are T tables, then the most you can store is < 64×d×T.) - -* When you set up storage for your RethinkDB data, you may have to select a filesystem. (Sometimes, the filesystem is already decided by the choice of storage.) We recommend using a filesystem that supports direct I/O (Input/Output). Many compressed or encrypted file systems don't support direct I/O. The ext4 filesystem supports direct I/O (but be careful: if you enable the data=journal mode, then direct I/O support will be disabled; the default is data=ordered). If your chosen filesystem supports direct I/O and you're using Linux, then you don't need to do anything to request or enable direct I/O. RethinkDB does that. - -

What is direct I/O? It allows RethinkDB to write directly to the storage device (or use its own in-memory caching mechanisms), rather than relying on the operating system's file read and write caching mechanisms. (If you're using Linux, a write-to-file normally writes to the in-memory Page Cache first; only later does that Page Cache get flushed to disk. The Page Cache is also used when reading files.)

- -* RethinkDB stores its data in a specific directory. You can tell RethinkDB _which_ directory using the RethinkDB config file, as explained below. In this documentation, we assume the directory is `/data`. If you set up a separate device (partition, RAID array, or logical volume) to store the RethinkDB data, then mount that device on `/data`. +There are many options and tradeoffs. -## Install RethinkDB Server +## Install and Run MongoDB -If you don't already have RethinkDB Server installed, you must install it. The RethinkDB documentation has instructions for [how to install RethinkDB Server on a variety of operating systems](https://rethinkdb.com/docs/install/). - - -## Configure RethinkDB Server - -Create a RethinkDB configuration file (text file) named `instance1.conf` with the following contents (explained below): -```text -directory=/data -bind=all -direct-io -# Replace node?_hostname with actual node hostnames below, e.g. rdb.examples.com -join=node0_hostname:29015 -join=node1_hostname:29015 -join=node2_hostname:29015 -# continue until there's a join= line for each node in the cluster -``` - -* `directory=/data` tells the RethinkDB node to store its share of the database data in `/data`. -* `bind=all` binds RethinkDB to all local network interfaces (e.g. loopback, Ethernet, wireless, whatever is available), so it can communicate with the outside world. (The default is to bind only to local interfaces.) -* `direct-io` tells RethinkDB to use direct I/O (explained earlier). Only include this line if your file system supports direct I/O. -* `join=hostname:29015` lines: A cluster node needs to find out the hostnames of all the other nodes somehow. You _could_ designate one node to be the one that every other node asks, and put that node's hostname in the config file, but that wouldn't be very decentralized. Instead, we include _every_ node in the list of nodes-to-ask. - -If you're curious about the RethinkDB config file, there's [a RethinkDB documentation page about it](https://www.rethinkdb.com/docs/config-file/). The [explanations of the RethinkDB command-line options](https://rethinkdb.com/docs/cli-options/) are another useful reference. - -See the [RethinkDB documentation on securing your cluster](https://rethinkdb.com/docs/security/). - - -## Install Python 3.4+ - -If you don't already have it, then you should [install Python 3.4+](https://www.python.org/downloads/). - -If you're testing or developing BigchainDB on a stand-alone node, then you should probably create a Python 3.4+ virtual environment and activate it (e.g. using virtualenv or conda). Later we will install several Python packages and you probably only want those installed in the virtual environment. +* [Install MongoDB](https://docs.mongodb.com/manual/installation/) +* [Run MongoDB (mongod)](https://docs.mongodb.com/manual/reference/program/mongod/) ## Install BigchainDB Server -First, [install the OS-level dependencies of BigchainDB Server (link)](../appendices/install-os-level-deps.html). +### Install BigchainDB Server Dependencies -With OS-level dependencies installed, you can install BigchainDB Server with `pip` or from source. +* [Install OS-level dependencies](../appendices/install-os-level-deps.html) +* [Install Python 3.4+](https://www.python.org/downloads/) - -### How to Install BigchainDB with pip +### How to Install BigchainDB Server with pip BigchainDB (i.e. both the Server and the officially-supported drivers) is distributed as a Python package on PyPI so you can install it using `pip`. First, make sure you have an up-to-date Python 3.4+ version of `pip` installed: ```text @@ -131,7 +81,7 @@ pip3 install bigchaindb Note: You can use `pip3` to upgrade the `bigchaindb` package to the latest version using `pip3 install --upgrade bigchaindb`. -### How to Install BigchainDB from Source +### How to Install BigchainDB Server from Source If you want to install BitchainDB from source because you want to use the very latest bleeding-edge code, clone the public repository: ```text @@ -142,52 +92,34 @@ python setup.py install ## Configure BigchainDB Server -Start by creating a default BigchainDB config file: +Start by creating a default BigchainDB config file for a MongoDB backend: ```text -bigchaindb -y configure rethinkdb +bigchaindb -y configure mongodb ``` -(There's documentation for the `bigchaindb` command is in the section on [the BigchainDB Command Line Interface (CLI)](bigchaindb-cli.html).) +(There's documentation for the `bigchaindb` command is in the section on [the BigchainDB Command Line Interface (CLI)](../server-reference/bigchaindb-cli.html).) -Edit the created config file: +Edit the created config file by opening `$HOME/.bigchaindb` (the created config file) in your text editor: -* Open `$HOME/.bigchaindb` (the created config file) in your text editor. * Change `"server": {"bind": "localhost:9984", ... }` to `"server": {"bind": "0.0.0.0:9984", ... }`. This makes it so traffic can come from any IP address to port 9984 (the HTTP Client-Server API port). * Change `"keyring": []` to `"keyring": ["public_key_of_other_node_A", "public_key_of_other_node_B", "..."]` i.e. a list of the public keys of all the other nodes in the cluster. The keyring should _not_ include your node's public key. +* Ensure that `database.host` and `database.port` are set to the hostname and port of your MongoDB instance. (The port is usually 27017, unless you changed it.) -For more information about the BigchainDB config file, see [Configuring a BigchainDB Node](configuration.html). +For more information about the BigchainDB config file, see the page about the [BigchainDB configuration settings](../server-reference/configuration.html). -## Run RethinkDB Server +## Maybe Update the MongoDB Replica Set -Start RethinkDB using: +**If this isn't the first node in the BigchainDB cluster**, then you must add your MongoDB instance to the MongoDB replica set. You can do so using: ```text -rethinkdb --config-file path/to/instance1.conf +bigchaindb add-replicas your-mongod-hostname:27017 ``` -except replace the path with the actual path to `instance1.conf`. - -Note: It's possible to [make RethinkDB start at system startup](https://www.rethinkdb.com/docs/start-on-startup/). - -You can verify that RethinkDB is running by opening the RethinkDB web interface in your web browser. It should be at `http://rethinkdb-hostname:8080/`. If you're running RethinkDB on localhost, that would be [http://localhost:8080/](http://localhost:8080/). +where you must replace `your-mongod-hostname` with the actual hostname of your MongoDB instance, and you may have to replace `27017` with the actual port. -## Run BigchainDB Server +## Start BigchainDB -After all node operators have started RethinkDB, but before they start BigchainDB, one designated node operator must configure the RethinkDB database by running the following commands: -```text -bigchaindb init -bigchaindb set-shards numshards -bigchaindb set-replicas numreplicas -``` - -where: - -* `bigchaindb init` creates the database within RethinkDB, the tables, the indexes, and the genesis block. -* `numshards` should be set to the number of nodes in the initial cluster. -* `numreplicas` should be set to the database replication factor decided by the consortium. It must be 3 or more for [RethinkDB failover](https://rethinkdb.com/docs/failover/) to work. - -Once the RethinkDB database is configured, every node operator can start BigchainDB using: ```text bigchaindb start ``` From a673d9c6efcc2d37b72f545b2170ead2995762d3 Mon Sep 17 00:00:00 2001 From: vrde Date: Tue, 11 Apr 2017 16:34:50 +0200 Subject: [PATCH 068/150] Add more code coverage --- tests/web/test_websocket_server.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index b205fb25..ee0cfc6e 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -1,5 +1,6 @@ import json import random +from unittest.mock import patch import pytest import asyncio @@ -114,6 +115,19 @@ def test_capped_queue(loop): assert async_queue.qsize() == 0 +@patch('threading.Thread.start') +@patch('aiohttp.web.run_app') +@patch('bigchaindb.web.websocket_server.init_app') +@patch('asyncio.get_event_loop', return_value='event-loop') +@patch('asyncio.Queue', return_value='event-queue') +def test_start_creates_an_event_loop(queue_mock, get_event_loop_mock, init_app_mock, run_app_mock, thread_start_mock): + from bigchaindb.web.websocket_server import start + + start(None) + + init_app_mock.assert_called_with('event-queue', loop='event-loop') + + @asyncio.coroutine def test_websocket_string_event(test_client, loop): from bigchaindb.web.websocket_server import init_app, POISON_PILL, EVENTS_ENDPOINT From beace99fc0a1de9d8c69c34698b4463f33a7e2c7 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 11 Apr 2017 16:47:30 +0200 Subject: [PATCH 069/150] some edits to setup-run-node.md --- .../source/production-nodes/setup-run-node.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index 2d938dd0..cee34508 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -1,8 +1,8 @@ # Set Up and Run a Cluster Node -This is a page of general guidelines for setting up a production BigchainDB node. Before continuing, please review the pages about production node [assumptions](node-assumptions.html), [components](node-components.html) and [requirements](node-requirements.html). +This is a page of general guidelines for setting up a production BigchainDB node. Before continuing, make sure you've read the pages about production node [assumptions](node-assumptions.html), [components](node-components.html) and [requirements](node-requirements.html). -Note: These are just guidelines. You can modify them to suit your needs. For example, if you want to initialize the MongoDB replica set before installing BigchainDB, you _can_ do that. We don't cover all possible setup procedures here. +Note: These are just guidelines. You can modify them to suit your needs. For example, if you want to initialize the MongoDB replica set before installing BigchainDB, you _can_ do that. If you'd prefer to use Docker and Kubernetes, you can (and [we have a template](../cloud-deployment-templates/node-on-kubernetes.html)). We don't cover all possible setup procedures here. ## Security Guidelines @@ -13,7 +13,9 @@ There are some [notes on BigchainDB-specific firewall setup](../appendices/firew ## Sync Your System Clock -A BigchainDB node uses its system clock to generate timestamps for blocks and votes, so that clock should be kept in sync with some standard clock(s). The standard way to do that is to run an NTP daemon (Network Time Protocol daemon) on the node. (You could also use tlsdate, which uses TLS timestamps rather than NTP, but don't: it's not very accurate and it will break with TLS 1.3, which removes the timestamp.) +A BigchainDB node uses its system clock to generate timestamps for blocks and votes, so that clock should be kept in sync with some standard clock(s). The standard way to do that is to run an NTP daemon (Network Time Protocol daemon) on the node. + +MongoDB also recommends having an NTP daemon running on all MongoDB nodes. NTP is a standard protocol. There are many NTP daemons implementing it. We don't recommend a particular one. On the contrary, we recommend that different nodes in a cluster run different NTP daemons, so that a problem with one daemon won't affect all nodes. @@ -35,6 +37,8 @@ We suggest you set up a separate storage device (partition, RAID array, or logic There are many options and tradeoffs. +Consult the MongoDB documentation for its recommendations regarding storage hardware, software and settings, e.g. in the [MongoDB Production Notes](https://docs.mongodb.com/manual/administration/production-notes/). + ## Install and Run MongoDB @@ -46,8 +50,7 @@ There are many options and tradeoffs. ### Install BigchainDB Server Dependencies -* [Install OS-level dependencies](../appendices/install-os-level-deps.html) -* [Install Python 3.4+](https://www.python.org/downloads/) +Before you can install BigchainDB Server, you must [install its OS-level dependencies](../appendices/install-os-level-deps.html) and you may have to [install Python 3.4+](https://www.python.org/downloads/). ### How to Install BigchainDB Server with pip From 79997848cd469fe75c237c1f97312c34f5f1c2f5 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 11 Apr 2017 17:21:25 +0200 Subject: [PATCH 070/150] Refine test for the election pipeline process test that the process is started with the events_queue kwargs --- tests/test_processes.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index 00716010..e6503541 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -9,8 +9,9 @@ from bigchaindb.pipelines import vote, block, election, stale @patch.object(block, 'start') @patch.object(vote, 'start') @patch.object(Process, 'start') -def test_processes_start(mock_process, mock_vote, mock_block, mock_election, - mock_stale): +@patch('bigchaindb.events.setup_events_queue', spec_set=True, autospec=True) +def test_processes_start(mock_setup_events_queue, mock_process, mock_vote, + mock_block, mock_election, mock_stale): from bigchaindb import processes processes.start() @@ -19,5 +20,5 @@ def test_processes_start(mock_process, mock_vote, mock_block, mock_election, mock_block.assert_called_with() mock_stale.assert_called_with() mock_process.assert_called_with() - # the events queue is declared inside processes.start() - assert mock_election.call_count == 1 + mock_election.assert_called_once_with( + events_queue=mock_setup_events_queue.return_value) From e0e997755e8666dc495a1e0c15c831437baf7731 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 11 Apr 2017 18:31:56 +0200 Subject: [PATCH 071/150] Re-order imports (pep8) --- tests/web/test_websocket_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index ee0cfc6e..55564ec2 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -1,9 +1,9 @@ +import asyncio import json import random from unittest.mock import patch import pytest -import asyncio from bigchaindb.models import Transaction From 98e52e047e866027d0210c7d1d6749414afc4e35 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 11 Apr 2017 18:32:21 +0200 Subject: [PATCH 072/150] Make utility test function into a fixture --- tests/web/test_websocket_server.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 55564ec2..403b037d 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -8,7 +8,9 @@ import pytest from bigchaindb.models import Transaction -def create_block(b, total=1): +@pytest.fixture +def _block(b, request): + total = getattr(request, 'param', 1) transactions = [ Transaction.create( [b.me], @@ -154,7 +156,8 @@ def test_websocket_string_event(test_client, loop): @asyncio.coroutine -def test_websocket_block_event(b, test_client, loop): +@pytest.mark.parametrize('_block', (10,), indirect=('_block',), ids=('block',)) +def test_websocket_block_event(b, _block, test_client, loop): from bigchaindb import events from bigchaindb.web.websocket_server import init_app, POISON_PILL, EVENTS_ENDPOINT @@ -162,7 +165,7 @@ def test_websocket_block_event(b, test_client, loop): app = init_app(event_source, loop=loop) client = yield from test_client(app) ws = yield from client.ws_connect(EVENTS_ENDPOINT) - block = create_block(b, 10).to_dict() + block = _block.to_dict() block_event = events.Event(events.EventTypes.BLOCK_VALID, block) yield from event_source.put(block_event) From 8361fae8159b3a6fd4b4baccffbe25a15a7e2d1c Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 11:16:49 +0200 Subject: [PATCH 073/150] clarified notes on RethinkDB storage based on @r-marques comments --- docs/server/source/appendices/rethinkdb-reqs.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/server/source/appendices/rethinkdb-reqs.md b/docs/server/source/appendices/rethinkdb-reqs.md index f1351612..0d3468b7 100644 --- a/docs/server/source/appendices/rethinkdb-reqs.md +++ b/docs/server/source/appendices/rethinkdb-reqs.md @@ -20,9 +20,9 @@ As for the read & write rates, what do you expect those to be for your situation * The RethinkDB storage engine has a number of SSD optimizations, so you _can_ benefit from using SSDs. ([source](https://www.rethinkdb.com/docs/architecture/)) -* If you want a RethinkDB cluster to store an amount of data D, with a replication factor of R (on every table), and the cluster has N nodes, then each node will need to be able to store R×D/N data. +* If you have an N-node RethinkDB cluster and 1) you want to use it to store an amount of data D (unique records, before replication), 2) you want the replication factor to be R (all tables), and 3) you want N shards (all tables), then each BigchainDB node must have storage space of at least R×D/N. -* RethinkDB tables can have [at most 64 shards](https://rethinkdb.com/limitations/). For example, if you have only one table and more than 64 nodes, some nodes won't have the primary of any shard, i.e. they will have replicas only. In other words, once you pass 64 nodes, adding more nodes won't provide more storage space for new data. If the biggest single-node storage available is d, then the most you can store in a RethinkDB cluster is < 64×d: accomplished by putting one primary shard in each of 64 nodes, with all replica shards on other nodes. (This is assuming one table. If there are T tables, then the most you can store is < 64×d×T.) +* RethinkDB tables can have [at most 64 shards](https://rethinkdb.com/limitations/). What does that imply? Suppose you only have one table, with 64 shards. How big could that table be? It depends on how much data can be stored in each node. If the maximum amount of data that a node can store is d, then the biggest-possible shard is d, and the biggest-possible table size is 64 times that. (All shard replicas would have to be stored on other nodes beyond the initial 64.) If there are two tables, the second table could also have 64 shards, stored on 64 other maxed-out nodes, so the total amount of unique data in the database would be (64 shards/table)×(2 tables)×d. In general, if you have T tables, the maximum amount of unique data that can be stored in the database (i.e. the amount of data before replication) is 64×T×d. * When you set up storage for your RethinkDB data, you may have to select a filesystem. (Sometimes, the filesystem is already decided by the choice of storage.) We recommend using a filesystem that supports direct I/O (Input/Output). Many compressed or encrypted file systems don't support direct I/O. The ext4 filesystem supports direct I/O (but be careful: if you enable the data=journal mode, then direct I/O support will be disabled; the default is data=ordered). If your chosen filesystem supports direct I/O and you're using Linux, then you don't need to do anything to request or enable direct I/O. RethinkDB does that. From 8b861131d87d80f3b3a72ca4e5bee1d24b98a4e4 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 11:18:20 +0200 Subject: [PATCH 074/150] docs: added that we require MongoDB 3.4+ --- docs/server/source/production-nodes/node-components.md | 2 +- docs/server/source/production-nodes/setup-run-node.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/server/source/production-nodes/node-components.md b/docs/server/source/production-nodes/node-components.md index 62c5c9a9..e95a26b7 100644 --- a/docs/server/source/production-nodes/node-components.md +++ b/docs/server/source/production-nodes/node-components.md @@ -3,7 +3,7 @@ A production BigchainDB node must include: * BigchainDB Server -* MongoDB Server (mongod) +* MongoDB Server 3.4+ (mongod) * Scalable storage for MongoDB It could also include several other components, including: diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index cee34508..42a11b04 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -42,7 +42,7 @@ Consult the MongoDB documentation for its recommendations regarding storage hard ## Install and Run MongoDB -* [Install MongoDB](https://docs.mongodb.com/manual/installation/) +* [Install MongoDB 3.4+](https://docs.mongodb.com/manual/installation/). (BigchainDB only works with MongoDB 3.4+.) * [Run MongoDB (mongod)](https://docs.mongodb.com/manual/reference/program/mongod/) From a6ce7b40aa1bd6915f12a2476bd737c307a7b1c1 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 11:23:34 +0200 Subject: [PATCH 075/150] docs fix: the bigchaindb package on PyPI no longer includes the Python driver --- docs/server/source/production-nodes/setup-run-node.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index 42a11b04..c1777cff 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -54,7 +54,7 @@ Before you can install BigchainDB Server, you must [install its OS-level depende ### How to Install BigchainDB Server with pip -BigchainDB (i.e. both the Server and the officially-supported drivers) is distributed as a Python package on PyPI so you can install it using `pip`. First, make sure you have an up-to-date Python 3.4+ version of `pip` installed: +BigchainDB is distributed as a Python package on PyPI so you can install it using `pip`. First, make sure you have an up-to-date Python 3.4+ version of `pip` installed: ```text pip -V ``` @@ -74,7 +74,7 @@ pip3 install --upgrade pip setuptools pip3 -V ``` -Now you can install BigchainDB Server (and officially-supported BigchainDB drivers) using: +Now you can install BigchainDB Server using: ```text pip3 install bigchaindb ``` From de445bb977a583c79f7c5a723e0ab6c86a82ae58 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 12 Apr 2017 11:58:43 +0200 Subject: [PATCH 076/150] simplifications and clarifications for review of federation tests --- tests/integration/test_federation.py | 36 +++++++++++----------------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/tests/integration/test_federation.py b/tests/integration/test_federation.py index c18c65de..00c59685 100644 --- a/tests/integration/test_federation.py +++ b/tests/integration/test_federation.py @@ -24,15 +24,26 @@ def federation(n): @contextmanager def make_nodes(i): + """ + make_nodes is a recursive context manager. Essentially it is doing: + + with f(a[0]) as b0: + with f(a[1]) as b1: + with f(a[2]) as b2: + yield [b0, b1, b2] + + with an arbitrary depth. It is also temporarily patching global + configuration to simulate nodes with separate identities. + """ nonlocal keys if i == 0: yield [] else: config = deepcopy(config_orig) - keys = [keys[-1]] + keys[:-1] + keys = [keys[-1]] + keys[:-1] # Rotate keys + config['keyring'] = [pub for _, pub in keys[1:]] config['keypair']['private'] = keys[0][0] config['keypair']['public'] = keys[0][1] - config['keyring'] = list(list(zip(*keys[1:]))[1]) bigchaindb.config = config stepper = create_stepper() with stepper.start(): @@ -143,26 +154,6 @@ def test_elect_disagree_prev_block(federation_3): assert bx[i].get_transaction(tx.id, True)[1] is None -@pytest.mark.skip() # TODO: wait for #1309 -@pytest.mark.bdb -@pytest.mark.genesis -def test_elect_dupe_vote(federation_3): - from bigchaindb.exceptions import CriticalDuplicateVote - [bx, (s0, s1, s2)] = federation_3 - tx = input_single_create(bx[0]) - process_tx(s0) - process_tx(s1) - process_tx(s2) - vote = process_vote(s0, True) - # Drop the unique index and write the vote again - bx[0].connection.db.votes.drop_index('block_and_voter') - s0.queues['vote_write_vote'].append([vote]) - s0.vote_write_vote() - for i in range(3): - with pytest.raises(CriticalDuplicateVote): - bx[i].get_transaction(tx.id, True)[1] - - @pytest.mark.bdb @pytest.mark.genesis def test_elect_sybill(federation_3): @@ -172,6 +163,7 @@ def test_elect_sybill(federation_3): process_tx(s1) process_tx(s2) # What we need is some votes from unknown nodes! + # Incorrectly signed votes are ineligible. for s in [s0, s1, s2]: s.vote.bigchain.me_private = generate_key_pair()[0] process_vote(s0, True) From 3bf1f9fa176caae457486f46acdee64e353f4e80 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Wed, 12 Apr 2017 12:16:42 +0200 Subject: [PATCH 077/150] add additional test of negative amount to fix codecov --- tests/common/test_transaction.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index cdb270cf..4b72164c 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -237,6 +237,7 @@ def test_generate_output_single_owner_with_output(user_pub): def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): from bigchaindb.common.transaction import Output + from bigchaindb.common.exceptions import AmountError with raises(ValueError): Output.generate([], 1) @@ -246,6 +247,8 @@ def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): Output.generate([[user_pub, [user2_pub, [user3_pub]]]], 1) with raises(ValueError): Output.generate([[user_pub]], 1) + with raises(AmountError): + Output.generate([[user_pub]], -1) def test_invalid_transaction_initialization(asset_definition): From 75dd645ec9dfdc2b39918bce11c2a31c215b2b75 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Wed, 12 Apr 2017 13:47:58 +0200 Subject: [PATCH 078/150] Import stdlib pkgs at the top of the test module --- tests/web/test_websocket_server.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 403b037d..3b3f2e39 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -1,6 +1,9 @@ import asyncio import json +import queue import random +import threading +import time from unittest.mock import patch import pytest @@ -32,8 +35,6 @@ class MockWebSocket: @asyncio.coroutine def test_bridge_sync_async_queue(loop): - import queue - import threading from bigchaindb.web.websocket_server import _multiprocessing_to_asyncio sync_queue = queue.Queue() @@ -87,9 +88,6 @@ def test_put_into_capped_queue(loop): @asyncio.coroutine def test_capped_queue(loop): - import queue - import threading - import time from bigchaindb.web.websocket_server import _multiprocessing_to_asyncio sync_queue = queue.Queue() From e614834a0360ce8e474fb5334972710a99e2821d Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Wed, 12 Apr 2017 13:49:10 +0200 Subject: [PATCH 079/150] Import Transaction class within fixture --- tests/web/test_websocket_server.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 3b3f2e39..4323685a 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -8,11 +8,10 @@ from unittest.mock import patch import pytest -from bigchaindb.models import Transaction - @pytest.fixture def _block(b, request): + from bigchaindb.models import Transaction total = getattr(request, 'param', 1) transactions = [ Transaction.create( From 0347fbccf49d95c3add28ce9339d58025aad439d Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Wed, 12 Apr 2017 13:50:09 +0200 Subject: [PATCH 080/150] Add a few more checks to the test --- tests/web/test_websocket_server.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 4323685a..13015dbb 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -114,17 +114,30 @@ def test_capped_queue(loop): assert async_queue.qsize() == 0 -@patch('threading.Thread.start') +@patch('threading.Thread') @patch('aiohttp.web.run_app') @patch('bigchaindb.web.websocket_server.init_app') @patch('asyncio.get_event_loop', return_value='event-loop') @patch('asyncio.Queue', return_value='event-queue') -def test_start_creates_an_event_loop(queue_mock, get_event_loop_mock, init_app_mock, run_app_mock, thread_start_mock): - from bigchaindb.web.websocket_server import start +def test_start_creates_an_event_loop(queue_mock, get_event_loop_mock, + init_app_mock, run_app_mock, + thread_mock): + from bigchaindb import config + from bigchaindb.web.websocket_server import start, _multiprocessing_to_asyncio start(None) - + thread_mock.assert_called_once_with( + target=_multiprocessing_to_asyncio, + args=(None, queue_mock.return_value, get_event_loop_mock.return_value), + daemon=True, + ) + thread_mock.return_value.start.assert_called_once_with() init_app_mock.assert_called_with('event-queue', loop='event-loop') + run_app_mock.assert_called_once_with( + init_app_mock.return_value, + host=config['wsserver']['host'], + port=config['wsserver']['port'], + ) @asyncio.coroutine From 40e3c78c9a725b7b2c6e6467fe09f709c80a7b60 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 13:55:07 +0200 Subject: [PATCH 081/150] modified the Bigchaindb node diagram & notes on it --- .../server/source/_static/Node-components.png | Bin 36249 -> 38521 bytes .../production-nodes/node-components.md | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/_static/Node-components.png b/docs/server/source/_static/Node-components.png index 427bc4c6cbdc8a10ed5d57d7712f113bd7646fd8..326b6af69c44b7c817275afbd33954af8d1f791d 100644 GIT binary patch literal 38521 zcmdSBWmMH)6gGGf6{Mw+Zlt9fL_)edE*;VhmzEX*k&-R}k?t-j>Fx&U?uI%1-+ABp zG_z)A&6oMD<+{sz;oK8@Kl?d*KbtT`c}Y~HH%Jf&1Xb#zm@)+N%m@5qL3{?@F*SH^ z3SQuyM5R;_5fSHB6qdn%37o|>omK42oZSo^O(EvCb~dI=PR5R=rnXKNcFu?JEkY0o z8AM9#y{dc4-h!{L%ETkWk+Hm1x&2m0KBc65CxjRY4hatK9S%IE*fUJ*Q@1||va)xf z{;%Hphh_P{@`w8j$BXUQ)&G`VQHeEAKJhafj_F9L65j2DgYd%PGqLL`rI@Zm% zx2uetH5`skbmh)ZNsRUs|H7*@nqEg5ufn{Fv01?@S)mfZ3^oB3-M7@3XBG)<X*frs*!h((QmEFj$!mx$&M2%{)dA5qwM@x>$U@o^V>RpZd306szB?S&fF7vH=*z zuqZ~8p40BedA^-IZ3B|r4-V7Wd(X?i#b9zzU!GvO{%Zq`Q@~5a^%YoH5E-n^sjBkV zUuLk+;BywR+Zjq2P2j~(;{Dl^UA=iR0%ioU!2Un^a4zJ*TFLuzpc!3oF_AsO^5Mqn z)I8Z5Rp)JkOThHXaW+Dq_Xe@Fd~3@>6_ZijP@_KMvd_Xo-oQYUy9gLq5Mg9a!=%R6 zoo{fKpz8Rz9l=%JY3cNW(1!IQWWHUfkvA_AZ}qmavQjlRHCn1OU#?7J9B`oy;Ir>pq>0`G$Gaq(64FW+y!Dwnav~+ShMwgV_+PPuUn|K%r z^6BFe>wMvL=5QNAG5K;J-A2^w)^p4*-A0$+{EgFtbqw#=4;j2>9hWj)Z*#Bk^yK@w zT*ghUl*`35hmAbUyOA8^B^)A_))vkBqXm7T4^O)VAs%cxoIHm$7?1=B)Ot?N!3R*FVhtN5oxI+2+># zup1ZWgKRd};C{oLAGa{rCyR=z?{zi3oli5Cy6Gw4J?nM9gR|*DvW5yhY={BQIjjcO zsNj10QuOaP!F*bTo~s*%nIgj1BgaBALVs%dPtT5>Q_g2-txjcFmUYs&xJyv(oyE6k z?sPs}5YBNI37@L;sK#uQGU|EX@5JqDlz5d|pS$BenbjHP@XEY~{utp*Odj64c}p($ zXrJBNGSjmkWyOacJ+=$q^dIA4=kwJayNGnq4rv3|@>TZzKPN^thrm-jyD*61SmL zTmc(f+s;y%byF)Lug6W5&Xjz+mqlp%cZj6LSNP2 z+x+v^#cgwl%m0xCoZ>D(<=lc1e)}h&5+U`s`hNmj$Z0a0;=3SxLbhIp>L2N%1eBR1 zfd4%qxD^e)e521f(u|tLp9p9H3N-zrE6)Ew%Z9F1rUK+maTATqS2(8Tg@r{u{}WoL zp0l9#_6baHsQb-sS~{Jg*CmoNp;9$q{O_o|#udda4SfW*7OnSjX%?$yqBHJtBYEua z{|F589Ho8bEqy71-4~a+OX`8vJ3IS+F9SdHyu#TGG&M&MZ{Y(dOfp~hi=PTJtboC8 zFK`Oa{Z@{heN)yuX73BkA2@k0lEZ8>OS&PGa6C49qHH+>K=A$NBXe@}arETxjz8M! zpU3t6P}jYg7|Bc;N~)qFBja%=5GT|*1(Q?|IlFjPr(3L?e{*mpAtAxZ$?5Ip6^Icb zVa$r}ee>iB_83j@t>4IxjGxAZqfGgK5S{}||qut;PAO2;#0f&=7 z%saC)#D&);2~wZk%<3MTGXwYNKb}@zTdu8j2|wPQ$`35?b5^-oeX{z=d49QW*JNsG zet9wHYBx7G_W=bS@)F}DyQaMS`_Sg2ec{nm!IasY4u5SUKn$*SzxTFk_r0p{6}#vK ze~YtxEpPTOPs+cXn)j9H{;DUhyH30H^lA1fE-uCfJZTgHS$^>`Tt7}a?%R)Vv9V{D zOGDO6iS0VeM!a*Pl9C~@MmM~ni-SFf!heU}-bJ&x+dsZHP*il+b95|8RyGgWOmv2Vt%m_@-qxH{&Mx(+7HfM@)7>_^EV`9^HpCa=5y@2CTWo45ffkpRmX?<*dn3p6m{iQzbiF!QY$7DD^trzwY1SQ5 zkfy^<6Y_@je5bbNx((uA6J|SSa^_2IGtku1a^N94-;axn3knLdxiBfyl$V!x=4
  • 0J|3f`11G zgC92G#OSd3`1oGFd`ZIR{EH@2ukp9HxBdP7 z_uaBbETp8QWa1gO^{W>ArigHGs$6!}Bbw-nhe86RsW9E5E@cPge0aREC6wS$lP%6} z{0o~TTl(DWou@JDs2Lf*?tKwyQPj}DBOsVDgq&4%baW7~n=M0Ys;f_~?>aifpwOjc zZo5^RrElLb1~ewfJw0!K{rZ()d4lOKu-Fwr7-o04&`{(1=p&SVxV5FJuP>iH5*qRh z9)3dQ;_mwN-Me>fZEY2118kg}`w=I%Rm~BLjoBV;eevF43)e4w(wi|LshU^*d3Ds> zUh6deBHxme3m$IK{elO6y!+7fdi&qM@!{cbiHTau%AYS$E*lH#YIt~foU3T#drYc5 zzC=Yu<>k?)4@HHChet-zR9DPcp9>Nb5p6l&^u{qbIXb@laCR{v`W-)5+%`uiANJyV7Ea(^+bxRHWv$L~nYTTrHkS7!K zfBiy4LUMF|dQebNfgN;=a9K}_5UEpPzE;RDF78Tab32${XmB&epKM0C^GoKkQGSea zK3Zu43ZhO>J19qN0MC|54y#W-hL(su~z0eDkK-dggP>Tyt}?Xr|)5 zQk`zIX&>(0c3N6m2s)|H%C@jVx>$*5=8r?JM|TIMwtPxfH^ein4{txz&`J!rmR47n zbv-8e`L9kE=~!A?W@cvU>+6HB!4Jm4#y)aylz%uoTx^;f9WBqu@M|*|wy32rO4@^o<-rd>35S7kRritO?=3Y~H9VQkZACH5Bqe@>@Ud~2EWpaJ8zJpL%QL%P< zElU$4pGQkWla!dKrmBiDBqt+-jD*zN-w%Ls$6=8drTzIcxH##mnwsvKf9p@r3X~yE z8V@hw=*T`uw9TjtoWHfTwX%vz81X;Hy1F{vX87cR;=;nSqoe6cODu;wd3kUY0K;x> zZ#6VDfU)tV>MJTPudKZHvi((9C@Uk=E^PRB{RCW~Z04$u-31b5gE24>pNHcC7>M#+ z;a!zm2@VeC%I=@#ft}Hb{v8-7YL`5I1~%Eq+&oVoqQn$z2#ykL62K!yT$vnYaWSzU z$;nuxbtY_yzzwXe8G}R(S@Fxt%4~{xK2l+NAp~NCcVdQjg5w8>q2gQMwm&PLH+Ebz zyR-zo_x+e~<`Yt(oL}iy%FUJh-F+)OGd8oy#|r|Z_%~K%+)cpKz=JH0^fe;l$qC@n zhgzJsF&7EL(7(-8StUe8DclM&(9?^Giq`5LgkjMhjR;OmOn~otczBp9)^l=kN#%3- z9Uc8Ts=GaeY{j=XN7>TIh+=W6K#`7-iD_(f)b(J#$AFcdfuX&2A%(|3>Q7N| zadkz->G^q+>;9Xv;3QT0qWpZYtkko@pLu!dLf&s4wV4xuU-Ye?^!N9pP5FmKMd5{s z0YCwGB!T!gPTJ1yaCOya^=3C1m5>TkN?ZGl&w^@>@<1v-Hy79B&=7h{>1dA>u^>#9 z3?nckh15Q}U6Nm|@SK`Nzb>z~J1gJTDIh`ucPl-5n$(UOr&;^zMJ`Q^)>5D`)y zdlBXYth^}0AKBT)pFVv85W_urw7ss*5^N|G?)^1a_UnTq!$0P&bguwhCTPw0OeX;> z4I#d@Rb-Vu1H`7XvUiK_Hg>>`5HkYy<<%6=8iY)w`t2Ea>QTrs-sjE;^T zh_U#--0ctqYD`Sb&>5E8++479fM#Nm6oC|!lrz>`5fWDHsSg%V^ZN$}LCB)%Vo=H-2MAJSHHEn_m6|f%5r$=KWHipg zL`X=OoRow)&~YIkAP|aN+11sBi;K&0Ez@DZ`b|5JPxK=lHs;|6W##XhjJadmAmEIU9esmdfMfGoOZEby>8aEu6AvV&B^dUD_*DjDP=rAe+a}J{n z7t8wc<;&A{JkMACBr5t$bZfaSU@JZC2%OSNlrmlH*RNl}I$i8URat6aA4ETp2>Be@ z^tLSuGD4dAxGkbr!~V35yw|0fJW>@Mwz0_#d6(F~smB8x`llkDP1o(RV7*b9`#=&W zu~5Ol7D?koD6PLHqXc4HZ0s+n0bpJdh);lHXkcV?@}~ljfa>y_r9UshDW9zOfe;S7 zUp-&dW@BP}Vxni=(bw1aJgx_9cA2!SiwkG4VMifVN>LG0^9#FPH%H~k{~kp0Ffqr? zLEM_VjnP?tC3%-zpJV^Q{}rAE=(Km zSkM-~cOfXR95Li})DWR{PfWO+{Oifj&j(m=F#46|OyQ>57x(x>cZ-Ee>CMB{YXIcn znI(GeoBjkjX##cLx2~CrntFP~goJ$ttN_lAtl;z}tcV9e?9{2Tl^dMS7M5v?2}c z$X|J%m3EIvj?&I|SV7&<*(okA?oc-mU;xN=XdM~XhR#|H!p;Zj6ARq5JF;T6}Y&#aIvuq^%`*7 zOaK1;dl2Dtkh%q6^g?T|g5PC|FeU=F?gz1GRqP#3?MbJlJ>+-U!Jet|TkKeORHMgP zl#y*^>2iSH`7&88p)MQKC<0RQ>4RoadFuvEMdeA?=%AR;Jo$QM=G(8 zz=47LaH)lLt{S!nH}&t)cv%+R-Q59TdqYU5LWhmgeq(g>c(FgHMz3aXe}t^lb5-YT zJdPRAW>7I>R8~8H@Y;(tAuQ}wLSGJWr3abi)P9~jlQHxrYdzVz^xB_A(KBS+c8-QDZWeUFY4qo)rCgd>dC$J$vmwxuQgW)sRf z%%oF|LXQ3BjWh=VAPPpt#(+-Ugl;7qYN4W`Ngx*K)I_>}(Pf4pY_l@-M|jEjPT;-BMnz02;=nT>^oV)}q+-q^zYJfH~jj;A;Z61}gx|rd3GWTVKyn z&ObRhiIq0}^of3PrQfK`fH*yviZ2b&{={-tld zxpa<;m6er(;?LmV;L;K)ITZ%tPvy4>eMNOGB#cVCc}mI=eTU( zzv~sa|17xc1rBcvBm*h~0>DQqM^{&HYxfdLm(RmOn6EI8(qzA_l0h$yi-g)oHFEE-48J!kS$b;6h>)E2q~W#4xBAZb?)LMl1rBrN{4< zvzd2s!~^+eai3?UTZG4`Q(ZU!`6z-5E@AliAHOc7a#$)G7%cykiW$FLId)I&Sd1j* zx3;kXwB^c6C{EhN$q5-5nX;G@xBoJK$zLk14ed9+nBTbSNaW+^$1?u;S`=`)jwx&;YHscta8<3X)yz@8 zzP)A9D*rd30i`zjI$)-urq3T_4mv2r4Ftg zpPZfoC{IaEWs3e-SX4CO*5ylK!#w$$lxQily~2te6aUe&dIoS|9ZgL}-P$9MwLg4s zA-;2Ov3q-BXwivy{7R&Np??q+1=onsbh4P9M(1s1a7A@+0Q=bXC}X|UoXV41Rpp|l zrsn0nznR|58~zCH$S?u3p% zBIxH^+9hHXJ%9gRxv%Xk{(cE5XFNMsyq_8EfgwXI^j%%6!M)&SI1e~IS>RFJyVll; zZ1s}4XEpg_ybPjz_{0#{>q;2sTAtC$1B(g+i%WGsBP59m8%a-Bmkt{#svF#vf$`AC z!8|-nO->eC zA16V50T&cMxWLT^TM!Uv`?}#FB110hFd1p-jp?Duk&*3%PY|+%$jHIQOTGqVMv&*# z<<912;NM-imS>kVj2Z%qcYQOs3k|RFA&|ze%#wEE;xEiksP2_+yp^6td)|IrUr$NT zV5r3a5lP>2JX>sVivpkwjeREa_|9}T7R1tMy%!J>eivTYy5qG4d{_7-yaAbKKzncy zCj{dCvu{L4L&Ll^aO;J3WM>)#)VsDQ^&%HooH%NKhdzg_OTb* z66QEH?CipEH)_C|v4avGb>V=#_G!M&z=wxpGFRgXgKK1ANHn0TkiGqY8 z(}4J4)E()P!8ZT2?8r33=l8ao@j-8I*$C4)zCYDv@4`2-E;mVEG#^xOTsT7Pr(X`b zcKyK;{$C%~ta{}%adLS0OLz>VUymKfJlXnxDaR(A+i%9tE5iIF z_xqql;Vp7(RZ zuD&!EsC44lO@#^{E{+|4{_r_<5`n*||3P9hy?W*kzBu~^>-|!VqYLY{m%_%6=xRHM z$>)5fsFH3cn5YV|b1i&ndYa6>(}5USiuz?~*AgAti(a%93HhLmB9gx2Hd64Z&7>+S z@g=M)=EoK8bvDg$=iF!ZDYh28gnJDCyppVKbgg}3HRQ=tjacEkB3ii*jRxXlq%3tU zg%|JC%zvEAQr_vTkHy#DbbaY_V~a|=gj%?4^@$*@$Dv(XTc}F^bOa+NE`<;Mvs*4j z=w5y=?DjM@nF9^|BDJc!i3cUMKIp{v+dt)mF6R+8pNt73b9W;wT6{a# z`N7*P4nCn)M(%96ff7Fob%S4NI16uD{|`?Z#D!Po8od)vDa&rA%?v~s95R6h`dBt(qeZ7$)C z97`Rs&vtpKmz2xH*V*8!){oT9!FOhci=@QU7~al*198L@$_p>bHp@-!Ei<{0S953V zI4eUfutdf4exopPC-K^_`*NERUBq6O^EKgPKK596k{?MCImFh!P_`*{%U-#2-rTgKU+t_w>4ogll> z6nnUxVc?KEOH)}^cqbmF$Ouc^G(F;$nc5}Ur(C~e&$9tD>iQA%A}ALtEHr(5BrAO5 zuuF6BmPj}Od|&1DcsaKmkIA1e?MvyBh=$V@Hqeu92VwHGa%Q&VD~y;l9WL-;fR;J! zoP_q^?ffvss)r#uC`#HHh?S^buyX7m2A%XKiG*B4fi13kfUe`{=@S0aOu~z=Gta*= zk~8}4Hx1Wv*Mp}UrYl^+!NSPz!5eh)rF+ynPQxn9yD^?rbI(@G>uT-ielvD20|Pe+ z4avE_1KIxHx~lh)pHqfM8%zJ5pPIO!hHK;qlx(Xr7Dh@5kd(!DKf2odyYV=YY+=%m zyJ3@BGhoLdS9Q?3Xe54 zX5RJ=V~Q{qc_lL&1O3B~&aYv2c2h`>q5;CCa(t?@W*xd$<{tg;DBc3&4A`4TR%S+3&ud8Buf@+2NlD0|85<)vL=m}&$9JNGBIr2Yd1V*zjI%+bJW z&^eed$5`+ABrK87l)A#jdTo+ z^^Us>C>hGGI;gB2o1btmXV(*1=dwzS{1NW!yUv_H^k(ql2Hwjpu5*mb6)b2RF{Z$; zjC&u(|L{xoLecB^qSvn`pRfHl_2s4c-JUh~zKJX1!>^d4ccOILira6apFh4K@8BZR z^SM(oA|e2K7Ww3o-Ov{cceHRaLyw{?J47#CAMAiNXQ8HeA zc)FQ8C_1Y)AFRuMyeqrA%6p|09?B$xd;gfNz9E4~u4Mx+`gh#@?vT4#I`ZDnUA0N? zs&VyrzH@~XFIJ(!zApAE={LLVy>}}*4vw#ElW=2`X|vNO1|#-MMpRvnWE-A(aBSAf zWr<_eF3~gY$H3D8K`25g)lhgZFnG+3wYedXx;2xlsQ*gWL`7ri;~nB?RBWd2Voi28 z3q_!4{G$~N$EWr4s&o~6QuOzRw88N2t~BA&Lqaye`nC|r^^Ijg%>bf#pkmn3@9t5P zA=4i2A&Hvpg4mDiUzIqz=#p+EjL~AQ-lP|eo5{ulE8JQ7O^US?^P^5|Y~uAjg&_6! zkEZC<%5`G&hhDK5$(`nwp7Rw+VdIroU>`Fojts~vHK=sJmnF4G7i<9nCQ zIEy}U6GB3;J0ojlhtW)BG|&DU16r@2nyPnYdbhsTr()*?bvtonf$@yxVKd$Wf;A|Q z2GQ5|9>!=`=^7DG6?C*m^?SK4WBq$SqU=aovGc0is&ir7S3dNwrG1)&I^9ng8JrFZ z3D}GJY4Iazb4VbqWK#;+HG&!lSS&S(iIPJ#hjmZT?;)c)Ge6o>`*RRrl3!Fy4>M!l z4hlObNmPlqlJ~vPV!|BxZ+Y$|HLrQL5vF-^v^g@W>9;1k(d|o*qe3vkG`etkwM1Eh zl83g>HavXRLE!~WVRcr{pW-dNOR*M9tAD(U<~94*l4ch$>-E|4bQFzC<>U(QvV*a> z)=N_Tmgi#Za>A{&>fdre<9NRBO9~l>8T9B(CAUupB+Rn6yF)F^&_a58p5vL^eIRc! z(;BEOWlI;bs-HoL0i^>I4I~IY_Dk6_inA6~izO#3K9^@u`y3GS1i8jFdJG67ZH0|# z1#7;(`*QK;MVg}z-4HY5<+1~Nv$piV%Xk)~25;P<0K=hC$6gH4gJO2Rr5GW{=b|5H z3O_x51F*|VWmDE*I@!F;g`W5($~=-FtY+$%o8ErJI{lKdUezRH$oh_+2MP){9zpml z5iRVy0jcAjv$yErj^a7yW}dg{bJn)wLa5FHt#Q7t&q2&g#@t><)?iL)`4Q*yZ*k$4 zoiW$RTy9glzqE3Db`FpUy-|gd^Xfcn+nzP3*rRT!+UAB)FoyIH6J_;ZQm%ZcQKfhI z*pZl2znCcE+oZJOd3PSJ{Hvf6hGxwG3~hp=YFLBTy<(H;2iY{$i{XH7??pDXz`Z|? zf!{h&TyH}db1MLY0=_&8#qKtzN|sOc!$>mbeMKJYapsjw94x0>#mv?alc3LIMF+>|D4>1$A{O;OU*Z3v$ajssp*kjjQXqh3uSgKL(IC{E!D?Tl%f{p z@4EXA?}?$EDLl9B+kLd~8qdiB(GKJ)I9*V5@xNm5xgTYSjk+e`g3G0%u+6!&DSBAT z8vcbWz2}ea7EO(%ElyvvTeN<_n506aDTN;OXXe?KdiLZO*8Z{}P0*eeLN~x%?|z52 zhhWBQ4tafeD7Cx}MgbtW!m#9RNbP=UHyh37nfz8Y)e!JX5D?bdT=R;P`xNMtbwVpt z?k*~e^_3V4i{t0b-)vDI9(gXN0j%0zTOBO`JeSuR{+yuxIUt34I*oTz-fLULEo`l6 zn-*;TQoi%kj66KNA} zeHr4*P~cB%IxS&mZ&$%#b}RTcRztvjKJb^gjYP!jqfE7x5f5;f zF=%m^bG+68Klj7K8stp&kuUU8>xI`g?+JvlIDb+oAug!JS@ml z)aQ-9e&T$ql71LQU$abUS$qvC_1i&VN+{yN#KRb zdOw@^$>8F$37%zMZse@;A#;y`*uj2~tmklwCn0j*bsX2EWChgc+|=36EW3!z zZne1gFMmc1zuRDin*GHO7Q~8^Dba6P0(dYsk;v(tRYaY4F9WUS%O{#fbtnF{kxqcTwF4evyG**-h_Cd!aGAsk#xsm~gKOlBcJ z@8k<}?j8!+P1d`;8I!q>z$?!aTrWBqKKp0IY;}59un2HBKyfk)mw1|uOvQ>!R`{Hz09=9G8toCn3lJRzbKZa_Xd=k*tKD7X( z(zGhdn5#ZU@6Fa>e6haKY4ya7so|y%uB3OFxZOW1Js4j?-@0QR$bh~_1cfevU`k3# z1_lN>Ik~Wx?~(@&Vs_rQWUA5!Jjnoh4H$!;LFAxwUyE`o0TB75{SVVt; zYn}BqllYj*EFv+WwKnQT&jQ9|Bv=mI-yfU5PQPV2)x)`oZhUyJkX3^kCMJ3&(`Uj4 zlE_(57^_$7l#wNZ$_LTM_KIrV!`)62;mAG5O8-5d`^c|*4N-*1NWR0X2Ne5Z`jFXO z@Sfr!N0-6( zH5tsGYggiI^-I)!t&f(by}iAt=;tnTeAb)M3$Q09BGEJ46fv=mJs%Ap>@?5|9@&ySQ*+xg5Si4Ci*`;AZ z=BvGs`;mdWySs^riKgasc>1LI(W?&k0L}j~uT**3veNe!NoB|Nmw4{8L!f=&{mS2- zQBr#5bWG4c@H3%po)E%UVLek0iYg%Y4pbkYs*fj}X}4Sv!p_D9O7*sjjdhOe(m|rH zad0@z`thBdPC+FoCofMX$L25*4wkA&cUK_(qORcG!9{gKiEeR$RYS*$2*L<*E3!RL z%E^hNii!$5dqq|jMS0(QSsJI6KS8fvy#mSUmdA(NG(pd4YuESh{d02GYaP}E1O-7M z96*8eM4JuudvszUuW9pQh8Z;!3cmJ`x|so9uPfG>pfk2m{fzZAxAZXPOhr0=w*ye9 z2d0mPin=eHFOj2cV`IZ@JO2aJ*rum7wY7IZWh(k3C~cgbpYQMO0a248*sS4U5!W1T zQ7X(Pn@_T?I051N&N1wH3Nd=(EMpdDM$_za%|sAQs{;S`B}~Pu|Ng0}sZqyC+gMxw zE-3KaEp7TcJ?##ZcAyvyBq9YRB~hWFK>H9D76z&^W4UrbmBVMhEb>_Bl>sE0#u3}g zX%rcUvZ+_~vNV(pU-Wb9wur4n5_;=B5(FXjoAGgBP}BX#L<^uzoIZFFkm2pVg^_c9;0UIe|6tqs`Nd@&SXmTaoJ}a zuuG;4cCYEw2DhH#tOQGD7}HXcL-k#Jmp&RK@Y@#Nwrqh`lh5?|?o>j2JPkFq2k@HE zW?fLPG&C#&azdcOyScfMNni%m8=y`Z9#-`B_Lh<98y+4`Ng>)N?0;4nP+pg~hY3sE z$eQZvdDn7P{@>V&K0R?W@{!&SCd>&s`uc+=Yy$%WLGU&kKxA}rd<=koC`Tp%6jqIm zjkld)+S;>~mSaF}28MS#T-4Lj3Il2CxMvxu9Nh1G{S$P&oJH3sX3^}*&_qfY80)@f zu5h`kdpmp4$0ent^sgOzd3(dd!vh%`!m2bL$D0A5Qt9b=^;lhAjtQM+d;3<3IbqEW z)~Yf7`Bewx40nJj-yh?~40_XZcXuir*xqy}0#)Ge(v7qK=rW(ThJLj*;n#4~tdT#%rsqy%&eHda>2DJg|zG+Si_ zDs7-(_i}Z*z_y(grDY6$kxZRFZpRHa{5cH*rk$o<4U-DjcR@ub)Yg93TQL3<^S{){ zH&w6eOLn4ms?-V;qq6bHsbYG7c>ukPghW?H(_FDE4ZrV0qb4IRQV?cX$Mp1cTN_-| z3LGB^YL>=nB^&IvbJHQ%p9@N~gCl6y1&3-}F=IZMGE;6E4$$f0{&rzup{F?X`4w@f zx0lzGVh=NF1&H84?QGP7!>ExXp^sSA8y?c?>jt?&3KE?uGY%;zuo4jwad0>uOy#f2 z=&qXMOA!J3g9%!UH7OLVQ2)t*>ng|@6Hq?@Uju9c=!trJrOW%aS%4Dl3l0MsF`x4( zs3CLP1qTKKy~vLrKOEM&fJSb`jV%5nl}Q4Sa`>sUK)fT{4E}9y0-;rFXD2lqo8$e> zxrxSj%Q9Bzv5-fcw3?EV!^7P*z(64N3dAT&i4O{T0V+)mnh@Cg7JRc8eB2YQD2q<0 zyVtA*Xlfc72+yCFY8HM7L9@KPn_B#=Kn6q;6w!#lHf?P7mzNDpO*>84IHBk$6{7-0 zL}o9D2O$=bK$E4VMG(^dj-8$1t-sPPORxeN&>45pVn~XMk62U!nH*s(DtntLtBIx7zzOW`~91ql~plQ z5vb&%KuLCf-U}!*czJo*62IAb10@E>OiWGc(IeN%_Am5|qpy0Rji`9N7UK|4^D>fsjA`VM6e$ z4nMKsofm#4y3CiOIWo?CK(V1qg&D0ES^W-(-DohT#>WE#0}b$T2?_aa=WBt&f(hD8 z1RQT50GPu+PzDBpNivG${On8@s5pQL8Sbe;4OsDafJN{NVq;^AiHU)b;pM%}>$Gt; z`Fs`3bY?A)oyG}CA^a1mz)v(%uBFLfK_8;YS(vV>YsCeL%N45zAw%TK^K(d{B+^05~v7f9aZhCr0pPOepL$U z`+yg_1f$=24M;W@4{oJz7YQ*RrnJE1TwBuIu^MVut94q`pF0ZY6T4KHO>)x|gcVa`4%SWvN7CgF zd1Av3OC5$Dz(SJ~UaD^X)F72w!YR+an!hG#Dq7|R4Y*rSI_zGcl|DT^1#lnF0xf=` zE*8UW04F3R(*-=rK_C$ zJFS1K78wc&lZzh^dk&^fhf|)NIAOhDuxhiBTX#zL=y2m<&r{fm{O3&oAsNEu zRaH&S+uxP*Ik>q)CXzmaYhe_LV7=`EpMt^=(5`bYd9hPd=M*5kdm}6#l`G@LudFd; zUkFE%!UN`i%rdB|{~mFm=2oM=qC1K`(ZpHFF<_d_7ztrWd>-H1J5aGN-IpS8P%ocr zIIjSDGm*VLn>f_k;xqm0+d@eVdwS->PHg4}U}oA6ce@Et>Z=!Ki`ES1&vXkJ>tVgi zrXpw0h80G(&k73*LqidPIv>dN%lilz>FMdK?UoLa&@+j;Xxd}(}{-1G57nib0bt9$l==yq_^y{cu1oYzz+vGf~DAy}zwD2Gd z*on@VUGK)l3k4#{KcX`Ma9^{YXFzqTtrMG?1ci>dDJcVDJzrV~*6I4}jOVXDJw|UL zy4H>-i}gJBrr*gif0QB?LLm=?R@vCarY*+g<_<<>NRAcRxzC$XPR@SuJ#Or`{bVbF zs1%IWd1>uE`TN1b%E~Ha=$YX2B7MjWKU#v`ojTVzzfm}QM z32p(~m|?t!?Lv}nd^rNXvy*}9g80=Eo-j_I9kCCAisvoJwJ@E@2`o4k_$)j>I6u+M zz4}yHhE6h*E>y|`Ou)K2>R1k(z=I7UsU!3sLAbEo03N_;ON(i z?U5BVY1rSO_=tOSVWWB8)n*H)(j*N6NT?CF_bn2F0}&eqA4F%Me|rD^Jy2c($r`w2 zn`m&b(#7=xo(`I0z&QXV4oTGJ#zwXI5D9L$xU1!(!ll}dIYWF*bP^|EPX>EF(>+P%9Paq0xSMaLA=-p^vS|5FR#bDewEpTn>tIPr+8 z?|3|Wo?*iu@5%h*T_R-zG(AOQdiG}Tl(X+RQzyMd>tV&W#xv0S3*M29lGLp_+ZGP! zB}bt`dASAzw+(KGKwuvZR9j_6FJ^Ggp+L0{1W3>PMW`?xKu1cH+tx5SFoTF9Y%`uo zgRc)Q@nK!&=K^+p6nNcAl;^9X=#OrX?*qRL9vLDpl`U0rKuB-I2>8bB3)_B0KfZa% z`90T)XVMR!=Kg#G6(JHAHwBmYewzdA<{q)qPM=$8cN9bD<6Xu0*}X^5Rop_2Y_5$K z$wx+ub=JPSxgoBn(Hs4=x>S@Yg6Fp-nK|(7pSIqC3O$Xy@|?Cf|qi(lQ6&D5uQi!q%#a!ObTp?G?A!g=Q}xTYy^H zdU){0^Z-;6e$vmbe|y!=TBg}(Gsgw^C*YSkxw+tmmd@jl)!h6rocTdWNC^G_O&kQS z_Oh3Hy?TL)(r`=8Bj5YDA6mk%c8Zb!*xMz_TUF$NjRbL!kD%T(L*bCNuAnHRoxsok zi*ybRE$ykhfUoakYilcz(!X~Z3#zZ@Gm^jvXj?vZ2N~y!i;FiTiSxdUj;{T5^jhUE zo~>SKAh;IPSUJi+{Jf>-?2oEkPiQ+u$%3jvD!(^0+!kt89F=K+7M5A>F+gTrp3;4` zrfi9zUm!Vo4sgiI+d}B**S9I)uz;H+IRCQ%fVtf)?Na&UnpBw7_+r;2;pHs&TM!1V z;O{K}rwfgW;s&h~($YI!Su*!2Vide!f6|9QPX>@(0tu++#a>xe6|rj3UBWxhdP@*1 zf!;c33O6QOZSQ#x~b8}iTRc*=l?r)2LgE^=PiuF%@>sr zAiDeBT#Dt6K2E((niod1c)CpvStUV1rGQV7L`a5 z5`91!-P4oE?`8`q9QXqdkB4#8@rt;J&sFIsW@fZbW$55Dr0(zCFd@UJ;N(E^3^X=f z$>H5hl*{p;w2M)^POAlu0`esSBzE9(LB+g|=-1WS2u8J$`xks?o>(0{EL9+fOZdSVUGH`joUZ`NAHd_bz7ujV52yY9JPhnIlmKJ)h*+QgEf1H{CMCq<};0Kxa*i<*^v{SOW$k5P`)c4N8!=p}-4%Apc6Nd@j94P=bq3a?k_Z7^U zAQW6YDsB=kdJus>xRN(nm%8!PR95QV<&^&mA+?jTI#_6$Ix5Y~Syw0z-)U8aXD^r8 z@{1g_4~8@Dt?WT#zBm z080Y>7r-^%l+-}873n-L_ix?Le~;a|?V^xMxbn{0NzShPeY72EE<5}10J<8A1r7gG z7vB7OBloFOyP`#~{IIVCRbsngR(sB#hJB0D1v*hpY7h0C{#EN!bMfn_Uv0Z6N=U!i z`TYA+IT>#P`Tvj`-r#_K7gbfU|IU4HXXoZbh7R=i`1wK1%{PJlX;NoL1f`Mq3i(vB ztP00w&K|WiElD#in$=0NMb60cZni1Wo$;tvRandP^Q5f=MI| zwl99#k-pZit+bue$Z|5(_F(8~!*9YHgz8q9+~1&k3$HfGxH4(Ku(G*lW2ZelOB|?;o>f&8!)gi>1Q7 z=iK+a?~Z3b``LS!Q-$*L1U!*Xlb|f{vg6>>AP(gkG2ztoRJ4v7cc6F1)^|o)jtP1Q_4M?>a6nGj z6+iY0fc~)#tnR#pdyTxdwL#>C<-;uWW%UBs#AYeV<>u`N= zZ!4Z$lr6Zo%B${=ylX(O7EvS-rzx2tRaP-hmQ>wW*{RDCB_%c0ei3D-N~tQZ{v<`( z%6De5b#Uo!G(9~&XBzuaXk2!wA46W>YCuSfT;GRKB)Gb1CDMf?h@j-+C$ux@i= zR7k)4*PlnxrPKcfa%_^iU#wQ6;y0&`6}v@ro;iTZ+AhM!f*{z`$mpnm;WH^_N2y`w z$?lw`sVOlzdA&h9dKt@Rl+2C)z4KZzf<{Kg%M}xWWVj9<01v%?9(H1c3VF_g$dR6< znb`xAdrC|MR8iG-t36&#{(qCWEx~@=$HnzoaL~uCdV{bLB3ALy$kF&X<&kn%k-x z*a?v*LwkGP0~LLJ-6@EjiXU|(MQCHXgOeA_G#*n$K}WB#t*wY?&|~NrTv;)KqX4~T zBqSu1l;1rIuhl`N0?0{JD!T z2wqN95vq%V?_uYw7iv6xw#WNcU;jNc3qbt`5J;%t92b}?AskVI4|WIwBhN(^(YXw* zyhp>}nlS#?K*h!73}U0ovM5TQG#@=Q1YM$1e>>>JfDY#?k&|74qP(pu#Is=uRNS~W zTqh$stvK2*zQL$9gx`PXBcAGx|9Vqtk;h&dG1x978v!6Xg+$Bn(H z8QUakT3>NC`oW8Fk=Fk1{`=@&&(_o2BV%H!t52bGOR~h@E^2veYin`wiS=|vXB0IG z3W}wL#cv%vgt_2*a}yXpPN8n_Z#~`H!^7kJc&i#*v;R<&=Y_D>B?%Q(lTkN**IaSN z=pSgqft(4Pd$rA6Ydr-*9p1+A-Jwt^C!nYdXG%s zJv3I?*w{?gsRKY5aK{zlRu`+aY++qO8oF&d8f9grQ2`#^t@5{1s$}lKo|hvYQ(#D_t{VoQpAdhWoU(?Zut4QpRT!D8O_!o$&;g4nyTm2hMAI+ z+_*1Qea$6UKyM#u4ZsKj&>0p9Fi)ATodF0rGl^wB{v8Re$vsQxI%`XiK#%>s!C^La z-tH7xRq{Qfi-epUI?`>XE4pWAFSn=4K{dk#lrONMbegbiSIdsC`%uu6i147Ip`vwa zynEPqC~#rI%q(wNC&`~SS_UrE7I&WX)Uac|(S4~y?}7OuBUNxt!#0iv3h$%qfDa)} zVFjaA&0dx7Nn{@2@3Ub&@c$ST6tq9f0!5?-mb;=3aHX@iyd{QNzYXb@@25Sqt9=>C z=GUR{ciz)QAOL-Wpq`E_f(Y&l7x#(F{-Wo_$<^iIAl#%+u;`I5)t;l>8-u>`$*!}b z3DUSKB6ux!I{#Y}<4|XvFFLpFVhQfh%K!E2y<`i-11N2}(LeMfB_~YZKhm@^YA8$z zhZ`J}Gr@6k`WPG2Ue7h84^cO}Zi7ap1!2{n+g`YN!aGBi58T9}TGkI@(wjpgLKTBu zq$rMC|E&^-DsD!#~lMuj#Wo#4nw z2)Uv67(T3Myw3)qR(O7rf5p*sAaVo7KdNnHr`N5$lU+6N**EhoOw}22s81Je&SSHS zLY>XOSejc{NJ&cWOqJhm#wSmPCMJmYLZ`nq6`gd?3F!0Mu6cB^KV~&nQG1`9w9l5` zIrKU!dUYin3rjlkBxz-f1$b^*x=kkea?~~gLzaIvLO*>?OrmV zT?PS)tllr(oDY#x_`iH%f@g%F2qpmF^)MpVvZ^X=I^NmdDi-uEf2M{LW=2w3yBCi8XBMTZVKe$ajaUu3N`(lC2yd`&_3sefSB0X zbC6odlV7r0TwPtD)YV$F>zKw5MJOWT{=`)YjhmLbO3NgXL!8V5s|0SH=*Q06Zj=Aq zaTeBz*tDecHXBsT2Ea3!IHt#5tzI_pz@%1E;O=zTJOeEAX;>h zhnrCK$u%nfjJlFn^35Q9p0NJ#dmQ|<`)L06lPC0Td2M#sQ8W7OuO4F^XX$2N;yI|j z$&dPSNS2zyaaf&-ZfDmFN_yE7H?HUP%e<|E@x>9(+z>9b+%LNA#d!CgX=x2ma@J); z-*-8d8tYX*^SpTz#F~+Ud-#}#L!q{~tbFUE@fI;oMRlNnuPC_*w9p%M;cj;2xm(viJ_8i znq3smeoRWGduG7;_FN#0b z>zH7cX&uPp12T6y(r=- zE8#+~qi)(wcJnl*vHvkjS5Nn%4#9?gu*7e0MbLk`y|>p4kpMPIwFt}d@84{CwI0`} zdJmlIr>l_csf68*{;tY3VEpkI+PY%SFDqMo%47OME_T|Y_i}{8|6{<1wEdPQbo4j* zW0~lIi!ve^_i6oCWa#~Id5GqQx}J_-dB=wZ-}5bv zx$x&W!dOmR@ud7K|8(x_Od;KYIYFH_DsNt^ywQGj`IrsN|JSc2pGE1xm4fQl?RPVC zGqVCqvwTal0+v(N>l4xsU+wJeDhP$6KM`kV0(z^@H9k z4)>10@q)Crw%7hA8M z)2&zkE5{ShcDo9id9x7gVIe=E`|SN=@r`uyYJXFgkUPOf`waLY4l7;Jcyv;I2~Q1A zwx?#QZTB``5(5BTd42O0eP}$~q2kbXwxvE)0eN@^E%;t5*oFS-I{U-n``-6bqv^U; z#>bUnV+X8fW(SaL_Uk5)v&g8R?Queu)b3r0a39A{!S z?t8JFX4pJTrQzpi8F$lQJuxua5+q(k10qwIx9aJ4kZx}72o_&7hjpz zw?!E*^Tg%Z_WL?P=+xWq>-3MjUWUmG^bg3raFL~r4wj(3x)~iFhJ*#Tdv3Hho;~Mf zvWcZ7IRU{KVZj5jqtG-BDR#_f&KPza9N=lkUVl4Rq$m)Llk4x9B00kmH=AZkkb6YS zid-ZY`a;KA5Xu3)y}eMd1zH>Alt5#k67fn&zx~3Ao)wy!o`&!ZB%Ff${V~b7P$6M4 zLz1wqE^(`~b=wVkPBT%$Qi)0ftCO~R9ZB@8kOaYh*bMT;lhx)$xI>~ud5BV*>{df_ za@K(b#A`JMe*1KgTp5cJF}X+lxwV-i(rtlj2=b8?o=#i6jFt1>lqWM@qwyJso{)c4mAQT zbY}Z7iPA6o%Fx>^G|d53ZE$c8n)e}1BkZNz5RHN?7$NP!C&{;yCHl7W4T&P&O{u9m zGBUxy`h<{P7IFbm=X=q!wUB><`o_ti$W2Xsy)AHt+CvE;rtuz^r4uG~l>S-T@0P>3qpHuI@vLp&_ zi!n&AauibQK;Wq6fH;`s^2Z(ctxrONf)I@X^x@av$ic(Y0k0K<*CO0R=gMRcn}LQQ z0Q7z+IN;SzA;UW;LX|;>YQm4|koWY(8l`uW-H*=>mH}*l(gWPoZydO({i`;*PoQ&g zt5E?JYpYQtMn*=8usfd+;D=T zLX=$+E4S{O&N@{2_1m|7wmS}VbAq3s%EC11KtoUOuaM*1UVrpJk}&*=n5{VRDPd-{ zy{y_150@Q0a2ItNF^J0vEUN3PQ0g*v?{@>v0Ww3k6`1mJ3`|U*K3=ulzq`+X7OIQI zdK@)AZpUHaLQE@`z+Fi;P3EG0$L)zWZ6Z9_`GzU*Tw14_UD0$Uf9`H*;}bO2A=eF- z+ih&(HfF{o#C*?Lpx$HlA;A};$06flVqh?aR337C8X6j44pFk&jbGE9m^^esl)o!p z)X2{Cm6-*i@<`H66_u7}Zroh|Jk|7&kZjjmys~bzlnU~Tc31lAZ(X>W`snz$$DEE3 z@(f1WG?jzR=PbWSvYn0bp;M#)PeOuON-NU~7kspPEu=+K5_4g4Z&fDD@?H}wP$a9q zO89)eyO?A=SuQ9n!o{(st1bShysX&QH>|SVDt)-s@B^&eGOtaeTRMX@tt+1ZnORHv zfR4Ctrt{}0_p{W_rk^PXY!beL;qFyl>&OiS2>4&^=`>|!AJNmV%+}iP?(Ra8Ia`up zDAK1+OS?c!aP4&Y8M31M!jwy47Kof4mH$0EegdhKswN+|03#0r$klzbRZ!~wdaOk< z&OPueF1zk3hGV{8c9PpEceL~+1Z9wcvEW6+oqY{=K7T_7pOM2(q%Hfa$%+>){Z{^! zbdiLFMc!*l*1p%}_8rClB$Z>6^xgz=o0+ZjP1kYKuRj@Vlq@ z!t*yjoRo->|5(70`U_zQ2mz8&QOysZz62BZ#PU4UJf@^Gt#{%I&%vobJkQePjMg8l zdfj>P?Hh#6OV*#NkB{q3|C@P2Zt0I3HK^Bc@#A~W`x6tywKEq3MtK=IkqVQ41gA@= z8T@KLb62s-U#>~KWqck=Pe#{ z-LE>9+Dl9#n6?GI%?~z_KwQ&>`0fw&O3qJUoJBm(38<+Th7VajuExoeMhR7MjpZ=2 zaKC#dmm~ipl4P{9QY8fj?pQFnCLpqJ#@alWv=6z$e|_p(IMf*)@lNM>NNP3jCT*{BI# zXAfg0@emgI=lKhwJPnYVKdR&+dP!D)=QigzEZ#yvaw(3JOPa zE;+0q4R&TV*p)c@nSXws$CWuVaFeag(3KknTFkd^E=k_w$uhz+lUg?m^VHUtsVWbkVcx954`tV4Iz5Etx8FlsD}y`&7vq zz~!wdkOMmx)P~f^YaMn{<}Qk)g!|c9{hIfhofr5_a*_XZMb`Cpipma5uY7H#eESv z+>eFkYOMqWs}na(CcD2ihw+~B?n-ReJ5<)x5bR~TQQiIUB|F=~!kWkC*NWdR8+s@! zJNF%gX+N0<6f-r`AXU!z{l?MX?|Wv7wuRHc#ih72%1prh&Z>1x_yFE#H#a2R2A(GBb zqefxx>)BG`qs`cEmr==aA2m{?{v~%d0|v=&MLblLS33v>#p=bn-*9Yx>qu@O=5j8#8e&+l%ebC!$mzo0OLQIkn6QH#A?RX{e;^VsrJ_)YJY@^;2*F#+-Ezt#d zO8>qY|Fr)8z7W0Q#Rv*$jEf4Mxw>k*;g;InEXm=}o);8+Tlehhx_S%Qk;oQfgB&93 ztpvQ!vsYQU64&=}J@%Adcg#Q%&o8-}qqHmH8gAxz@Ysj_cBlyMeQRlgKFF?D&fB(*g57a=W`JO#q@J{Gl2a}c{K5d&RR|i$RaA#JXySIa0_`~HNinyMS8TOH!GFYDI)!f~( za)<*TR_?Xn_1;-Q>r^NUJ}BwLoISkKoh=W?L1Iz=-q2{%c=qx;{;w@40aLn*73(I3 z^XjNznZ@FbQ#7s9s=Zd(v{)VS??Qb>?hIx=biZW;@)r`=*brsus?dNUrt_7kl!oEvT3;mf58){CXLl* zQy(h(S|YKZ&}%J@id9J__5inpPJ&iRRY!G5R6#+lwisVLSo|A+X(MB*u@}s}EFaA` zxm=N4ktAJ*Jz(a*e+CG2p9y-k1huwaX13x4AW@@Z2ouD{$CZ;$!r4cqdCBUCgybqI z>1xf&&8JC9Xv`o-&Pn?Lq&%Z{pMN`QR}dFXk6rB!MK{{+o!>Y#9mAMuDk{s5Yqz-^ z!|)Vu#1%JQrFQkWW*iIaT#x_!G+~vnU9+(}GU!AAFYOu}}GZ&?|7 zQeA$Lmp7I*R_gV19rD#nR^*{E0Vg|Hk%c>%)rPPlzb>}bWgq=1=G0}(i6w|5Bi%|& zCfnRH3oqm39a7^RQ)2AxS=3qc{!GyI0^NwRKdwH=uI43DyRoLE@e~pR9}+`+*0Nqs zQw&3Il4jDIsW(ZQiEpsE;-M<_%>-y? zBBZ5n)wI3{J#cFV^~4TI2EnEq`pscwM{dztyh>+|-ZQSWrh zw_mrmme)9~aKG#6HH2+C{npE#3vnazxbpJ8y2eoJvnLb22j}qK|M^l{|5rNJSO4x4 zv!&Fn2)Zm5%b92-k}T>dId?7}vIj_RW7N$S;Jp8H&hS&J1~>Q~yrn!?G@l~G0?mvs zbUD&E2YEg0GMGY6bnnPlY}|3t{1fj>e-nCE@7caB*E!&ZeGn2B+6e;*et7FPdo`L3 z(*jBXEFEj>#{aNw1Dur>_3-DmRu9jT<7^-G^SNHp<}BTH%%bb*L*R zao>;nxPd#3xnhQFCBlOc zARhxL)4zPs^b5kS+a@UBZe@l~EUUjui*SSIJEQ((`wSB^6$dRa{gJzcO56#6J>0$4L z$_@|CF0oHoXOAu_sp2+vmrBNElW-)_JPDIT%e z)$_ew&RpQ7$9?|X)#Y&S}Mazn1;|H6cl2$uo|kHTrYmeb{Wuavr6@B z$P12cm9G6o!D*xW+vrb%vy4~>!oc?*Na3SnV32%JQC1iMyuw?V;s1PXSR%I4^FlK6 z;%^xaHoNj3iz$!6IrSY=i_NzmMzQ5QldlfzbZnI4i&MEbfBbzf;VW??amS|~{5%z? zkV{Dgif5o=gW4%{P6BBVKcT$iFl zXdRd{__||YW|7HE-46F_piwBm#mzZj!L)c@Pe@T9Gyou0kl4R8_}EM=_xA zXupP;DBn>n=tJ*>WlERNciP@fgkm2?B3&eU%VJCEc*=k{jo zxupkhKb7OT(`EMDheTHfLqYA1T7f7E3!E*%oTmOJN<3#?4jSI($|TC zSsF+GVg~m=ET{CRqL5Xl)(pYuCZ8&?;?n}I+#b1{NDY3+d*y+S>*hUKxj6z+Dc|!f zOQ_8(3~e|j-F&jX^GRyr?Qsa)51UI!Ik_HB?rWQ{jd?v{X`zYOe-xOor8`spqr*Sa z*%4CjhhU#0G9}VDf4L55fJzZs*7 zfi7f%juwQoOyTNs}* zxKDmciM>-gd^asJ3r#W$H0nN9gB1|6V>MYJSOW^pIsvB~b3WN5g(MQ6B$6xT%cM-- zhEI1jrIlamD#VSbE6G(-e;6XCk@yssd8~|=BCYgc)&eC-Q)gyqf8p+f_Ek)(s3_du zha!aWoWg1o_z>cr?t;69G1Jl^&#>j{)%D-TSJ)k{?wH2GEXs(5}iwo=>Xu=%C? zF*B2X#Q~ATT`|AxzoZ$u=^}LgD@L-+Fa9TD869K`1fuG+EP-Ym2({Ez< zzSp4iE_@*(LKhc|Rybh86@>;igtji=lK}}H_*EpBfnSnmfWD@t*75cUG^|7I;5{qu zgL}6V01cL53$H#iYGh<^QbUTz;Gm~ZcJsL5s^~*B> z4oi)YJibYVAE-3u^{#9@Qh1#cS7(WL5b#@}hVttd4-L(dWlV}J{U?yKhXO`01|qBI zQI)Z8GL~lG(dv7hey`LEsXnr})fLvP)W`pMI15YAFNcVX39}b~(af5b`HBe|MxH%8 zOBOO58ejAl)p9CMc$}o;2bg(Z(oJ;(^J%**UIROVXy3=~m1^4s&;xFCIq>aVh7lul z{-Es)LiLi8?ch|QV`0AtPxQ#CDzs->eGff>`*A3~>z9zo{Lsw(a_-~pwlNA5tln2a zCgxuyKW+Nq8kE+y69T%VMwjy?JTH?;F)o&k$?((HbJXUP(Xo_-BxP--?*&tXE!sF$ zdq@Zsp^%b!o*zN&0nE79`4Pxxe;YP~*5cc)9boX*^ZURYNk(|v!<%Q6=k%KI{AD&8 zdj<_VKY5#S-P84I_}7J{pv-x~AQ|G}(QB2Sp~g!Wvu-UAfs5|>iQ=4|wEm;jGn1_S z%6ZOe172k9f;>}0J&RYde$H$p2F4HSqEHYvlt5`7zHr4Uu!AHlR$EDb;2ov#_h+WA zi>K4y)V-CTuB6ngc3u!_cp;ryc4~EY$m@!l0VWZWNUA?(C!2qHwl(j5c(NStoxoif z?e~)XvKE(y%dSMT?ZA-^CAsR^VP!D!wZOn1^Vf8KJVtZ#p00zLZ;0G`>au+4kr7~D z&h7+$nV2C0Z__N8n3Jb$RUb8-S(Qa#=ZH>=CekYQT3u;z8LoI( zPEFFL`P;!=J}v!_=JOz0PkY zYw_y@Pnnr51JeJJT5vn39R4Yp{8sOaCobhMeUDd5gk2hqaaRbQeDbpP~5aisz=>_E?3|kK>_7M?C*lPJtbmFcV7>?1pQT91{>pU@; z$g4-pu_3Go!JOXU_vUrd+j}Pv^W&wL<*A<2=|ULy1KH!LDzk>SpAO~IX>xuVTsfy` zasMzSk0|L1uxpr=3fG6EW8U*K>IOf5|F?!|}!&b|gQ6 z<3Z|G@I=%49o>1O(wh4zh6c^%;*q{ejxpiESvvAYzM=gmIg)WfVZA3WliJs^O$gTr zy~k8kt2oE4-#foD@p6?k-a!z;3dOG-51@}(SR;;8C)*sVJM4RG^*kr-8YZj!H_ifp z6i%@2P&eR3KcMfU9vI=^hM;-4!=xPM4ua4;5K+V*R+l}#gDXVx4jBQV6rYk($gl&G zC`?sP@8-`aqyH<=HUKjPtXUYk0&SS!(SBe8=z>cJg|hv>e+{0HKx>r7t^o}Cfk7=0 zH-kSrD8RnNxW%5@+AdD_;5s$hY*jC9F<`zA%-I6w!sDQyc%IgxB>&~HgxpVZ^Yejd zUCh1s?iuv{K{*%SSanhEzg!L!k=0aG#+=BB!k{E;I6Q{fgq*DjV$um_W@cw5$sjZa zEh2aXh|_=+iNU#}(`#5h5dLFvLT+H}#+wGD#oIB-%l9;=aCL+>jT(k0aJ%?P!(uBc8% zs~H-*f7XXa^0~7_wy*7K^FQ60j%3B~xFr@3{kwIM4{)b#Km?1^>(b>v_nUd@zJHe9 zG?$y)&2CKVAE!XBFOh>rRCNy`4t92SEiElq*MG&_uT)f^h5z6fsB!S|pDbRg4lb;; zH}$M=akF&yx?z|8_+?a-TiDKX%~D(#Av?g8IeM@6v2>%$>E%nRsKJTR%<~!zj9;cj zd8V+RG)XLs*W#rG<#;m-BAhE*PKd%~2Rx38e8#(I@KM2>{@juG$5`azUI>>TU^Vz? zEVDd^%>2oCv@FzrA$oMoWBt!o-zBiN{1WHix!+%6#Xu^#3+sy~u3@{?Bm_f7{JM-Y&cTWX`779p?k z=oROGz<&(=Tc*KQH%Sset>f2j3Kntj?{hHr46d5k)uc3BrYVP|1<{iNRc1c8`FW-S zx5+`hgi9w9rkc+_-fmco;B}61y`N%GJFUNpDgm705dZm$3HifxK?+McYhK&FCtEAk z{}{q8{D%%$%h$pkZ3a(?e!-d2drV{r93o&oKfn)xQ%8^A4Ks_Bln!9BTRgj-lbu~_ zy(j|W!1oZDzGvRNUSRwPm4eD`_Sig17CQW!uG)5HX#>-%>Efn18-w2Ir5qhPtsIC% z=sB+ZOKyPAt0qBN6fAhTxW|3(u6|$Wa}5*2s`Q$xnDs>z?_#vsm7E+&ow^Hg*y6B= zY8@1;9wp8DEneyucb;}9F15xnzZg6Lt z($m!P_FNbV?fOX=9i(WJnha#Czjax9UKGUSbg(!Yit!h}>D6xHevpHeWUQI`EJ4BS zQF3FwkOrm(N6{eL9*(s2tp;)}*5(yZ32f zb&zts!6}aMmsd@y?2q$w4XfQIdz^dy;TsiSzS{@RB=)60A=tVYIHLN!z7u@>UR9MK zUXlC*?u&kA3DkhjZ*WSAlZD8(79nEW*4o*!5D|S>g>j1if@J(cP4O7gHOhlSCvC3w z9awS*fEF%&AP9s7I5mTPlBM`*k3K zj}447VG$H;fMyA3VP{}i1;&d5KjmKLkLI^+&4hle$9Ur9-WW+7BVK*4EZ09s!SRGY z-+8<~9TCxRwT!EseHnaZSyp&7x^n7E$HsHBL*y%&wAh>E<|Jj){j=?eXq{rug3eN8h-;lW6#5m42W7Gwz|hlzGV3`pE(r5@F@II*vh3!mdZjA z>_NqM5QH#7BwFk54dF}JvQYFnBJ%#b^4ea5_+?bT6i!j38gg$=#J)q>S=oDIEDRc8 zArT78&>yKOeeSf$Y-j53Mdby1f)Xg$bj8KM+1!ww7J-YutHx7OtJfr<7sl+O{aPsk z*Dmp5J^mOq=wRZBr3NYu9c3j^r#UpRWEeSAz^bae`l8SER5o&lM6?Z!8NO zH{c+MI=!*cZ~goFtPgSyPuIUEr^@7HT6)LG+bxg@J^a*kQlq;^9D zFvs#xn%=~PUBu;D;WB>M5;Az)7rkt)RBs{9_2pQbK=v}!GlmdM;axe}RHMwheG{Xy z#ZtUf5RdKY8oj^y=V5a{vEZ-K%!kdr>Poa2&E{gHoW*BZF9dw;ebV^P7aWcCyZ8Q< zUZ3+0La?w{-@;X(Mp1WWHZuJqw`er_Edm1JtgM8X3h~}O>5>DZPY%>Acxubdr+u#c znz(3Mv|%jqV@MTjP1N}_}!u067Mx136BfKj~bnSH1Xf6 zsZBdNSvaXl1n#uwdD^}?WCY}uE;nIEw(RA`=DFXqq-xKZWL+HiX`j;3Rkw|MA@t=! zt&a_$SPp6BTk+|Q5gQe7Oi|bc>3d)7X$c(%HG9K4Pv0G$L5H1Q z^1PRqqOOi+k=dy&SfRebH2UJL@^?&omj`bt;aoP@$&e4&8tKicDp-*PV!UiZJ~9fEYx>#$P?Vb|vYqPSNW! zu495Aj;BC#^*CO_fQ72lOe>VyEmC$izp4N1ez;h+5CtmsQX((pJLesyxK0 z^4|cNY2Y~2%)VF;ZCPVThRkOCX==56a3gb3?fNZ?Xm6p)?6RX)cM^{%*FeoNBQ{ek6K5_GD`owoQ$m1LvKpic9DK(J*0xUTQD4PGD9hxMn5=;yxc#qefR z*(rcv!^9a zeyYC$g7b9*^5qu2_jqUUr5rD}td{&B003fd;{5YBTqdQpVi}58Jn1CO=ldN#mU)_z zzBv|~Qv$v=IVOawL{H(t9cp@+P3>afg^;oi;6^8uTAd<++4T~kQftj^Iy^d#{4&#u z)w(=Vg52w*S95se=q}uf%i`x)E@hvT4;OiDr$N z+ZiJM*-L|yX8tOclJz(M2U2L9lsd;hMT_CHpQg_{z|~Tk+Tb)?lDJpJwa>-FTQTTVgQekj5WKL1Htee+dY^Kh@Z#R=U1 zZ2r&cOyy@U`{6s-iz7|1;L)+me#NJ^X{>_?Ph|wpV;){%ZY^2elV0vjzyclENZ#3m zr4fyfypnV2O9Trmz;B$i{&vX(V#4mNyK*F|;E;m9IYM5x;wU|_-E8$W>*1uuzJtd0 zHMerS0_6wXo-Wq+sm-Q)wHGX=;qs)wi!oYoJV}bqNEb zAoN0BkE`8rVKYxc+!ms!U!ZX94Ia3NaQXga;AV(0#Ju^k@qBVP<&D)Z2z*b zM(dBQ?ZRYcVkq8eM@(asp98Q5X;^>z35p;EKA1FntHZ}u>%wrsAzS-En*Lj4i6*MWq!lG%gNu^8r@;B2FI~19Su2+RMZB!- zU-@G$f8IUiPw|zcqMcNxD{CaD~oNvd$ zTAehZq@I^&o9O!eHy3xWXQ^^y+FJAdPvn!@{zLKgo6Oz45BzHu>Snc+2O-lUA2x4A z+37#8pCr-0sm0;Bn)9CR{%)^nWfK;n1AF`yiLZY7uDqoq1uT$;RZ+Tn@A3DNuc00e# zdnIwS^q=%1r#*lCdBqs;oVRp?3D%uIT`m4O&gKE|VEe106+hrK;e`(spq%7TzYnQyRF$g^6RSfrv5>9R82L0jz2n=TiY~h$k#DC zGxlC0ww)L6^Vat|L|zV5aE-j^e+&-}GG_GimJ78DVec5;i-|86rFl@h@*U}sTMB_# z;{&-VYPxC1+I?S|UEiAs_4;WX)cMkqb^+}4>L8g1Z=)|&gvW!r!Y)YZPUmL5uV zFU^Z@O$U>>9%(=IY(nl8j#CxUI^^=Md|H^*oOr@E=DAnxovKf>3u-4xAi*~;?FXmq zqcHKmGjJlhNR;|(*%f~4w0mdjHpDQjZ&B^PAt{H);KuRC@j;n`%hZ(j1h68m}6iUN>$XEv(NHQInZXd+&@bJ%4-*oyCkxT-CXm@AsIOzaH$b+pD)~Rd_M)VOh)yVcT>!NQT|G}T7!G*?wE=m!{**+vrhjYti z9hLYluk`XsTS+mmJM#em3cd=;QV%LR9Q-D#<9Ds2^2+(ue#XA6i;9(zwuN1OY~FRQ z*hAgOY_R_>69G|>;I@^AqYTaqy>OwEq~)vU!y)Gzn;%xwzPW5|lWb2Ha(lXFj``p3 zJTct;{$TpwZ{8o1%ua{`Jj}w-4}PxiKrPO0sjS1|iDiix@!=sqnTl~k!4@;5@o?Lp zPm$Och_lz@Rte*@rV<2dYccJe@%BV&{pT9&Savi!+A-qy+3rCNmtU|yR4OZ$Al^yx z(cH(vl)|9u`k-n7ai$B;fs3O)-aOK%=l2)Br8D5W&1jJ%9Oq0WnUQY}IgBw0IhZAl zj%mMolc?S&)6eXds*{qWs-p3CxJzbYY`X7lNqN)_!hKG|gWbideCT16QvP{64AEut z-BJGVSVDMD5!1J1&L_WkpZq#aTnX^hgf;V>H2{FvQVaH}Pg-V^dcG67in2`j%YI z@wWnHhcOig?mYfYX2=B3)FNAGt4azvn!`0;bv$PJ-p5|R%w(wc#B$7xfM2I~a<=VP zulp}{M9c^*REfLu$G^k7-%cmYEZ@#_6m)#%R#{oG;Muhi`r3^Mm|FZ-m8IBEv%lt6 zd@cFS8#Tk1LiL$tL~@!C!nqn-V-=KUH$^LH{#@cI z7-dRI;sUOiqxDO063$*ULg_$g z3yeFD?*zUcNW4j!BPGvi=&0%_YpSX*eAIdKOGQOltp{Wh3i2&VT4#t)?#)pUXIT99ZP_BbOH~{y2H~HNp*7jyT8M>uM-x!nw5wI?wD z{MHDyDF_sR-@GY5d6kIW*~NX4BCaZM0xxW|69{^a3s{I3j-T;gyz&`iv=3V5Z)0};&IMe zKU-nDeed;a!5BuxCTGqDd$~Jb1Zs#+emIx0|L5MPoTc{8-rxUCyDU&mfbg!<=50=s z{Nc$-M?Egkg)as+xH&oLy8n;H&OI8+G>+pBrEIOs9>au0u}dQ=BpQv||L^u=t;@Iu>MS!OW)Y#-v`OWGad z2A&oAGiR_7H_UZRz;6wBY9?3RIj3BEdVaxwuZ2Zalpz+KFR`a`f`FEg2nq4#sSydc zc3>OorGi2&m(#93NxTH|Cp05dbZ2T7T7`R2kBDy`h>ZPZ2O1D})<&~fEr7mX9@mT9 zFi*8$*qPHZ-5Y;x0p$zPZhYR8&+Dyb#T|@QUslBnAqu8Az8VK}1(EoUXt&aU)VujL zqGu|I>hpiyQev+~n%Rb*q?^*}qU!**Kjom-8rQz!c9>^M(e@P4ot|3`k1AA=k)ERn z?x8!XzvU1&hCiV_v1&G_L=E|cSQVHLSHG1AsaG38!a9}~w6!fU3%}_+X<9%nI%A8- zbit53yM*}syzjvKGVFb>8WrhqV0U@?smUm6=m77#JO)4Yu11*+x&9U%^^z@9A zfq#lAU*z5n7O2S5lCb#C6D`?fpK%_4`KfkAx*XMwKt@%po|OxJeL>qsly#z*e1MZM zbslK2liqgmG^b#3cdarHh~qX^SP$jD_J?-#^6*%`E+h~v^*)L3msN0vcvE#}>L@ZdRA2hIDq&u5 zoOYM?)FtZ4->@czt*VDX!2yKlT&GfXV}i}-Z|?(w1#5#QT$6;JNEmY^AQG( zEB2i@G4b&s5WJZ?7q~|Y8Z*uM^*k;?|1}V4AB%`cnC3Z0TSqrxMOE|32Al$>um+rK zsa0>H#zcE&T_?y?r0EDsR$Ibi7t1)U?-|`yiPb^@pCgnyyyyedka&l_{`M;y)~F8H z*zRJh&*)9rclCLJ)Rz(BBxsF1Y}!UjIVJZC{JAM3qYa9sD~`^+r{F+vuhKB8?cw9o ztgDj%=)9&LhlXxeOYcSX=(DK+pLkcrtSU1ODNb{5CAS^as4uoU_j^u54wp;ni&X-D z<$FVpGJp|kwVBi=y9o!&APjknn+)7p{3rZuDyOnjns()A?Zc_*-zv^m>6T4QT(bC7E;2QuL z&QD|+cqZV$%&s7+!C{F?WV8=UNFuri<}UcE5&Hip=}Yg-)~S@ySqP0te^6oChhbA( N_%qQ9=()f42{L{GZ2CxM8=9%eR+E2cEntih(_@^-33&D1PcSx$+LP9=+3 z_OEtIs`e_oSn;3cavQx;$GFN!8%9RGrk_85I;I|xS@0a%3*Dii)4M zzpg|H<|tY=QaO|u2$+tM2k%Ul<9U04Ygx8%gG0TF{h+C?^th;PzCPzj=vq}%qeGq6 zxSn+wLW4?A(Yy}IOIAPin2Pvno(i8mEDw$jzK!pPR^Wo0(3ju|RHwn^=H#w;pzSfL z&>3BBgOPCkbhv6K(E<=Psvc=H?@x;cZe*EAUT0P@y^}e1-b>3KMFUjjrRVAKv z69cz!&ZwRIA|WBc&h*~TAfg5(Xvkpyu5QOuN-Dg84m?%1`?zrS@C{e&=FaTd)pQ-A z%sqw^C$s{bK43`p&NGyq8`8=M@%iUP*~Ub2O- zEdPx~c zAs*^6c+yuJzesz$U%y%~`C3B&Ci!Z+EznJ5F5-yYpRFq~MMu|lcVKaO_ua`8?7jad zhn@+~wbG<%jG0-oI>TxXvxPgo)}0h+>eOe~ShD^W4-#+>kVzT%gK?3&K}_d`wtMCI zJN{H&*MN$pl{X?uw$f_ODV4VZvboKuR13DoC&p>T43ejsj zMo*g>U?{VNYuDuIN-S1q?C@ver?{52L)Ulu9N%KR!<&>6&u=pPc++>3ri4o;prV6A z_VWIEysIvH96T!c^WH#g;ApvvAKGF=7epR*TGMDBF}<=5hS|g7d_vIsM}zsPYUyv0 z(1v?Ux%1uvzpT<%{Jysv$G>&^FFL9k(`Lea>)Abyzvhs(2a@@*-oWzFZ4w!N8<%g~ z8iZl~t{NYz0HsJZi!%gAPXXo%=l4?$M1K3oI0Sp`3&CWWR@EtV`LEp9P3f8PiQHj(z~{2 zHod=Ce{ngySNeE>Xa5RZV5NR<8ujAh?l1pOaHLl)IFdwf!5`Z8a8(kKav4*ysgT9n zKfDl5E_ps{rI6LIw88eH-9STS__|xom2nz`g*@i%zm$;G zhFQu-^cVznV!3vyh~*h|XCH?1ON9BmhenTnGU}#5Jl}8iHl^ZwFTnTfret*L-ipUS z+pi4&SR5`@T(a1vfI+A4VB+a7|LJ}eGblW&kmNf)(iEkj`vb!S?674u4FZe=)vY=f`BwzQ3xY+^Za^FmrV* z=N+1svEy2;39APmk42tOeEgSgvmFBdpQ+&gm3>j_z(D~>R&U1GIr8}YCrOW6rNYS{ z{#WifSP#SWe*D2F=(6+l;sugb%fI~Kh0lPXJ=w1^l2vhVb7!snkiw0IhGsVlW=ZfU zH$<>yEJN+5K+C^emS84!NT2w>(wtr`cm5@1V=S9B=KXn>^X8;jB%W2vzi|KWq=j>zqshZp^!!RSZ|*>cGrR=ZB>PFM+tfBh1K)x$&Ta+e2_ znCkVyIAkWh@0h<_pV7F~)`Wa7u%cld%P(>x_^ z1QNSh{D6vP*E9AYa*&~XKOH>_BClz=sxW9dnN6_a=;m7U-D>A#aFg#%8gyIHM9UtJXmi;N+7 z@p^;Py`f9@uc*qcjlyR^v?aM=%d?wWd~5$o9(COukNo4)`>C$3u7w3%*ah5*-c%_( zLqx7bMC=CHpUV4sC*F*|oJl(?ylK2%s(qD^K)isxS^MvJiDYyYW1b3qvs<-b04d^% z4y2zQW9+mAHdGxe4uxKbe^+8GYr=3&xeY$&aeN8V#|q;NS|kQ;5;5(?>3Kv<4Ano- z^%8RQd5OT%#@f0)Dn^-|Plt$z+pc%2LKhX_6l>B2|KqdRJFcd%^z`&j;jV0f*ATfq zy**1Yig6zf4FiEJ1^A@Dr2U7+-w`2othU!xYH`2K?z>c*PbgwT=+w);_M83#cD`B; z&+C+nUBnbu`Yzqrhjt`N{2V`-TepZq&dr*eo3j{owEysNx=b0Ws7xNf8uMsMz~hW@ zSP6^`Y@SxGty85buoL7M_?66cdA}+1M9j7TljBF)yf5f!Z+21n!~BOXqXvu(E*o>g zDZ9L=DA|_(rZ;0xq6Aev)PHGLTN4r>9w!ldyIdhx-9Mg%>`zKciXo>1X-;B?KVZD9 z2-tkabLrQ@e==y)6=qdU1qs+;VZi*p$;$s#TT3fKl)_E}IiOvH=O6Gi8%%nrvbcK8 zyW{@m4hI>7KS$o_E40_dTN^d}P*Yo5ub0s%;^xNgqmk{aJ*O&270;l%!SD29XRB-e z12LMQ1B*pqUU83vjYiqJQ`76$uMvc^pkDbz{iZdq=+q?(CR$s0?Pd|rt@##n%PT9h z?D>5E+U^a*aNZ2SjqyZ(BzL3wYaDUPo05=FU`R+vdAXgdD_er}w41A7 zj{ zJ8ejciXzlq|F*f%&3@oM*_+OJGvBdaZ8ox7odySKreJLf;Wr>^4}J-^{Rcyo*&NG3o1SYg-!{W*_XH6XS}R7#pLa zp|LPC6TAp*D1~2=PPH3HMF?UuvYDuDaJ7%;0R?ci$@ZkXpUlhCvyG=X4*@O^bPTdo zrRC)zG-%v%!GY?u3C-1bwvUV_4<}TqEohMJK2fUA7qgRE)y5$Wbh(Z+5kK>Shg$VQ&ei14EEG~tl~h%~H_%Pkfhe}K zwdLjIm6nltGZ1t*J=`H~#yo$zIq3K2F4Gi6j}fffxaT=C8NQ1P3rm+fKd+Y{M;_uB z*<6bYxpK0%UU7|g9^B>LOlXj8JFq^e@0r{-2NE1EtjXqxAm;&xw&M|^xxvIEM)pD+ z`Z$m=8@bx*WJ~GgCT9yv%c6$8)7}jyPY;iug5;b|8z`m>NQBcF4AAS(mFi^}0lblr zRvz`fvW4_kEvk3`N*s_bksy4*Icb{2xDlgjPsO zyvL20F4YM8{CQ(@^Bon{Wr5aY_UvwhU<;LtoTUrO5G&G(-a5|4h0SIqT%aKAl_wv# zSxd1i1Sk%*Jg*OZR-e;hV(Bx;5(UUvk3#LRMhDPtLW6TH!MIgcI*5z+#*pxwpP$$1 zy#={AQ;>-CCFzBlN24w(YaOtNXAsQd*E zN&*KB_xImeGW+JfZ_}}chj_@K6Y<;r7M|tvzO-zicnWBHCU`VHEqLd=GTSV$!w2XW$wbyiMnstV*`nK6JcAJ6Uo{~jB5L>7BmDccRE7ciEwATN-fdA0 zTnly>kBH%UkaZ z5=6OJl^ze&9CgO{eoE};elH%IClzsDMfSAO=6!MN-#k_C@RZv&%`Pu3eM=Ng5lcGV z_TT-upm4`i`FC%fu{oabH*f?f7lV|T8y+q-C`zWJq~zxIB+GVjdfI8w@iJT#`_-$E z4)%{9cQ(BR&htT$OG{36dBB&IaA6+9!^7+JF@Xq{{*dPoaNSjHG-x zeC@e&i5t>4~CDu5?jyk3_dHk3s zUBk!c97QCewgn6Yx9qQGl=!_bkC)nra_Qblc)mAqxHvVy{yCtap`ihwO*ZwI+|FV$ znKUK#&F!r+&G)md(W{cO9@LQBqN1X*vZ&vN^akzsatip7J+EH9s;;hJqr}9YG{c0^5vJZv6z_H^AO<>;g?ugzl^JY$fg95WTmC0#mB#m zlN=fzo-Nn0u(pmcC@CtUVPR3B$5Wxf#l*xE5D+};=~OFKw}aM&k8Y{ZeE;)D5*az{;qJ=9%uK34*~VsXIE`byh5C6&CxcGI ziYo>>dXI(xG6KTCyI<`Jvt`$zh#Rx4`|<^4+QED@laCk!x#cgo%-Y(TCVgUB8c`5A zO~l{7f1Nmq!$pGx;QVrBsbr+2R-KxVkdQJnGc&GBc*;0vR61jQxFE1fkZpHYv() zrKL80ety*Evv?Ui-8w(%5P#kj5sZq*$4lfEfan zS&;mbn;Qn06Ew6u=ReMAjqb=6Lc5`!&P!+pNxBtB{KdR<>@o}4WE@p-O8qa`rOtQwiK8{foI zDWn;#woe@Z$e`DRap8*-Cpnc+UydD4NlqRhNIo?+bsk#Q24 z0tSPbptq0o4-M5dH2j#{K?%CPzW&6S4&qT%RMh1jRl0!r!-ucpdDAm9gaicSDA+Gw z8kw1G?(OO5=%l5lYHMgv<3(N_E=H1n(WDp0LJ%8|#Ao`}Z;Bw4o{_=F#|N@?P*9Ku zuRfSGP0hKPa-Hf(9v+^%ySt<14zj1$LKJb<*4EIHC|yPcNy#s4I5t(%#d_4M|F&YS|J#;6O)-Ti89&3K=_j3F&OJt|hq7Fze7ot>S7V-^PU zp8O#voKsa(gH5!EiH^?3!m@X*u_y*Ikeyhh4u^du<{d4s+2Gs>PP2M-d+(AfX!Qp9 zG)QP3q;!1$3|l%M#(E0)G;ZM(?gYxv7s`GjHQ`kB7u0b71n1kESGfJ|BzV1*F3a1` z{^>aj1$)`#YpdQi2cL`GX?bMm{{H^l+}!r|Hdy_xhp3Cf#j2nJIdZ1g)G(ultnKbL zJ1p~0x6jDaF)<}cQi23Er-P4=j~)I!CFMpuu%VyOMlIkhb>Ko z%Gz2MKPD9wm9ma2utb%MzYm+QFa9xQ=AxveeDfx!y`A*VJ|iRJWWdDnWg1bG8g?u| zuy6SJNipldgaIgeffn`Ui=@2#$mrU2Q`{@79r@w|VsUdRAjpZ{rn+pA@-cOxKSq6`XVX5n;+PU<)h)hw8LWx@bd%r22M#XnYO6?(S`uKin zX=#ASJ$W)9fEB4T5YmegI<)$xMJg>ycw_eN_zPm!&ttQ2ru`y3oBg3J`;`cfg4 zRjM8fO-Z@9Zdx!>f}c%uxeQ=*>m1=6#fPNp)iAOK1G0O%&Naujh8+x6SL zUU>)htt2MoD;Hl^q9O>XviKVF(Ub@vf`^usldG2amodB&9QjhD%UJ$_{1|5Ig1zgf z-Adi&Pwx#MX7wu6vg#?XBcti=ec=+%k9jxD>pX`~zetfq%_V%Oq&`AhQubnXR;i454 z6_1aPcb5k$54B5rrp#JKMsMOI>FMcJ@4gJ1yYr42_v1D1MN(jgTj!wMI1G7Dstu-I zb6o$pc?T^PQFuh2N#R+v|0%~zJYI8;wN>#%x%m779W7^Ml%q;(SGrh?WW%iA>>_nR)YWSYv&L-* z5zaaGX#H31$nf9-vxC`oILnLyb)(GC4IMV=jk0>u1!2sXL>J$GY~=_N-*q7_s+b)f zK}7Dw#f7E-d$f30`|EV5PW0~3^@3dguzH_m(`Le7_OIVh*oG{ii6mU(@$TQ*2Ar<$ zTP9(j_=b0Y6c&-oP%Bz*Hj>T->h8|Yjt7rIj-siV*|=pL#{;3B zo?dm&fXDO1E$pNDLq-e?46vrkM*u)FWxlC*?VZ_OQ4>~Dt@i>{f^so#+F*45$VlSg z=E%qhO3=^d=2H(|ANyM2oIm~jJMDd5(eiSk&F@oqCaTSDg1xVzdX-wV^V;p{?e44Z zv%F4Yke&Tcv<3*chpT-_-D$SN0STQ~4z8Ep_eA2``LMM-t92gwrB9_ILL`J<-@MK@Vl4FMxlaRc_meSTHOp#U3Af0A- z@#4kA!~`5X{K5Wy6&W6YH}rTQW3@Lh%}z`(gC7P4IdZJzw6rr1-r|ASrtEBTj4&Jm zf-iC<*eD2KTpJo1goK1pf^6ri0=rf@xwyo{#j(S?hlYkSW$zYJ@D4I~lux(SZ*i8q z6_9VE4Q#f*yga?*OPy`G)yUk^)wD+BmQrYZDhZemyTorB*8+@|!ChF?=$1*X18W-x zD5`UHR`DP&fG7pCW@@?tf*6yO_e;6a^n$OB>*c-{QgGnIg@%ctAr=XVT;BNZ-rkN2 z_hSaQ>Bfr(fZJLD{1hY?m5`vOq#WBhPfaCkEhm2Ya&><{D#Q&GPo&@+ZXv1i{JJ_; zE-o!F*2u`U^R@uD_!zxPkv0E7a1b>v|EEyzaZS4qO zX;77IY_#arLDjakEh{K^mo{wwl+&!#k;KGv+Hxi!ng6oGpv+o}-)4Y%nN*bwhRh(w z;P@n`xMb2;n*&?qs!b6LFyqaRN5HqQ_(C_rS5vuOxX|p$W4O9yhaQ|mLx_*xxB3Z8 zBkT8i0wiO&>V*)fmMYa-pLE};D_mII{=Ju%*X89Uv;u15LS^dqaXF|g zM@YkeWhH0#DVWcX#=+}0%G5Mp{ZaMgdN%|O^JsaJgMc@YUH<^GS!;pXv=AUUMZ}3E z^fm12sLj%ge`rjx9H*tJ=|;#T@6iz6H~9Blbk%J6OipZp87Zq}c0(y-$sQ@VnYsrx zA{Vsv3NkW#mu=5PDPH0k?CtH5kdS~W5El=nQvVBKNM*5zsjYREl>F0R2m51MBSVVN+XUr}vMB&TV zCGQfEN_jrP@7s}F&62VEKHP*>+HN(Iqn!GpPd2{(pI!i;{T`OWHsP6}i*xn1>)_tr z`be$ALG_CuCdi zUq6>ouNaVdI)r!^UZs92JMKB#;#o7sLb#!L^ZbX)v#~a}!-W*UC#R?H9~?Z~=D`Y7 zX%*5r1JTF1BD<2rowA|V6$qt_@LVX@++U@ZDV0TARL2tct84-a<` zvq6F!48^4kYjZA)eV}8zIq90ghoJZgc*=P=j$kuQ(?q6NeoXkdJe=Kw#WWHisk_p+ z-iT3I)Xg$gS`LS$g|V@(adA1#b?XQ3 z@FJUZ85=AXS_H|lj&JT6^+k(SothRhhtlSv>l${ueE!n6cvfg4I}Iwl^Vbu@LdZHf z|KZ}|qW+Guy)f^rizB=Qr+7-X{?3ElCTGQ+G>9AoTGtpoWN}SYt6;AW%@uiDT|;Bi zvJNA&K#}J1X!&Sw&&Jla)p1=MH3TF@ugCkF?(S|<$;i2QKvbs4{;2DEPpk7TPVzW< z-n;4e_;^IEzM=v{K#4YiIYoB*MWn+^JNeNweH^!(!`QMZ)8-Jd!D)VL>!t)xp2pI| z?!$YdpE*W4eMHfDUOuGhNso)G`)!AVa4_x{X|DHcp7E#ilN@FI5W&T~uvm`10^PEX z?hVa-^sk+#oJYAs7Q)gJ=~}kqQ2Urg zf0-}GY*z?><8tA!iF{f(KmWA2FeERl*hT_bk$Z5gNo0csn@ZAhCJKz7vrPsd!u{@z zCSstcx3#qebuKS2kBE@)^z;-o_jhjJ#jrmAHtg=}ySnM?21O_&G}O8U51POES;)?n zJZ+4c0>$a(UaPg+Hq><}MsH5Q&6%gJFFht^B;vvm95F~p=r?zML4*2kyEGakp@pAL zHxO@&Br=o$_wh+;1ss$1@F(UIKwG~x?s7pXkL@(1b5^g=wF2{J+=B`iSml1Sl*Zw3 zI9J{E`}gXi)>8@874)YEfRM!wUszm}la=j_CF}6IunrLpBq>&;0a%(!KHbvT*x1}0 z4^U{nxTK^pb|j;w%qq&tSO`K&E}-X?rJ4j00e*UW&B3qNB%mfsNl8gdM*vU*R&Ykf zVmMCaB~}-9xF%pdl9OctQ3qO%>ltq80w?f~IW2Q@bCCD@1UlXBs;jDgjc7MHndxM# zmgx1ZuC9WD1iE$5q|ULqAKE@(Rsw!Pko=cd)ZpOYNg7zL&+mkjWvOEORJ>8k_`#`w zU2C5&Zw30qM06zB8$@Rw8f>C;85>sXm(Ds|LWl1^9e6+KZ%)p^!Qr2U1v}s0X}04z zN5XJ9xsg5QQE;utgk%RJ_-96H;qaW9>;;+Jl-uh701eO|AVQjSD*zCP?15Gpb+x49 zDY-I#*Q5sxUe^r$Kfc3z1c*o=i&CIqV__+R4(s)6z*~X10OhLN_~dsmGT;;u;Q}@3 zdvOlK!oms*3jqsocYSS0G>V9fOhiCnh?WGdAxa^H)jc_hBQ`!XBnuKV;1i7BZnw9$ zgTUeE=Z}er@!=PE|6Ym`8?c#}VoKE1)cjdw#Kg8%R^bPIzqZb*A9sN%3=lP6zhXH% zJBN#YOHUtKKLLPy@)Q|IUG2VJlp?O*6!b&jwQKeB=g-?aI}8Txx(DqbYZo%wS|RSOKsk77>s3{ns~Xm zUM~}l_V*W89cL|SaT3d$nwt9f@CK0sdJo_$&Ci!-XI;5$mvOp`7Pa$WM!l~dCv%U_ z&WPA;HO0gbGvA2jDC#yjA>KU#00e%3L=I@9zV(ySvonz0j$5$8yEisA08w>%>I8_r zgoF?~guSgTK)MnV6YsCDudl6ju71+h(*x~bgkF|#PJUrwWY5~HygyF3XnlSC91>*t2lNcit>9r7+ zjEoE>W-MT3-T5S>XMs>m@#IEc+S92p`&mGVrX&_`Ry{M>l*<=4grdX$X*cA|of5k( z5WY=h*6l+r!b~7B(Mlxd?@WOnrjh7orXR8=paLczc+YY z`1FmHH=6*xYboO073v@&j9I=)r8U@xUmQ#Har<@S-Yx~9Lo^=$dd z;x5OMR_kwh^}$#2qhlwo)R}Fmi2E^qGhflB>3_0e#0lHR344r3)AmqHRry3~H+v#I zqJ1XSTdqIvJ{)@^R_{WXVs!Q}+fOI!AGt-u%HlEhh)U;Y@XJGb}&4nnn`rv{CJ^ws`}bx%!Tq zoBOxp^UDgfk_b!#e(LMmW@(2N!GSxnxnbA(tPaENWBcHuZ?b(-&51k`*>&_AR*vZd8_XROq&Q{5=m>g@ZCuyV zRoX0mibRdUgWPkSRjiK7>8}DH2;#S9l83{jowc=8eS@fnm`czuA3QWpOP^ew{kb-4 zZ~J~ey(-4Lv;!37B_@}5`{*tA0Ufdh)qj?i@?w59&$5!W?aHHA4kJI~(Ge+3-5O># zHZ@p+FHe#EVhJq^%jEL10O$+e+)0P5Zrbcbp!}_CeBJqoy%mNMF~n;&9(}@cp_rUz8Ec zZzpCo8f#y9Y6cdwa??0Vsjr*W8aU8(uLcWBr=YDw`GT129g-&d{bFGo%F+tf<2~tBT z4|V%Ac^1h0dPMuJGZ!`-?e`SsQH$hKlRQE z1!lt=i>WVvP{dGdyZ~!PD(UT6gYn^X@u~R1d#`3K@Xw((IgJWZT!b7u(b$hP&Wbl zI!ygqhLBWu-9Omi0O2UN_R9UHMV(&H@I9_nTl=4Gvag(?0yb#^>_fVr&g=X&)m^?g zJ9)hhBDapV9qxblcocSY>2bs4d4v7@5+ja%p*qcOuXe%aE|WoTfe)uHUy%kdAwW^A zaoJI}H95=RaS#3c85b9~=pBsq{rlAu4=~VO8sjtx0~l1@*@`r%A;Po?Yrl0UvC9Di zz1Zx^N?2Y|F#>}T9oMgC(e5Vv9J#v9ohZCvayamF^u^x_Bi~sJOKTvrt8du%?a)$Sv*L`uy*_n+^Gw!$ zcysa2&*teF19hwjR=&Om5!1`%)-Zw7$1YcA-}ZP`-nrJLw&_Xy;WTTWj9!o>!!K?V z#+#Zt(n0-HLNRC|Y>lV;Vb(TzUtaJWUd zkHnaj-oqBs^IBLrIG9{?zg3%5#$e>^AD?W=jhcL$QF~7NtzE9lY$S-L^f^%eL8a4a z3;ykutcJg0Tx)%4N z%j093_wR+2zV{f1gmvt~bqi}ZD!ADc)|aEE$&ujWhb4h9snVpkMbO^M7Ar-JCF4uw zE5BeJNj=_hl!(`tA|m_9_sH@xN}TQ7Eo=PrPllFmUDM6VAvrw~lb&I?99k#;V&)B< zkOcCR3xKPPj#7`l9^jSL1+jnL^Gsry4) z{s$^!9shuE9^u;X kcJKl6KZtSp&O4`iURL?lql`AmTrKIaS=4SlG7YWqhYXw|3 z4g36=H!QJjM;w>!%UB2>vrLH_I-l9}(WU9qItj?>A4BX{bjl>qDyWBsaBA50TEX+?% zgNN(hjgKswluiXW>S2yWqCM1XzPU16Yf4+zL+W<|`xEctWxp{fP!NDQI|3nlzy7dzeT-1WMfiqAj zH_*udS4)GNnwB;@JL}AuzP)Wp7%g6)3_2)~Ie`!bqyb6Ffzi>%`g*;3>sNt-UqG^r zj;;sfo{Nj>f8qzw+h}NtygQx&zm16r4rp#19F|Ao?CtG=P!Cc74r&NMTiD?UcNG*A z+_k@QU_#l`L12@?{jG&(?Ba-HstR%}B@ssdogd=kWoH4*YTtw%?He7Yc+o|@lYfUT zf4^pZ!QPr-C^EF!*|=VTqb#M+t9f@?HS9xm0Z#l71dQH(ZKSwdrb|xkQq|FoZNN^l&ChO}L6T|%1*5!l|kL9*rfw9gy@~^|(afx0`T)5N@FqJGO+!trs zKLzC#Cde^WB}YdiN4~pKWfEg24oY}BLzg{lvagN{eQSAsqQR)|`PjPCTTe9IacaNV z>bv)s;X&xuB&sj!BdOeL9`mS^?T;pXQ9lTjqWbC*YT?LHjP30YkB(y59;;^Ud{g7% zjO^|8)z!gr6Qhb35f-MPps0GbxH*^<*<;L~RjEi*q-qV&CXf*SlW>eb5CHfBT5=zL zgJRX)ogL8j0^djJl32ZIUkpiNVj^f!feC<>5Zz!@gd7WlguBjSG!qcpz{EhCkP2vM zLc)j(N(Z1wi05sB9%gF`w7o#J5f>G$<1QVXpMQ;c11LBK0lU3OX<1oOCEyUEOOS?# zhp#VGr0X9Xbar$5`w3_nT$I@F-@JLl%37C1PaQ0XrawM;Fj|!o&|x+wo7{D6d0+N3 zdKs^DWJ>H`zfcU7CYlb(2h*BwbntMsI3ro2NU2AX78^aj3*e0nm1-%z5(%pv+RTx3 zVOy%CFdJ#k?H^}?c*!nwijKdgWMx{Lqn4hCxDP}vaqoTW-ut?I9qj`J@$6x8a;`7P z=is{>O%VC3X?(S=H*WZKH(4{S&ID(l}`iT`3diyy=nK174*O) z3Q{ZJ-qUGZe718q-662r2j9XpgVqs#xBQfWQvoBeGU0$1`U?CF%gvs94f(4x(K<;X zxdSF@9uU7z28VllG6l+3tk9p1>tK#_oNbobz~Z>}BAuRPkU>EZwzRY?y+M195VUJe z26{I7K16#!WL`Vr`}i`*m*=-Y$^;+F%LbtOY znXk45Fs`g%@O+LUaba)Q2huZ_j!SP((z+f0*dtHi7uceMQ5E&SjtuDYzlBJ zx^MRh$j|uUTy^{wL6+zaO;JeWHKl1YGhYW|Hu~i8Cb9=R@}J*>7(;>{55zNexTxB* zX9TTixANJq^oqzn82$o1jJ|(S1pa7NB;!;pDTn>`@k@nQv6Ti*L2WaN@0PiR<5Z#7 zf2JL4w3}F*m7Tm&Bp1H8-{U2q9V;*tMy=y!B?@>&G%mSq-Q%Sx=Qm3>7Btt+0)YE% zmg!vodbFsjW+!AN2tk2n{K7&?p6M+ShStW!Qc$(PsGh_l>{Iu=eBmpNbb!{16Gz|% zvDxg0P)bfv01`J#z*mJo7@@ZpyXYa2%CvhJIW}Nsx%3a2N47!ve06Nmn0f05`cs_N zNDop*+y|KN4db2_mG#3hpTjMEX{P~ZCQa%6X=L(=i?*ok+}*m|RLpB;m*aqE@KxiY zYJi`m$1~{gBSHHGR;+O~Xg(eq+p zl*7l2XLj>8f$rJ{U9Bcbh1W`%Q7lTzfE+i|Z71YQg^uAXSW4Wk%tu0X zRYu9Bo052zKU?ddRMAmfndRSLhi%Vhv}HEz{mfewEphbjFsx!HZv3k(`V}YeIP-pd z@>Sg3Mr@`VtUG5g|I1O zruC|Gr}~Lb7V0)N?Z`~J(uOauFz3`Qz`G}@{$=yKI6)f40hx{)UC6xTJ_7y@V= zyH3`-5PP0QY7E`aakF}{d7c-|W{LqbCna_!0Sa}3^a%uzZB9;3Q*a;HIgrtQm7Ci_ zfZvrT&}{^}Qpkb-uv}p7m+yQ;b=^s7(S(kKo53nsVu7~sN5aHf6Rp9GUO-(HG?n^l z906e8vqU92)JH7>BBffT6*S9+rjPHeeg@{R2HsGv`z|xsJRP!@7?~bZg9`U{nBt2F zvNRDH7gX*X#LE_byY>3d;LjAr(Aqiq!gqc^M@#TALFgPAfg=uQyB}|vxZ1!ax?Ir*9fN4Aq-GfL7BUz6-VG6n#8?4P786Sz9ofjN%av%U*AvvLhYja#wiF zRp8ZI*a{V`P?hVOa|G5iS*k|ep!W&VVnFl@sIHR$(MX#BY>J5thA6+X-FL5m{h5Q$ zV*h9LE7x**yL7hAcn3^8H;MAP^|h2qzRBEZ?X}0WYzYgJVoXfwcHrz*nw5f4WOM#LPw*OH`q;L;H~W-iFKdFJTZl zXs~U|Ew4Fk2CYl|o|i}%E1n;L6%AT*6z-$pn%q@AxVc$l{_qKtwATbW)aln}nmowB z0OxU3u@BFMsP8AEUZ(Y@ zc!+CovtQi)du1nTK8yb3|JHVoaE!w+$O1PN@A==joxVuBa&Z~J+z{}iA>&KjZKE0r zkmUXI*xr5^`v~s>WD_Rsm-&FxJ$`IES~t=2y&p2yEJcLgMMoFhG~w@BgPWp25l%C0 zrupE38xk~K3uN216*VA)GGz)F5Vl&EEE3hRcv)ooz9s5l7-1?Y=c>VY66xS(7XYBR zD`m>9q5YXdt@@b;klncqLk2omFSHfjm+!`wsP2guO-|m$>_;3KTVWfrp)4APl4~xI?P1q4Tt8P(wo1%^kGqYETM_FkhKWF3J z-Zh%5|Io$hYb#Tin*}X-Bps)NUHs=#C_!I9>7ab7)d&O(_640rCcmF8$LK}R*^iR9 zV#^hgVZQEdMK5X}|(ZO|SECSX$p-E&oaz3)TL)4K#|AiHc0 zrJO;RuGO>5Ci1~n@E;{1bL&NkUnMEMz3=49oO^cJZDz`55BsjPX(BF%+Lif8EMrOj zbU1E0&HYs^dv1BuY&bWq>B4wXot+D^Y@PJ@;pC$R+eI$~Azfd7oi@Xzlz3jB zw7|p?nb;)<+o<_!G}y#1w~E0JB3$h%byYzIyraG3x=h5Qyl&-~;Si zO+PI?gWg1o+uXRnmI)0&HssHWZ3A?}G-N?<9#cM(w>U8$ZvE&eD=seZVQ0D40yC2{ zck=$>p*nhO`5f^4+eb$RT3RKrndGWP;{1ukiCiPO@M1|VscXFP!tx|Y}CZI zv@NOz#06T~fti^G3qO=gm+Xh&yFD6IvHHB}>8;we^nEaNRf3V^e})va6%>ki6b+UQ zkNZsu#MgUMM$-!%KFm3*(RrsB^+sm}IB4>CEpQ?Tl@ZidH-aD=BeNYlFKg@m?2N}$ zUt@v6ac{9-4tyI9b+2z6WP9iqn1?#h0R5)9CJ51HGyD~>-WTiKv;ejg0htL1fb#b} zvovslW7ZJh>nnMd+_zr;;@_IHJ7}A12mTtLn|M)*`5H4B&}=j{aV2IN12p;qDbHqf z*3*TXM8;_D_qtjZXhMJk{T($mOUe)wsy)r74R-5X|8t8QTfIFvIA~d7_f>%lBxhl% z?RyN~-w}k-G;#Hx3ZPMgRq?^4`%BoB{=0YNucPfRbV*1uHphwP4E%c@xk-hvpp+_S zDGjoTcPlyN=Vt`UQyhm>twzX4fU(AWNcIGd1fE7pcS?>E-*v|>i4ujtXOJSJ4+Ly% zY@po%JLiOOQ+KMc}D}+m@~-`!J^)ImMe723ma%ZFK__#D4#N zY=$o*NmsX9IG=|WRp?>`xTmD0!N86YE&7@Jn=^PgxNfj(%x3B}P@ceYWn^S5RWFy8 zljHL^Hng)lSZr}`c^BUfnriS={H6r2UYQx6(xOX(HW3&R?k@H?JdRhuHUZ$cpPQdw z&H869?(ED3(DC8+0tjEFrKMd0!b+@EROZJkzrlMf?CeV*${*u2=`SxXNZ4%&+;US= z<{uyKs$+o{Jv{t5aK2#WXJ`NF?iL0A;{+F%k;K2hygM4ZfEOF^2Lb}1w`_178p{!h zYq(kyQ*r^WP2ge%C*ND{cv@(324D;NQIk)taa*aJDun%Uk}h?)KjL=4_O!nK7RTC1^A+%q z`&##l$*^Rx@5ub=17tA-?1Z|x(QEY>2k8a4uX%Y}-+B+Wz6IN=fP-F^3Jn9JSGo#U zdu|VGJOwU5+5~OzerD##>L+Y$Y|3x`mnG0P@5aJH>J(WpS)kWtPKoL_UEBH#{L1+F zg#`shcj&-!2yBiBLLdU;`+*xDxQ#2y%7C#CkS9P-)cnxJJz7lwRD3Y^@$onf%YwiT zmoy0Wy*=8Hf&2j&Zm=g$T>xyg0b8^vv5U=aKgfD`cxY-S0}ci>@aE<}fVINONf`~M z$yeP5rzTU?1=OZ*Ku{(i_$TFqtse3HAPbr@&wqcfQsrod7x@YgFO>WXFtgUy0+kV* z1P}+Hl?Qqi5yxY#rU-3M^EQbD?R{0C4v!BKLyV=t8-xN1B z%}1;rL!He)rhhBE<>|RDQk&OH@D^plU+~Id>f(B}3!n=0$AjnJn07Qcz%KW%Yh$slKr>i_?+f zGETWTDki3(z8-8!=#d7rGqd0QHCPiSIv8Ii^MM`FmT8w_wMwjEu*T6{%+wTNJ|I^(jwB`NQa2h z-67H~Esdmrbc1xaz@bY(y5Z2>-Q9Qbf1dl{j(f))cf8+T27@?f?|s%@@vAlG+#(9N zL@)mWa?fY5Kg>vQCMUAi23&UQOyP(L8{Q4DHa-Z&=cI?mCy|Ha@m; z@ZDy))LkBO5r*b)3AptN*iD%dkGlh^R#{$^FAlYD3Mr9+VBbZ-3hQOJezExMcPXC% zF+_+1VdSS@;o+F5s51cknV6^nHn)K~&1NocQxgb0t2>Cet`oaffolZZtEwi~ot+)f zGzSNcg(B%3MG%IR$7Kq?iRfDg^YjYP(Dx7K8@`8ONJvPyS8(9rm+P!NMHpAd26qeq z2dk&IC~KvoqgV3d-^XcC@3{R@y`H+SudhE9P6yyn0G8X^yD^mJwbmUOmo>7!z7EJI zZf>iTpkG{$mYzj^0@U*TIptR=vS6njASl3x0(c{+Qw3s>n7bq>7J$VAfJ^e|*8oin zFe@r5DpMC94B6_&6!5mMg=f9~dOOaJ_F-Y*t zz0+>}oC+eprHp^m#r~y&%~Na>AXr9xd|*l)Ot9hz3r;iQ3HZuA6wBkzqw7OfTqHDX zom#Z~8rA2`KS0HwWD-o<#w6ADfY#F7b}1PA+Prn=YHu}LoZw)X`+5!=KTI{egfu1t z4+1!jqGDp@--w8acK7$^*>?%u;PFCoc$bl%Fd%t<{;(h+A&p;uoh;Lnc(5W_ITD0e z+U^9BdFI~~m{S!yfz8jG8vq$GC<+F&t6L!lURI|7N9%CHb2@k@!2aCv{n_yq4K2jm z+Z*r)psuCn)QmrbzmKJcUtN8V{1g-Y^aYk$*B2I1QBloOj%djoK&S#+)SH_{9)97d z&bGQxyoclip)cM=mnNkPX;pWoz%cITsgI6~nC)th=Lnkjc~VKI`Y?N{@O?RwUjgW* zDT^#y<-00S0bUiw?D8iqG|}(-#-})>n^W-FqnQI@^I`f-)E+-?#MJHb;1%+7z?TH| zUyGt!=n!z8U_iB*E{B93mC>Qr@`|4nGg)wA;+ZlXP!$3Y7?8VH{gB(c6zStSwkRn5`!2c%09x>aK2Xd4 zrlF@V0z@{z8MSb%2oIO*d)0I`& z^WQyFr){$W?n=Y)Fuk}iPJ&5zZcyI^0T)gHvn3${L5U0+;@OW0u(TSnkgFs#sI*6$Loj76~=>uQTIyz z1ChDP>*?tMt-5+850to+9pB(RVuMiy@QNcNQJUaV?K-x1*-bJv3!j{wNa)i9cRP#D zRWJ#O&;v}XZPPogt94pgGYFc_Io?1T@$&W__l#$o9%`*uz$W0zN>p<46CfWTbkT{p z&fGWBNr&wwSOJf*x7VD&(P?jr92XUUGytXpNFC6{g3|V(>OgllaE=v!{s8nLaJfJP z0ATIQbY_T%iM7?$O#w_`d%HlH?maOE7dx6BC#=Jy_2YBcO6E+l!E+Hng>l}S8aeR+ z><8c_f#CtbwHJuMKY}v+_0<)CZslJ%4R1Suxe5q)k)J-~C<0ViNy+|Wjq|20>t;CF z-$jdn9*zQzTrJuhT=8xh%2$yQ2A}(QTdXH4zRq{?icJhu4?2p!4F-fVyI73+aI<;~ z4$k(&@Sz9eS#^mdLH2>&>!~imq@hjrG?#JsGQr7P#-<_*fXF*IH~`H7G3nRxD-duI z5)uNl2_s6B-e`0=<+7f;+89U%F#p9CuP37mjt0vQI5{vt08U0m!rg$eQ)$Xsr}^Hj zfb&q)2Xj3;&#da^4v3SOPHklfrs8DNL1L?o*J0Wx%a;I1WocyvB4M@DdgB3`O`PQM zwdLjGwVr5TC#R;=(a}QOsHh&#tV)o@8FO=A>Y5riHQV6M1lVPBMGR@+8(-D zB)o0tS-kF=jw&i=p?_LQ;fb#7HyIOOo%2}rWi+HM;-&{o(=!J*8-yow#C5)aiPeDn zVPIl97|wuq#K#PEA=DKW^>O%+2O?hrcH?){)RXQ`cx&1ocvhFB)gWFO;Xaa@@Tzy( zP!4aNcRYg%wU{-1EvmRAoxR_@mKclEV<^MoJj(NZ+sj;hlyI>*787N4%f*^-7A{nn zl~w6+djTM<0JY|{HBA2IO*+PqCry5qL^z-spKXtAHXJsOjgQ~2h6_!U>dgMdxo@k< z-|TWzwK)-Ja;|wpo#*L1%hUMzXL#-FtG?cAs5@_b*FF2u&-{Wom&dA>gJ0j1BBZ2V zMA&~lZa}itK;zP`w9_;h%{W~!XqYavtQWiYt?$?X2oZcH?JZcrSVxGYE$RT^x6{&Y z1Nt$Da{*$ze`qN0VC$K1DPDy;sEuQ4BH)5@1erSOiI7sxAfm{Z&I>yhhk(yOZ(qp_Y8#m zn3N@1thHAeYM$Q?gC|=y-NYuf4?VyQ4J{6dvpZhth^1EptVs~)=1!1QL+nJUD8xcv z?|W{+4jST_?|6G|Z>}rBg08s2eNP?viA^u>IoYHA_SJ|6N;x$%iAWzDj1PbU12M_M zh~JNbBGK&ZZ9A>hGgInl3eAG4ggpGAfVn!CJeHPpf+7ueRWYG14F%QG!i$1ue+}14 z&4E?0OLc&cy&N8mQKkgy}$j6xiC2FJ8ZM-!ZiZo?mEj2!R z`1U(E8Z?Z+%C8;4U|;eTK0d(k&@GGAya^<98L3{ewXZxTM)S4|6*U|z!0#crDe}{(3EGA2P-;Xg47e(Tigr!z{Gp7Tgvf*L?%m(uYz&+PNx(?#w zSd(0>2&B+P({rue7Fka*XH-Z88bB3$g5`@Tbhos=&grzL&hLPRo)w92wP{^l9-fLy z@8^K@4C2CSJy#1*p#X}?$HN0SuN2()mr_2KpydDz=t1l@kJ=#Pj0K6Hkfs6=aw4c& z0L)(sucKzOyNj(Y>wHG~i20hh6!P}&)UC1U^Lbi`@WGQtfO`2`LBZz6#<|f^JD^|i zeGAVp;~2jrI$6x{N+%LI`VZV+39#X4x!8OUR>Dx45H~wJpTiawX}$l=_*F<;NpCgO z`ts_THQRIVN5d}w(bgRaXqrn)UqGn@L^2~O$b{jYdQiWJ)>P|wc(E8<>NA~GD^87OkfW>f!i2x5Y&JkL)m05Mq1jQsi|6}JlU4J z6EtvSStbMMyF?bpJ8(T|IW&l4dS)gNfgvIzW5x>s879z02n@7dMx#|2QADbGNcPI( zLx#}M0S^K^s*uacJ5El=#bytOu4f&8v-yO5lh{l~g>PHSq8AbpsxtV{z_FmJ1&@R= zoC1d%fK4)jEvn8iu@Wo9m~c_~f#K;02o1tIou+7ax_+rKo>fihc^3Ex@L~=+v+RA1)u?;xJ3CKaEYmD=Y$fdvYeN#j>x~m`Pr9gxQL`NPB4x%*T zCid~9Zwa16LC+DyZ<+hJ8LZ}W&;Nmq_a<;-I|#wm)j1ZPqAY?Ys5x8 z|M?ZHZr-!GWlqKbOXXk!vRT)EU&0&T#|ucIWwrHKhu@9|xGXYCbdlFwYEjM)=j)Z@@LUTvb(v{?()-m;`I zN2E10^!3pZ?`v<#9&=aIHTagq_C@^gAa-32Cl;nSng`V1mJ5NBP)$8A2XQ>Jwdg;h zwU({Dz)SF9T8(kTviQO>UB6vsEPnLT*Qm2dBNbp|)T)7vj*l9$$-jG4Jg#N~AVzZf z&8_t!{~V)J_stiC8CHqdVRIhG%IuC!ImM|QTSOv4InOn5&zt6cX zaX5oIBkf%+`k2{!YX9-PrqULV4z7PS-i{qudEQeOEwdKN2V9AFnQvwZnQSRWYYjXK z`GJQ3D?eM#pFg!F8G_)tv0Se8bsBa5^Ehs>s7E=44*8*0lN*b9;c2lk5z@|YlUNC^ z##DwJ>NI(b;L^Vx$>|GJR#)+Qtk?EoZJMnbKd_l3vI$xDnNR#W0OkSAlnEik=;ik_ z6l=N>IaM9chd%Gd+qB68bzMm`ae8_NJPUBU@o(X@pz=hl-+n!A2;@*2IRwH%yaNI< zV@vRM%q#H*c<@~dq0s)4$zh>OvUZ*xa594UELJW50cc_x#(UC7e*N_OrS=x2r7b*& z@aJ?>qA*}P0#YmRC9}CHUS(MfT8Gi&mk{i89cX@gjVO%>F?E-RI(%gz5c`PH zjOA&(T;U>O;xq4xE^y~2tf2pLz7CkW1(t0Lh#%HyiQ1%fQ+;mJ=STZT*30I0t9@P! zGQQ$R@8%a&z6Yj2`BLWF2YQKLGf>@T{q6m!-S#dFKCcr|z1e4=n}WSkg`JrokbFMk zkzNwejcYqgYdiAF>JHm`>v|guKx?PH>je?79ZP&y4j+*7bteG_G7rQh;pB$3fVSN} z48S#i`S+J~tZS&=;ew561r9=kglaj6E%u8KgB~d4tRvxvbB)|%Cf2d}=HAH$UYpHM7b&)zxDan*(Gj|L ze7A$(|4pXE*w{!9dWNq<7xzIZJp#8*&abD+YQ)OGFghSvuF);1;>E@7*i|LGxU++Y zr2SCp3YJ-vx0%Z>mlz~P4r$(F`Lj4Mj9jBD20B!y&U7HBGQRR@bxiT-G~*Z@vNKA^ z3xnW89>SAp^$sj%k#=n)bbCraLnA|>+DjIyo8!aTOB8xP(3<@{KF5QJp3xbfg9I>P zki=(^w~2OK!Y+=8_eGFt8EM^KpVMqStfsTKn%S^fHaI}bcTKI$COTnL|BBJ)KR<&I z3gKE&Op5A+y3)mCy)DKP4EH$ZKUiP?8!`FW1}xe$^z#9R8BI+s;8|HKO^a;uCy!9d z#KEq}ThAHfaH}p^f$uLDE5t)ke02gYc0KMz`hWbDx&~x4V=J%3wSh5Mdi7BE>Y;%1 zv8`yP`rC3=3qB7&2`w=X^YJ9#XY<7jD#=B}$c<-Ie0ia%2e?DzO;&u8Lc8u)`}WH< zY$sRXI{0B^9%quKT4ML*Oz_zj%pX5c85vfMj1hcU9CsTQ-lm)iu8HN7O$l;8iNRV~ zXpoRP$3wq_hl3ylW~#ikRN>;B$a3h}&E~dIRK!`09!#sh;@x)CmDSzlD&d<_w{_9J zpK#@M<(*U2)J}~vmXevyaZNqhC>uUn>UIm5X%tMcEL{jmZPwpy${X^f$eO!7li$pL zc!if55l32K&rbf1irluJnO<2%`6JIcwcqas+ug4p){d)MoGL5c`8}6dXt+Fc5j9V8 zJ52T9oz`x2+}y|-H{mYHm?#ypgFTj=f>*zv?c$uD)$RTk_rt0oy-*$*Tv&5v;&024 z%6l1hAH~k*;e6QQ%6xM*vo4J&rmP}h_^`4DcJc?q7{|vS8r@}-RVW+CkRcpwG@3ca zUne@j06R8HAxHkCEQ?-jl=X~$prUxVE=ta*L2wtbb}IGO`@3$oT{AH`GH%_m3U*y& z7|}@tNIphW`C(PlX({aXlIr2YM0W4W%-t-BoK z_UyFr@&(YGM@{7KzhCl~LMhnxD%IsmPbMKZJo!UzUFlVLi%9~Bc@d2F(E5d~pJ66R z1A6~0)|gcwx<;N*aq^jz5Akf98K&lTf|=TKAaJ%M?miSY z;fF=Uu2g7*LT-hA?vg|ZVhX>AKIcqia3#-&)Mw79S0 z5sxP^kE=Yo^q9~5x_L{<9)k7N=%&oAb|(2XjwsvdNkP1_+~etj@c^onBno+=i@WGg zXd?=Fy>>|)<3lqniGpUy%)F3Gt>Aj0I^f1Oh}JK{Qsl>Zi;(inIs}sDziBF>|C90% znOwRv?_t<;92tAX8G8pU=)+0S08pM=U2G^(0j?>{I}^cQ-#lFc`SYjyN#r4zeyJ{f ziCoL1jmZ$*|HJ|aCxf>KyQ7$TUgE~Ysmbfrm=|cMYtj`KmR`qkPuJC#K#|G8@>=`V zpHviOzJ~-^8k2{nvlL6u9&vEWYNHo@*ZRy@@LkL0-IgR;nEB*IT$<-0a#yo7qCr;= z`zU2Hob0C-r^ETm6|40ii^&|RQlX!p2L^5T_2rOl_s!CNM>^7Ppj1gRM1EpEfP=W> zm5%tdePMg8wK0?gHE)l&4rxMaU?Y%`)ox1t9a&|xAgx-No906?09K5T~BRL?k%oDo$7$TYn1HNqdBdm%qQ9 zMuXYSPkq-C=mRQxUg<6PXcSz=;ePq#$;vEFkl1fWrIM877|3IZerM#&CVH`Dj9=?` zKNxM|^qWTpxt!IeVds%7n$pjqnx*XeqdCgqBN<#OpHp_MS+YI1wb9z~t;5ciKzGN2 zNfMJ<2v(aRi>#G;-L1n?$Iov2<(FWzKZ_v^k+4VAi%V72_4=3WawGiaA;DuREF}E0 z=(zpvQ05u6xiKQ-ig~fg>+!*2acn04{9HoKFcOgvhgxeBDe!VQWyOYBaUt8#_`T!q z+oo_@a?W|%o4IiyEX?1ZvS6u~GaLTcaQrR?K%f5A?JhxK9Q8_&Va!QW5n5$cHMzw< zj+fh~FewF<^IRc07J?V?r1M+oh>%cHSrT8xKb0(G%Tq&#Yl$swZRk}lC}!JjHL*px zkLssAKy6$@c*Lh->io2kEC8ea@yx|YrOGWWl}VsJ!{}N1vvgCDpec=nS&f9J3PTK9 z=?IVu@5TxS$%}9Rh`k{YHor`eVMUPR0-0^^7a32FGB89I2MNy`F`6eaa{|%-^^`aC zUxWM(kVDZuvmwFYzzRi8Ras}hPAUoc^xV>qptNwsOQRVPt6EY^;No7d#6bssff(e! zBS{2}?TH2p*lD~&o~d%C?my>U-^86~xWE({cC_Yrb^A&@y&-xav~@Ta50Z-`M|tQQ z`X&-rs_Yyguls<5H=+NkZZW5K%;P5TK+5vlfDASt4VSSSPb=1AZSM!PV)RhW=I{ATnd9Mf-*TU7#kmVe zr3_dIK#uKQ|7DZ}1Pax3Ze1BAKfH8)SC}>Umq!+P#Ha2vg28jxjMD2N&`@Arg{L@- zYxLl=*(RPddyzie>;?+Qlg)Wm3cI`ErQL7QIjrACZVe8&af?xq&bdSk;X_R`It{x! z6f|925CXF#ol!-RUj`L^P*Ew-G&|o~H2C`_&sw0K=HN%M{bZ|Iff?xVbh0qhjAg)I z0Xf-ITILJeFx|AH*dPQ|m~`7V&i~W+{(qPf|35uNx*i?^OU`V|tXa3Yo_;_6_N5Tt zp;a9yBIVcq`J_&cs}g*5jQ}%Vd?||D)4L3?-#fcC>#yMXFyHw#Qh)Ftmrl+T_mCMG z8#iVlVARaFg=rmqpUb6V&=I@Or5n&RKZR-LSIMhcY#ri2a8ZNtaDM@cisaCs%uug; zbX-bgY+`CkVq{8cqLKnR%sMA$#3u`Uj}_+7HProJl1rX%Y*NuG%?+T_60b*spI4Rc z6!JQ$(k!%}8VHfCLj*zEpcmzOt}mj4Kg{85_oBh1ZNi^9W-0$%5pYNgHfo5BnWCNmTBGyg+I{AQWH z{?C2=QlI;zWj+Nrd-8Fzb91swsKm~9PW@?eL;J z>@4>xE4_Y8$_`kLb^eLgk)U9Y#&F`As8)q$BX}cTJXJh|TE<(dxXeB7M_y@UKg*$9 zLVMfx_F$pb{&9hV{*+6HSAEJEfFigbPTD1}spCHB=45v?Nr$;y>^wx{7bMm--OgM( z?2HZ7>18zz@6I`X(=>ltGFyR}2)rGg{7oc;w$yjB3L=pfLbt79Ag`%e|C2nHQbdj$ z9~Za~){J+&bTco37~tBJ;>uV5K2Zaj!>86HG6E+r4;6+FQn$M|zanc0S5^yd6!?ww z{=!sRqSO)Vl!2UMrIl<`^o7hXD(TaM{o#;<59bhZG+~huQD1CT>%fbiEMl|NX`96tuxht$9P-ipjp-E(ssC4ZvRx(HhJdF3+6=s z{i|=T7ugC9dGgBAh^rDdLzP`aW1t?p1H25I^SR>G&u0SLvKfCr)}7U1zZM0aCZ7V> z<==lqU%N5~9N?ldynCzp4toSRx~Egn^$#P*937RCDX7gb{ z+;MOjIewRy&X*k~ro|k!b#}Oo1PGWV)`1iFn6dR9ZKhRk$%TUZP7aAR_lC!#4n9DR zN?6!%IcO+!4koU3`=~Jmu^lFD`x%JnPmkNR5ifRg*9NniXDDWUtPt{X}518)LkAJTZb~gO(=U7 zjh|6NrXBiouy>j7a*xLZpve~!OrNEfgV}18@QX@!P^am41GZ>pT2nLjx!p>eMCP9ex{91jWLd?kPY-(a;ojKwo<~&+P zK||+q!}k8bND(x&{J5us(n!aOafV>~`-NGJ#e+OLC)fP5XTlpGM$m&+!CIX6Pns<7 zcZ?(vg_tNeJ>6ZmQjp|QDOH0DX9$U2@9qM-LP)d)ekKua6A)~TdM;@(xhKaZMn}b^ zfS*Z;QujiXjFmaLt`bs8^h)<5@RnRdgvCQ++1%6+4t{V4oJwM;1>`p0y@+;|9;1?i0?WJvCJ^qFZ66ts`dRidhH}O$ZWsv3P;=+Ilsw0tY5WEnBS$ z2K689_wvhTgL}TX-`LZOtGHbIkH&Z874q`3yk0g!n`5Sr<=>;o3_ybTuck1Nb|K(`T$c!J1 zD%pD?Mc?;fMOwr^U-NgY8g6`qvb?gUys|2}J3Y3_`a1sI;|6eV6?TBHE|RZ!Y% zt*fXBJxX z^emd@-OTgLeXRw8e!858io30ST}vHX#tbV<*DakUgSzj?gBZA*Fc@#G?W8~~UWR~f zES%@1T}$m=$(hmAg7`;QO}6_F5QtAE0)mQ$$~_%tO}6yZpt^*XRMW%^KCMQFQJYg| zy*B!w*Zy+5frg0fbrFo1gwat+O!uA|5n|*IPNaxy)T&tR|6yN4S5#O)=@i!~F)5QX z!^*JJ0j=^fJPyfM37emJEWeSkVst#skQ%jn?uVs1*#jG?JVO)xq$|?XI6ytP_%ma{ z$iVu$!#or-FCGb!4?pMl!Y-qB&q^~aeZtFUvdtjuHGk7=x=b6uUi%4t|%v-?VLRYD5?`C$RGuw%Zr zQc**e%G!MWMv#fs;E;{1&^y$jj?eSKb9q9ES+Qb|A$0dq3jsn2BK|rSs8?RbJ@2@t z>DS?%cE<4HRA~ZJECx$r>^d;$5+Y zBRBjMDtGAsJ#~N<&2qy*-%Fqah7lg8Rx zu4fW*k?VTHFSEIwJXV30Uj^)68z#dbJq=2z>&u6w^=l$l_&gg=$Kx8gvwXT_ab#Nj zU>?gjhYaPX%C; zKBDhA@t%p)*cM5nyVwR1NPmpZxaUKdcU5G65KI(j@Z~s zC?GY1HIgSrX3961C@|sD=QA?u0haNjx`0dLp`g;GcnV{v65Evv0wt1~Y~}(OfYARv zmy!%~Vk#Rddnl`dox>hAXGi9SJLit9EwNkv#MI-cGq~W}ZPGgCPIfj{X&r`X+_lZJ z28GQt6qx_=yx=`KF6u2bm$iSjNvaA<)bGvtIaT}5!JO$wR_kScij=|g03kfwc-y3+ z7uzQEHojts7?0;4`#I~C+(|Y|ea95Xz}B;{5bzyyc_Dbs&x7%VZI=oEpR#S1F9_XL zy@fk=8J`9v?!rAUZsJo+*iM6AKsnnn8FE2r(Tz_$;&Uh2v*b)%cQ;1yst>+Qyy0oW zpKt$EAbPeW#*>L~9oxYy+4mAuDcGNPtb>j^?P{?6y(@r;W~(VLrY>xMImyD8ZAbgZ{sP`Pq;dH2)Pkoy6T)1Iljb<80spuhy- zz@X7g*U%LS+Pu>N&4x*VfX6GD+q^#B_pxBt;sWWVXeaZ$Np_#iiDV-fTt^Fg%2x9`-uF$+H+>WHw1-T3r=)m4>O4+TTL}V zuV-H1q2)W$yz5GnWtxMUNKI-2;R!}*0(5oL78o0wMUOg!99SyaTPNl;UJfcj$HT;L zqMwXZ&LiG?u8Y2`;<7x<9LvDW&MOG%^Rs5Jkz7oE4INPr=W{4cgU03xPz+Z< zdO^;@(YJ~zif_%5;|wKQy-om)_xr0nM=~wo`!*Env*T;+10YT@&$60>n!hRl0JjeW` zI2~bRlo#Qj`(Ey8ZHSfZTTEInvXPl(E2(8wUWq@mpi;5`4uqOpwhUM0@V$Md>UZ$r zKdPz7KZupoMKZ;=8(Z<5>u56pu7_}w&8TKV7Lh@@gUCq^>oNY-Izoi1lXFqWbs)jL1a``aIyJLVR) zi4m>^y~g9(Zm0T&ntSS+7 zGcYUMq>aUOM;pH1ML(HHWnntI(Y{q>aH(-@RhN_CD>N#25?k1xc_}J-z#q$0a9x#> zV+%UiIuxe^*ojm$r<=6;hwVfEqE^ND1NZi412RSIF?8hCb`IG^j!!HoPiExd;m14lAE{z zEQMXPk3QN<`zLQHEfzL#V5>1Kb#@1D~=~o3S z=>Vo4-c5aokk1=ybYu>YPdz$P|8m|k$IZ<*I8FTYS+Uc5v`>Z?RUb&&coj3HL5O6X zc0V$Yv28LtSvsF|FG4MPV4qZ}o3kNHJQ3JxD(XlLhP!_waiRHE;$cnh;cZ&};T~?dey5CQmabq2wRiFGJg4T$FD?xG^;vr0B@2NJZxYc2 z|L*~iINtLp080Ygv9el|P$-CVDE zn#yGbEWv+*3WXXBs?iy#o~2E^81=-R_!zW-Kqd{&b|qJ<+KNJ6h8=M*o;MedsEui_IFXbVf*S1WVdy_#Oy?^`U2Qv|86Ocpd~X^R**d=y~)W?stmdd z?0g1TShYPA{(9@elpNNocWc)B^aUWA0)uf&xh%x*Y?a-2Z^}wP6gxBbo8J)~9qb@< z``6=^RAVr6^T|obH7txlIDR#!3;=DAMvqJ86I=GBs$W5J^TgjeI_Lguj6shWD@H3F z9FJ;QAmok=npObxhRaC9V#Rnq+!UgEP{(3376cYzDPq@b(`4^NMT)yQER# zLQItt^zwa`x*&cL^{;@0R_Pluu(R;^IYJL?ilxJHOdTB@D%kCWT1X%?Mg3vv}pL&4c1BSY?-YR%^qiiKV6{1SZ`ncQ#fRSfX3 zdKMYaNsp3xMG=^NF1;m&eo}4}h(NH>U{pZBg6i-^3 z-l2l<``8#1o36%sl2h}?x}TVn2SQ#UnzKi(*&sUuuNS^ouE;OsrCI6ePP^-3nx;q@ z5vWODbLuZ@^ugXX2H7WY1bpEd*>J6l_8r?#PZJJEu2r%uv7 z>9vg;t!e4C{AUaHrHFw^kU(7~fnv9P+w&tZJ7bhDy-`CC0WkOK*r*QQylF04B{>R0g+^i0-@tLaqkfUJbzSUFb%`q zm4rp3ZV_!!Q@%^qfR-$Bq(d$H(ZQ4kA-n9^7KP4t-m~RzT}+(ZMa6}wEpCE>pD6=_+tGhz4gVSro+GTP@^o>tdvJ_ZxB~@@78H$#hYWwcH%-u=(?vX$Q83 zzS2|_t;oozc)^8oq~S=61oY7v>DcN$v%I5{mli^J-K3u^a~(UZifz_eTMsx@Bj3+}3mZFBr{`r_S zmGALc{|v~GgG5?_3JY@rOMnS($KPA|qrp5>Cih|{!O3wqU65qcnpf98Y9`#=CQ||# zWM-5)cFauMrbhxhEuiNyqP;9kirH;`OL^S+;dc7+h0kfvgB;=w^Nu>Soj<@oaISFj zUHV?*?}iC}H+QPNi~zz0xlOOmT~PB0)BZ^e%UcBj3rsx-gcZi&j?F~L?^3dL#6p`q zMlOlDLa^9Ivcf^VX52?#ff>Y}Ed!^SRPTm2%}B2tqa21Lm=cj03iu(wOEXLCqOVp87!OZIKo?eS3bfT8 z0B}ShV{~2$rNY3xyONcgpX4UO)#c%1|F=9QP9!qV$S6=n*&odk8*o;@oZ|m{<;Ghb zwh_R&_|}T2Fvw!UiO+y!$L|R;A{g6lJpS7+Fe#dP!)Pau9UnIyN7Q`b@A>SU0;paT zJc5K;_sn-Tb&Q)IfA21nOe72IkzYh9E9b|LXTThUaHH)|pHz@_IpS4u{g4Wq zL-+8XKJFb>RPVFB8~%Z$(|R0HD)2C1gzYrw4XV+@n{7~!)f0H?_QE;+icF9j!eL&M-%S6DCvDI z;@c20Nu@zm{Jr93rlYN`i-XX|cWw59@;2Ef;c|W-LJ6lzCN$}lJTQ_Sk0)A4pP>f- z1pDTd5B*k6gYzXQbmc2_@ekLo!=TkEI>Q)dCnq#EU=GtdITe z??9~t@qBx0*DDTdbb6GAGP3eaa21o(|PoKx2i6c-NamIsG?PfZpORwAEs*L+<>3CnMp@cfQ z!@eK~J9~7`UGC!2Gr~lho7=zZcA=rO|2_y+R~~BX{Scl8Cu&wQ^t9NlQ2Kr2Um=-$lZ?r1?uPC1CxOHx26Xij-Y zgOPkbot#Hlb5vCGJ0)eL0#fC^s~4o8n^mg&0&MyJuC%bRH1AHXS~k`G>dHqk59((W z_;2?aLLV)*KIHor;kO&hrip{~Qm1s;5Jn4Fp8!>z*Yvq8o&&;`Ia+K6O1@c$PH6S# zh64}jv#e!!_e*&7&XE9C$!b<*Wlhh#MX`cDG;lpOyPO&WC`5OeEBy>mO(5FZ z(c^WlYj#40oG^bmJSG&-kNKrM=BC^OLa6{Srh7|4 zfSLAml`2=hC)EAS%*FoL;aS?IU<)mTrWu|u46sAQBqW|N&vqrLWUg93cE4<)={FMJ z_LkY?VXBBNP)i#J)D)`|l6eta6$KYc-=1_naVwM|J)IWpuyzf=HyWYzac;~U4_ zM{fPnKcZLHB_i(u#2+JAHkZ#Ij*s}p&^sAtp7uffFMfY&?GoUp0zx6!M2v7g*M|CG z0sVtZk%MM?Ru#NEE7;X?OIovtGb%#$<62eZYQR{^3fU8^{GY#+m&VT8JH2zvH)%U9 zpw}!Wi(`K-JT@DpF*sqM<^hzdvrK}$6A(%~+#1aM(z~Dm^j-PJ2@ zVP(bpL<0o(8!SR4-e<@L9uVnv{?*j<$WddD40o_q$*sc@Jy4^8?L=yOFq0*5*_Cf6XCes&u^U+X?^FI+R-?&jX(wfL zM!3>sV+IV8HQHz?`rwHJb;?~#xQ@3krAiS3=TVy+ncwy{hNc#I+_p9q7fYXS4~~C( z+JwA#^PmaqWAN0s=@NuTe1GnIAmyAN9`TcMtZRUDYYlK?q(EV_V+>229xntuLQ3<5 zL)Gg3Rae!KLOH7(MVs|v9Vz(|8gYHqp|Sqdczx)9RSg^@isaTs6|}$JtoohZGLKq! z{Uai%Ip9n`gm6o+qg|8GtE-c1vlx{}{x}9gEL($c=@EpPpg+Erf4}*)Ew@5?$Xg*( z%Eb4h@#PVMLQPg$m!Onq;?s9LP4E>!Fz2a)e%hkB^;?UVxrrT}IW_SoNcm zu`yHh$OoO!l9HU#R32P{wyo&Me56XM&;JhdX@Q|KAWXqadi=okRI7b~dn>RNm;((3_gvWfST5?l0VTyB!NeQXq) z{$~vYa=#Aq+<|+OyM@a&J^>OY-yYM9g-mfYjQGhy4f|IDV!pYSTs5(AN%fj&=Xd}m z-8ld5j`7{so*haeHeHic0ed|5Q;0hsAu#2Ny1IhkQSFGk4UT-Ob^QbpI&xdL%)%`58^zXjH{2*bRU0^Z;xoU=9PknyK>B z!6(PZb#9y1AHP;hv_!@a>g002@Wzh%Nl6|4`(jJzq$&Tir6q$VUi!j_`v>}0L_`gS zJuCft#p3jved9DckWlsR4huf6gM(=xW&MhNwmNoEgdKvL)QHRY#3()87_i9b(YV^> z=eIz6!-shwFjHaP5Ky1H=p@Ad7ygaXi3&8P<>AJn;82mM0R!OpIL*hB0JkSp^6u1+ zWY-yg1K(=Ce!QnA=;_5Yemq9<4-9y>hZlg?<_4$-rKC@6gKgR8=~JMWee)$myby%K z-RjQbbAVHKpoWvjv(tpe0MasGSC5&<=H)hTuTIU5RC$FrU(~)QSn2}|0pR;3bX(l0 zS9FmqcFPS~$hfV41GNA^--!9Cjb9QY{H7K#`iFDU8=MauwyigpX3ARZ^`3Bzn{W|I z0LJoilA_(Gti?TU3CNQG6@caC*5z+og9z!raI;Kv=;-OobQ;X+FbXN_!(IqnLL}WPugDZur2DW4!KIs#Is@qWM0HhaOTwK5( zKoEEO{q%qSt#he!SHVbb{mXowJ_R1|@_}JuVg!7%a|vjIG%JiX#2rw*2qO-05w4TALaHq)$ diff --git a/docs/server/source/production-nodes/node-components.md b/docs/server/source/production-nodes/node-components.md index e95a26b7..d7d4e85b 100644 --- a/docs/server/source/production-nodes/node-components.md +++ b/docs/server/source/production-nodes/node-components.md @@ -17,6 +17,6 @@ It could also include several other components, including: * Monitoring software * Maybe more -The relationship between the main components is illustrated below. Note that BigchainDB Server must be able to communicate with all other mongod instances in the BigchainDB cluster (i.e. in other BigchainDB nodes). +The relationship between the main components is illustrated below. Note that BigchainDB Server must be able to communicate with the _primary_ MongoDB instance, and any of the MongoDB instances might be the primary, so BigchainDB Server must be able to communicate with all the MongoDB instances. Also, all MongoDB instances must be able to communicate with each other. ![Components of a production node](../_static/Node-components.png) From 7c3f912fe0a7f9ea61f06d97c470b20bf094c202 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 13:56:09 +0200 Subject: [PATCH 082/150] Addressed remaining comments on PR #1386 --- docs/server/source/production-nodes/setup-run-node.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index c1777cff..78b4d22c 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -89,6 +89,7 @@ Note: You can use `pip3` to upgrade the `bigchaindb` package to the latest versi If you want to install BitchainDB from source because you want to use the very latest bleeding-edge code, clone the public repository: ```text git clone git@github.com:bigchaindb/bigchaindb.git +cd bigchaindb python setup.py install ``` @@ -113,16 +114,19 @@ For more information about the BigchainDB config file, see the page about the [B ## Maybe Update the MongoDB Replica Set -**If this isn't the first node in the BigchainDB cluster**, then you must add your MongoDB instance to the MongoDB replica set. You can do so using: +**If this isn't the first node in the BigchainDB cluster**, then someone with an existing BigchainDB node (not you) must add your MongoDB instance to the MongoDB replica set. They can do so (on their node) using: ```text bigchaindb add-replicas your-mongod-hostname:27017 ``` -where you must replace `your-mongod-hostname` with the actual hostname of your MongoDB instance, and you may have to replace `27017` with the actual port. +where they must replace `your-mongod-hostname` with the actual hostname of your MongoDB instance, and they may have to replace `27017` with the actual port. ## Start BigchainDB +**Warning: If you're not deploying the first node in the BigchainDB cluster, then don't start BigchainDB before your MongoDB instance has been added to the MongoDB replica set (as outlined above).** + ```text +# See warning above bigchaindb start ``` From ad37441115b95ae800de7edd23a6c71d826c76b8 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 14:24:11 +0200 Subject: [PATCH 083/150] docs: moved rethinkdb backup notes to appendices --- docs/server/source/appendices/index.rst | 1 + .../backup.md => appendices/rethinkdb-backup.md} | 4 ++-- docs/server/source/clusters-feds/index.rst | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) rename docs/server/source/{clusters-feds/backup.md => appendices/rethinkdb-backup.md} (96%) diff --git a/docs/server/source/appendices/index.rst b/docs/server/source/appendices/index.rst index 7beb27f5..a901b58a 100755 --- a/docs/server/source/appendices/index.rst +++ b/docs/server/source/appendices/index.rst @@ -22,5 +22,6 @@ Appendices firewall-notes ntp-notes example-rethinkdb-storage-setups + rethinkdb-backup licenses install-with-lxd diff --git a/docs/server/source/clusters-feds/backup.md b/docs/server/source/appendices/rethinkdb-backup.md similarity index 96% rename from docs/server/source/clusters-feds/backup.md rename to docs/server/source/appendices/rethinkdb-backup.md index 5faf3465..732323ed 100644 --- a/docs/server/source/clusters-feds/backup.md +++ b/docs/server/source/appendices/rethinkdb-backup.md @@ -1,6 +1,6 @@ -# Backing Up & Restoring Data +# Backing Up and Restoring Data -There are several ways to backup and restore the data in a BigchainDB cluster. +This page was written when BigchainDB only worked with RethinkDB, so its focus is on RethinkDB-based backup. BigchainDB now supports MongoDB as a backend database and we recommend that you use MongoDB in production. Nevertheless, some of the following backup ideas are still relevant regardless of the backend database being used, so we moved this page to the Appendices. ## RethinkDB's Replication as a form of Backup diff --git a/docs/server/source/clusters-feds/index.rst b/docs/server/source/clusters-feds/index.rst index 93258057..40e3b873 100644 --- a/docs/server/source/clusters-feds/index.rst +++ b/docs/server/source/clusters-feds/index.rst @@ -5,6 +5,5 @@ Clusters :maxdepth: 1 set-up-a-cluster - backup aws-testing-cluster From 2bedc9b059a3ff7d25e574978f927c977d05f4c1 Mon Sep 17 00:00:00 2001 From: vrde Date: Wed, 12 Apr 2017 14:39:15 +0200 Subject: [PATCH 084/150] Fix typos --- tests/pipelines/test_election.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pipelines/test_election.py b/tests/pipelines/test_election.py index c3254601..f0dd232d 100644 --- a/tests/pipelines/test_election.py +++ b/tests/pipelines/test_election.py @@ -210,7 +210,7 @@ def test_handle_block_events(): assert events_queue.qsize() == 0 - # no event should be emited in case a block is undecided + # no event should be emitted in case a block is undecided e.handle_block_events({'status': Bigchain.BLOCK_UNDECIDED}, block_id) assert events_queue.qsize() == 0 @@ -219,7 +219,7 @@ def test_handle_block_events(): event = e.event_handler.get_event() assert event.type == EventTypes.BLOCK_INVALID - # put an valid block event in the queue + # put a valid block event in the queue e.handle_block_events({'status': Bigchain.BLOCK_VALID}, block_id) event = e.event_handler.get_event() assert event.type == EventTypes.BLOCK_VALID From ee3c7f607c26b4ebdb782097729e56e664a067ea Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 14:55:56 +0200 Subject: [PATCH 085/150] updated set-up-a-cluster.md in server docs --- .../source/clusters-feds/set-up-a-cluster.md | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/docs/server/source/clusters-feds/set-up-a-cluster.md b/docs/server/source/clusters-feds/set-up-a-cluster.md index c8193dd2..277a4656 100644 --- a/docs/server/source/clusters-feds/set-up-a-cluster.md +++ b/docs/server/source/clusters-feds/set-up-a-cluster.md @@ -3,7 +3,9 @@ This section is about how to set up a BigchainDB cluster where each node is operated by a different operator. If you want to set up and run a testing cluster on AWS (where all nodes are operated by you), then see [the section about that](aws-testing-cluster.html). -## Initial Checklist +## Initial Questions + +There are many questions that must be answered before setting up a BigchainDB cluster. For example: * Do you have a governance process for making consortium-level decisions, such as how to admit new members? * What will you store in creation transactions (data payload)? Is there a data schema? @@ -15,14 +17,16 @@ This section is about how to set up a BigchainDB cluster where each node is oper The consortium must decide some things before setting up the initial cluster (initial set of BigchainDB nodes): -1. Who will operate a node in the initial cluster? -2. What will the replication factor be? (It must be 3 or more for [RethinkDB failover](https://rethinkdb.com/docs/failover/) to work.) -3. Which node will be responsible for sending the commands to configure the RethinkDB database? +1. Who will operate each node in the initial cluster? +2. What will the replication factor be? (It should be 3 or more.) +3. Who will deploy the first node? +4. Who will add subsequent nodes? (It must be one of the existing nodes.) -Once those things have been decided, each node operator can begin setting up their BigchainDB (production) node. +Once those things have been decided, the cluster deployment process can begin. The process for deploying a production node is outlined in the section on production nodes. -Each node operator will eventually need two pieces of information from all other nodes: +Each BigchainDB node operator will eventually need some information from all other nodes: -1. Their RethinkDB hostname, e.g. `rdb.farm2.organization.org` -2. Their BigchainDB public key, e.g. `Eky3nkbxDTMgkmiJC8i5hKyVFiAQNmPP4a2G4JdDxJCK` +1. Their BigchainDB public key, e.g. `Eky3nkbxDTMgkmiJC8i5hKyVFiAQNmPP4a2G4JdDxJCK` +1. Their MongoDB hostname and port, e.g. `mdb.farm2.organization.org:27017` +To secure communications, more information will be needed. From 8fa6b1685ef4d41b924c30378710b194211e0a13 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 14:57:36 +0200 Subject: [PATCH 086/150] Clarify that AWS depl. scripts deploy w/ RethinkDB --- docs/server/source/clusters-feds/aws-testing-cluster.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/server/source/clusters-feds/aws-testing-cluster.md b/docs/server/source/clusters-feds/aws-testing-cluster.md index d4b4c12e..497d20a2 100644 --- a/docs/server/source/clusters-feds/aws-testing-cluster.md +++ b/docs/server/source/clusters-feds/aws-testing-cluster.md @@ -1,6 +1,6 @@ -# Deploy a Testing Cluster on AWS +# Deploy a RethinkDB-Based Testing Cluster on AWS -This section explains a way to deploy a cluster of BigchainDB nodes on Amazon Web Services (AWS) for testing purposes. +This section explains a way to deploy a _RethinkDB-based_ cluster of BigchainDB nodes on Amazon Web Services (AWS) for testing purposes. ## Why? From da634c3892a4dbf4d1bcc63605216667a979a8a5 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 15:10:36 +0200 Subject: [PATCH 087/150] more edits to cluster setup docs --- docs/server/source/clusters-feds/set-up-a-cluster.md | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/docs/server/source/clusters-feds/set-up-a-cluster.md b/docs/server/source/clusters-feds/set-up-a-cluster.md index 277a4656..4b02bd9f 100644 --- a/docs/server/source/clusters-feds/set-up-a-cluster.md +++ b/docs/server/source/clusters-feds/set-up-a-cluster.md @@ -19,14 +19,10 @@ The consortium must decide some things before setting up the initial cluster (in 1. Who will operate each node in the initial cluster? 2. What will the replication factor be? (It should be 3 or more.) -3. Who will deploy the first node? -4. Who will add subsequent nodes? (It must be one of the existing nodes.) +3. Who will deploy the first node, second node, etc.? -Once those things have been decided, the cluster deployment process can begin. The process for deploying a production node is outlined in the section on production nodes. +Once those things have been decided, the cluster deployment process can begin. The process for deploying a production node is outlined in [the section on production nodes](../production-nodes/index.html). -Each BigchainDB node operator will eventually need some information from all other nodes: +Every time a new BigchainDB node is added, every other node must update their [BigchainDB keyring](../server-reference/configuration.html#keyring) (one of the BigchainDB configuration settings): they must add the public key of the new node. -1. Their BigchainDB public key, e.g. `Eky3nkbxDTMgkmiJC8i5hKyVFiAQNmPP4a2G4JdDxJCK` -1. Their MongoDB hostname and port, e.g. `mdb.farm2.organization.org:27017` - -To secure communications, more information will be needed. +To secure communications between BigchainDB nodes, each BigchainDB node can use a firewall or similar, and doing that will require additional coordination. From b741c51dbca4b4145e224bb58f09571a8390bf6c Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 15:14:44 +0200 Subject: [PATCH 088/150] docs: added step where other nodes update their keyring --- docs/server/source/production-nodes/setup-run-node.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index 78b4d22c..87b7d78c 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -112,6 +112,11 @@ Edit the created config file by opening `$HOME/.bigchaindb` (the created config For more information about the BigchainDB config file, see the page about the [BigchainDB configuration settings](../server-reference/configuration.html). +## Get All Other Nodes to Update Their Keyring + +All other BigchainDB nodes in the cluster must add your new node's public key to their BigchainDB keyring. Currently, that means they must shut down BigchainDB Server and start it again. + + ## Maybe Update the MongoDB Replica Set **If this isn't the first node in the BigchainDB cluster**, then someone with an existing BigchainDB node (not you) must add your MongoDB instance to the MongoDB replica set. They can do so (on their node) using: From 659ff0a8136adabf591951aaef3af8613913663c Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 12 Apr 2017 15:18:08 +0200 Subject: [PATCH 089/150] docs: clarify that the keyring update process --- docs/server/source/production-nodes/setup-run-node.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/production-nodes/setup-run-node.md b/docs/server/source/production-nodes/setup-run-node.md index 87b7d78c..6e7ddbea 100644 --- a/docs/server/source/production-nodes/setup-run-node.md +++ b/docs/server/source/production-nodes/setup-run-node.md @@ -114,7 +114,7 @@ For more information about the BigchainDB config file, see the page about the [B ## Get All Other Nodes to Update Their Keyring -All other BigchainDB nodes in the cluster must add your new node's public key to their BigchainDB keyring. Currently, that means they must shut down BigchainDB Server and start it again. +All other BigchainDB nodes in the cluster must add your new node's public key to their BigchainDB keyring. Currently, the only way to get BigchainDB Server to "notice" a changed keyring is to shut it down and start it back up again (with the new keyring). ## Maybe Update the MongoDB Replica Set From 4c9adededd558a4c4d29965e11c60b86da4bdafe Mon Sep 17 00:00:00 2001 From: vrde Date: Wed, 12 Apr 2017 15:54:11 +0200 Subject: [PATCH 090/150] Remove TODO --- bigchaindb/web/views/info.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bigchaindb/web/views/info.py b/bigchaindb/web/views/info.py index 9b084ac5..51b59643 100644 --- a/bigchaindb/web/views/info.py +++ b/bigchaindb/web/views/info.py @@ -43,7 +43,6 @@ class ApiV1Index(Resource): 'self': api_root, 'statuses': api_root + 'statuses/', 'transactions': api_root + 'transactions/', - # TODO: The version should probably not be hardcoded 'streams_v1': websocket_root, }, }) From a7ed28e539a1ff605a5f5954efeabb078eb9bf26 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Wed, 12 Apr 2017 16:12:41 +0200 Subject: [PATCH 091/150] Test command helper _run_init --- tests/commands/test_commands.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 6fb424d6..fa3ecf42 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -130,6 +130,22 @@ def test_bigchain_run_init_when_db_exists(mock_db_init_with_existing_db): run_init(args) +def test__run_init(mocker): + from bigchaindb.commands.bigchaindb import _run_init + bigchain_mock = mocker.patch( + 'bigchaindb.commands.bigchaindb.bigchaindb.Bigchain') + init_db_mock = mocker.patch( + 'bigchaindb.commands.bigchaindb.schema.init_database', + autospec=True, + spec_set=True, + ) + _run_init() + bigchain_mock.assert_called_once_with() + init_db_mock.assert_called_once_with( + connection=bigchain_mock.return_value.connection) + bigchain_mock.return_value.create_genesis_block.assert_called_once_with() + + @patch('bigchaindb.backend.schema.drop_database') def test_drop_db_when_assumed_yes(mock_db_drop): from bigchaindb.commands.bigchaindb import run_drop From 303e12ee280befb2cbe0bc707c5b62f7ef896066 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Wed, 12 Apr 2017 16:38:18 +0200 Subject: [PATCH 092/150] Test command run_init when db already exists --- tests/commands/test_commands.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index fa3ecf42..087e1afe 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -124,10 +124,23 @@ def test_bigchain_export_my_pubkey_when_pubkey_not_set(monkeypatch): "This node's public key wasn't set anywhere so it can't be exported" -def test_bigchain_run_init_when_db_exists(mock_db_init_with_existing_db): +def test_bigchain_run_init_when_db_exists(mocker, capsys): from bigchaindb.commands.bigchaindb import run_init + from bigchaindb.common.exceptions import DatabaseAlreadyExists + init_db_mock = mocker.patch( + 'bigchaindb.commands.bigchaindb.schema.init_database', + autospec=True, + spec_set=True, + ) + init_db_mock.side_effect = DatabaseAlreadyExists args = Namespace(config=None) run_init(args) + output_message = capsys.readouterr()[1] + print(output_message) + assert output_message == ( + 'The database already exists.\n' + 'If you wish to re-initialize it, first drop it.\n' + ) def test__run_init(mocker): From 414d915033c9e37476a37449a3899abc6a69ba7d Mon Sep 17 00:00:00 2001 From: vrde Date: Thu, 13 Apr 2017 08:54:34 +0200 Subject: [PATCH 093/150] Snakecaseify keys --- bigchaindb/web/websocket_server.py | 6 +++--- docs/server/source/websocket-event-stream-api.rst | 6 +++--- tests/web/test_websocket_server.py | 8 ++++---- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index ae7d6da2..5507f504 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -109,9 +109,9 @@ class Dispatcher: for tx in block['block']['transactions']: asset_id = tx['id'] if tx['operation'] == 'CREATE' else tx['asset']['id'] - data = {'blockid': block['id'], - 'assetid': asset_id, - 'txid': tx['id']} + data = {'block_id': block['id'], + 'asset_id': asset_id, + 'tx_id': tx['id']} str_buffer.append(json.dumps(data)) for _, websocket in self.subscribers.items(): diff --git a/docs/server/source/websocket-event-stream-api.rst b/docs/server/source/websocket-event-stream-api.rst index 1dedc45f..3ce86553 100644 --- a/docs/server/source/websocket-event-stream-api.rst +++ b/docs/server/source/websocket-event-stream-api.rst @@ -82,9 +82,9 @@ the transaction's ID, associated asset ID, and containing block's ID. Example message:: { - "txid": "", - "assetid": "", - "blockid": "" + "tx_id": "", + "asset_id": "", + "block_id": "" } diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 13015dbb..6484ef4e 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -183,10 +183,10 @@ def test_websocket_block_event(b, _block, test_client, loop): for tx in block['block']['transactions']: result = yield from ws.receive() json_result = json.loads(result.data) - assert json_result['txid'] == tx['id'] + assert json_result['tx_id'] == tx['id'] # Since the transactions are all CREATEs, asset id == transaction id - assert json_result['assetid'] == tx['id'] - assert json_result['blockid'] == block['id'] + assert json_result['asset_id'] == tx['id'] + assert json_result['block_id'] == block['id'] yield from event_source.put(POISON_PILL) @@ -235,4 +235,4 @@ def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): result = loop.run_until_complete(ws.receive()) json_result = json.loads(result.data) - assert json_result['txid'] == tx.id + assert json_result['tx_id'] == tx.id From 93baa922c8ec2d77867c801c12023a403ba0fd96 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 09:56:03 +0200 Subject: [PATCH 094/150] Made the consensus plugin an undocumented feature --- docs/server/source/appendices/consensus.rst | 5 ----- docs/server/source/appendices/index.rst | 1 - .../source/server-reference/configuration.md | 15 +-------------- 3 files changed, 1 insertion(+), 20 deletions(-) delete mode 100644 docs/server/source/appendices/consensus.rst diff --git a/docs/server/source/appendices/consensus.rst b/docs/server/source/appendices/consensus.rst deleted file mode 100644 index 34c0c032..00000000 --- a/docs/server/source/appendices/consensus.rst +++ /dev/null @@ -1,5 +0,0 @@ -######### -Consensus -######### - -.. automodule:: bigchaindb.consensus diff --git a/docs/server/source/appendices/index.rst b/docs/server/source/appendices/index.rst index 7beb27f5..4cfa7ed9 100755 --- a/docs/server/source/appendices/index.rst +++ b/docs/server/source/appendices/index.rst @@ -13,7 +13,6 @@ Appendices json-serialization cryptography the-Bigchain-class - consensus pipelines backend commands diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 4cd9e9d4..91fa4efb 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -21,7 +21,6 @@ For convenience, here's a list of all the relevant environment variables (docume `BIGCHAINDB_SERVER_THREADS`
    `BIGCHAINDB_CONFIG_PATH`
    `BIGCHAINDB_BACKLOG_REASSIGN_DELAY`
    -`BIGCHAINDB_CONSENSUS_PLUGIN`
    `BIGCHAINDB_LOG`
    `BIGCHAINDB_LOG_FILE`
    `BIGCHAINDB_LOG_LEVEL_CONSOLE`
    @@ -169,21 +168,9 @@ export BIGCHAINDB_BACKLOG_REASSIGN_DELAY=30 "backlog_reassign_delay": 120 ``` -## consensus_plugin - -The [consensus plugin](../appendices/consensus.html) to use. - -**Example using an environment variable** -```text -export BIGCHAINDB_CONSENSUS_PLUGIN=default -``` - -**Example config file snippet: the default** -```js -"consensus_plugin": "default" -``` ## log + The `log` key is expected to point to a mapping (set of key/value pairs) holding the logging configuration. From 861bfa2aab3332c8d929482ee7314af1e7ec2d50 Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Thu, 13 Apr 2017 11:38:17 +0200 Subject: [PATCH 095/150] use patch instead of subclassing --- tests/test_voting.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_voting.py b/tests/test_voting.py index 07a60f24..06d56de1 100644 --- a/tests/test_voting.py +++ b/tests/test_voting.py @@ -1,4 +1,5 @@ import pytest +from unittest.mock import patch from collections import Counter from bigchaindb.core import Bigchain @@ -235,11 +236,8 @@ def test_block_election(b): } +@patch('bigchaindb.voting.Voting.verify_vote_signature', return_value=True) def test_duplicate_vote_throws_critical_error(b): - class TestVoting(Voting): - @classmethod - def verify_vote_signature(cls, vote): - return True keyring = 'abc' block = {'id': 'xyz', 'block': {'voters': 'ab'}} votes = [{ @@ -247,4 +245,4 @@ def test_duplicate_vote_throws_critical_error(b): 'vote': {'is_block_valid': True, 'previous_block': 'a'} } for c in 'aabc'] with pytest.raises(CriticalDuplicateVote): - TestVoting.block_election(block, votes, keyring) + Voting.block_election(block, votes, keyring) From 2d9ce8a9632c71aeeca122cc530cdfba462886c7 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 11:41:44 +0200 Subject: [PATCH 096/150] first draft of v0.10.0 CHANGELOG.md --- CHANGELOG.md | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2148903b..e24fb226 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,62 @@ For reference, the possible headings are: * **External Contributors** to list contributors outside of BigchainDB GmbH. * **Notes** +## [0.10.0] - 2017-04-18 +Tag name: v0.10.0 + +### Added +* More logging. Added `--log-level` option to `bigchaindb start` command. Added new logging configuration settings. Pull Requests +[#1285](https://github.com/bigchaindb/bigchaindb/pull/1285), +[#1307](https://github.com/bigchaindb/bigchaindb/pull/1307), +[#1324](https://github.com/bigchaindb/bigchaindb/pull/1324), +[#1326](https://github.com/bigchaindb/bigchaindb/pull/1326), +[#1327](https://github.com/bigchaindb/bigchaindb/pull/1327), +[#1330](https://github.com/bigchaindb/bigchaindb/pull/1330) and +[#1365](https://github.com/bigchaindb/bigchaindb/pull/1365) +* Events API using WebSocket protocol. Pull Requests +[#1086](https://github.com/bigchaindb/bigchaindb/pull/1086), +[#1347](https://github.com/bigchaindb/bigchaindb/pull/1347), +[#1349](https://github.com/bigchaindb/bigchaindb/pull/1349), +[#1356](https://github.com/bigchaindb/bigchaindb/pull/1356) and +[#1368](https://github.com/bigchaindb/bigchaindb/pull/1368) +* Initial support for using SSL with MongoDB (work in progress). Pull Requests +[#1299](https://github.com/bigchaindb/bigchaindb/pull/1299) and +[#1348](https://github.com/bigchaindb/bigchaindb/pull/1348) + +### Changed +* The main BigchainDB Dockerfile (and its generated Docker image) now contains only BigchainDB Server. (It used to contain both BigchainDB Server and RethinkDB.) You must now run MongoDB or RethinkDB in a separate Docker container. [Pull Request #1174](https://github.com/bigchaindb/bigchaindb/pull/1174) +* Made separate schemas for CREATE and TRANSFER transactions. [Pull Request #1257](https://github.com/bigchaindb/bigchaindb/pull/1257) +* When signing transactions with threshold conditions, we now sign all subconditions for a public key. [Pull Request #1294](https://github.com/bigchaindb/bigchaindb/pull/1294) +* Many changes to the voting-related code, including how we validate votes and prevent duplicate votes by the same node. Pull Requests [#1215](https://github.com/bigchaindb/bigchaindb/pull/1215) and [#1258](https://github.com/bigchaindb/bigchaindb/pull/1258) + +### Removed +* Removed the `bigchaindb load` command. Pull Requests +[#1261](https://github.com/bigchaindb/bigchaindb/pull/1261), +[#1273](https://github.com/bigchaindb/bigchaindb/pull/1273) and +[#1301](https://github.com/bigchaindb/bigchaindb/pull/1301) +* Removed old `/speed-tests` and `/benchmarking-tests` directories. [Pull Request #1359](https://github.com/bigchaindb/bigchaindb/pull/1359) + +### Fixed +* Fixed the URL of the BigchainDB docs returned by the HTTP API. [Pull Request #1178](https://github.com/bigchaindb/bigchaindb/pull/1178) +* Fixed the MongoDB changefeed: it wasn't reporting update operations. [Pull Request #1193](https://github.com/bigchaindb/bigchaindb/pull/1193) +* Fixed the block-creation process: it wasn't checking if the transaction was previously included in: + * a valid block. [Pull Request #1208](https://github.com/bigchaindb/bigchaindb/pull/1208) + * the block-under-construction. Pull Requests [#1237](https://github.com/bigchaindb/bigchaindb/issues/1237) and [#1377](https://github.com/bigchaindb/bigchaindb/issues/1377) + +### External Contributors +* @tymlez - Pull Requests [#1108](https://github.com/bigchaindb/bigchaindb/pull/1108) & [#1209](https://github.com/bigchaindb/bigchaindb/pull/1209) +* @anryko - [Pull Request #1277](https://github.com/bigchaindb/bigchaindb/pull/1277) +* @lavinasachdev3 - [Pull Request #1358](https://github.com/bigchaindb/bigchaindb/pull/1358) +* @jackric - [Pull Request #1365](https://github.com/bigchaindb/bigchaindb/pull/1365) +* @anujism - [Pull Request #1366](https://github.com/bigchaindb/bigchaindb/pull/1366) +* @tomconte - [Pull Request #1299](https://github.com/bigchaindb/bigchaindb/pull/1299) +* @morrme - [Pull Request #1340](https://github.com/bigchaindb/bigchaindb/pull/1340) + +### Notes +* We now recommend the used of MongoDB in production, not RethinkDB. +* Initial docs about how to deploy a BigchainDB node on Kubernetes (work in progress). + + ## [0.9.5] - 2017-03-29 Tag name: v0.9.5 From d937933627bc885160165cba8e7019ca54a32aa8 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 11:46:01 +0200 Subject: [PATCH 097/150] updated link to python-rapidjson repo on github --- docs/server/source/appendices/json-serialization.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/appendices/json-serialization.md b/docs/server/source/appendices/json-serialization.md index c2d03f6e..8322b2de 100644 --- a/docs/server/source/appendices/json-serialization.md +++ b/docs/server/source/appendices/json-serialization.md @@ -24,7 +24,7 @@ deserialize(serialize(data)) == data True ``` -Since BigchainDB performs a lot of serialization we decided to use [python-rapidjson](https://github.com/kenrobbins/python-rapidjson) +Since BigchainDB performs a lot of serialization we decided to use [python-rapidjson](https://github.com/python-rapidjson/python-rapidjson) which is a python wrapper for [rapidjson](https://github.com/miloyip/rapidjson) a fast and fully RFC complient JSON parser. ```python From 6a7eeec23ae0c049a20bf8a5a373e97c824bb72e Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:04:49 +0200 Subject: [PATCH 098/150] Updated link to AWS docs re access keys --- docs/server/source/appendices/aws-setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/appendices/aws-setup.md b/docs/server/source/appendices/aws-setup.md index 38ce2c1c..793f4d36 100644 --- a/docs/server/source/appendices/aws-setup.md +++ b/docs/server/source/appendices/aws-setup.md @@ -18,7 +18,7 @@ pip install awscli ## Create an AWS Access Key -The next thing you'll need is an AWS access key. If you don't have one, you can create one using the [instructions in the AWS documentation](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSGettingStartedGuide/AWSCredentials.html). You should get an access key ID (e.g. AKIAIOSFODNN7EXAMPLE) and a secret access key (e.g. wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY). +The next thing you'll need is AWS access keys (access key ID and secret access key). If you don't have those, see [the AWS documentation about access keys](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys). You should also pick a default AWS region name (e.g. `eu-central-1`). That's where your cluster will run. The AWS documentation has [a list of them](http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region). From 24f7e2662bd8e6742113cc5b08d9ae4d2465fe6f Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:10:36 +0200 Subject: [PATCH 099/150] fixed 2 problematic hyperlinks in run-with-docker.md --- docs/server/source/appendices/run-with-docker.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/server/source/appendices/run-with-docker.md b/docs/server/source/appendices/run-with-docker.md index 516978dd..a44da2ea 100644 --- a/docs/server/source/appendices/run-with-docker.md +++ b/docs/server/source/appendices/run-with-docker.md @@ -45,7 +45,7 @@ Let's analyze that command: `$HOME/bigchaindb_docker` to the container directory `/data`; this allows us to have the data persisted on the host machine, you can read more in the [official Docker - documentation](https://docs.docker.com/engine/tutorials/dockervolumes/#/mount-a-host-directory-as-a-data-volume) + documentation](https://docs.docker.com/engine/tutorials/dockervolumes) * `bigchaindb/bigchaindb` the image to use. All the options after the container name are passed on to the entrypoint inside the container. * `-y configure` execute the `configure` sub-command (of the `bigchaindb` command) inside the container, with the `-y` option to automatically use all the default config values @@ -80,9 +80,9 @@ docker run \ rethinkdb:2.3 ``` + -You can also access the RethinkDB dashboard at -[http://172.17.0.1:58080/](http://172.17.0.1:58080/) +You can also access the RethinkDB dashboard at http://172.17.0.1:58080/ #### For MongoDB From 7f8ab60d3beec6bbdc5b7e3f67389ebc2995079a Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:29:53 +0200 Subject: [PATCH 100/150] repaired problematic kubernetes hyperlinks --- .../cloud-deployment-templates/node-on-kubernetes.rst | 11 +++++------ .../upgrade-on-kubernetes.rst | 8 ++++---- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/docs/server/source/cloud-deployment-templates/node-on-kubernetes.rst b/docs/server/source/cloud-deployment-templates/node-on-kubernetes.rst index 6a59c750..8c38e384 100644 --- a/docs/server/source/cloud-deployment-templates/node-on-kubernetes.rst +++ b/docs/server/source/cloud-deployment-templates/node-on-kubernetes.rst @@ -157,7 +157,7 @@ Step 5: Create the Config Map - Optional This step is required only if you are planning to set up multiple `BigchainDB nodes -`_. +`_. MongoDB reads the local ``/etc/hosts`` file while bootstrapping a replica set to resolve the hostname provided to the ``rs.initiate()`` command. It needs to @@ -268,7 +268,7 @@ Step 7: Initialize a MongoDB Replica Set - Optional This step is required only if you are planning to set up multiple `BigchainDB nodes -`_. +`_. Login to the running MongoDB instance and access the mongo shell using: @@ -315,7 +315,7 @@ Step 8: Create a DNS record - Optional This step is required only if you are planning to set up multiple `BigchainDB nodes -`_. +`_. **Azure.** Select the current Azure resource group and look for the ``Public IP`` resource. You should see at least 2 entries there - one for the Kubernetes @@ -426,9 +426,8 @@ on the cluster and query the internal DNS and IP endpoints. $ kubectl run -it toolbox -- image --restart=Never --rm There is a generic image based on alpine:3.5 with the required utilities -hosted at Docker Hub under ``bigchaindb/toolbox``. -The corresponding Dockerfile is `here -`_. +hosted at Docker Hub under `bigchaindb/toolbox `_. +The corresponding Dockerfile is in the bigchaindb/bigchaindb repository on GitHub, at `https://github.com/bigchaindb/bigchaindb/blob/master/k8s/toolbox/Dockerfile `_. You can use it as below to get started immediately: diff --git a/docs/server/source/cloud-deployment-templates/upgrade-on-kubernetes.rst b/docs/server/source/cloud-deployment-templates/upgrade-on-kubernetes.rst index 348abf22..ba109fbe 100644 --- a/docs/server/source/cloud-deployment-templates/upgrade-on-kubernetes.rst +++ b/docs/server/source/cloud-deployment-templates/upgrade-on-kubernetes.rst @@ -53,7 +53,7 @@ on the node and mark it as unscheduleable kubectl drain $NODENAME -There are `more details in the Kubernetes docs `_, +There are `more details in the Kubernetes docs `_, including instructions to make the node scheduleable again. To manually upgrade the host OS, @@ -82,13 +82,13 @@ A typical upgrade workflow for a single Deployment would be: $ KUBE_EDITOR=nano kubectl edit deployment/ -The `kubectl edit `_ -command opens the specified editor (nano in the above example), +The ``kubectl edit`` command +opens the specified editor (nano in the above example), allowing you to edit the specified Deployment *in the Kubernetes cluster*. You can change the version tag on the Docker image, for example. Don't forget to save your edits before exiting the editor. The Kubernetes docs have more information about -`updating a Deployment `_. +`Deployments `_ (including updating them). The upgrade story for the MongoDB StatefulSet is *different*. From efa20aea6673309c20d1957088252baab3378155 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:35:30 +0200 Subject: [PATCH 101/150] fixed problematic hyperlinks in setup-run-node.md --- docs/server/source/dev-and-test/setup-run-node.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/server/source/dev-and-test/setup-run-node.md b/docs/server/source/dev-and-test/setup-run-node.md index bb7285b4..d53c2112 100644 --- a/docs/server/source/dev-and-test/setup-run-node.md +++ b/docs/server/source/dev-and-test/setup-run-node.md @@ -23,7 +23,9 @@ Start RethinkDB using: $ rethinkdb ``` -You can verify that RethinkDB is running by opening the RethinkDB web interface in your web browser. It should be at [http://localhost:8080/](http://localhost:8080/). +You can verify that RethinkDB is running by opening the RethinkDB web interface in your web browser. It should be at http://localhost:8080/ + + To run BigchainDB Server, do: ```text @@ -87,11 +89,11 @@ Start RethinkDB: docker-compose up -d rdb ``` -The RethinkDB web interface should be accessible at . +The RethinkDB web interface should be accessible at http://localhost:58080/. Depending on which platform, and/or how you are running docker, you may need to change `localhost` for the `ip` of the machine that is running docker. As a dummy example, if the `ip` of that machine was `0.0.0.0`, you would access the -web interface at: . +web interface at: http://0.0.0.0:58080/. Start a BigchainDB node: From 9f474f51617140141a0fe9e8bb1c6c4029270f9a Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:40:48 +0200 Subject: [PATCH 102/150] fixed problematic hyperlink in http-client-server-api.rst --- docs/server/source/drivers-clients/http-client-server-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/drivers-clients/http-client-server-api.rst b/docs/server/source/drivers-clients/http-client-server-api.rst index 39e4395e..6acba3d2 100644 --- a/docs/server/source/drivers-clients/http-client-server-api.rst +++ b/docs/server/source/drivers-clients/http-client-server-api.rst @@ -406,7 +406,7 @@ Determining the API Root URL When you start BigchainDB Server using ``bigchaindb start``, an HTTP API is exposed at some address. The default is: -`http://localhost:9984/api/v1/ `_ +``http://localhost:9984/api/v1/`` It's bound to ``localhost``, so you can access it from the same machine, From 333dc9bb9385a3962d7529b1f3516279c548a065 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 12:43:35 +0200 Subject: [PATCH 103/150] Updated link to Haskell transaction builder in drivers-clients/index.rst --- docs/server/source/drivers-clients/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/drivers-clients/index.rst b/docs/server/source/drivers-clients/index.rst index 704832c0..39a1cbdc 100644 --- a/docs/server/source/drivers-clients/index.rst +++ b/docs/server/source/drivers-clients/index.rst @@ -26,6 +26,6 @@ Please note that some of these projects may be work in progress, but may nevertheless be very useful. * `Javascript transaction builder `_ -* `Haskell transaction builder `_ +* `Haskell transaction builder `_ * `Go driver `_ * `Java driver `_ From 183edb1081fb5ddf2a852bdc1569c936dd0a413e Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Thu, 13 Apr 2017 15:23:33 +0200 Subject: [PATCH 104/150] Revised Notes in CHANGELOG.md --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e24fb226..c47dbdb3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -67,8 +67,8 @@ Tag name: v0.10.0 * @morrme - [Pull Request #1340](https://github.com/bigchaindb/bigchaindb/pull/1340) ### Notes -* We now recommend the used of MongoDB in production, not RethinkDB. -* Initial docs about how to deploy a BigchainDB node on Kubernetes (work in progress). +* MongoDB is now the recommended database backend (not RethinkDB). +* There are some initial docs about how to deploy a BigchainDB node on Kubernetes. It's work in progress. ## [0.9.5] - 2017-03-29 From df02a77788b6d879fddb0321da6dd0319e4b265c Mon Sep 17 00:00:00 2001 From: Krish Date: Thu, 13 Apr 2017 16:17:18 +0200 Subject: [PATCH 105/150] Documentation for running BigchainDB in docker containers on mac (#1265) * Documentation for running bigchaindb docker image on mac --- .../server/source/appendices/docker-on-mac.md | 101 ++++++++++++++++++ docs/server/source/appendices/index.rst | 1 + .../source/appendices/run-with-docker.md | 7 +- 3 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 docs/server/source/appendices/docker-on-mac.md diff --git a/docs/server/source/appendices/docker-on-mac.md b/docs/server/source/appendices/docker-on-mac.md new file mode 100644 index 00000000..7f87540f --- /dev/null +++ b/docs/server/source/appendices/docker-on-mac.md @@ -0,0 +1,101 @@ +# Run BigchainDB with Docker On Mac + +**NOT for Production Use** + +Those developing on Mac can follow this document to run BigchainDB in docker +containers for a quick dev setup. +Running BigchainDB on Mac (Docker or otherwise) is not officially supported. + +Support is very much limited as there are certain things that work differently +in Docker for Mac than Docker for other platforms. +Also, we do not use mac for our development and testing. :) + +This page may not be up to date with various settings and docker updates at +all the times. + +These steps work as of this writing (2017.Mar.09) and might break in the +future with updates to Docker for mac. +Community contribution to make BigchainDB run on Docker for Mac will always be +welcome. + + +## Prerequisite + +Install Docker for Mac. + +## (Optional) For a clean start + +1. Stop all BigchainDB and RethinkDB/MongoDB containers. +2. Delete all BigchainDB docker images. +3. Delete the ~/bigchaindb_docker folder. + + +## Pull the images + +Pull the bigchaindb and other required docker images from docker hub. + +```text +docker pull bigchaindb/bigchaindb:master +docker pull [rethinkdb:2.3|mongo:3.4.1] +``` + +## Create the BigchainDB configuration file on Mac +```text +docker run \ + --rm \ + --volume $HOME/bigchaindb_docker:/data \ + bigchaindb/bigchaindb:master \ + -y configure \ + [mongodb|rethinkdb] +``` + +To ensure that BigchainDB connects to the backend database bound to the virtual +interface `172.17.0.1`, you must edit the BigchainDB configuration file +(`~/bigchaindb_docker/.bigchaindb`) and change database.host from `localhost` +to `172.17.0.1`. + + +## Run the backend database on Mac + +From v0.9 onwards, you can run RethinkDB or MongoDB. + +We use the virtual interface created by the Docker daemon to allow +communication between the BigchainDB and database containers. +It has an IP address of 172.17.0.1 by default. + +You can also use docker host networking or bind to your primary (eth) +interface, if needed. + +### For RethinkDB backend +```text +docker run \ + --name=rethinkdb \ + --publish=28015:28015 \ + --publish=8080:8080 \ + --restart=always \ + --volume $HOME/bigchaindb_docker:/data \ + rethinkdb:2.3 +``` + +### For MongoDB backend +```text +docker run \ + --name=mongodb \ + --publish=27017:27017 \ + --restart=always \ + --volume=$HOME/bigchaindb_docker/db:/data/db \ + --volume=$HOME/bigchaindb_docker/configdb:/data/configdb \ + mongo:3.4.1 --replSet=bigchain-rs +``` + +### Run BigchainDB on Mac +```text +docker run \ + --name=bigchaindb \ + --publish=9984:9984 \ + --restart=always \ + --volume=$HOME/bigchaindb_docker:/data \ + bigchaindb/bigchaindb \ + start +``` + diff --git a/docs/server/source/appendices/index.rst b/docs/server/source/appendices/index.rst index e0a3ee29..1c969c05 100755 --- a/docs/server/source/appendices/index.rst +++ b/docs/server/source/appendices/index.rst @@ -10,6 +10,7 @@ Appendices install-os-level-deps install-latest-pip run-with-docker + docker-on-mac json-serialization cryptography the-Bigchain-class diff --git a/docs/server/source/appendices/run-with-docker.md b/docs/server/source/appendices/run-with-docker.md index a44da2ea..fef0e638 100644 --- a/docs/server/source/appendices/run-with-docker.md +++ b/docs/server/source/appendices/run-with-docker.md @@ -25,7 +25,7 @@ docker run \ --interactive \ --rm \ --tty \ - --volume "$HOME/bigchaindb_docker:/data" \ + --volume $HOME/bigchaindb_docker:/data \ bigchaindb/bigchaindb \ -y configure \ [mongodb|rethinkdb] @@ -76,7 +76,7 @@ docker run \ --publish=172.17.0.1:28015:28015 \ --publish=172.17.0.1:58080:8080 \ --restart=always \ - --volume "$HOME/bigchaindb_docker:/data" \ + --volume $HOME/bigchaindb_docker:/data \ rethinkdb:2.3 ``` @@ -95,7 +95,7 @@ be owned by this user in the host. If there is no owner with UID 999, you can create the corresponding user and group. -`groupadd -r --gid 999 mongodb && useradd -r --uid 999 -g mongodb mongodb` +`useradd -r --uid 999 mongodb` OR `groupadd -r --gid 999 mongodb && useradd -r --uid 999 -g mongodb mongodb` should work. ```text @@ -156,3 +156,4 @@ docker build --tag local-bigchaindb . ``` Now you can use your own image to run BigchainDB containers. + From 7701963f5efab6bad906760d1679084d4f461169 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 13 Apr 2017 15:25:00 +0200 Subject: [PATCH 106/150] Use rotating file handler for logging closes #1204 --- bigchaindb/log/configs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bigchaindb/log/configs.py b/bigchaindb/log/configs.py index 9dac0dcb..ae4edfb5 100644 --- a/bigchaindb/log/configs.py +++ b/bigchaindb/log/configs.py @@ -41,9 +41,11 @@ SUBSCRIBER_LOGGING_CONFIG = { 'level': logging.INFO, }, 'file': { - 'class': 'logging.FileHandler', + 'class': 'logging.RotatingFileHandler', 'filename': join(DEFAULT_LOG_DIR, 'bigchaindb.log'), 'mode': 'w', + 'maxBytes': 209715200, + 'backupCount': 5, 'formatter': 'file', 'level': logging.INFO, }, From 28042a7e83ff81750f41cababcae0af1859802dd Mon Sep 17 00:00:00 2001 From: Krish Date: Thu, 13 Apr 2017 17:34:25 +0200 Subject: [PATCH 107/150] Add NGINX integration with 3scale on Kubernetes (#1392) * Add NGINX integration with 3scale on Kubernetes --- k8s/nginx-3scale/nginx-3scale-cm.yaml | 13 +++ k8s/nginx-3scale/nginx-3scale-dep.yaml | 96 +++++++++++++++++++++++ k8s/nginx-3scale/nginx-3scale-secret.yaml | 13 +++ k8s/nginx-3scale/nginx-3scale-svc.yaml | 29 +++++++ 4 files changed, 151 insertions(+) create mode 100644 k8s/nginx-3scale/nginx-3scale-cm.yaml create mode 100644 k8s/nginx-3scale/nginx-3scale-dep.yaml create mode 100644 k8s/nginx-3scale/nginx-3scale-secret.yaml create mode 100644 k8s/nginx-3scale/nginx-3scale-svc.yaml diff --git a/k8s/nginx-3scale/nginx-3scale-cm.yaml b/k8s/nginx-3scale/nginx-3scale-cm.yaml new file mode 100644 index 00000000..6f87b494 --- /dev/null +++ b/k8s/nginx-3scale/nginx-3scale-cm.yaml @@ -0,0 +1,13 @@ +############################################################################ +# This YAML file desribes a ConfigMap with a valid list of ':' separated # +# IP addresses (or 'all' for all IP addresses) that can connect to the # +# MongoDB instance. We only support the value 'all' currently. # +############################################################################ + +apiVersion: v1 +kind: ConfigMap +metadata: + name: mongodb-whitelist + namespace: default +data: + allowed-hosts: "all" diff --git a/k8s/nginx-3scale/nginx-3scale-dep.yaml b/k8s/nginx-3scale/nginx-3scale-dep.yaml new file mode 100644 index 00000000..49695315 --- /dev/null +++ b/k8s/nginx-3scale/nginx-3scale-dep.yaml @@ -0,0 +1,96 @@ +############################################################### +# This config file runs nginx as a k8s deployment and exposes # +# it using an external load balancer. # +# This deployment is used as a front end to both BigchainDB # +# and MongoDB. # +############################################################### + +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: ngx-instance-0-dep +spec: + replicas: 1 + template: + metadata: + labels: + app: ngx-instance-0-dep + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: nginx-3scale + image: bigchaindb/nginx_3scale:0.1 + # TODO(Krish): Change later to IfNotPresent + imagePullPolicy: Always + env: + - name: MONGODB_FRONTEND_PORT + value: $(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_MDB_PORT) + - name: MONGODB_BACKEND_HOST + value: mdb-instance-0.default.svc.cluster.local + - name: MONGODB_BACKEND_PORT + value: "27017" + - name: BIGCHAINDB_FRONTEND_PORT + value: $(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_BDB_PORT) + - name: BIGCHAINDB_BACKEND_HOST + value: bdb-instance-0.default.svc.cluster.local + - name: BIGCHAINDB_BACKEND_PORT + value: "9984" + - name: MONGODB_WHITELIST + valueFrom: + configMapKeyRef: + name: mongodb-whitelist + key: allowed-hosts + - name: DNS_SERVER + value: "10.0.0.10" + - name: NGINX_HEALTH_CHECK_PORT + value: "8888" + # TODO(Krish): use secrets for sensitive info + - name: THREESCALE_SECRET_TOKEN + value: "" + - name: THREESCALE_SERVICE_ID + value: "" + - name: THREESCALE_VERSION_HEADER + value: "" + - name: THREESCALE_PROVIDER_KEY + value: "" + - name: THREESCALE_FRONTEND_API_DNS_NAME + value: "" + - name: THREESCALE_UPSTREAM_API_PORT + value: "" + ports: + - containerPort: 27017 + hostPort: 27017 + name: public-mdb-port + protocol: TCP + - containerPort: 443 + hostPort: 443 + name: public-bdb-port + protocol: TCP + - containerPort: 8888 + hostPort: 8888 + name: health-check + protocol: TCP + - containerPort: 8080 + hostPort: 8080 + name: public-api-port + protocol: TCP + volumeMounts: + - name: https + mountPath: /usr/local/openresty/nginx/conf/ssl/ + readOnly: true + resources: + limits: + cpu: 200m + memory: 768Mi + livenessProbe: + httpGet: + path: / + port: 8888 + initialDelaySeconds: 15 + timeoutSeconds: 10 + restartPolicy: Always + volumes: + - name: https + secret: + secretName: certs + defaultMode: 0400 diff --git a/k8s/nginx-3scale/nginx-3scale-secret.yaml b/k8s/nginx-3scale/nginx-3scale-secret.yaml new file mode 100644 index 00000000..8f725313 --- /dev/null +++ b/k8s/nginx-3scale/nginx-3scale-secret.yaml @@ -0,0 +1,13 @@ +# Certificate data should be base64 encoded before embedding them here by using +# `cat cert.pem | base64 -w 0 > cert.pem.b64` and then copy the resulting +# value here. Same goes for cert.key. +# Ref: https://kubernetes.io/docs/concepts/configuration/secret/ + +apiVersion: v1 +kind: Secret +metadata: + name: certs +type: Opaque +data: + cert.pem: + cert.key: diff --git a/k8s/nginx-3scale/nginx-3scale-svc.yaml b/k8s/nginx-3scale/nginx-3scale-svc.yaml new file mode 100644 index 00000000..db212222 --- /dev/null +++ b/k8s/nginx-3scale/nginx-3scale-svc.yaml @@ -0,0 +1,29 @@ +apiVersion: v1 +kind: Service +metadata: + name: ngx-instance-0 + namespace: default + labels: + name: ngx-instance-0 + annotations: + # NOTE: the following annotation is a beta feature and + # only available in GCE/GKE and Azure as of now + # Ref: https://kubernetes.io/docs/tutorials/services/source-ip/ + service.beta.kubernetes.io/external-traffic: OnlyLocal +spec: + selector: + app: ngx-instance-0-dep + ports: + - port: 443 + targetPort: 443 + name: ngx-public-bdb-port + protocol: TCP + - port: 8080 + targetPort: 8080 + name: ngx-public-3scale-port + protocol: TCP + - port: 27017 + targetPort: 27017 + name: ngx-public-mdb-port + protocol: TCP + type: LoadBalancer From cae017eb2289d27b9c6b201cb6066760a123b37d Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 13 Apr 2017 17:23:12 +0200 Subject: [PATCH 108/150] Fix logging file handler --- bigchaindb/log/configs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigchaindb/log/configs.py b/bigchaindb/log/configs.py index ae4edfb5..14c6e319 100644 --- a/bigchaindb/log/configs.py +++ b/bigchaindb/log/configs.py @@ -41,7 +41,7 @@ SUBSCRIBER_LOGGING_CONFIG = { 'level': logging.INFO, }, 'file': { - 'class': 'logging.RotatingFileHandler', + 'class': 'logging.handlers.RotatingFileHandler', 'filename': join(DEFAULT_LOG_DIR, 'bigchaindb.log'), 'mode': 'w', 'maxBytes': 209715200, From aa4d532e47230ba01b1c72910e4b1cbf9bb8dd1d Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Sun, 16 Apr 2017 21:22:12 +0200 Subject: [PATCH 109/150] added docs re enforcing max tx size with a reverse proxy --- .../source/data-models/inputs-outputs.rst | 5 +- docs/server/source/production-nodes/index.rst | 2 + .../production-nodes/reverse-proxy-notes.md | 72 +++++++++++++++++++ 3 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 docs/server/source/production-nodes/reverse-proxy-notes.md diff --git a/docs/server/source/data-models/inputs-outputs.rst b/docs/server/source/data-models/inputs-outputs.rst index e81aa3b2..5ad360ec 100644 --- a/docs/server/source/data-models/inputs-outputs.rst +++ b/docs/server/source/data-models/inputs-outputs.rst @@ -26,7 +26,10 @@ When one creates a condition, one can calculate its fulfillment length (e.g. 96). The more complex the condition, the larger its fulfillment length will be. A BigchainDB federation can put an upper limit on the complexity of the conditions, either directly by setting an allowed maximum fulfillment length, -or indirectly by setting a maximum allowed transaction size which would limit +or +`indirectly `_ +by :ref:`setting a maximum allowed transaction size ` +which would limit the overall complexity accross all inputs and outputs of a transaction. If someone tries to make a condition where the output of a threshold condition feeds into the input of another “earlier” threshold condition (i.e. in a closed logical circuit), then their computer will take forever to calculate the (infinite) “condition URI”, at least in theory. In practice, their computer will run out of memory or their client software will timeout after a while. diff --git a/docs/server/source/production-nodes/index.rst b/docs/server/source/production-nodes/index.rst index 7b42cbaa..4a9cb15b 100644 --- a/docs/server/source/production-nodes/index.rst +++ b/docs/server/source/production-nodes/index.rst @@ -8,3 +8,5 @@ Production Nodes node-components node-requirements setup-run-node + reverse-proxy-notes + \ No newline at end of file diff --git a/docs/server/source/production-nodes/reverse-proxy-notes.md b/docs/server/source/production-nodes/reverse-proxy-notes.md new file mode 100644 index 00000000..18930942 --- /dev/null +++ b/docs/server/source/production-nodes/reverse-proxy-notes.md @@ -0,0 +1,72 @@ +# Using a Reverse Proxy + +You may want to: + +* rate limit inbound HTTP requests, +* authenticate/authorize inbound HTTP requests, +* block requests with an HTTP request body that's too large, or +* enable HTTPS (TLS) between your users and your node. + +While we could have built all that into BigchainDB Server, +we didn't, because you can do all that (and more) +using a reverse proxy such as NGINX or HAProxy. +(You would put it in front of your BigchainDB Server, +so that all inbound HTTP requests would arrive +at the reverse proxy before *maybe* being proxied +onwards to your BigchainDB Server.) +For detailed instructions, see the documentation +for your reverse proxy. + +Below, we note how a reverse proxy can be used +to do some BigchainDB-specific things. + +You may also be interested in +[our NGINX configuration file template](https://github.com/bigchaindb/nginx_3scale/blob/master/nginx.conf.template) +(open source, on GitHub). + + +## Enforcing a Max Transaction Size + +The BigchainDB HTTP API has several endpoints, +but only one of them, the `POST /transactions` endpoint, +expects a non-empty HTTP request body: +the transaction (JSON) being submitted by the user. + +If you want to enforce a maximum-allowed transaction size +(discarding any that are larger), +then you can do so by configuring a maximum request body size +in your reverse proxy. +For example, NGINX has the `client_max_body_size` +configuration setting. You could set it to 15 kB +with the following line in your NGINX config file: + +```text +client_max_body_size 15k; +``` + +For more information, see +[the NGINX docs about client_max_body_size](https://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size). + +Note: By enforcing a maximum transaction size, you +[indirectly enforce a maximum crypto-conditions complexity](https://github.com/bigchaindb/bigchaindb/issues/356#issuecomment-288085251). + + +**Aside: Why 15 kB?** + +Both [RethinkDB](https://rethinkdb.com/limitations/) and +[MongoDB have a maximum document size of 16 MB](https://docs.mongodb.com/manual/reference/limits/#limit-bson-document-size). +In BigchainDB, the biggest documents are the blocks. +A BigchainDB block can contain up to 1000 transactions, +plus some other data (e.g. the timestamp). +If we ignore the other data as negligible relative to all the transactions, +then a block of size 16 MB +will have an average transaction size of (16 MB)/1000 = 16 kB. +Therefore by limiting the max transaction size to 15 kB, +you can be fairly sure that no blocks will ever be +bigger than 16 MB. + +Note: Technically, the documents that MongoDB stores aren't the JSON +that BigchainDB users think of; they're JSON converted to BSON. +Moreover, [one can use GridFS with MongoDB to store larger documents](https://docs.mongodb.com/manual/core/gridfs/). +Therefore the above calculation shoud be seen as a rough guide, +not the last word. From 0ec29abd24f372f81c302aecedad0437247de6ba Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Sun, 16 Apr 2017 21:57:05 +0200 Subject: [PATCH 110/150] docs: added note re only real way to limit CC complexity today --- docs/server/source/data-models/inputs-outputs.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/server/source/data-models/inputs-outputs.rst b/docs/server/source/data-models/inputs-outputs.rst index 5ad360ec..4309a4c8 100644 --- a/docs/server/source/data-models/inputs-outputs.rst +++ b/docs/server/source/data-models/inputs-outputs.rst @@ -25,12 +25,16 @@ The (single) output of a threshold condition can be used as one of the inputs of When one creates a condition, one can calculate its fulfillment length (e.g. 96). The more complex the condition, the larger its fulfillment length will be. A BigchainDB federation can put an upper limit on the complexity of the -conditions, either directly by setting an allowed maximum fulfillment length, +conditions, either directly by setting a maximum allowed fulfillment length, or `indirectly `_ by :ref:`setting a maximum allowed transaction size ` which would limit the overall complexity accross all inputs and outputs of a transaction. +Note: At the time of writing, there was no configuration setting +to set a maximum allowed fulfillment length, +so the only real option was to +:ref:`set a maximum allowed transaction size `. If someone tries to make a condition where the output of a threshold condition feeds into the input of another “earlier” threshold condition (i.e. in a closed logical circuit), then their computer will take forever to calculate the (infinite) “condition URI”, at least in theory. In practice, their computer will run out of memory or their client software will timeout after a while. From a65c8799dd8b6c86a352d0ec28bddd41ea4908c9 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 13 Apr 2017 16:18:02 +0200 Subject: [PATCH 111/150] Document gunicorn loglevel setting --- docs/server/source/server-reference/configuration.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 91fa4efb..15726659 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -17,6 +17,7 @@ For convenience, here's a list of all the relevant environment variables (docume `BIGCHAINDB_DATABASE_NAME`
    `BIGCHAINDB_DATABASE_REPLICASET`
    `BIGCHAINDB_SERVER_BIND`
    +`BIGCHAINDB_SERVER_LOGLEVEL`
    `BIGCHAINDB_SERVER_WORKERS`
    `BIGCHAINDB_SERVER_THREADS`
    `BIGCHAINDB_CONFIG_PATH`
    @@ -121,17 +122,22 @@ If you used `bigchaindb -y configure mongodb` to create a default local config f ``` -## server.bind, server.workers & server.threads +## server.bind, server.loglevel, server.workers & server.threads These settings are for the [Gunicorn HTTP server](http://gunicorn.org/), which is used to serve the [HTTP client-server API](../drivers-clients/http-client-server-api.html). `server.bind` is where to bind the Gunicorn HTTP server socket. It's a string. It can be any valid value for [Gunicorn's bind setting](http://docs.gunicorn.org/en/stable/settings.html#bind). If you want to allow IPv4 connections from anyone, on port 9984, use '0.0.0.0:9984'. In a production setting, we recommend you use Gunicorn behind a reverse proxy server. If Gunicorn and the reverse proxy are running on the same machine, then use 'localhost:PORT' where PORT is _not_ 9984 (because the reverse proxy needs to listen on port 9984). Maybe use PORT=9983 in that case because we know 9983 isn't used. If Gunicorn and the reverse proxy are running on different machines, then use 'A.B.C.D:9984' where A.B.C.D is the IP address of the reverse proxy. There's [more information about deploying behind a reverse proxy in the Gunicorn documentation](http://docs.gunicorn.org/en/stable/deploy.html). (They call it a proxy.) +`server.loglevel` sets the log level of Gunicorn's Error log outputs. See +[Gunicorn's documentation](http://docs.gunicorn.org/en/latest/settings.html#loglevel) +for more information. + `server.workers` is [the number of worker processes](http://docs.gunicorn.org/en/stable/settings.html#workers) for handling requests. If `None` (the default), the value will be (cpu_count * 2 + 1). `server.threads` is [the number of threads-per-worker](http://docs.gunicorn.org/en/stable/settings.html#threads) for handling requests. If `None` (the default), the value will be (cpu_count * 2 + 1). The HTTP server will be able to handle `server.workers` * `server.threads` requests simultaneously. **Example using environment variables** ```text export BIGCHAINDB_SERVER_BIND=0.0.0.0:9984 +export BIGCHAINDB_SERVER_LOGLEVEL=debug export BIGCHAINDB_SERVER_WORKERS=5 export BIGCHAINDB_SERVER_THREADS=5 ``` @@ -140,6 +146,7 @@ export BIGCHAINDB_SERVER_THREADS=5 ```js "server": { "bind": "0.0.0.0:9984", + "loglevel": "debug", "workers": 5, "threads": 5 } @@ -149,6 +156,7 @@ export BIGCHAINDB_SERVER_THREADS=5 ```js "server": { "bind": "localhost:9984", + "loglevel": "info", "workers": null, "threads": null } From 6921b1386c5c4dbfce72bd7cf693c1d2b5a5dc8d Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 10:53:27 +0200 Subject: [PATCH 112/150] docs: noted that rethinkdb doesn't use database.connection_timeout setting yet --- docs/server/source/server-reference/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 42f22d4e..32672129 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -97,7 +97,7 @@ The settings with names of the form `database.*` are for the database backend * `database.port` is self-explanatory. * `database.name` is a user-chosen name for the database inside RethinkDB or MongoDB, e.g. `bigchain`. * `database.replicaset` is only relevant if using MongoDB; it's the name of the MongoDB replica set, e.g. `bigchain-rs`. -* `database.connection_timeout` is the maximum number of milliseconds that BigchainDB will wait before giving up on one attempt to connect to the database backend. +* `database.connection_timeout` is the maximum number of milliseconds that BigchainDB will wait before giving up on one attempt to connect to the database backend. Note: At the time of writing, this setting was only used by MongoDB; there was an open [issue to make RethinkDB use it as well](https://github.com/bigchaindb/bigchaindb/issues/1337). * `database.max_tries` is the maximum number of times that BigchainDB will try to establish a connection with the database backend. If 0, then it will try forever. **Example using environment variables** From 02db6d9827679a660861e2e2434e16031f5afd84 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 12:03:05 +0200 Subject: [PATCH 113/150] reorder extern contributors in changelog in alpha order --- CHANGELOG.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c47dbdb3..4e9e4b50 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,13 +58,14 @@ Tag name: v0.10.0 * the block-under-construction. Pull Requests [#1237](https://github.com/bigchaindb/bigchaindb/issues/1237) and [#1377](https://github.com/bigchaindb/bigchaindb/issues/1377) ### External Contributors -* @tymlez - Pull Requests [#1108](https://github.com/bigchaindb/bigchaindb/pull/1108) & [#1209](https://github.com/bigchaindb/bigchaindb/pull/1209) +In alphabetical order by GitHub username: * @anryko - [Pull Request #1277](https://github.com/bigchaindb/bigchaindb/pull/1277) -* @lavinasachdev3 - [Pull Request #1358](https://github.com/bigchaindb/bigchaindb/pull/1358) -* @jackric - [Pull Request #1365](https://github.com/bigchaindb/bigchaindb/pull/1365) * @anujism - [Pull Request #1366](https://github.com/bigchaindb/bigchaindb/pull/1366) -* @tomconte - [Pull Request #1299](https://github.com/bigchaindb/bigchaindb/pull/1299) +* @jackric - [Pull Request #1365](https://github.com/bigchaindb/bigchaindb/pull/1365) +* @lavinasachdev3 - [Pull Request #1358](https://github.com/bigchaindb/bigchaindb/pull/1358) * @morrme - [Pull Request #1340](https://github.com/bigchaindb/bigchaindb/pull/1340) +* @tomconte - [Pull Request #1299](https://github.com/bigchaindb/bigchaindb/pull/1299) +* @tymlez - Pull Requests [#1108](https://github.com/bigchaindb/bigchaindb/pull/1108) & [#1209](https://github.com/bigchaindb/bigchaindb/pull/1209) ### Notes * MongoDB is now the recommended database backend (not RethinkDB). From 5831a6233cad48b12a4afa0a31121772b215fdc4 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 12:07:23 +0200 Subject: [PATCH 114/150] Expanded notes about logging improvements in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4e9e4b50..453d41ed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,7 +19,7 @@ For reference, the possible headings are: Tag name: v0.10.0 ### Added -* More logging. Added `--log-level` option to `bigchaindb start` command. Added new logging configuration settings. Pull Requests +* Improved logging. Added logging to file. Added `--log-level` option to `bigchaindb start` command. Added new logging configuration settings. Pull Requests [#1285](https://github.com/bigchaindb/bigchaindb/pull/1285), [#1307](https://github.com/bigchaindb/bigchaindb/pull/1307), [#1324](https://github.com/bigchaindb/bigchaindb/pull/1324), From c04a62a1e87a0e91dd692b6667fb4bbdd75df957 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 14:43:32 +0200 Subject: [PATCH 115/150] updated firewall setup docs re/ port 9985 for WebSocket API --- docs/server/source/appendices/firewall-notes.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/server/source/appendices/firewall-notes.md b/docs/server/source/appendices/firewall-notes.md index cd440774..b7af6c22 100644 --- a/docs/server/source/appendices/firewall-notes.md +++ b/docs/server/source/appendices/firewall-notes.md @@ -8,9 +8,10 @@ This is a page of notes on the ports potentially used by BigchainDB nodes and th Assuming you aren't exposing the RethinkDB web interface on port 8080 (or any other port, because [there are more secure ways to access it](https://www.rethinkdb.com/docs/security/#binding-the-web-interface-port)), there are only three ports that should expect unsolicited inbound traffic: 1. **Port 22** can expect inbound SSH (TCP) traffic from the node administrator (i.e. a small set of IP addresses). -2. **Port 9984** can expect inbound HTTP (TCP) traffic from BigchainDB clients sending transactions to the BigchainDB HTTP API. -3. If you're using RethinkDB, **Port 29015** can expect inbound TCP traffic from other RethinkDB nodes in the RethinkDB cluster (for RethinkDB intracluster communications). -4. If you're using MongoDB, **Port 27017** can expect inbound TCP traffic from other nodes. +1. **Port 9984** can expect inbound HTTP (TCP) traffic from BigchainDB clients sending transactions to the BigchainDB HTTP API. +1. **Port 9985** can expect inbount WebSocket traffic from BigchainDB clients. +1. If you're using RethinkDB, **Port 29015** can expect inbound TCP traffic from other RethinkDB nodes in the RethinkDB cluster (for RethinkDB intracluster communications). +1. If you're using MongoDB, **Port 27017** can expect inbound TCP traffic from other nodes. All other ports should only get inbound traffic in response to specific requests from inside the node. @@ -59,6 +60,11 @@ If Gunicorn and the reverse proxy are running on the same server, then you'll ha You may want to have Gunicorn and the reverse proxy running on different servers, so that both can listen on port 9984. That would also help isolate the effects of a denial-of-service attack. +## Port 9985 + +Port 9985 is the default port for the [BigchainDB WebSocket Event Stream API](../websocket-event-stream-api.html). + + ## Port 28015 Port 28015 is the default port used by RethinkDB client driver connections (TCP). If your BigchainDB node is just one server, then Port 28015 only needs to listen on localhost, because all the client drivers will be running on localhost. Port 28015 doesn't need to accept inbound traffic from the outside world. From f94a1e020c2f77ef412efa0859414bdd48112828 Mon Sep 17 00:00:00 2001 From: vrde Date: Tue, 18 Apr 2017 15:57:50 +0200 Subject: [PATCH 116/150] Dispatcher is a consumer, no capped queue needed --- bigchaindb/web/websocket_server.py | 24 ++------------ tests/web/test_websocket_server.py | 51 ------------------------------ 2 files changed, 2 insertions(+), 73 deletions(-) diff --git a/bigchaindb/web/websocket_server.py b/bigchaindb/web/websocket_server.py index 5507f504..0aa51ecb 100644 --- a/bigchaindb/web/websocket_server.py +++ b/bigchaindb/web/websocket_server.py @@ -29,26 +29,6 @@ POISON_PILL = 'POISON_PILL' EVENTS_ENDPOINT = '/api/v1/streams/valid_tx' -def _put_into_capped_queue(queue, value): - """Put a new item in a capped queue. - - If the queue reached its limit, get the first element - ready and put the new one. Note that the first element - will be lost (that's the purpose of a capped queue). - - Args: - queue: a queue - value: the value to put - """ - while True: - try: - queue.put_nowait(value) - except asyncio.QueueFull: - queue.get_nowait() - else: - return - - def _multiprocessing_to_asyncio(in_queue, out_queue, loop): """Bridge between a synchronous multiprocessing queue and an asynchronous asyncio queue. @@ -60,7 +40,7 @@ def _multiprocessing_to_asyncio(in_queue, out_queue, loop): while True: value = in_queue.get() - loop.call_soon_threadsafe(_put_into_capped_queue, out_queue, value) + loop.call_soon_threadsafe(out_queue.put_nowait, value) class Dispatcher: @@ -161,7 +141,7 @@ def start(sync_event_source, loop=None): if not loop: loop = asyncio.get_event_loop() - event_source = asyncio.Queue(maxsize=1024, loop=loop) + event_source = asyncio.Queue(loop=loop) bridge = threading.Thread(target=_multiprocessing_to_asyncio, args=(sync_event_source, event_source, loop), diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index 6484ef4e..f25e183f 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -3,7 +3,6 @@ import json import queue import random import threading -import time from unittest.mock import patch import pytest @@ -64,56 +63,6 @@ def test_bridge_sync_async_queue(loop): assert async_queue.qsize() == 0 -@asyncio.coroutine -def test_put_into_capped_queue(loop): - from bigchaindb.web.websocket_server import _put_into_capped_queue - q = asyncio.Queue(maxsize=2, loop=loop) - - _put_into_capped_queue(q, 'Friday') - assert q._queue[0] == 'Friday' - - _put_into_capped_queue(q, "I'm") - assert q._queue[0] == 'Friday' - assert q._queue[1] == "I'm" - - _put_into_capped_queue(q, 'in') - assert q._queue[0] == "I'm" - assert q._queue[1] == 'in' - - _put_into_capped_queue(q, 'love') - assert q._queue[0] == 'in' - assert q._queue[1] == 'love' - - -@asyncio.coroutine -def test_capped_queue(loop): - from bigchaindb.web.websocket_server import _multiprocessing_to_asyncio - - sync_queue = queue.Queue() - async_queue = asyncio.Queue(maxsize=2, loop=loop) - - bridge = threading.Thread(target=_multiprocessing_to_asyncio, - args=(sync_queue, async_queue, loop), - daemon=True) - bridge.start() - - sync_queue.put('we') - sync_queue.put('are') - sync_queue.put('the') - sync_queue.put('robots') - - # Wait until the thread processes all the items - time.sleep(1) - - result = yield from async_queue.get() - assert result == 'the' - - result = yield from async_queue.get() - assert result == 'robots' - - assert async_queue.qsize() == 0 - - @patch('threading.Thread') @patch('aiohttp.web.run_app') @patch('bigchaindb.web.websocket_server.init_app') From b4eb75ede5c763f797012ca04b94b92e94be8bd7 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Thu, 13 Apr 2017 15:37:18 +0200 Subject: [PATCH 117/150] Use MongoDB as default db in docker-compose file --- docker-compose.yml | 4 +-- .../source/dev-and-test/setup-run-node.md | 12 +++---- tests/README.md | 32 +++++++++---------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index f5dbcdc9..fbd35022 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,7 +28,7 @@ services: - /data command: "true" - bdb: + bdb-rdb: build: context: . dockerfile: Dockerfile-dev @@ -50,7 +50,7 @@ services: - "9984" command: bigchaindb start - bdb-mdb: + bdb: build: context: . dockerfile: Dockerfile-dev diff --git a/docs/server/source/dev-and-test/setup-run-node.md b/docs/server/source/dev-and-test/setup-run-node.md index d53c2112..1b60e3c3 100644 --- a/docs/server/source/dev-and-test/setup-run-node.md +++ b/docs/server/source/dev-and-test/setup-run-node.md @@ -98,19 +98,19 @@ web interface at: http://0.0.0.0:58080/. Start a BigchainDB node: ```bash -docker-compose up -d bdb +docker-compose up -d bdb-rdb ``` You can monitor the logs: ```bash -docker-compose logs -f bdb +docker-compose logs -f bdb-rdb ``` If you wish to run the tests: ```bash -docker-compose run --rm bdb py.test -v -n auto +docker-compose run --rm bdb-rdb py.test -v -n auto ``` ### Docker with MongoDB @@ -130,19 +130,19 @@ $ docker-compose port mdb 27017 Start a BigchainDB node: ```bash -docker-compose up -d bdb-mdb +docker-compose up -d bdb ``` You can monitor the logs: ```bash -docker-compose logs -f bdb-mdb +docker-compose logs -f bdb ``` If you wish to run the tests: ```bash -docker-compose run --rm bdb-mdb py.test -v --database-backend=mongodb +docker-compose run --rm bdb py.test -v --database-backend=mongodb ``` ### Accessing the HTTP API diff --git a/tests/README.md b/tests/README.md index ce4ac22c..d0e2da52 100644 --- a/tests/README.md +++ b/tests/README.md @@ -68,20 +68,6 @@ The `pytest` command has many options. If you want to learn about all the things You can also use [Docker Compose](https://docs.docker.com/compose/) to run all the tests. -#### With RethinkDB as the backend - -First, start `RethinkDB` in the background: - -```text -$ docker-compose up -d rdb -``` - -then run the tests using: - -```text -$ docker-compose run --rm bdb py.test -v -``` - #### With MongoDB as the backend First, start `MongoDB` in the background: @@ -93,7 +79,7 @@ $ docker-compose up -d mdb then run the tests using: ```text -$ docker-compose run --rm bdb-mdb py.test -v +$ docker-compose run --rm bdb py.test -v ``` If you've upgraded to a newer version of BigchainDB, you might have to rebuild @@ -103,8 +89,22 @@ the images before being able to run the tests. Run: $ docker-compose build ``` +#### With RethinkDB as the backend + +First, start `RethinkDB` in the background: + +```text +$ docker-compose up -d rdb +``` + +then run the tests using: + +```text +$ docker-compose run --rm bdb-rdb py.test -v +``` + to rebuild all the images (usually you only need to rebuild the `bdb` and - `bdb-mdb` images). + `bdb-rdb` images). ## Automated Testing of All Pull Requests From 82a170402e826e508443d72e3bc254c71d5376e8 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 11:57:54 +0200 Subject: [PATCH 118/150] Upgrade MongoDB in docker-compose file --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index fbd35022..322cbcf6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,7 +2,7 @@ version: '2' services: mdb: - image: mongo:3.4.1 + image: mongo:3.4.3 ports: - "27017" command: mongod --replSet=bigchain-rs From 675d011a76337a105508faaa42a1a29e71ad4cce Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 12:02:43 +0200 Subject: [PATCH 119/150] Set error logs file handler to a rotating one --- bigchaindb/log/configs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bigchaindb/log/configs.py b/bigchaindb/log/configs.py index 14c6e319..034256a4 100644 --- a/bigchaindb/log/configs.py +++ b/bigchaindb/log/configs.py @@ -50,11 +50,13 @@ SUBSCRIBER_LOGGING_CONFIG = { 'level': logging.INFO, }, 'errors': { - 'class': 'logging.FileHandler', + 'class': 'logging.handlers.RotatingFileHandler', 'filename': join(DEFAULT_LOG_DIR, 'bigchaindb-errors.log'), 'mode': 'w', - 'level': logging.ERROR, + 'maxBytes': 209715200, + 'backupCount': 5, 'formatter': 'file', + 'level': logging.ERROR, }, }, 'loggers': {}, From 6d4245dfd91b02382fb229400cc6a6400d341480 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 12:05:05 +0200 Subject: [PATCH 120/150] Set error log file according to user given setting --- bigchaindb/log/setup.py | 6 +++++- tests/log/test_setup.py | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/bigchaindb/log/setup.py b/bigchaindb/log/setup.py index f3e8f7a3..b6b45b00 100644 --- a/bigchaindb/log/setup.py +++ b/bigchaindb/log/setup.py @@ -49,7 +49,7 @@ def setup_logging(*, user_log_config=None): setup_sub_logger(user_log_config=user_log_config) -def create_subscriber_logging_config(*, user_log_config=None): +def create_subscriber_logging_config(*, user_log_config=None): # noqa: C901 sub_log_config = deepcopy(SUBSCRIBER_LOGGING_CONFIG) if not user_log_config: @@ -59,6 +59,10 @@ def create_subscriber_logging_config(*, user_log_config=None): filename = user_log_config['file'] sub_log_config['handlers']['file']['filename'] = filename + if 'error_file' in user_log_config: + error_filename = user_log_config['error_file'] + sub_log_config['handlers']['errors']['filename'] = error_filename + if 'level_console' in user_log_config: level = _normalize_log_level(user_log_config['level_console']) sub_log_config['handlers']['console']['level'] = level diff --git a/tests/log/test_setup.py b/tests/log/test_setup.py index 39a55995..0e608d26 100644 --- a/tests/log/test_setup.py +++ b/tests/log/test_setup.py @@ -137,6 +137,7 @@ def test_create_subscriber_logging_config_with_user_given_config(): SUBSCRIBER_LOGGING_CONFIG as expected_log_config) user_log_config = { 'file': '/var/log/bigchaindb/bdb.log', + 'error_file': '/var/log/bigchaindb/bdb-err.log', 'level_console': 'warning', 'level_logfile': 'info', 'fmt_console': '[%(levelname)s] (%(name)s) %(message)s', @@ -167,7 +168,10 @@ def test_create_subscriber_logging_config_with_user_given_config(): user_log_config['level_console'].upper()) assert (config['handlers']['file']['level'] == user_log_config['level_logfile'].upper()) + assert config['handlers']['errors']['level'] == logging.ERROR assert config['handlers']['file']['filename'] == user_log_config['file'] + assert (config['handlers']['errors']['filename'] == + user_log_config['error_file']) del config['handlers']['console']['level'] del config['handlers']['file']['level'] del config['handlers']['file']['filename'] From 4c0fc52e9e3a8e6f37470ce10af7670f631efdb8 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 12:05:36 +0200 Subject: [PATCH 121/150] Document error log file setting and log rotation --- .../source/server-reference/configuration.md | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index df508326..c15fec52 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -26,6 +26,7 @@ For convenience, here's a list of all the relevant environment variables (docume `BIGCHAINDB_BACKLOG_REASSIGN_DELAY`
    `BIGCHAINDB_LOG`
    `BIGCHAINDB_LOG_FILE`
    +`BIGCHAINDB_LOG_ERROR_FILE`
    `BIGCHAINDB_LOG_LEVEL_CONSOLE`
    `BIGCHAINDB_LOG_LEVEL_LOGFILE`
    `BIGCHAINDB_LOG_DATEFMT_CONSOLE`
    @@ -205,6 +206,7 @@ holding the logging configuration. { "log": { "file": "/var/log/bigchaindb.log", + "error_file": "/var/log/bigchaindb-errors.log", "level_console": "info", "level_logfile": "info", "datefmt_console": "%Y-%m-%d %H:%M:%S", @@ -240,8 +242,8 @@ internal defaults are used, such that the actual operational default is: The next subsections explain each field of the `log` configuration. -### log.file -The full path to the file where logs should be written to. +### log.file & log.error_file +The full paths to the files where logs and error logs should be written to. **Example**: @@ -249,15 +251,41 @@ The full path to the file where logs should be written to. { "log": { "file": "/var/log/bigchaindb/bigchaindb.log" + "error_file": "/var/log/bigchaindb/bigchaindb-errors.log" } } ``` -**Defaults to**: `"~/bigchaindb.log"`. +**Defaults to**: + + * `"~/bigchaindb.log"` + * `"~/bigchaindb-errors.log"` Please note that the user running `bigchaindb` must have write access to the -location. - +locations. + +#### Log rotation + +Log files have a size limit of 200 MB and will be rotated up to five times. + +For example if we consider the log file setting: + +``` +{ + "log": { + "file": "~/bigchain.log" + } +} +``` + +logs would always be written to `bigchain.log`. Each time the file +`bigchain.log` reaches 200 MB it would be closed and renamed +`bigchain.log.1`. If `bigchain.log.1` and `bigchain.log.2` already exist they +would be renamed `bigchain.log.2` and `bigchain.log.3`. This pattern would be +applied up to `bigchain.log.5` after which `bigchain.log.5` would be +overwritten by `bigchain.log.4`, thus ending the rotation cycle of whatever +logs were in `bigchain.log.5`. + ### log.level_console The log level used to log to the console. Possible allowed values are the ones From b3290f12d24aed895e5e5eb1c8ab89935adbd323 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 14:51:12 +0200 Subject: [PATCH 122/150] Apply log level cmd line option to log file --- bigchaindb/__init__.py | 26 +++++++++++++------------- bigchaindb/commands/utils.py | 5 ++++- tests/commands/test_utils.py | 3 ++- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index 98e6b27b..fc8142a0 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -1,6 +1,9 @@ import copy +import logging import os +from bigchaindb.log.configs import SUBSCRIBER_LOGGING_CONFIG as log_config + # from functools import reduce # PORT_NUMBER = reduce(lambda x, y: x * y, map(ord, 'BigchainDB')) % 2**16 # basically, the port number is 9984 @@ -73,19 +76,16 @@ config = { 'keyring': [], 'backlog_reassign_delay': 120, 'log': { - # TODO Document here or elsewhere. - # Example of config: - # 'file': '/var/log/bigchaindb.log', - # 'level_console': 'info', - # 'level_logfile': 'info', - # 'datefmt_console': '%Y-%m-%d %H:%M:%S', - # 'datefmt_logfile': '%Y-%m-%d %H:%M:%S', - # 'fmt_console': '%(asctime)s [%(levelname)s] (%(name)s) %(message)s', - # 'fmt_logfile': '%(asctime)s [%(levelname)s] (%(name)s) %(message)s', - # 'granular_levels': { - # 'bichaindb.backend': 'info', - # 'bichaindb.core': 'info', - # }, + 'file': log_config['handlers']['file']['filename'], + 'error_file': log_config['handlers']['errors']['filename'], + 'level_console': logging.getLevelName( + log_config['handlers']['console']['level']), + 'level_logfile': logging.getLevelName( + log_config['handlers']['file']['level']), + 'datefmt_console': log_config['formatters']['console']['datefmt'], + 'datefmt_logfile': log_config['formatters']['file']['datefmt'], + 'fmt_console': log_config['formatters']['console']['format'], + 'fmt_logfile': log_config['formatters']['file']['format'], }, } diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py index cd59856c..d6840d68 100644 --- a/bigchaindb/commands/utils.py +++ b/bigchaindb/commands/utils.py @@ -36,7 +36,10 @@ def configure_bigchaindb(command): def configure(args): try: config_from_cmdline = { - 'log': {'level_console': args.log_level}, + 'log': { + 'level_console': args.log_level, + 'level_logfile': args.log_level, + }, 'server': {'loglevel': args.log_level}, } except AttributeError: diff --git a/tests/commands/test_utils.py b/tests/commands/test_utils.py index 85aa8de4..d361efcb 100644 --- a/tests/commands/test_utils.py +++ b/tests/commands/test_utils.py @@ -76,7 +76,8 @@ def test_configure_bigchaindb_logging(log_level): args = Namespace(config=None, log_level=log_level) test_configure_logger(args) from bigchaindb import config - assert config['log'] == {'level_console': log_level} + assert config['log']['level_console'] == log_level + assert config['log']['level_logfile'] == log_level def test_start_raises_if_command_not_implemented(): From 4d8f9dd777c81b90824eea1647b0598c339271aa Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 14:57:11 +0200 Subject: [PATCH 123/150] Explicitly set default log config Closes #1318 --- bigchaindb/__init__.py | 3 +++ tests/commands/rethinkdb/test_commands.py | 3 ++- tests/commands/test_commands.py | 18 ++++++++++------ tests/commands/test_utils.py | 17 +++++++-------- tests/test_config_utils.py | 26 ++++++++++++++++++++--- 5 files changed, 48 insertions(+), 19 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index fc8142a0..c8ea8185 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -59,6 +59,8 @@ config = { # Note: this section supports all the Gunicorn settings: # - http://docs.gunicorn.org/en/stable/settings.html 'bind': os.environ.get('BIGCHAINDB_SERVER_BIND') or 'localhost:9984', + 'loglevel': logging.getLevelName( + log_config['handlers']['console']['level']).lower(), 'workers': None, # if none, the value will be cpu_count * 2 + 1 'threads': None, # if none, the value will be cpu_count * 2 + 1 }, @@ -86,6 +88,7 @@ config = { 'datefmt_logfile': log_config['formatters']['file']['datefmt'], 'fmt_console': log_config['formatters']['console']['format'], 'fmt_logfile': log_config['formatters']['file']['format'], + 'granular_levels': {}, }, } diff --git a/tests/commands/rethinkdb/test_commands.py b/tests/commands/rethinkdb/test_commands.py index 0eab914c..e40b3ff2 100644 --- a/tests/commands/rethinkdb/test_commands.py +++ b/tests/commands/rethinkdb/test_commands.py @@ -11,12 +11,13 @@ def test_bigchain_run_start_with_rethinkdb(mock_start_rethinkdb, mock_processes_start, mock_db_init_with_existing_db, mocked_setup_logging): + from bigchaindb import config from bigchaindb.commands.bigchaindb import run_start args = Namespace(start_rethinkdb=True, allow_temp_keypair=False, config=None, yes=True) run_start(args) mock_start_rethinkdb.assert_called_with() - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with(user_log_config=config['log']) @patch('subprocess.Popen') diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index 087e1afe..37079ddd 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -37,10 +37,11 @@ def test_bigchain_run_start(mock_run_configure, mock_processes_start, mock_db_init_with_existing_db, mocked_setup_logging): + from bigchaindb import config from bigchaindb.commands.bigchaindb import run_start args = Namespace(start_rethinkdb=False, allow_temp_keypair=False, config=None, yes=True) run_start(args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with(user_log_config=config['log']) @pytest.mark.skipif(reason="BigchainDB doesn't support the automatic creation of a config file anymore") @@ -288,7 +289,8 @@ def test_allow_temp_keypair_generates_one_on_the_fly( args = Namespace(allow_temp_keypair=True, start_rethinkdb=False, config=None, yes=True) run_start(args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with( + user_log_config=bigchaindb.config['log']) assert bigchaindb.config['keypair']['private'] == 'private_key' assert bigchaindb.config['keypair']['public'] == 'public_key' @@ -313,7 +315,8 @@ def test_allow_temp_keypair_doesnt_override_if_keypair_found(mock_gen_keypair, args = Namespace(allow_temp_keypair=True, start_rethinkdb=False, config=None, yes=True) run_start(args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with( + user_log_config=bigchaindb.config['log']) assert bigchaindb.config['keypair']['private'] == original_private_key assert bigchaindb.config['keypair']['public'] == original_public_key @@ -322,6 +325,7 @@ def test_run_start_when_db_already_exists(mocker, monkeypatch, run_start_args, mocked_setup_logging): + from bigchaindb import config from bigchaindb.commands.bigchaindb import run_start from bigchaindb.common.exceptions import DatabaseAlreadyExists mocked_start = mocker.patch('bigchaindb.processes.start') @@ -332,7 +336,7 @@ def test_run_start_when_db_already_exists(mocker, monkeypatch.setattr( 'bigchaindb.commands.bigchaindb._run_init', mock_run_init) run_start(run_start_args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with(user_log_config=config['log']) assert mocked_start.called @@ -340,6 +344,7 @@ def test_run_start_when_keypair_not_found(mocker, monkeypatch, run_start_args, mocked_setup_logging): + from bigchaindb import config from bigchaindb.commands.bigchaindb import run_start from bigchaindb.commands.messages import CANNOT_START_KEYPAIR_NOT_FOUND from bigchaindb.common.exceptions import KeypairNotFoundException @@ -354,7 +359,7 @@ def test_run_start_when_keypair_not_found(mocker, with pytest.raises(SystemExit) as exc: run_start(run_start_args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with(user_log_config=config['log']) assert len(exc.value.args) == 1 assert exc.value.args[0] == CANNOT_START_KEYPAIR_NOT_FOUND assert not mocked_start.called @@ -364,6 +369,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker, monkeypatch, run_start_args, mocked_setup_logging): + from bigchaindb import config from bigchaindb.commands.bigchaindb import run_start from bigchaindb.commands.messages import RETHINKDB_STARTUP_ERROR from bigchaindb.common.exceptions import StartupError @@ -380,7 +386,7 @@ def test_run_start_when_start_rethinkdb_fails(mocker, with pytest.raises(SystemExit) as exc: run_start(run_start_args) - mocked_setup_logging.assert_called_once_with(user_log_config={}) + mocked_setup_logging.assert_called_once_with(user_log_config=config['log']) assert len(exc.value.args) == 1 assert exc.value.args[0] == RETHINKDB_STARTUP_ERROR.format(err_msg) assert not mocked_start.called diff --git a/tests/commands/test_utils.py b/tests/commands/test_utils.py index d361efcb..0ddec6ef 100644 --- a/tests/commands/test_utils.py +++ b/tests/commands/test_utils.py @@ -57,17 +57,16 @@ def test_configure_bigchaindb_configures_bigchaindb(): @pytest.mark.usefixtures('ignore_local_config_file', 'reset_bigchaindb_config', 'reset_logging_config') -@pytest.mark.parametrize('log_level', ( - logging.DEBUG, - logging.INFO, - logging.WARNING, - logging.ERROR, - logging.CRITICAL, -)) +@pytest.mark.parametrize('log_level', tuple(map( + logging.getLevelName, + (logging.DEBUG, + logging.INFO, + logging.WARNING, + logging.ERROR, + logging.CRITICAL) +))) def test_configure_bigchaindb_logging(log_level): from bigchaindb.commands.utils import configure_bigchaindb - from bigchaindb import config - assert not config['log'] @configure_bigchaindb def test_configure_logger(args): diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 7ee74432..f93d0bd2 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -1,4 +1,5 @@ import copy +import logging from unittest.mock import mock_open, patch import pytest @@ -147,12 +148,16 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): WSSERVER_HOST = '1.2.3.4' WSSERVER_PORT = 57 KEYRING = 'pubkey_0:pubkey_1:pubkey_2' + LOG_FILE = '/somewhere/something.log' file_config = { 'database': { 'host': DATABASE_HOST }, - 'backlog_reassign_delay': 5 + 'backlog_reassign_delay': 5, + 'log': { + 'level_console': 'debug', + }, } monkeypatch.setattr('bigchaindb.config_utils.file_config', lambda *args, **kwargs: file_config) monkeypatch.setattr('os.environ', {'BIGCHAINDB_DATABASE_NAME': DATABASE_NAME, @@ -161,10 +166,12 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'BIGCHAINDB_SERVER_BIND': SERVER_BIND, 'BIGCHAINDB_WSSERVER_HOST': WSSERVER_HOST, 'BIGCHAINDB_WSSERVER_PORT': WSSERVER_PORT, - 'BIGCHAINDB_KEYRING': KEYRING}) + 'BIGCHAINDB_KEYRING': KEYRING, + 'BIGCHAINDB_LOG_FILE': LOG_FILE}) import bigchaindb from bigchaindb import config_utils + from bigchaindb.log.configs import SUBSCRIBER_LOGGING_CONFIG as log_config config_utils.autoconfigure() database_rethinkdb = { @@ -199,6 +206,8 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'CONFIGURED': True, 'server': { 'bind': SERVER_BIND, + 'loglevel': logging.getLevelName( + log_config['handlers']['console']['level']).lower(), 'workers': None, 'threads': None, }, @@ -213,7 +222,18 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): }, 'keyring': KEYRING.split(':'), 'backlog_reassign_delay': 5, - 'log': {}, + 'log': { + 'file': LOG_FILE, + 'error_file': log_config['handlers']['errors']['filename'], + 'level_console': 'debug', + 'level_logfile': logging.getLevelName( + log_config['handlers']['file']['level']), + 'datefmt_console': log_config['formatters']['console']['datefmt'], + 'datefmt_logfile': log_config['formatters']['file']['datefmt'], + 'fmt_console': log_config['formatters']['console']['format'], + 'fmt_logfile': log_config['formatters']['file']['format'], + 'granular_levels': {}, + }, } From 965334e9b65e345236fd385737975b087e1f0a2b Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 15:58:14 +0200 Subject: [PATCH 124/150] Use lower case for default log level --- bigchaindb/__init__.py | 4 ++-- tests/test_config_utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigchaindb/__init__.py b/bigchaindb/__init__.py index c8ea8185..9c981385 100644 --- a/bigchaindb/__init__.py +++ b/bigchaindb/__init__.py @@ -81,9 +81,9 @@ config = { 'file': log_config['handlers']['file']['filename'], 'error_file': log_config['handlers']['errors']['filename'], 'level_console': logging.getLevelName( - log_config['handlers']['console']['level']), + log_config['handlers']['console']['level']).lower(), 'level_logfile': logging.getLevelName( - log_config['handlers']['file']['level']), + log_config['handlers']['file']['level']).lower(), 'datefmt_console': log_config['formatters']['console']['datefmt'], 'datefmt_logfile': log_config['formatters']['file']['datefmt'], 'fmt_console': log_config['formatters']['console']['format'], diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index f93d0bd2..bb445d83 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -227,7 +227,7 @@ def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): 'error_file': log_config['handlers']['errors']['filename'], 'level_console': 'debug', 'level_logfile': logging.getLevelName( - log_config['handlers']['file']['level']), + log_config['handlers']['file']['level']).lower(), 'datefmt_console': log_config['formatters']['console']['datefmt'], 'datefmt_logfile': log_config['formatters']['file']['datefmt'], 'fmt_console': log_config['formatters']['console']['format'], From 8206a4a18c8aa98bdffd06a48951f54d524cba94 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Tue, 18 Apr 2017 15:59:31 +0200 Subject: [PATCH 125/150] Update docs --- docs/server/source/server-reference/configuration.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index c15fec52..6f3c8878 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -220,21 +220,19 @@ holding the logging configuration. } ``` -**Defaults to**: `"{}"`. - -Please note that although the default is `"{}"` as per the configuration file, -internal defaults are used, such that the actual operational default is: +**Defaults to**: ``` { "log": { "file": "~/bigchaindb.log", + "error_file": "~/bigchaindb-errors.log", "level_console": "info", "level_logfile": "info", "datefmt_console": "%Y-%m-%d %H:%M:%S", "datefmt_logfile": "%Y-%m-%d %H:%M:%S", - "fmt_console": "%(asctime)s [%(levelname)s] (%(name)s) %(message)s", - "fmt_logfile": "%(asctime)s [%(levelname)s] (%(name)s) %(message)s", + "fmt_logfile": "[%(asctime)s] [%(levelname)s] (%(name)s) %(message)s (%(processName)-10s - pid: %(process)d)", + "fmt_console": "[%(asctime)s] [%(levelname)s] (%(name)s) %(message)s (%(processName)-10s - pid: %(process)d)", "granular_levels": {} } ``` From e5eb49952950decacffdc50639adef4fe3bead23 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 16:22:17 +0200 Subject: [PATCH 126/150] more updates to changelog for 0.10.0 --- CHANGELOG.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 453d41ed..4ada804c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,14 +25,20 @@ Tag name: v0.10.0 [#1324](https://github.com/bigchaindb/bigchaindb/pull/1324), [#1326](https://github.com/bigchaindb/bigchaindb/pull/1326), [#1327](https://github.com/bigchaindb/bigchaindb/pull/1327), -[#1330](https://github.com/bigchaindb/bigchaindb/pull/1330) and -[#1365](https://github.com/bigchaindb/bigchaindb/pull/1365) +[#1330](https://github.com/bigchaindb/bigchaindb/pull/1330), +[#1365](https://github.com/bigchaindb/bigchaindb/pull/1365), +[#1394](https://github.com/bigchaindb/bigchaindb/pull/1394), +[#1396](https://github.com/bigchaindb/bigchaindb/pull/1396), +[#1398](https://github.com/bigchaindb/bigchaindb/pull/1398) and +[#1402](https://github.com/bigchaindb/bigchaindb/pull/1402) * Events API using WebSocket protocol. Pull Requests [#1086](https://github.com/bigchaindb/bigchaindb/pull/1086), [#1347](https://github.com/bigchaindb/bigchaindb/pull/1347), [#1349](https://github.com/bigchaindb/bigchaindb/pull/1349), -[#1356](https://github.com/bigchaindb/bigchaindb/pull/1356) and -[#1368](https://github.com/bigchaindb/bigchaindb/pull/1368) +[#1356](https://github.com/bigchaindb/bigchaindb/pull/1356), +[#1368](https://github.com/bigchaindb/bigchaindb/pull/1368), +[#1401](https://github.com/bigchaindb/bigchaindb/pull/1401) and +[#1403](https://github.com/bigchaindb/bigchaindb/pull/1403) * Initial support for using SSL with MongoDB (work in progress). Pull Requests [#1299](https://github.com/bigchaindb/bigchaindb/pull/1299) and [#1348](https://github.com/bigchaindb/bigchaindb/pull/1348) From 44aa52b71a53e1aca058f13754ba18840278d55a Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 16:55:57 +0200 Subject: [PATCH 127/150] updated version.py for the 0.10.0 release --- bigchaindb/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigchaindb/version.py b/bigchaindb/version.py index c59a3cbe..2b3026d5 100644 --- a/bigchaindb/version.py +++ b/bigchaindb/version.py @@ -1,2 +1,2 @@ -__version__ = '0.10.0.dev' -__short_version__ = '0.10.dev' +__version__ = '0.10.0' +__short_version__ = '0.10' From 3c9753221939cd7190775ab9b9c7cde8ef2ce475 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 17:35:12 +0200 Subject: [PATCH 128/150] Update Makefile so 'release' depends on 'dist' --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 37bf6db8..a3012a03 100644 --- a/Makefile +++ b/Makefile @@ -70,7 +70,7 @@ docs: ## generate Sphinx HTML documentation, including API docs servedocs: docs ## compile the docs watching for changes watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . -release: clean ## package and upload a release +release: dist ## package and upload a release twine upload dist/* dist: clean ## builds source (and not for now, wheel package) From 9e52537cd3e5f7043cbd35f9af86f414119bd220 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Tue, 18 Apr 2017 17:45:18 +0200 Subject: [PATCH 129/150] updated version in version.py to 0.11.0.dev --- bigchaindb/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigchaindb/version.py b/bigchaindb/version.py index 2b3026d5..6bf027a0 100644 --- a/bigchaindb/version.py +++ b/bigchaindb/version.py @@ -1,2 +1,2 @@ -__version__ = '0.10.0' -__short_version__ = '0.10' +__version__ = '0.11.0.dev' +__short_version__ = '0.11.dev' From 411e682a6c62a0ee2eb300836026bf6e9bbe8552 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 19 Apr 2017 11:21:59 +0200 Subject: [PATCH 130/150] added docs for wssserver.host and .port --- .../source/server-reference/configuration.md | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/docs/server/source/server-reference/configuration.md b/docs/server/source/server-reference/configuration.md index 6f3c8878..053ed68b 100644 --- a/docs/server/source/server-reference/configuration.md +++ b/docs/server/source/server-reference/configuration.md @@ -22,6 +22,8 @@ For convenience, here's a list of all the relevant environment variables (docume `BIGCHAINDB_SERVER_LOGLEVEL`
    `BIGCHAINDB_SERVER_WORKERS`
    `BIGCHAINDB_SERVER_THREADS`
    +`BIGCHAINDB_WSSERVER_HOST`
    +`BIGCHAINDB_WSSERVER_PORT`
    `BIGCHAINDB_CONFIG_PATH`
    `BIGCHAINDB_BACKLOG_REASSIGN_DELAY`
    `BIGCHAINDB_LOG`
    @@ -180,6 +182,40 @@ export BIGCHAINDB_SERVER_THREADS=5 } ``` + +## wsserver.host and wsserver.port + +These settings are for the +[aiohttp server](https://aiohttp.readthedocs.io/en/stable/index.html), +which is used to serve the +[WebSocket Event Stream API](../websocket-event-stream-api.html). +`wsserver.host` is where to bind the aiohttp server socket and +`wsserver.port` is the corresponding port. +If you want to allow connections from anyone, on port 9985, +set `wsserver.host` to 0.0.0.0 and `wsserver.port` to 9985. + +**Example using environment variables** +```text +export BIGCHAINDB_WSSERVER_HOST=0.0.0.0 +export BIGCHAINDB_WSSERVER_PORT=9985 +``` + +**Example config file snippet** +```js +"wsserver": { + "host": "0.0.0.0", + "port": 65000 +} +``` + +**Default values (from a config file)** +```js +"wsserver": { + "host": "localhost", + "port": 9985 +} +``` + ## backlog_reassign_delay Specifies how long, in seconds, transactions can remain in the backlog before being reassigned. Long-waiting transactions must be reassigned because the assigned node may no longer be responsive. The default duration is 120 seconds. From ecbcf68de7ceefad6814679fa8004c6531074620 Mon Sep 17 00:00:00 2001 From: Krish Date: Wed, 19 Apr 2017 16:07:44 +0200 Subject: [PATCH 131/150] Refactor Dockerfile post `locale` errors (#1410) * Basing BigchainDB docker image from `python:3.6` in `Dockerfile` and `Dockerfile-dev`. * Added ENV var for websocket interface * Optimization to Dockerfile-dev to save build time during dev/test. Pre-installing the `pynacl` dependeny in separate command so that devs do not need to wait while testing. The trade-off is that devs will need to clear (`rmi`) the image to start from scratch. --- Dockerfile | 39 ++++++++------------------------------- Dockerfile-dev | 24 ++++++++++++++++-------- 2 files changed, 24 insertions(+), 39 deletions(-) diff --git a/Dockerfile b/Dockerfile index 021f6772..cdcb4289 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,40 +1,17 @@ -FROM ubuntu:xenial - -ENV LANG en_US.UTF-8 -ENV DEBIAN_FRONTEND noninteractive - +FROM python:3.6 +LABEL maintainer "dev@bigchaindb.com" RUN mkdir -p /usr/src/app COPY . /usr/src/app/ WORKDIR /usr/src/app - -RUN locale-gen en_US.UTF-8 && \ - apt-get -q update && \ - apt-get install -qy --no-install-recommends \ - python3 \ - python3-pip \ - libffi-dev \ - python3-dev \ - build-essential && \ - \ - pip3 install --upgrade --no-cache-dir pip setuptools && \ - \ - pip3 install --no-cache-dir -e . && \ - \ - apt-get remove -qy --purge gcc cpp binutils perl && \ - apt-get -qy autoremove && \ - apt-get -q clean all && \ - rm -rf /usr/share/perl /usr/share/perl5 /usr/share/man /usr/share/info /usr/share/doc && \ - rm -rf /var/lib/apt/lists/* - +RUN apt-get -qq update \ + && apt-get -y upgrade \ + && pip install --no-cache-dir -e . \ + && apt-get autoremove \ + && apt-get clean VOLUME ["/data"] WORKDIR /data - ENV BIGCHAINDB_CONFIG_PATH /data/.bigchaindb ENV BIGCHAINDB_SERVER_BIND 0.0.0.0:9984 -# BigchainDB Server doesn't need BIGCHAINDB_API_ENDPOINT any more -# but maybe our Docker or Docker Compose stuff does? -# ENV BIGCHAINDB_API_ENDPOINT http://bigchaindb:9984/api/v1 - +ENV BIGCHAINDB_WSSERVER_HOST 0.0.0.0 ENTRYPOINT ["bigchaindb"] - CMD ["start"] diff --git a/Dockerfile-dev b/Dockerfile-dev index 2ae4e2ba..17c8b073 100644 --- a/Dockerfile-dev +++ b/Dockerfile-dev @@ -1,13 +1,21 @@ -FROM python:3.5 +FROM python:3.6 +LABEL maintainer "dev@bigchaindb.com" -RUN apt-get update && apt-get install -y python3.4 vim +RUN apt-get update \ + && apt-get install -y vim \ + && pip install pynacl \ + && apt-get autoremove \ + && apt-get clean + +VOLUME ["/data"] +WORKDIR /data + +ENV BIGCHAINDB_CONFIG_PATH /data/.bigchaindb +ENV BIGCHAINDB_SERVER_BIND 0.0.0.0:9984 +ENV BIGCHAINDB_WSSERVER_HOST 0.0.0.0 RUN mkdir -p /usr/src/app -WORKDIR /usr/src/app - -RUN pip install --upgrade pip - COPY . /usr/src/app/ - +WORKDIR /usr/src/app RUN pip install --no-cache-dir -e .[dev] -RUN bigchaindb -y configure rethinkdb +RUN bigchaindb -y configure mongodb From e97ce7683676bc36095d93cbeee32ab70f938b80 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 19 Apr 2017 16:11:11 +0200 Subject: [PATCH 132/150] Updated CHANGELOG.md for v0.10.1 --- CHANGELOG.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4ada804c..3db903e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,16 @@ For reference, the possible headings are: * **External Contributors** to list contributors outside of BigchainDB GmbH. * **Notes** +## [0.10.1] - 2017-04-19 +Tag name: v0.10.1 + +## Added +* Documentation for the BigchainDB settings `wsserver.host` and `wsserver.port`. [Pull Request #1408](https://github.com/bigchaindb/bigchaindb/pull/1408) + +## Fixed +* Fixed `Dockerfile`, which was failing to build. It now starts `FROM python:3.6` (instead of `FROM ubuntu:xenial`). [Pull Request #1410](https://github.com/bigchaindb/bigchaindb/pull/1410) +* Fixed the `Makefile` so that `release` depends on `dist`. [Pull Request #1405](https://github.com/bigchaindb/bigchaindb/pull/1405) + ## [0.10.0] - 2017-04-18 Tag name: v0.10.0 From 4f5ec32d2af65aac4bfe7d865c045ab38a63453e Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 19 Apr 2017 16:52:03 +0200 Subject: [PATCH 133/150] minor addition to Release_Process.md --- Release_Process.md | 1 + 1 file changed, 1 insertion(+) diff --git a/Release_Process.md b/Release_Process.md index 22572837..e4a988a1 100644 --- a/Release_Process.md +++ b/Release_Process.md @@ -27,6 +27,7 @@ A patch release is similar to a minor release, but piggybacks on an existing min 1. Apply the changes you want, e.g. using `git cherry-pick`. 1. Update the `CHANGELOG.md` file 1. Increment the patch version in `bigchaindb/version.py`, e.g. "0.9.1" +1. Commit that change, and push the updated branch to GitHub 1. Follow steps outlined in [Common Steps](#common-steps) 1. Cherry-pick the `CHANGELOG.md` update commit (made above) to the `master` branch From f2e14fb73cc680744699300bb74a20bbff85bb0a Mon Sep 17 00:00:00 2001 From: Krish Date: Thu, 20 Apr 2017 10:38:06 +0200 Subject: [PATCH 134/150] Remove `editable` flag in Dockerfile (#1416) From `pip install --help`: ``` -e, --editable Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url. ``` --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index cdcb4289..807761fe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ COPY . /usr/src/app/ WORKDIR /usr/src/app RUN apt-get -qq update \ && apt-get -y upgrade \ - && pip install --no-cache-dir -e . \ + && pip install --no-cache-dir . \ && apt-get autoremove \ && apt-get clean VOLUME ["/data"] From cc66d5aaa5e83e48746bc17ceccb7d2f633f03d8 Mon Sep 17 00:00:00 2001 From: Krish Date: Fri, 21 Apr 2017 14:41:12 +0200 Subject: [PATCH 135/150] Single node setup (#1418) * Add more tools to the toolbox container * Add mongodb monitoring agent * Add a bigchaindb/mongodb-monitoring-agent container that includes the monitoring agent. * It makes use of an api key provided by MongoDB Cloud Manager. This is included in the configuration/config-map.yaml file. * Changes to mongodb StatefulSet configuration Changes to bump up mongodb version to v3.4.3. Add configuration settings for mongodb instance name in ConfigMap. Split the mongodb service to a new configuration file. * Modify bigchaindb deployment config * Bugfix to remove keyring field for the first node. * Split the mongodb service to a new configuration file. * Add mongodb backup agent * Add a bigchaindb/mongodb-backup-agent container that includes the backup agent. * It makes use of an api key provided by MongoDB Cloud Manager. This is included in the configuration/config-map.yaml file. * Changes to nginx deployment config * Allow 'all' by default for now. This is included in the configuration/config-map.yaml file. * Dynamically resolve DNS addresses of our backend services; cache DNS resolution for 20s. * Configure DNS based on user provided resolver. This helps in user deciding to provide 8.8.8.8 or a custom DNS for name resolution. For k8s deployments, we use the hardcoded k8s DNS IP of 10.0.0.10. * Changes to nginx-3scale deployment config * Use the common ConfigMap in configuration/config-map.yaml file. * Removing prefix `v` from the docker tag for mongodb-monitoring-agent and mongodb containers * Bumping up version for nginx-3scale container * Add small helper scripts for docker build and push of mongodb monitoring and backup agents * Documentation for setting up the first node with monitoring and backup agents --- .../cloud-deployment-templates/first-node.rst | 455 ++++++++++++++++++ .../cloud-deployment-templates/index.rst | 2 +- k8s/bigchaindb/bigchaindb-dep.yaml | 43 +- k8s/bigchaindb/bigchaindb-svc.yaml | 16 + k8s/configuration/config-map.yaml | 36 ++ k8s/mongodb-backup-agent/container/Dockerfile | 19 + .../container/docker_build_and_push.bash | 5 + .../mongodb_backup_agent_entrypoint.bash | 20 + .../mongo-backup-dep.yaml | 27 ++ .../container/Dockerfile | 54 +++ .../container/docker_build_and_push.bash | 5 + .../mongodb_mon_agent_entrypoint.bash | 30 ++ .../mongo-mon-dep.yaml | 38 ++ k8s/mongodb/container/Dockerfile | 2 +- k8s/mongodb/container/Makefile | 2 +- k8s/mongodb/mongo-cm.yaml | 13 - k8s/mongodb/mongo-ss.yaml | 32 +- k8s/mongodb/mongo-svc.yaml | 16 + k8s/nginx-3scale/nginx-3scale-cm.yaml | 13 - k8s/nginx-3scale/nginx-3scale-dep.yaml | 4 +- k8s/nginx/container/README.md | 11 +- k8s/nginx/container/nginx.conf.template | 29 +- k8s/nginx/container/nginx_entrypoint.bash | 5 +- k8s/nginx/nginx-cm.yaml | 13 - k8s/nginx/nginx-dep.yaml | 45 +- k8s/nginx/nginx-svc.yaml | 24 + k8s/toolbox/Dockerfile | 11 +- k8s/toolbox/README.md | 2 + 28 files changed, 817 insertions(+), 155 deletions(-) create mode 100644 docs/server/source/cloud-deployment-templates/first-node.rst create mode 100644 k8s/bigchaindb/bigchaindb-svc.yaml create mode 100644 k8s/configuration/config-map.yaml create mode 100644 k8s/mongodb-backup-agent/container/Dockerfile create mode 100755 k8s/mongodb-backup-agent/container/docker_build_and_push.bash create mode 100755 k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash create mode 100644 k8s/mongodb-backup-agent/mongo-backup-dep.yaml create mode 100644 k8s/mongodb-monitoring-agent/container/Dockerfile create mode 100755 k8s/mongodb-monitoring-agent/container/docker_build_and_push.bash create mode 100755 k8s/mongodb-monitoring-agent/container/mongodb_mon_agent_entrypoint.bash create mode 100644 k8s/mongodb-monitoring-agent/mongo-mon-dep.yaml delete mode 100644 k8s/mongodb/mongo-cm.yaml create mode 100644 k8s/mongodb/mongo-svc.yaml delete mode 100644 k8s/nginx-3scale/nginx-3scale-cm.yaml delete mode 100644 k8s/nginx/nginx-cm.yaml create mode 100644 k8s/nginx/nginx-svc.yaml diff --git a/docs/server/source/cloud-deployment-templates/first-node.rst b/docs/server/source/cloud-deployment-templates/first-node.rst new file mode 100644 index 00000000..06b3843b --- /dev/null +++ b/docs/server/source/cloud-deployment-templates/first-node.rst @@ -0,0 +1,455 @@ +First Node or Bootstrap Node Setup +================================== + +This document is a work in progress and will evolve over time to include +security, websocket and other settings. + +Step 1: Set Up the Cluster +-------------------------- + + .. code:: bash + + az group create --name bdb-test-cluster-0 --location westeurope --debug -- output json + + az acs create --name k8s-bdb-test-cluster-0 \ + --resource-group bdb-test-cluster-0 \ + --master-count 3 \ + --agent-count 2 \ + --admin-username ubuntu \ + --agent-vm-size Standard_D2_v2 \ + --dns-prefix k8s-bdb-test-cluster-0 \ + --ssh-key-value ~/.ssh/ \ + --orchestrator-type kubernetes \ + --debug --output json + + az acs kubernetes get-credentials \ + --resource-group bdb-test-cluster-0 \ + --name k8s-bdb-test-cluster-0 \ + --debug --output json + + echo -e "Host k8s-bdb-test-cluster-0.westeurope.cloudapp.azure.com\n ForwardAgent yes" >> ~/.ssh/config + + ssh ubuntu@k8s-bdb-test-cluster-0.westeurope.cloudapp.azure.com + + +Step 2: Connect to the Cluster UI - (optional) +---------------------------------------------- + + * Get the kubectl context for this cluster using ``kubectl config view``. + + * For the above commands, the context would be ``k8s-bdb-test-cluster-0``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 proxy -p 8001 + +Step 3. Configure the Cluster +----------------------------- + + * Use the ConfigMap in ``configuration/config-map.yaml`` file for configuring + the cluster. + + * Log in the the MongoDB Cloud Manager and select the group that will monitor + and backup this cluster from the dropdown box. + + * Go to Settings, Group Settings and copy the ``Agent Api Key``. + + * Replace the ```` field with this key. + + * Since this is the first node of the cluster, ensure that the ``data.fqdn`` + field has the value ``mdb-instance-0``. + + * We only support the value ``all`` in the ``data.allowed-hosts`` field for now. + + * Create the ConfigMap + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f configuration/config-map.yaml + +Step 4. Start the NGINX Service +------------------------------- + + * This will will give us a public IP for the cluster. + + * Once you complete this step, you might need to wait up to 10 mins for the + public IP to be assigned. + + * You have the option to use vanilla NGINX or an OpenResty NGINX integrated + with 3scale API Gateway. + + +Step 4.1. Vanilla NGINX +^^^^^^^^^^^^^^^^^^^^^^^ + + * This configuration is located in the file ``nginx/nginx-svc.yaml``. + + * Since this is the first node, rename ``metadata.name`` and ``metadata.labels.name`` + to ``ngx-instance-0``, and ``spec.selector.app`` to ``ngx-instance-0-dep``. + + * Start the Kubernetes Service: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f nginx/nginx-svc.yaml + + +Step 4.2. OpenResty NGINX + 3scale +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + * You have to enable HTTPS for this one and will need an HTTPS certificate + for your domain + + * Assuming that the public key chain is named ``cert.pem`` and private key is + ``cert.key``, run the following commands to encode the certificates into + single continuous string that can be embedded in yaml. + + .. code:: bash + + cat cert.pem | base64 -w 0 > cert.pem.b64 + + cat cert.key | base64 -w 0 > cert.key.b64 + + + * Copy the contents of ``cert.pem.b64`` in the ``cert.pem`` field, and the + contents of ``cert.key.b64`` in the ``cert.key`` field in the file + ``nginx-3scale/nginx-3scale-secret.yaml`` + + * Create the Kubernetes Secret: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f nginx-3scale/nginx-3scale-secret.yaml + + * Since this is the first node, rename ``metadata.name`` and ``metadata.labels.name`` + to ``ngx-instance-0``, and ``spec.selector.app`` to ``ngx-instance-0-dep`` in + ``nginx-3scale/nginx-3scale-svc.yaml`` file. + + * Start the Kubernetes Service: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f nginx-3scale/nginx-3scale-svc.yaml + + +Step 5. Assign DNS Name to the NGINX Public IP +---------------------------------------------- + + * The following command can help you find out if the nginx service strated above + has been assigned a public IP or external IP address: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 get svc -w + + * Once a public IP is assigned, you can log in to the Azure portal and map it to + a DNS name. + + * We usually start with bdb-test-cluster-0, bdb-test-cluster-1 and so on. + + * Let us assume that we assigned the unique name of ``bdb-test-cluster-0`` here. + + +Step 6. Start the Mongo Kubernetes Service +------------------------------------------ + + * Change ``metadata.name`` and ``metadata.labels.name`` to + ``mdb-instance-0``, and ``spec.selector.app`` to ``mdb-instance-0-ss``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb/mongo-svc.yaml + + +Step 7. Start the BigchainDB Kubernetes Service +----------------------------------------------- + + * Change ``metadata.name`` and ``metadata.labels.name`` to + ``bdb-instance-0``, and ``spec.selector.app`` to ``bdb-instance-0-dep``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f bigchaindb/bigchaindb-svc.yaml + + +Step 8. Start the NGINX Kubernetes Deployment +--------------------------------------------- + + * As in step 4, you have the option to use vanilla NGINX or an OpenResty NGINX + integrated with 3scale API Gateway. + +Step 8.1. Vanilla NGINX +^^^^^^^^^^^^^^^^^^^^^^^ + + * This configuration is located in the file ``nginx/nginx-dep.yaml``. + + * Since this is the first node, change the ``metadata.name`` and + ``spec.template.metadata.labels.app`` to ``ngx-instance-0-dep``. + + * Set ``MONGODB_BACKEND_HOST`` env var to + ``mdb-instance-0.default.svc.cluster.local``. + + * Set ``BIGCHAINDB_BACKEND_HOST`` env var to + ``bdb-instance-0.default.svc.cluster.local``. + + * Set ``MONGODB_FRONTEND_PORT`` to + ``$(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_MDB_PORT)``. + + * Set ``BIGCHAINDB_FRONTEND_PORT`` to + ``$(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_BDB_PORT)``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f nginx/nginx-dep.yaml + +Step 8.2. OpenResty NGINX + 3scale +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + * This configuration is located in the file + ``nginx-3scale/nginx-3scale-dep.yaml``. + + * Since this is the first node, change the metadata.name and + spec.template.metadata.labels.app to ``ngx-instance-0-dep``. + + * Set ``MONGODB_BACKEND_HOST`` env var to + ``mdb-instance-0.default.svc.cluster.local``. + + * Set ``BIGCHAINDB_BACKEND_HOST`` env var to + ``bdb-instance-0.default.svc.cluster.local``. + + * Set ``MONGODB_FRONTEND_PORT`` to + ``$(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_MDB_PORT)``. + + * Set ``BIGCHAINDB_FRONTEND_PORT`` to + ``$(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_BDB_PORT)``. + + * Also, replace the placeholder strings for the env vars with the values + obtained from 3scale. You will need the Secret Token, Service ID, Version Header + and Provider Key from 3scale. + + * The ``THREESCALE_FRONTEND_API_DNS_NAME`` will be DNS name registered for your + HTTPS certificate. + + * You can set the ``THREESCALE_UPSTREAM_API_PORT`` to any port other than 9984, + 9985, 443, 8888 and 27017. We usually use port ``9999``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f nginx-3scale/nginx-3scale-dep.yaml + + +Step 9. Create a Kubernetes Storage Class for MongoDB +----------------------------------------------------- + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb/mongo-sc.yaml + + +Step 10. Create a Kubernetes PersistentVolumeClaim +-------------------------------------------------- + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb/mongo-pvc.yaml + + +Step 11. Start a Kubernetes StatefulSet for MongoDB +--------------------------------------------------- + + * Change ``spec.serviceName`` to ``mdb-instance-0``. + + * Change the ``metadata.name``, ``template.metadata.name`` and + ``template.metadata.labels.app`` to ``mdb-instance-0-ss``. + + * It might take up to 10 minutes for the disks to be created and attached to + the pod. + + * The UI might show that the pod has errored with the + message "timeout expired waiting for volumes to attach/mount". + + * Use the CLI below to check the status of the pod in this case, + instead of the UI. This happens due to a bug in Azure ACS. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb/mongo-ss.yaml + + * You can check the status of the pod using the command: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 get po -w + + +Step 12. Start a Kubernetes Deployment for Bigchaindb +----------------------------------------------------- + + * Change both ``metadata.name`` and ``spec.template.metadata.labels.app`` + to ``bdb-instance-0-dep``. + + * Set ``BIGCHAINDB_DATABASE_HOST`` to ``mdb-instance-0``. + + * Set the appropriate ``BIGCHAINDB_KEYPAIR_PUBLIC``, + ``BIGCHAINDB_KEYPAIR_PRIVATE`` values. + + * One way to generate BigchainDB keypair is to run a Python shell with + the command + ``from bigchaindb_driver import crypto; crypto.generate_keypair()``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f bigchaindb/bigchaindb-dep.yaml + + +Step 13. Start a Kubernetes Deployment for MongoDB Monitoring Agent +------------------------------------------------------------------- + + * Change both metadata.name and spec.template.metadata.labels.app to + ``mdb-mon-instance-0-dep``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb-monitoring-agent/mongo-mon-dep.yaml + + * Get the pod name and check its logs: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 get po + + kubectl --context k8s-bdb-test-cluster-0 logs -f + + +Step 14. Configure MongoDB Cloud Manager for Monitoring +------------------------------------------------------- + + * Open `MongoDB Cloud Manager `_. + + * Click ``Login`` under ``MongoDB Cloud Manager`` and log in to the Cloud Manager. + + * Select the group from the dropdown box on the page. + + * Go to Settings, Group Settings and add a Preferred Hostnames regexp as + ``^mdb-instance-[0-9]{1,2}$``. It may take up to 5 mins till this setting + is in effect. You may refresh the browser window and verify whether the changes + have been saved or not. + + * Next, click the ``Deployment`` tab, and then the ``Manage Existing`` button. + + * On the ``Import your deployment for monitoring`` page, enter the hostname as + ``mdb-instance-0``, port number as ``27017``, with no authentication and no + TLS/SSL settings. + + * Once the deployment is found, click the ``Continue`` button. + This may take about a minute or two. + + * Do not add ``Automation Agent`` when given an option to add it. + + * Verify on the UI that data is being by the monitoring agent. + + +Step 15. Start a Kubernetes Deployment for MongoDB Backup Agent +--------------------------------------------------------------- + + * Change both ``metadata.name`` and ``spec.template.metadata.labels.app`` + to ``mdb-backup-instance-0-dep``. + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 apply -f mongodb-backup-agent/mongo-backup-dep.yaml + + * Get the pod name and check its logs: + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 get po + + kubectl --context k8s-bdb-test-cluster-0 logs -f + + +Step 16. Configure MongoDB Cloud Manager for Backup +--------------------------------------------------- + + * Open `MongoDB Cloud Manager `_. + + * Click ``Login`` under ``MongoDB Cloud Manager`` and log in to the Cloud + Manager. + + * Select the group from the dropdown box on the page. + + * Click ``Backup`` tab. + + * Click on the ``Begin Setup`` after the replica set name at the bottom of + the page. + + * Click on ``Next``, select the replica set from the dropdown menu. + + * Verify the details of your MongoDB instance and click on ``Start`` again. + + * It might take up to 5 minutes to start the backup process. + + * Verify that data is being backed up on the UI. + + +Step 17. Verify that the Cluster is Correctly Set Up +---------------------------------------------------- + + * Start the toolbox container in the cluster + + .. code:: bash + + kubectl --context k8s-bdb-test-cluster-0 \ + run -it toolbox \ + --image bigchaindb/toolbox \ + --image-pull-policy=Always \ + --restart=Never --rm + + * Verify MongoDB instance + + .. code:: bash + + nslookup mdb-instance-0 + + dig +noall +answer _mdb-port._tcp.mdb-instance-0.default.svc.cluster.local SRV + + curl -X GET http://mdb-instance-0:27017 + + * Verify BigchainDB instance + + .. code:: bash + + nslookup bdb-instance-0 + + dig +noall +answer _bdb-port._tcp.bdb-instance-0.default.svc.cluster.local SRV + + curl -X GET http://bdb-instance-0:9984 + + * Verify NGINX instance + + .. code:: bash + + nslookup ngx-instance-0 + + dig +noall +answer _ngx-public-mdb-port._tcp.ngx-instance-0.default.svc.cluster.local SRV + + curl -X GET http://ngx-instance-0:27017 # results in curl: (56) Recv failure: Connection reset by peer + + dig +noall +answer _ngx-public-bdb-port._tcp.ngx-instance-0.default.svc.cluster.local SRV + + * If you have run the vanilla NGINX instance, run + + .. code:: bash + + curl -X GET http://ngx-instance-0:80 + + * If you have the OpenResty NGINX + 3scale instance, run + + .. code:: bash + + curl -X GET http://ngx-instance-0:443 + + * Check the MongoDB monitoring and backup agent on the MOngoDB Coud Manager portal to verify they are working fine. + + * Send some transactions to BigchainDB and verify it's up and running! + diff --git a/docs/server/source/cloud-deployment-templates/index.rst b/docs/server/source/cloud-deployment-templates/index.rst index 28ac7923..d5b60a0e 100644 --- a/docs/server/source/cloud-deployment-templates/index.rst +++ b/docs/server/source/cloud-deployment-templates/index.rst @@ -17,4 +17,4 @@ If you find the cloud deployment templates for nodes helpful, then you may also node-on-kubernetes add-node-on-kubernetes upgrade-on-kubernetes - \ No newline at end of file + first-node diff --git a/k8s/bigchaindb/bigchaindb-dep.yaml b/k8s/bigchaindb/bigchaindb-dep.yaml index 83daaaaf..b8550249 100644 --- a/k8s/bigchaindb/bigchaindb-dep.yaml +++ b/k8s/bigchaindb/bigchaindb-dep.yaml @@ -1,49 +1,31 @@ ############################################################### -# This config file runs bigchaindb:master as a k8s Deployment # +# This config file runs bigchaindb:0.10.1 as a k8s Deployment # # and it connects to the mongodb backend running as a # # separate pod # ############################################################### -apiVersion: v1 -kind: Service -metadata: - name: bdb-svc - namespace: default - labels: - name: bdb-svc -spec: - selector: - app: bdb-dep - ports: - - port: 9984 - targetPort: 9984 - name: bdb-port - type: ClusterIP - clusterIP: None ---- apiVersion: extensions/v1beta1 kind: Deployment metadata: - name: bdb-dep + name: bdb-instance-0-dep spec: replicas: 1 template: metadata: labels: - app: bdb-dep + app: bdb-instance-0-dep spec: terminationGracePeriodSeconds: 10 containers: - name: bigchaindb - image: bigchaindb/bigchaindb:master + image: bigchaindb/bigchaindb:0.10.1 imagePullPolicy: IfNotPresent args: - start env: - name: BIGCHAINDB_DATABASE_HOST - value: mdb-svc + value: mdb-instance-0 - name: BIGCHAINDB_DATABASE_PORT - # TODO(Krish): remove hardcoded port value: "27017" - name: BIGCHAINDB_DATABASE_REPLICASET value: bigchain-rs @@ -54,13 +36,20 @@ spec: - name: BIGCHAINDB_SERVER_BIND value: 0.0.0.0:9984 - name: BIGCHAINDB_KEYPAIR_PUBLIC - value: EEWUAhsk94ZUHhVw7qx9oZiXYDAWc9cRz93eMrsTG4kZ + value: "" - name: BIGCHAINDB_KEYPAIR_PRIVATE - value: 3CjmRhu718gT1Wkba3LfdqX5pfYuBdaMPLd7ENUga5dm + value: "" - name: BIGCHAINDB_BACKLOG_REASSIGN_DELAY value: "120" - - name: BIGCHAINDB_KEYRING - value: "" + - name: BIGCHAINDB_DATABASE_MAXTRIES + value: "3" + - name: BIGCHAINDB_DATABASE_CONNECTION_TIMEOUT + value: "120" + - name: BIGCHAINDB_LOG_LEVEL_CONSOLE + value: debug + # The following env var is not required for the bootstrap/first node + #- name: BIGCHAINDB_KEYRING + # value: "" ports: - containerPort: 9984 hostPort: 9984 diff --git a/k8s/bigchaindb/bigchaindb-svc.yaml b/k8s/bigchaindb/bigchaindb-svc.yaml new file mode 100644 index 00000000..9927a92d --- /dev/null +++ b/k8s/bigchaindb/bigchaindb-svc.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: Service +metadata: + name: bdb-instance-0 + namespace: default + labels: + name: bdb-instance-0 +spec: + selector: + app: bdb-instance-0-dep + ports: + - port: 9984 + targetPort: 9984 + name: bdb-port + type: ClusterIP + clusterIP: None diff --git a/k8s/configuration/config-map.yaml b/k8s/configuration/config-map.yaml new file mode 100644 index 00000000..1c04dbf7 --- /dev/null +++ b/k8s/configuration/config-map.yaml @@ -0,0 +1,36 @@ +####################################################### +# This YAML file desribes a ConfigMap for the cluster # +####################################################### + +apiVersion: v1 +kind: ConfigMap +metadata: + name: mdb-mon + namespace: default +data: + api-key: "" +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: mdb-backup + namespace: default +data: + api-key: "" +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: mdb-fqdn + namespace: default +data: + fqdn: mdb-instance-0 +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: mongodb-whitelist + namespace: default +data: + allowed-hosts: "all" + diff --git a/k8s/mongodb-backup-agent/container/Dockerfile b/k8s/mongodb-backup-agent/container/Dockerfile new file mode 100644 index 00000000..8407fb09 --- /dev/null +++ b/k8s/mongodb-backup-agent/container/Dockerfile @@ -0,0 +1,19 @@ +FROM ubuntu:xenial +LABEL maintainer "dev@bigchaindb.com" +ARG DEBIAN_FRONTEND=noninteractive +ARG DEB_FILE=mongodb-mms-backup-agent_latest_amd64.ubuntu1604.deb +ARG FILE_URL="https://cloud.mongodb.com/download/agent/backup/"$DEB_FILE +WORKDIR / +RUN apt update \ + && apt -y upgrade \ + && apt -y install --no-install-recommends curl ca-certificates logrotate \ + libsasl2-2 \ + && curl -OL $FILE_URL \ + && dpkg -i $DEB_FILE \ + && rm -f $DEB_FILE \ + && apt -y purge curl \ + && apt -y autoremove \ + && apt clean +COPY mongodb_backup_agent_entrypoint.bash / +RUN chown -R mongodb-mms-agent:mongodb-mms-agent /etc/mongodb-mms/ +ENTRYPOINT ["/mongodb_backup_agent_entrypoint.bash"] diff --git a/k8s/mongodb-backup-agent/container/docker_build_and_push.bash b/k8s/mongodb-backup-agent/container/docker_build_and_push.bash new file mode 100755 index 00000000..e57e58a1 --- /dev/null +++ b/k8s/mongodb-backup-agent/container/docker_build_and_push.bash @@ -0,0 +1,5 @@ +#!/bin/bash + +docker build -t bigchaindb/mongodb-backup-agent:1.0 . + +docker push bigchaindb/mongodb-backup-agent:1.0 diff --git a/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash new file mode 100755 index 00000000..3eb20633 --- /dev/null +++ b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash @@ -0,0 +1,20 @@ +#!/bin/bash + +set -euo pipefail + +MONGODB_BACKUP_CONF_FILE=/etc/mongodb-mms/backup-agent.config + +mms_api_key=`printenv MMS_API_KEY` + +if [[ -z "${mms_api_key}" ]]; then + echo "Invalid environment settings detected. Exiting!" + exit 1 +fi + +sed -i '/mmsApiKey/d' $MONGODB_BACKUP_CONF_FILE + +echo "mmsApiKey="${mms_api_key} >> $MONGODB_BACKUP_CONF_FILE + +echo "INFO: starting mdb backup..." +exec mongodb-mms-backup-agent \ + -c $MONGODB_BACKUP_CONF_FILE diff --git a/k8s/mongodb-backup-agent/mongo-backup-dep.yaml b/k8s/mongodb-backup-agent/mongo-backup-dep.yaml new file mode 100644 index 00000000..b3d5a9ec --- /dev/null +++ b/k8s/mongodb-backup-agent/mongo-backup-dep.yaml @@ -0,0 +1,27 @@ +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: mdb-backup-instance-0-dep +spec: + replicas: 1 + template: + metadata: + labels: + app: mdb-backup-instance-0-dep + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: mdb-backup + image: bigchaindb/mongodb-backup-agent:1.0 + imagePullPolicy: Always + env: + - name: MMS_API_KEY + valueFrom: + configMapKeyRef: + name: mdb-backup + key: api-key + resources: + limits: + cpu: 200m + memory: 768Mi + restartPolicy: Always diff --git a/k8s/mongodb-monitoring-agent/container/Dockerfile b/k8s/mongodb-monitoring-agent/container/Dockerfile new file mode 100644 index 00000000..ec6496d8 --- /dev/null +++ b/k8s/mongodb-monitoring-agent/container/Dockerfile @@ -0,0 +1,54 @@ +# Dockerfile for MongoDB Monitoring Agent +# Use it to create bigchaindb/mongodb-monitoring-agent +# on Docker Hub. + +# "Never install the Monitoring Agent on the same server as a data bearing mongod instance." +# More help: +# https://docs.cloudmanager.mongodb.com/tutorial/install-monitoring-agent-with-deb-package/ + +FROM ubuntu:xenial +LABEL maintainer "dev@bigchaindb.com" +# Using ARG, one can set DEBIAN_FRONTEND=noninteractive and others +# just for the duration of the build: +ARG DEBIAN_FRONTEND=noninteractive +ARG DEB_FILE=mongodb-mms-monitoring-agent_latest_amd64.ubuntu1604.deb +ARG FILE_URL="https://cloud.mongodb.com/download/agent/monitoring/"$DEB_FILE + +# Download the Monitoring Agent as a .deb package and install it +WORKDIR / +RUN apt update \ + && apt -y upgrade \ + && apt -y install --no-install-recommends curl ca-certificates logrotate \ + libsasl2-2 \ + && curl -OL $FILE_URL \ + && dpkg -i $DEB_FILE \ + && rm -f $DEB_FILE \ + && apt -y purge curl \ + && apt -y autoremove \ + && apt clean + +# The above installation puts a default config file in +# /etc/mongodb-mms/monitoring-agent.config +# It should contain a line like: "mmsApiKey=" +# i.e. with no value specified. +# We need to set that value to the "agent API key" value from Cloud Manager, +# but of course that value varies from user to user, +# so we can't hard-code it into the Docker image. + +# Kubernetes can set an MMS_API_KEY environment variable +# in the container +# (including from Secrets or ConfigMaps) +# An entrypoint bash script can then use the value of MMS_API_KEY +# to write the mmsApiKey value in the config file +# /etc/mongodb-mms/monitoring-agent.config +# before running the MongoDB Monitoring Agent. + +# The MongoDB Monitoring Agent has other +# config settings besides mmsApiKey, +# but it's the only one that *must* be set. See: +# https://docs.cloudmanager.mongodb.com/reference/monitoring-agent/ + +COPY mongodb_mon_agent_entrypoint.bash / +RUN chown -R mongodb-mms-agent:mongodb-mms-agent /etc/mongodb-mms/ +#USER mongodb-mms-agent - BUG(Krish) Uncomment after tests are complete +ENTRYPOINT ["/mongodb_mon_agent_entrypoint.bash"] diff --git a/k8s/mongodb-monitoring-agent/container/docker_build_and_push.bash b/k8s/mongodb-monitoring-agent/container/docker_build_and_push.bash new file mode 100755 index 00000000..d2219b08 --- /dev/null +++ b/k8s/mongodb-monitoring-agent/container/docker_build_and_push.bash @@ -0,0 +1,5 @@ +#!/bin/bash + +docker build -t bigchaindb/mongodb-monitoring-agent:1.0 . + +docker push bigchaindb/mongodb-monitoring-agent:1.0 diff --git a/k8s/mongodb-monitoring-agent/container/mongodb_mon_agent_entrypoint.bash b/k8s/mongodb-monitoring-agent/container/mongodb_mon_agent_entrypoint.bash new file mode 100755 index 00000000..6454c729 --- /dev/null +++ b/k8s/mongodb-monitoring-agent/container/mongodb_mon_agent_entrypoint.bash @@ -0,0 +1,30 @@ +#!/bin/bash + +set -euo pipefail +# -e Abort at the first failed line (i.e. if exit status is not 0) +# -u Abort when undefined variable is used +# -o pipefail (Bash-only) Piped commands return the status +# of the last failed command, rather than the status of the last command + +MONGODB_MON_CONF_FILE=/etc/mongodb-mms/monitoring-agent.config + +mms_api_key=`printenv MMS_API_KEY` + +if [[ -z "${mms_api_key}" ]]; then + echo "Invalid environment settings detected. Exiting!" + exit 1 +fi + +# Delete all lines containing "mmsApiKey" in the MongoDB Monitoring Agent +# config file /etc/mongodb-mms/monitoring-agent.config +sed -i '/mmsApiKey/d' $MONGODB_MON_CONF_FILE + +# Append a new line of the form +# mmsApiKey=value_of_MMS_API_KEY +echo "mmsApiKey="${mms_api_key} >> $MONGODB_MON_CONF_FILE + +# start mdb monitoring agent +echo "INFO: starting mdb monitor..." +exec mongodb-mms-monitoring-agent \ + --conf $MONGODB_MON_CONF_FILE \ + --loglevel debug diff --git a/k8s/mongodb-monitoring-agent/mongo-mon-dep.yaml b/k8s/mongodb-monitoring-agent/mongo-mon-dep.yaml new file mode 100644 index 00000000..98abe92b --- /dev/null +++ b/k8s/mongodb-monitoring-agent/mongo-mon-dep.yaml @@ -0,0 +1,38 @@ +############################################################ +# This config file defines a k8s Deployment for the # +# bigchaindb/mongodb-monitoring-agent:latest Docker image # +# # +# It connects to a MongoDB instance in a separate pod, # +# all remote MongoDB instances in the cluster, # +# and also to MongoDB Cloud Manager (an external service). # +# Notes: # +# MongoDB agents connect to Cloud Manager on port 443. # +############################################################ + +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: mdb-mon-instance-0-dep +spec: + replicas: 1 + template: + metadata: + labels: + app: mdb-mon-instance-0-dep + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: mdb-mon + image: bigchaindb/mongodb-monitoring-agent:1.0 + imagePullPolicy: Always + env: + - name: MMS_API_KEY + valueFrom: + configMapKeyRef: + name: mdb-mon + key: api-key + resources: + limits: + cpu: 200m + memory: 768Mi + restartPolicy: Always diff --git a/k8s/mongodb/container/Dockerfile b/k8s/mongodb/container/Dockerfile index 11fc80cf..e9667f95 100644 --- a/k8s/mongodb/container/Dockerfile +++ b/k8s/mongodb/container/Dockerfile @@ -1,4 +1,4 @@ -FROM mongo:3.4.2 +FROM mongo:3.4.3 LABEL maintainer "dev@bigchaindb.com" WORKDIR / RUN apt-get update \ diff --git a/k8s/mongodb/container/Makefile b/k8s/mongodb/container/Makefile index 72ec4f79..0a3779af 100644 --- a/k8s/mongodb/container/Makefile +++ b/k8s/mongodb/container/Makefile @@ -12,7 +12,7 @@ GOINSTALL=$(GOCMD) install GOFMT=gofmt -s -w DOCKER_IMAGE_NAME?=bigchaindb/mongodb -DOCKER_IMAGE_TAG?=latest +DOCKER_IMAGE_TAG?=3.4.3 PWD=$(shell pwd) BINARY_PATH=$(PWD)/mongod_entrypoint/ diff --git a/k8s/mongodb/mongo-cm.yaml b/k8s/mongodb/mongo-cm.yaml deleted file mode 100644 index bf4b4f82..00000000 --- a/k8s/mongodb/mongo-cm.yaml +++ /dev/null @@ -1,13 +0,0 @@ -##################################################################### -# This YAML file desribes a ConfigMap with the FQDN of the mongo # -# instance to be started. MongoDB instance uses the value from this # -# ConfigMap to bootstrap itself during startup. # -##################################################################### - -apiVersion: v1 -kind: ConfigMap -metadata: - name: mdb-fqdn - namespace: default -data: - fqdn: mdb-instance-0.westeurope.cloudapp.azure.com diff --git a/k8s/mongodb/mongo-ss.yaml b/k8s/mongodb/mongo-ss.yaml index 089a0a96..2f180929 100644 --- a/k8s/mongodb/mongo-ss.yaml +++ b/k8s/mongodb/mongo-ss.yaml @@ -4,45 +4,25 @@ # It depends on the configdb and db k8s pvc. # ######################################################################## -apiVersion: v1 -kind: Service -metadata: - name: mdb-svc - namespace: default - labels: - name: mdb-svc -spec: - selector: - app: mdb-ss - ports: - - port: 27017 - targetPort: 27017 - name: mdb-port - type: ClusterIP - clusterIP: None ---- apiVersion: apps/v1beta1 kind: StatefulSet metadata: - name: mdb-ss + name: mdb-instance-0-ss namespace: default spec: - serviceName: mdb-svc + serviceName: mdb-instance-0 replicas: 1 template: metadata: - name: mdb-ss + name: mdb-instance-0-ss labels: - app: mdb-ss + app: mdb-instance-0-ss spec: terminationGracePeriodSeconds: 10 containers: - name: mongodb - # TODO(FIXME): Do not use latest in production as it is harder to track - # versions during updates and rollbacks. Also, once fixed, change the - # imagePullPolicy to IfNotPresent for faster bootup - image: bigchaindb/mongodb:latest - imagePullPolicy: Always + image: bigchaindb/mongodb:3.4.3 + imagePullPolicy: IfNotPresent env: - name: MONGODB_FQDN valueFrom: diff --git a/k8s/mongodb/mongo-svc.yaml b/k8s/mongodb/mongo-svc.yaml new file mode 100644 index 00000000..2c81797a --- /dev/null +++ b/k8s/mongodb/mongo-svc.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: Service +metadata: + name: mdb-instance-0 + namespace: default + labels: + name: mdb-instance-0 +spec: + selector: + app: mdb-instance-0-ss + ports: + - port: 27017 + targetPort: 27017 + name: mdb-port + type: ClusterIP + clusterIP: None diff --git a/k8s/nginx-3scale/nginx-3scale-cm.yaml b/k8s/nginx-3scale/nginx-3scale-cm.yaml deleted file mode 100644 index 6f87b494..00000000 --- a/k8s/nginx-3scale/nginx-3scale-cm.yaml +++ /dev/null @@ -1,13 +0,0 @@ -############################################################################ -# This YAML file desribes a ConfigMap with a valid list of ':' separated # -# IP addresses (or 'all' for all IP addresses) that can connect to the # -# MongoDB instance. We only support the value 'all' currently. # -############################################################################ - -apiVersion: v1 -kind: ConfigMap -metadata: - name: mongodb-whitelist - namespace: default -data: - allowed-hosts: "all" diff --git a/k8s/nginx-3scale/nginx-3scale-dep.yaml b/k8s/nginx-3scale/nginx-3scale-dep.yaml index 49695315..8b1fa673 100644 --- a/k8s/nginx-3scale/nginx-3scale-dep.yaml +++ b/k8s/nginx-3scale/nginx-3scale-dep.yaml @@ -19,19 +19,21 @@ spec: terminationGracePeriodSeconds: 10 containers: - name: nginx-3scale - image: bigchaindb/nginx_3scale:0.1 + image: bigchaindb/nginx_3scale:1.0 # TODO(Krish): Change later to IfNotPresent imagePullPolicy: Always env: - name: MONGODB_FRONTEND_PORT value: $(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_MDB_PORT) - name: MONGODB_BACKEND_HOST + # NGINX requires FQDN to resolve names value: mdb-instance-0.default.svc.cluster.local - name: MONGODB_BACKEND_PORT value: "27017" - name: BIGCHAINDB_FRONTEND_PORT value: $(NGX_INSTANCE_0_SERVICE_PORT_NGX_PUBLIC_BDB_PORT) - name: BIGCHAINDB_BACKEND_HOST + # NGINX requires FQDN to resolve names value: bdb-instance-0.default.svc.cluster.local - name: BIGCHAINDB_BACKEND_PORT value: "9984" diff --git a/k8s/nginx/container/README.md b/k8s/nginx/container/README.md index 9cb44246..30f42bfe 100644 --- a/k8s/nginx/container/README.md +++ b/k8s/nginx/container/README.md @@ -22,7 +22,7 @@ ### Step 1: Build the Latest Container -Run `docker build -t bigchaindb/nginx .` from this folder. +Run `docker build -t bigchaindb/nginx: .` from this folder. Optional: Upload container to Docker Hub: `docker push bigchaindb/nginx:` @@ -38,11 +38,10 @@ docker run \ --env "MONGODB_BACKEND_HOST=" \ --env "MONGODB_BACKEND_PORT=" \ --env "BIGCHAINDB_FRONTEND_PORT=" \ ---env "BIGCHAINDB_BACKEND_HOST=" \ ---env "BIGCHAINDB_BACKEND_PORT=" \ +--env "BIGCHAINDB_BACKEND_HOST=" \ +--env "BIGCHAINDB_BACKEND_PORT=" \ --env "MONGODB_WHITELIST=
    " \ +--env "DNS_SERVER=" \ --name=ngx \ --publish=: \ --publish= Date: Mon, 24 Apr 2017 10:25:15 +0200 Subject: [PATCH 136/150] Updating docs (#1433) * Include the key generation step * Fix typos in CLIs --- .../cloud-deployment-templates/first-node.rst | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/docs/server/source/cloud-deployment-templates/first-node.rst b/docs/server/source/cloud-deployment-templates/first-node.rst index 06b3843b..9130696a 100644 --- a/docs/server/source/cloud-deployment-templates/first-node.rst +++ b/docs/server/source/cloud-deployment-templates/first-node.rst @@ -9,7 +9,9 @@ Step 1: Set Up the Cluster .. code:: bash - az group create --name bdb-test-cluster-0 --location westeurope --debug -- output json + az group create --name bdb-test-cluster-0 --location westeurope --debug --output json + + ssh-keygen -t rsa -C "k8s-bdb-test-cluster-0" -f ~/.ssh/k8s-bdb-test-cluster-0 az acs create --name k8s-bdb-test-cluster-0 \ --resource-group bdb-test-cluster-0 \ @@ -18,7 +20,7 @@ Step 1: Set Up the Cluster --admin-username ubuntu \ --agent-vm-size Standard_D2_v2 \ --dns-prefix k8s-bdb-test-cluster-0 \ - --ssh-key-value ~/.ssh/ \ + --ssh-key-value ~/.ssh/k8s-bdb-test-cluster-0.pub \ --orchestrator-type kubernetes \ --debug --output json @@ -29,8 +31,6 @@ Step 1: Set Up the Cluster echo -e "Host k8s-bdb-test-cluster-0.westeurope.cloudapp.azure.com\n ForwardAgent yes" >> ~/.ssh/config - ssh ubuntu@k8s-bdb-test-cluster-0.westeurope.cloudapp.azure.com - Step 2: Connect to the Cluster UI - (optional) ---------------------------------------------- @@ -380,8 +380,7 @@ Step 16. Configure MongoDB Cloud Manager for Backup * Click ``Backup`` tab. - * Click on the ``Begin Setup`` after the replica set name at the bottom of - the page. + * Click on the ``Begin Setup``. * Click on ``Next``, select the replica set from the dropdown menu. @@ -447,7 +446,7 @@ Step 17. Verify that the Cluster is Correctly Set Up .. code:: bash - curl -X GET http://ngx-instance-0:443 + curl -X GET https://ngx-instance-0 * Check the MongoDB monitoring and backup agent on the MOngoDB Coud Manager portal to verify they are working fine. From 1e866a120790b9565f53fcce662fdc40d8f42cbb Mon Sep 17 00:00:00 2001 From: Scott Sadler Date: Mon, 24 Apr 2017 13:37:50 +0200 Subject: [PATCH 137/150] signature payload is serialized transaction --- bigchaindb/common/transaction.py | 9 +++------ docs/server/source/data-models/transaction-model.rst | 2 +- tests/common/test_transaction.py | 8 +++----- 3 files changed, 7 insertions(+), 12 deletions(-) diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 285272c5..d0b24dbb 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -690,8 +690,7 @@ class Transaction(object): tx_dict = Transaction._remove_signatures(tx_dict) tx_serialized = Transaction._to_str(tx_dict) for i, input_ in enumerate(self.inputs): - message = '%s:%s' % (i, tx_serialized) - self.inputs[i] = self._sign_input(input_, message, key_pairs) + self.inputs[i] = self._sign_input(input_, tx_serialized, key_pairs) return self @classmethod @@ -843,10 +842,8 @@ class Transaction(object): def validate(i, output_condition_uri=None): """ Validate input against output condition URI """ - message = '%s:%s' % (i, tx_serialized) - - return self._input_valid(self.inputs[i], self.operation, message, - output_condition_uri) + return self._input_valid(self.inputs[i], self.operation, + tx_serialized, output_condition_uri) return all(validate(i, cond) for i, cond in enumerate(output_condition_uris)) diff --git a/docs/server/source/data-models/transaction-model.rst b/docs/server/source/data-models/transaction-model.rst index 3610d1fe..cc548aa9 100644 --- a/docs/server/source/data-models/transaction-model.rst +++ b/docs/server/source/data-models/transaction-model.rst @@ -49,4 +49,4 @@ Here's some explanation of the contents of a :ref:`transaction `: Later, when we get to the models for the block and the vote, we'll see that both include a signature (from the node which created it). You may wonder why transactions don't have signatures... The answer is that they do! They're just hidden inside the ``fulfillment`` string of each input. A creation transaction is signed by whoever created it. A transfer transaction is signed by whoever currently controls or owns it. -What gets signed? For each input in the transaction, the "fullfillment message" that gets signed includes the JSON serialized body of the transaction, minus any fulfillment strings, and with "n:" prepended where n is the index of the input being signed. The computed signature goes into creating the ``fulfillment`` string of the input. +What gets signed? For each input in the transaction, the "fullfillment message" that gets signed includes the JSON serialized body of the transaction, minus any fulfillment strings. The computed signature goes into creating the ``fulfillment`` string of the input. diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index adc6e60d..205009ac 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -510,7 +510,7 @@ def test_validate_tx_simple_create_signature(user_input, user_output, user_priv, tx = Transaction(Transaction.CREATE, asset_definition, [user_input], [user_output]) expected = deepcopy(user_output) - message = ('0:' + str(tx)).encode() + message = str(tx).encode() expected.fulfillment.sign(message, PrivateKey(user_priv)) tx.sign([user_priv]) @@ -574,7 +574,7 @@ def test_validate_tx_threshold_create_signature(user_user2_threshold_input, tx = Transaction(Transaction.CREATE, asset_definition, [user_user2_threshold_input], [user_user2_threshold_output]) - message = ('0:' + str(tx)).encode() + message = str(tx).encode() expected = deepcopy(user_user2_threshold_output) expected.fulfillment.subconditions[0]['body'].sign(message, PrivateKey(user_priv)) @@ -589,8 +589,6 @@ def test_validate_tx_threshold_create_signature(user_user2_threshold_input, validate_transaction_model(tx) -import pytest -@pytest.mark.skip() def test_validate_tx_threshold_duplicated_pk(user_pub, user_priv, asset_definition): from copy import deepcopy @@ -877,7 +875,7 @@ def test_create_transfer_transaction_single_io(tx, user_pub, user2_pub, expected_input = deepcopy(inputs[0]) expected['id'] = transfer_tx['id'] - expected_input.fulfillment.sign(('0:' + serialize(expected)).encode(), + expected_input.fulfillment.sign(serialize(expected).encode(), PrivateKey(user_priv)) expected_ffill = expected_input.fulfillment.serialize_uri() transfer_ffill = transfer_tx['inputs'][0]['fulfillment'] From 6f8f2e2f93dc4f8a3ec7ca3173ad6dcd77fa376d Mon Sep 17 00:00:00 2001 From: Leo Arias Date: Thu, 20 Apr 2017 03:25:30 +0000 Subject: [PATCH 138/150] Add the packaging metadata to build the bigchaindb snap --- snap/snapcraft.yaml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 snap/snapcraft.yaml diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml new file mode 100644 index 00000000..aa3a9bca --- /dev/null +++ b/snap/snapcraft.yaml @@ -0,0 +1,23 @@ +name: bigchaindb +version: master +summary: a scalable blockchain database +description: | + With high throughput, sub-second latency and powerful functionality to + automate business processes, BigchainDB looks, acts and feels like a database + with added blockchain characteristics. + +# grade must be 'stable' to release into candidate/stable channels +grade: devel +# strict confinement requires https://github.com/snapcore/snapd/pull/2749 +confinement: devmode + +apps: + bigchaindb: + command: bigchaindb + plugs: [network, network-bind] + +parts: + bigchaindb: + source: . + plugin: python + build-packages: [g++, libffi-dev] From 3a3f73aeb271d2ed3c8f08489a5e839376ef67eb Mon Sep 17 00:00:00 2001 From: Leo Arias Date: Sat, 22 Apr 2017 19:19:46 +0000 Subject: [PATCH 139/150] Add the snap README --- snap/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 snap/README.md diff --git a/snap/README.md b/snap/README.md new file mode 100644 index 00000000..14bfdbce --- /dev/null +++ b/snap/README.md @@ -0,0 +1,12 @@ +This is the packaging metadata for the BigchainDB snap. + +Snaps and the snap store allows for the secure installation of apps that work +in most Linux distributions. For more information, go to https://snapcraft.io/ + +To build and install this snap in Ubuntu 16.04: + + $ sudo apt install git snapcraft + $ git clone https://github.com/bigchaindb/bigchaindb + $ cd bigchaindb + $ snapcraft + $ sudo snap install *.snap --dangerous --devmode From 96feb1860466e3288d94e54e644b95752a8bf8c5 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Mon, 24 Apr 2017 15:56:36 +0200 Subject: [PATCH 140/150] Changed from Alpha to Beta in setup.py I changed the classifier `'Development Status :: 3 - Alpha'` to `'Development Status :: 4 - Beta'` as per the list of classifiers here: https://pypi.python.org/pypi?%3Aaction=list_classifiers This is in preparation for the version 1.0 release but I think it's probably fine to have it in master now. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 45d6f04f..179b561f 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ setup( zip_safe=False, classifiers=[ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Database', 'Topic :: Database :: Database Engines/Servers', From 920d4aa181b7354554b8b91b4458876d5ddd740b Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Fri, 21 Apr 2017 18:13:20 +0200 Subject: [PATCH 141/150] Add log analytics oms deployment scripts --- k8s/log_analytics_oms.json | 49 +++++++++++++++++++++++++++ k8s/log_analytics_oms.parameters.json | 15 ++++++++ k8s/oms-daemonset.yaml | 30 ++++++++++++++++ 3 files changed, 94 insertions(+) create mode 100644 k8s/log_analytics_oms.json create mode 100644 k8s/log_analytics_oms.parameters.json create mode 100644 k8s/oms-daemonset.yaml diff --git a/k8s/log_analytics_oms.json b/k8s/log_analytics_oms.json new file mode 100644 index 00000000..caca7fab --- /dev/null +++ b/k8s/log_analytics_oms.json @@ -0,0 +1,49 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "type": "String" + }, + "workspaceName": { + "type": "String" + }, + "solutionType": { + "type": "String" + }, + }, + "resources": [ + { + "apiVersion": "2015-03-20", + "type": "Microsoft.OperationalInsights/workspaces", + "name": "[parameters('workspaceName')]", + "location": "[resourceGroup().location]", + "properties": { + "sku": { + "name": "[parameters('sku')]" + } + }, + "resources": [ + { + "apiVersion": "2015-11-01-preview", + "location": "[resourceGroup().location]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "type": "Microsoft.OperationsManagement/solutions", + "id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "dependsOn": [ + "[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + ], + "properties": { + "workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + }, + "plan": { + "publisher": "Microsoft", + "product": "[Concat('OMSGallery/', parameters('solutionType'))]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "promotionCode": "" + } + } + ] + } + ] +} diff --git a/k8s/log_analytics_oms.parameters.json b/k8s/log_analytics_oms.parameters.json new file mode 100644 index 00000000..c5d215e4 --- /dev/null +++ b/k8s/log_analytics_oms.parameters.json @@ -0,0 +1,15 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "value": "Free" + }, + "workspaceName": { + "value": "rg-abc-logs" + }, + "solutionType": { + "value": "Containers" + }, + } +} diff --git a/k8s/oms-daemonset.yaml b/k8s/oms-daemonset.yaml new file mode 100644 index 00000000..99cf14fe --- /dev/null +++ b/k8s/oms-daemonset.yaml @@ -0,0 +1,30 @@ +apiVersion: extensions/v1beta1 +kind: DaemonSet +metadata: + name: omsagent +spec: + template: + metadata: + labels: + app: omsagent + spec: + containers: + - env: + - name: WSID + value: + - name: KEY + value: + image: microsoft/oms + name: omsagent + ports: + - containerPort: 25225 + protocol: TCP + securityContext: + privileged: true + volumeMounts: + - mountPath: /var/run/docker.sock + name: docker-sock + volumes: + - name: docker-sock + hostPath: + path: /var/run/docker.sock From e954668eaa3cb2d52cad5ab3ef6cb598c9e1ff61 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Fri, 21 Apr 2017 18:19:24 +0200 Subject: [PATCH 142/150] Add docs --- .../cloud-deployment-templates/index.rst | 1 + .../log-analytics.rst | 238 ++++++++++++++++++ 2 files changed, 239 insertions(+) create mode 100644 docs/server/source/cloud-deployment-templates/log-analytics.rst diff --git a/docs/server/source/cloud-deployment-templates/index.rst b/docs/server/source/cloud-deployment-templates/index.rst index d5b60a0e..41eec0ed 100644 --- a/docs/server/source/cloud-deployment-templates/index.rst +++ b/docs/server/source/cloud-deployment-templates/index.rst @@ -18,3 +18,4 @@ If you find the cloud deployment templates for nodes helpful, then you may also add-node-on-kubernetes upgrade-on-kubernetes first-node + log-analytics diff --git a/docs/server/source/cloud-deployment-templates/log-analytics.rst b/docs/server/source/cloud-deployment-templates/log-analytics.rst new file mode 100644 index 00000000..05587689 --- /dev/null +++ b/docs/server/source/cloud-deployment-templates/log-analytics.rst @@ -0,0 +1,238 @@ +Log Analytics on Azure +====================== + +This section documents how to create and configure a Log Analytics workspace on +Azure, for a Kubernetes-based deployment. + +The documented approach is based on an integration of Microsoft's Operations +Management Suite (OMS) with a Kubernetes-based Azure Container Service cluster. + +The :ref:`oms-k8s-references` contains links to more detailed documentation on +Azure, and Kubernetes. + +There are three main steps involved: + +1. Create a workspace (``LogAnalyticsOMS``). +2. Create a ``ContainersOMS`` solution under the workspace. +3. Deploy the OMS agent(s). + +Steps 1 and 2 rely on `Azure Resource Manager templates`_ and can be done with +one template so we'll cover them together. Step 3 relies on a +`Kubernetes DaemonSet`_ and will be covered separately. + +Minimum Requirements +-------------------- +This document assumes that you have already deployed a Kubernetes cluster, and +that you have the Kubernetes command line ``kubectl`` installed. + +Creating a workspace and adding a containers solution +----------------------------------------------------- + +.. code-block:: bash + + $ az group deployment create --debug \ + --resource-group rg \ + --name "Microsoft.LogAnalyticsOMS" \ + --template-file log_analytics_oms.json \ + --parameters @log_analytics_oms.parameters.json + +An example of a simple tenplate file (``--template-file``): + +.. code-block:: json + + { + "$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "type": "String" + }, + "workspaceName": { + "type": "String" + }, + "solutionType": { + "type": "String" + }, + }, + "resources": [ + { + "apiVersion": "2015-03-20", + "type": "Microsoft.OperationalInsights/workspaces", + "name": "[parameters('workspaceName')]", + "location": "[resourceGroup().location]", + "properties": { + "sku": { + "name": "[parameters('sku')]" + } + }, + "resources": [ + { + "apiVersion": "2015-11-01-preview", + "location": "[resourceGroup().location]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "type": "Microsoft.OperationsManagement/solutions", + "id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "dependsOn": [ + "[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + ], + "properties": { + "workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + }, + "plan": { + "publisher": "Microsoft", + "product": "[Concat('OMSGallery/', parameters('solutionType'))]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "promotionCode": "" + } + } + ] + } + ] + } + +An example of the associated parameter file (``--parameters``): + +.. code-block:: json + + { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "value": "Free" + }, + "workspaceName": { + "value": "rg-abc-logs" + }, + "solutionType": { + "value": "Containers" + }, + } + } + +Deploying the OMS agent(s) +-------------------------- +In order to deploy an OMS agent two important pieces of information are needed: + +* workspace id +* workspace key + +Obtaining the workspace id: + +.. code-block:: bash + + $ az resource show + --resource-group rg + --resource-type Microsoft.OperationalInsights/workspaces + --name rg-abc-logs \ + | grep customerId + "customerId": "12345678-1234-1234-1234-123456789012", + +Obtaining the workspace key: + + +.. code-block:: yaml + + # oms-daemonset.yaml + apiVersion: extensions/v1beta1 + kind: DaemonSet + metadata: + name: omsagent + spec: + template: + metadata: + labels: + app: omsagent + spec: + containers: + - env: + - name: WSID + value: + - name: KEY + value: + image: microsoft/oms + name: omsagent + ports: + - containerPort: 25225 + protocol: TCP + securityContext: + privileged: true + volumeMounts: + - mountPath: /var/run/docker.sock + name: docker-sock + volumes: + - name: docker-sock + hostPath: + path: /var/run/docker.sock + + +.. code-block:: bash + + $ kubectl create -f oms-daemonset.yaml + + +Some useful management tasks +---------------------------- +List workspaces: + +.. code-block:: bash + + $ az resource list \ + --resource-group rg \ + --resource-type Microsoft.OperationalInsights/workspaces + +List solutions: + +.. code-block:: bash + + $ az resource list \ + --resource-group rg \ + --resource-type Microsoft.OperationsManagement/solutions + +Deleting the containers solution: + +.. code-block:: bash + + $ az group deployment delete --debug \ + --resource-group rg \ + --name Microsoft.ContainersOMS + +.. code-block:: bash + + $ az resource delete \ + --resource-group rg \ + --resource-type Microsoft.OperationsManagement/solutions \ + --name "Containers(rglogs)" + +Deleting the workspace: + +.. code-block:: bash + + $ az group deployment delete --debug \ + --resource-group rg \ + --name Microsoft.LogAnalyticsOMS + +.. code-block:: bash + + $ az resource delete \ + --resource-group rg \ + --resource-type Microsoft.OperationalInsights/workspaces \ + --name rglogs + + +.. _oms-k8s-references: + +References +---------- + +* `Monitor an Azure Container Service cluster with Microsoft Operations Management Suite (OMS) `_ +* `Manage Log Analytics using Azure Resource Manager templates `_ +* `azure commands for deployments `_ + (``az group deployment``) +* `Understand the structure and syntax of Azure Resource Manager templates `_ +* `Kubernetes DaemonSet`_ + + + +.. _Azure Resource Manager templates: https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authoring-templates +.. _Kubernetes DaemonSet: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/ From 43ac369e4529bd8f0a20d282bd1080760735a8bf Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Mon, 24 Apr 2017 16:25:58 +0200 Subject: [PATCH 143/150] Move oms related files under dedicated dir --- k8s/{ => logging-and-monitoring}/log_analytics_oms.json | 0 .../log_analytics_oms.parameters.json | 0 k8s/{ => logging-and-monitoring}/oms-daemonset.yaml | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename k8s/{ => logging-and-monitoring}/log_analytics_oms.json (100%) rename k8s/{ => logging-and-monitoring}/log_analytics_oms.parameters.json (100%) rename k8s/{ => logging-and-monitoring}/oms-daemonset.yaml (100%) diff --git a/k8s/log_analytics_oms.json b/k8s/logging-and-monitoring/log_analytics_oms.json similarity index 100% rename from k8s/log_analytics_oms.json rename to k8s/logging-and-monitoring/log_analytics_oms.json diff --git a/k8s/log_analytics_oms.parameters.json b/k8s/logging-and-monitoring/log_analytics_oms.parameters.json similarity index 100% rename from k8s/log_analytics_oms.parameters.json rename to k8s/logging-and-monitoring/log_analytics_oms.parameters.json diff --git a/k8s/oms-daemonset.yaml b/k8s/logging-and-monitoring/oms-daemonset.yaml similarity index 100% rename from k8s/oms-daemonset.yaml rename to k8s/logging-and-monitoring/oms-daemonset.yaml From 22f0e25c9aa81376cb4f06f0365f9c85a802637a Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Mon, 24 Apr 2017 16:30:20 +0200 Subject: [PATCH 144/150] Correct JSON --- k8s/logging-and-monitoring/log_analytics_oms.json | 2 +- k8s/logging-and-monitoring/log_analytics_oms.parameters.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/k8s/logging-and-monitoring/log_analytics_oms.json b/k8s/logging-and-monitoring/log_analytics_oms.json index caca7fab..44b9d05a 100644 --- a/k8s/logging-and-monitoring/log_analytics_oms.json +++ b/k8s/logging-and-monitoring/log_analytics_oms.json @@ -10,7 +10,7 @@ }, "solutionType": { "type": "String" - }, + } }, "resources": [ { diff --git a/k8s/logging-and-monitoring/log_analytics_oms.parameters.json b/k8s/logging-and-monitoring/log_analytics_oms.parameters.json index c5d215e4..895cbb15 100644 --- a/k8s/logging-and-monitoring/log_analytics_oms.parameters.json +++ b/k8s/logging-and-monitoring/log_analytics_oms.parameters.json @@ -10,6 +10,6 @@ }, "solutionType": { "value": "Containers" - }, + } } } From 7659290518e42449cf4bdd7e887f835847993956 Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Mon, 24 Apr 2017 16:31:47 +0200 Subject: [PATCH 145/150] Add missing backslash (\) --- docs/server/source/cloud-deployment-templates/log-analytics.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/server/source/cloud-deployment-templates/log-analytics.rst b/docs/server/source/cloud-deployment-templates/log-analytics.rst index 05587689..4705c6f7 100644 --- a/docs/server/source/cloud-deployment-templates/log-analytics.rst +++ b/docs/server/source/cloud-deployment-templates/log-analytics.rst @@ -121,7 +121,7 @@ Obtaining the workspace id: .. code-block:: bash - $ az resource show + $ az resource show \ --resource-group rg --resource-type Microsoft.OperationalInsights/workspaces --name rg-abc-logs \ From 8900276b1d959179a9dfd41c4a4825d1fa21a27c Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Mon, 24 Apr 2017 17:22:12 +0200 Subject: [PATCH 146/150] Mount k8s dir for docker-compose --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 322cbcf6..c7f3c584 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,7 @@ services: - ./bigchaindb:/usr/src/app/bigchaindb - ./tests:/usr/src/app/tests - ./docs:/usr/src/app/docs + - ./k8s:/usr/src/app/k8s - ./setup.py:/usr/src/app/setup.py - ./setup.cfg:/usr/src/app/setup.cfg - ./pytest.ini:/usr/src/app/pytest.ini @@ -58,6 +59,7 @@ services: - ./bigchaindb:/usr/src/app/bigchaindb - ./tests:/usr/src/app/tests - ./docs:/usr/src/app/docs + - ./k8s:/usr/src/app/k8s - ./setup.py:/usr/src/app/setup.py - ./setup.cfg:/usr/src/app/setup.cfg - ./pytest.ini:/usr/src/app/pytest.ini From 8f750456d89bd63f691b1d1122f59104ddd30abf Mon Sep 17 00:00:00 2001 From: Sylvain Bellemare Date: Mon, 24 Apr 2017 17:22:49 +0200 Subject: [PATCH 147/150] Address remaining details --- .../log-analytics.rst | 42 +++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/docs/server/source/cloud-deployment-templates/log-analytics.rst b/docs/server/source/cloud-deployment-templates/log-analytics.rst index 4705c6f7..fbef70d2 100644 --- a/docs/server/source/cloud-deployment-templates/log-analytics.rst +++ b/docs/server/source/cloud-deployment-templates/log-analytics.rst @@ -27,11 +27,21 @@ that you have the Kubernetes command line ``kubectl`` installed. Creating a workspace and adding a containers solution ----------------------------------------------------- +For the sake of this document and example, we'll assume an existing resource +group named: + +* ``resource_group`` + +and the workspace we'll create will be named: + +* ``work_space`` + +If you feel creative you may replace these names by more interesting ones. .. code-block:: bash $ az group deployment create --debug \ - --resource-group rg \ + --resource-group resource_group \ --name "Microsoft.LogAnalyticsOMS" \ --template-file log_analytics_oms.json \ --parameters @log_analytics_oms.parameters.json @@ -102,7 +112,7 @@ An example of the associated parameter file (``--parameters``): "value": "Free" }, "workspaceName": { - "value": "rg-abc-logs" + "value": "work_space" }, "solutionType": { "value": "Containers" @@ -122,14 +132,21 @@ Obtaining the workspace id: .. code-block:: bash $ az resource show \ - --resource-group rg + --resource-group resource_group --resource-type Microsoft.OperationalInsights/workspaces - --name rg-abc-logs \ + --name work_space \ | grep customerId "customerId": "12345678-1234-1234-1234-123456789012", Obtaining the workspace key: +Until we figure out a way to this via the command line please see instructions +under `Obtain your workspace ID and key +`_. + +Once you have the workspace id and key you can include them in the following +YAML file (:download:`oms-daemonset.yaml +<../../../../k8s/logging-and-monitoring/oms-daemonset.yaml>`): .. code-block:: yaml @@ -165,6 +182,7 @@ Obtaining the workspace key: hostPath: path: /var/run/docker.sock +To deploy the agent simply run the following command: .. code-block:: bash @@ -178,7 +196,7 @@ List workspaces: .. code-block:: bash $ az resource list \ - --resource-group rg \ + --resource-group resource_group \ --resource-type Microsoft.OperationalInsights/workspaces List solutions: @@ -186,7 +204,7 @@ List solutions: .. code-block:: bash $ az resource list \ - --resource-group rg \ + --resource-group resource_group \ --resource-type Microsoft.OperationsManagement/solutions Deleting the containers solution: @@ -194,30 +212,30 @@ Deleting the containers solution: .. code-block:: bash $ az group deployment delete --debug \ - --resource-group rg \ + --resource-group resource_group \ --name Microsoft.ContainersOMS .. code-block:: bash $ az resource delete \ - --resource-group rg \ + --resource-group resource_group \ --resource-type Microsoft.OperationsManagement/solutions \ - --name "Containers(rglogs)" + --name "Containers(work_space)" Deleting the workspace: .. code-block:: bash $ az group deployment delete --debug \ - --resource-group rg \ + --resource-group resource_group \ --name Microsoft.LogAnalyticsOMS .. code-block:: bash $ az resource delete \ - --resource-group rg \ + --resource-group resource_group \ --resource-type Microsoft.OperationalInsights/workspaces \ - --name rglogs + --name work_space .. _oms-k8s-references: From ae9d6f1e6e71a74a3f1a81a82d675d9f93b9e8f8 Mon Sep 17 00:00:00 2001 From: Troy McConaghy Date: Wed, 26 Apr 2017 10:11:45 +0200 Subject: [PATCH 148/150] Updated Drivers & Clients docs page, including Ruby driver --- docs/server/source/drivers-clients/index.rst | 30 +++++++++----------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/docs/server/source/drivers-clients/index.rst b/docs/server/source/drivers-clients/index.rst index 127d6309..0bfde7ad 100644 --- a/docs/server/source/drivers-clients/index.rst +++ b/docs/server/source/drivers-clients/index.rst @@ -1,29 +1,27 @@ Drivers & Clients ================= -Currently, the only language-native driver is written in the Python language. +Libraries and Tools Maintained by the BigchainDB Team +----------------------------------------------------- -We also provide the Transaction CLI to be able to script the building of -transactions. You may be able to wrap this tool inside the language of -your choice, and then use the HTTP API directly to post transactions. - -If you use a language other than Python, you may want to look at the current -community projects listed below. - - -.. toctree:: - :maxdepth: 1 - - The Python Driver - Transaction CLI +* `The Python Driver `_ +* `The Transaction CLI `_ is + a command-line interface for building BigchainDB transactions. + You may be able to call it from inside the language of + your choice, and then use :ref:`the HTTP API ` + to post transactions. Community-Driven Libraries and Tools ------------------------------------ -Please note that some of these projects may be work in progress, but may -nevertheless be very useful. + +.. note:: + + Some of these projects are a work in progress, + but may still be useful. * `Javascript transaction builder `_ * `Haskell transaction builder `_ * `Go driver `_ * `Java driver `_ +* `Ruby driver `_ From 20270cdb9bc5e53cd75741591be61cb17f8aa589 Mon Sep 17 00:00:00 2001 From: Krish Date: Wed, 26 Apr 2017 18:34:22 +0200 Subject: [PATCH 149/150] Config settings for MongoDB Backup Agent (#1442) --- .../container/mongodb_backup_agent_entrypoint.bash | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash index 3eb20633..ef3895ea 100755 --- a/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash +++ b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash @@ -12,9 +12,10 @@ if [[ -z "${mms_api_key}" ]]; then fi sed -i '/mmsApiKey/d' $MONGODB_BACKUP_CONF_FILE +sed -i '/mothership/d' $MONGODB_BACKUP_CONF_FILE echo "mmsApiKey="${mms_api_key} >> $MONGODB_BACKUP_CONF_FILE +echo "mothership=api-backup.eu-west-1.mongodb.com" >> $MONGODB_BACKUP_CONF_FILE echo "INFO: starting mdb backup..." -exec mongodb-mms-backup-agent \ - -c $MONGODB_BACKUP_CONF_FILE +exec mongodb-mms-backup-agent -c $MONGODB_BACKUP_CONF_FILE From 43f9f678675c339d6b2cf744bda164c7ff200a9a Mon Sep 17 00:00:00 2001 From: Krish Date: Fri, 28 Apr 2017 10:14:14 +0200 Subject: [PATCH 150/150] Publish port 80 publicly in nginx-3scale. (#1446) Upgrade docker image tag to `1.1` as the corresponding config changes for displaying error message are built in the `1.1` container image. --- k8s/nginx-3scale/nginx-3scale-dep.yaml | 6 +++++- k8s/nginx-3scale/nginx-3scale-svc.yaml | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/k8s/nginx-3scale/nginx-3scale-dep.yaml b/k8s/nginx-3scale/nginx-3scale-dep.yaml index 8b1fa673..964cbf8b 100644 --- a/k8s/nginx-3scale/nginx-3scale-dep.yaml +++ b/k8s/nginx-3scale/nginx-3scale-dep.yaml @@ -19,7 +19,7 @@ spec: terminationGracePeriodSeconds: 10 containers: - name: nginx-3scale - image: bigchaindb/nginx_3scale:1.0 + image: bigchaindb/nginx_3scale:1.1 # TODO(Krish): Change later to IfNotPresent imagePullPolicy: Always env: @@ -68,6 +68,10 @@ spec: hostPort: 443 name: public-bdb-port protocol: TCP + - containerPort: 80 + hostPort: 80 + name: https-msg-port + protocol: TCP - containerPort: 8888 hostPort: 8888 name: health-check diff --git a/k8s/nginx-3scale/nginx-3scale-svc.yaml b/k8s/nginx-3scale/nginx-3scale-svc.yaml index db212222..9150c24d 100644 --- a/k8s/nginx-3scale/nginx-3scale-svc.yaml +++ b/k8s/nginx-3scale/nginx-3scale-svc.yaml @@ -14,6 +14,10 @@ spec: selector: app: ngx-instance-0-dep ports: + - port: 80 + targetPort: 80 + name: ngx-public-bdb-port-http + protocol: TCP - port: 443 targetPort: 443 name: ngx-public-bdb-port