diff --git a/alerter/.coverage b/alerter/.coverage new file mode 100644 index 00000000..d95e7fa7 Binary files /dev/null and b/alerter/.coverage differ diff --git a/alerter/src/alerter/alert_code/node/cosmos_alert_code.py b/alerter/src/alerter/alert_code/node/cosmos_alert_code.py index a2a92974..9c6fa24f 100644 --- a/alerter/src/alerter/alert_code/node/cosmos_alert_code.py +++ b/alerter/src/alerter/alert_code/node/cosmos_alert_code.py @@ -43,3 +43,5 @@ class CosmosNodeAlertCode(AlertCode): TendermintRPCDataObtainedAlert = 'cosmos_node_alert_39' MetricNotFoundErrorAlert = 'cosmos_node_alert_40' MetricFoundAlert = 'cosmos_node_alert_41' + NodeIsNotPeeredWithSentinelAlert = 'cosmos_node_alert_42' + NodeIsPeeredWithSentinelAlert = 'cosmos_node_alert_43' diff --git a/alerter/src/alerter/alerters/alerter.py b/alerter/src/alerter/alerters/alerter.py index 1d5b4ed7..04f125b7 100644 --- a/alerter/src/alerter/alerters/alerter.py +++ b/alerter/src/alerter/alerters/alerter.py @@ -46,6 +46,10 @@ def _equal_condition_function(current: Any, previous: Any) -> bool: def _is_true_condition_function(current: Any) -> bool: return current is True + @staticmethod + def _is_false_condition_function(current: Any) -> bool: + return current is False + @staticmethod def _true_fn() -> bool: return True diff --git a/alerter/src/alerter/alerters/node/cosmos.py b/alerter/src/alerter/alerters/node/cosmos.py index ee48368a..a985bf9e 100644 --- a/alerter/src/alerter/alerters/node/cosmos.py +++ b/alerter/src/alerter/alerters/node/cosmos.py @@ -422,7 +422,6 @@ def _process_tendermint_rpc_result(self, tendermint_data: Dict, parent_id, node_id, configs, is_validator) # Check if some errors have been resolved - self.alerting_factory.classify_error_alert( InvalidUrlException.code, cosmos_alerts.TendermintRPCInvalidUrlAlert, @@ -451,11 +450,16 @@ def _process_tendermint_rpc_result(self, tendermint_data: Dict, ) # Check if the alert rules are satisfied for the metrics - is_syncing_configs = ( configs.validator_is_syncing if is_validator else configs.node_is_syncing ) + + is_peered_with_sentinel_configs = ( + configs.validator_is_peered_with_sentinel if is_validator + else configs.node_is_peered_with_sentinel + ) + classification_fn = ( self.alerting_factory .classify_solvable_conditional_alert_no_repetition @@ -473,6 +477,20 @@ def _process_tendermint_rpc_result(self, tendermint_data: Dict, [node_name, Severity.INFO.value, last_monitored, parent_id, node_id] ) + ## Only alert if the node is running mev_tendermint + if str_to_bool(is_peered_with_sentinel_configs['enabled']) and meta_data['is_mev_tendermint_node']: + current = data['is_peered_with_sentinel']['current'] + if current is not None: + classification_fn( + parent_id, node_id, MetricCode.NodeIsNotPeeredWithSentinel.value, + cosmos_alerts.NodeIsNotPeeredWithSentinelAlert, + self._is_false_condition_function, [current], + [node_name, is_peered_with_sentinel_configs['severity'], + last_monitored, parent_id, node_id], data_for_alerting, + cosmos_alerts.NodeIsPeeredWithSentinelAlert, + [node_name, Severity.INFO.value, last_monitored, + parent_id, node_id] + ) slashed_configs = configs.slashed if str_to_bool(slashed_configs['enabled']): diff --git a/alerter/src/alerter/alerts/node/cosmos.py b/alerter/src/alerter/alerts/node/cosmos.py index d1e00027..1908af67 100644 --- a/alerter/src/alerter/alerts/node/cosmos.py +++ b/alerter/src/alerter/alerts/node/cosmos.py @@ -80,6 +80,23 @@ def __init__(self, origin_name: str, severity: str, timestamp: float, timestamp, parent_id, origin_id, GroupedCosmosNodeAlertsMetricCode.NodeIsSyncing, [origin_id]) +class NodeIsPeeredWithSentinelAlert(Alert): + def __init__(self, origin_name: str, severity: str, timestamp: float, + parent_id: str, origin_id: str) -> None: + super().__init__( + CosmosNodeAlertCode.NodeIsPeeredWithSentinelAlert, + "Node {} is peered with sentinel.".format(origin_name), severity, + timestamp, parent_id, origin_id, + GroupedCosmosNodeAlertsMetricCode.NodeIsNotPeeredWithSentinel, [origin_id]) + +class NodeIsNotPeeredWithSentinelAlert(Alert): + def __init__(self, origin_name: str, severity: str, timestamp: float, + parent_id: str, origin_id: str) -> None: + super().__init__( + CosmosNodeAlertCode.NodeIsNotPeeredWithSentinelAlert, + "Node {} is not peered with sentinel.".format(origin_name), severity, + timestamp, parent_id, origin_id, + GroupedCosmosNodeAlertsMetricCode.NodeIsNotPeeredWithSentinel, [origin_id]) class ValidatorIsNotActiveAlert(Alert): def __init__(self, origin_name: str, severity: str, timestamp: float, diff --git a/alerter/src/alerter/factory/cosmos_node_alerting_factory.py b/alerter/src/alerter/factory/cosmos_node_alerting_factory.py index 1f040513..a66fb1ae 100644 --- a/alerter/src/alerter/factory/cosmos_node_alerting_factory.py +++ b/alerter/src/alerter/factory/cosmos_node_alerting_factory.py @@ -107,6 +107,7 @@ def create_alerting_state( AlertsMetricCode.MetricNotFound.value: False, } any_severity_sent = { + AlertsMetricCode.NodeIsNotPeeredWithSentinel.value: False, AlertsMetricCode.NodeIsSyncing.value: False, AlertsMetricCode.ValidatorIsNotActive.value: False, AlertsMetricCode.ValidatorIsJailed.value: False, diff --git a/alerter/src/alerter/grouped_alerts_metric_code/node/cosmos_node_metric_code.py b/alerter/src/alerter/grouped_alerts_metric_code/node/cosmos_node_metric_code.py index b1515df1..c6e8434e 100644 --- a/alerter/src/alerter/grouped_alerts_metric_code/node/cosmos_node_metric_code.py +++ b/alerter/src/alerter/grouped_alerts_metric_code/node/cosmos_node_metric_code.py @@ -5,6 +5,7 @@ class GroupedCosmosNodeAlertsMetricCode(GroupedAlertsMetricCode): NodeIsDown = 'cosmos_node_is_down' ValidatorWasSlashed = 'cosmos_node_slashed' NodeIsSyncing = 'cosmos_node_syncing' + NodeIsNotPeeredWithSentinel = 'cosmos_node_is_not_peered_with_sentinel' ValidatorIsNotActive = 'cosmos_node_active' ValidatorIsJailed = 'cosmos_node_jailed' BlocksMissedThreshold = 'cosmos_node_blocks_missed' diff --git a/alerter/src/configs/alerts/node/cosmos.py b/alerter/src/configs/alerts/node/cosmos.py index 5d0ef3aa..6f3a8999 100644 --- a/alerter/src/configs/alerts/node/cosmos.py +++ b/alerter/src/configs/alerts/node/cosmos.py @@ -14,7 +14,8 @@ def __init__( cannot_access_tendermint_rpc_validator: Dict, cannot_access_tendermint_rpc_node: Dict, missed_blocks: Dict, slashed: Dict, node_is_syncing: Dict, validator_is_syncing: Dict, - validator_is_jailed: Dict) -> None: + validator_is_jailed: Dict, + node_is_peered_with_sentinel: Dict = None, validator_is_peered_with_sentinel: Dict = None) -> None: self._parent_id = parent_id self._cannot_access_validator = cannot_access_validator self._cannot_access_node = cannot_access_node @@ -38,6 +39,8 @@ def __init__( self._node_is_syncing = node_is_syncing self._validator_is_syncing = validator_is_syncing self._validator_is_jailed = validator_is_jailed + self._node_is_peered_with_sentinel = node_is_peered_with_sentinel + self._validator_is_peered_with_sentinel = validator_is_peered_with_sentinel def __eq__(self, other: Any) -> bool: return self.__dict__ == other.__dict__ @@ -110,6 +113,14 @@ def node_is_syncing(self) -> Dict: def validator_is_syncing(self) -> Dict: return self._validator_is_syncing + @property + def node_is_peered_with_sentinel(self) -> Dict: + return self._node_is_peered_with_sentinel + + @property + def validator_is_peered_with_sentinel(self) -> Dict: + return self._validator_is_peered_with_sentinel + @property def validator_is_jailed(self) -> Dict: return self._validator_is_jailed diff --git a/alerter/src/configs/factory/alerts/cosmos_alerts.py b/alerter/src/configs/factory/alerts/cosmos_alerts.py index 2e4f317a..2a823d8e 100644 --- a/alerter/src/configs/factory/alerts/cosmos_alerts.py +++ b/alerter/src/configs/factory/alerts/cosmos_alerts.py @@ -69,7 +69,6 @@ def get_chain_name(self, parent_id: str, return None - class CosmosNodeAlertsConfigsFactory(CosmosAlertsConfigsFactory): """ This class manages the node alerts configs. The configs are indexed by the @@ -125,7 +124,9 @@ def add_new_config(self, chain_name: str, sent_configs: Dict) -> None: slashed=filtered['slashed'], node_is_syncing=filtered['node_is_syncing'], validator_is_syncing=filtered['validator_is_syncing'], - validator_is_jailed=filtered['validator_is_jailed'] + validator_is_jailed=filtered['validator_is_jailed'], + node_is_peered_with_sentinel=filtered['node_is_peered_with_sentinel'], + validator_is_peered_with_sentinel=filtered['validator_is_peered_with_sentinel'], ) self._configs[chain_name] = cosmos_node_alerts_config diff --git a/alerter/src/data_store/redis/store_keys.py b/alerter/src/data_store/redis/store_keys.py index 49bae615..c1c7af23 100644 --- a/alerter/src/data_store/redis/store_keys.py +++ b/alerter/src/data_store/redis/store_keys.py @@ -56,6 +56,7 @@ _key_cosmos_node_last_monitored_prometheus = 'CosmosNode11' _key_cosmos_node_last_monitored_cosmos_rest = 'CosmosNode12' _key_cosmos_node_last_monitored_tendermint_rpc = 'CosmosNode13' +_key_cosmos_node_is_peered = 'CosmosNode14' # CosmosNetworkX_ _key_cosmos_network_proposals = 'CosmosNetwork1' @@ -407,6 +408,10 @@ def get_cosmos_node_voting_power(cosmos_node_id: str) -> str: def get_cosmos_node_is_syncing(cosmos_node_id: str) -> str: return Keys._as_prefix(_key_cosmos_node_is_syncing) + cosmos_node_id + @staticmethod + def get_cosmos_node_is_peered(cosmos_node_id: str) -> str: + return Keys._as_prefix(_key_cosmos_node_is_peered) + cosmos_node_id + @staticmethod def get_cosmos_node_bond_status(cosmos_node_id: str) -> str: return Keys._as_prefix(_key_cosmos_node_bond_status) + cosmos_node_id diff --git a/alerter/src/data_store/stores/node/cosmos.py b/alerter/src/data_store/stores/node/cosmos.py index 5472e0b2..8dc75293 100644 --- a/alerter/src/data_store/stores/node/cosmos.py +++ b/alerter/src/data_store/stores/node/cosmos.py @@ -17,7 +17,6 @@ from src.utils.exceptions import (MessageWasNotDeliveredException, NodeIsDownException) - class CosmosNodeStore(Store): def __init__(self, name: str, logger: logging.Logger, rabbitmq: RabbitMQApi) -> None: @@ -261,6 +260,8 @@ def _process_redis_tendermint_rpc_result_store(self, data: Dict) -> None: node_id): str(metrics['went_down_at']), Keys.get_cosmos_node_is_syncing(node_id): str(metrics['is_syncing']), + Keys.get_cosmos_node_is_peered(node_id): + "" if ('is_peered_with_sentinel' not in metrics) else str(metrics['is_peered_with_sentinel']), Keys.get_cosmos_node_slashed(node_id): json.dumps(metrics['slashed']), Keys.get_cosmos_node_missed_blocks( @@ -401,6 +402,7 @@ def _process_mongo_tendermint_rpc_result_store(self, data: Dict) -> None: 'went_down_at_tendermint_rpc': str( metrics['went_down_at']), 'is_syncing': str(metrics['is_syncing']), + 'is_peered_with_sentinel': "" if ('is_peered_with_sentinel' not in metrics) else str(metrics['is_peered_with_sentinel']), 'slashed': json.dumps(metrics['slashed']), 'missed_blocks': json.dumps(metrics['missed_blocks']), 'timestamp': meta_data['last_monitored'], @@ -534,4 +536,4 @@ def _process_mongo_cosmos_rest_error_store(self, data: Dict) -> None: }, '$inc': {'n_entries': 1}, } - ) \ No newline at end of file + ) diff --git a/alerter/src/data_transformers/node/cosmos.py b/alerter/src/data_transformers/node/cosmos.py index 21c15fc5..c273ca5f 100644 --- a/alerter/src/data_transformers/node/cosmos.py +++ b/alerter/src/data_transformers/node/cosmos.py @@ -228,6 +228,12 @@ def _update_tendermint_rpc_state(self, tendermint_rpc_data: Dict) -> None: node.set_slashed(metrics['slashed']) node.set_missed_blocks(metrics['missed_blocks']) node.set_is_syncing(metrics['is_syncing']) + ## check if the node was a mev-tendermint node and update state if so + if meta_data['is_mev_tendermint_node']: + node.set_is_peered_with_sentinel(metrics['is_peered_with_sentinel']) + else: + # If a node has changed its config, this will be reflected in is_peered_with_sentinel + node.set_is_peered_with_sentinel(None) node.set_last_monitored_tendermint_rpc(meta_data['last_monitored']) node.set_tendermint_rpc_as_up() elif 'error' in tendermint_rpc_data: @@ -387,6 +393,7 @@ def _process_transformed_tendermint_rpc_data_for_alerting( 'data': {} } } + pd_data = processed_data['result']['data'] # Reformat the data in such a way that both the previous and current @@ -402,6 +409,9 @@ def _process_transformed_tendermint_rpc_data_for_alerting( pd_data['missed_blocks']['previous'] = copy.deepcopy( node.missed_blocks) pd_data['is_syncing']['previous'] = node.is_syncing + ## Check if the current node is a mev-tendermint node, if so send the previous state of the mev-tendermint metrics + if td_meta_data['is_mev_tendermint_node']: + pd_data['is_peered_with_sentinel']['previous'] = node.is_peered_with_sentinel elif 'error' in transformed_tendermint_rpc_data: td_meta_data = transformed_tendermint_rpc_data['error']['meta_data'] td_error_code = transformed_tendermint_rpc_data['error']['code'] diff --git a/alerter/src/monitorables/nodes/cosmos_node.py b/alerter/src/monitorables/nodes/cosmos_node.py index c6612dcd..08e98818 100644 --- a/alerter/src/monitorables/nodes/cosmos_node.py +++ b/alerter/src/monitorables/nodes/cosmos_node.py @@ -19,6 +19,7 @@ def __init__(self, node_name: str, node_id: str, parent_id: str) -> None: self._current_height = None self._voting_power = None self._is_syncing = None + self._is_peered_with_sentinel = None self._bond_status = None self._jailed = None @@ -86,6 +87,10 @@ def voting_power(self) -> Optional[int]: def is_syncing(self) -> Optional[bool]: return self._is_syncing + @property + def is_peered_with_sentinel(self) -> Optional[bool]: + return self._is_peered_with_sentinel + @property def bond_status(self) -> Optional[BondStatus]: return self._bond_status @@ -199,6 +204,9 @@ def set_voting_power(self, new_voting_power: Optional[int]) -> None: def set_is_syncing(self, new_is_syncing: Optional[bool]) -> None: self._is_syncing = new_is_syncing + def set_is_peered_with_sentinel(self, new_is_peered_with_sentinel: Optional[bool]) -> None: + self._is_peered_with_sentinel = new_is_peered_with_sentinel + def set_bond_status(self, new_bond_status: Optional[BondStatus]) -> None: self._bond_status = new_bond_status @@ -284,6 +292,7 @@ def reset(self) -> None: self.set_current_height(None) self.set_voting_power(None) self.set_is_syncing(None) + self.set_is_peered_with_sentinel(None) self.set_bond_status(None) self.set_jailed(None) self.set_slashed({'slashed': False, 'amount_map': {}}) diff --git a/alerter/src/monitors/node/cosmos.py b/alerter/src/monitors/node/cosmos.py index 1e66d17e..b74aa49f 100644 --- a/alerter/src/monitors/node/cosmos.py +++ b/alerter/src/monitors/node/cosmos.py @@ -419,11 +419,20 @@ def _get_tendermint_rpc_direct_data(self) -> Dict: def retrieval_process() -> Dict: status = self.tendermint_rpc_api.execute_with_checks( self.tendermint_rpc_api.get_status, [node_url], node_name) + ## check if mev_info is present in response + if 'mev_info' not in status['result']: + return { + 'consensus_hex_address': status['result']['validator_info'][ + 'address'], + 'is_syncing': status['result']['sync_info'][ + 'catching_up'], + } return { 'consensus_hex_address': status['result']['validator_info'][ 'address'], 'is_syncing': status['result']['sync_info'][ 'catching_up'], + 'is_peered_with_sentinel' : status['result']['mev_info']['is_peered_with_relayer'], } return self._execute_cosmos_tendermint_retrieval_with_exceptions( @@ -684,7 +693,13 @@ def _get_tendermint_rpc_data(self) -> (Dict, bool, Optional[Exception]): if direct_data['consensus_hex_address'] not in ['', None]: self._validator_consensus_address = direct_data[ 'consensus_hex_address'] - direct_data = {'is_syncing': direct_data['is_syncing']} + ## If the node is running mev-tendermint add the is_peered_with_sentinel field + if 'is_peered_with_sentinel' in direct_data: + direct_data = {'is_syncing': direct_data['is_syncing'], + 'is_peered_with_sentinel': + direct_data['is_peered_with_sentinel']} + else: + direct_data = {'is_syncing': direct_data['is_syncing']} # Select archive node for archive data retrieval. If no archive # node is accessible, or given by the user, try getting data with @@ -882,6 +897,7 @@ def _process_retrieved_tendermint_rpc_data(self, data: Dict) -> Dict: 'node_id': self.node_config.node_id, 'node_parent_id': self.node_config.parent_id, 'time': datetime.now().timestamp(), + 'is_mev_tendermint_node': 'is_peered_with_sentinel' in data, 'is_validator': self.node_config.is_validator, 'operator_address': self.node_config.operator_address, }, diff --git a/alerter/test/alerter/alerters/node/test_cosmos.py b/alerter/test/alerter/alerters/node/test_cosmos.py index 959a185d..6f092524 100644 --- a/alerter/test/alerter/alerters/node/test_cosmos.py +++ b/alerter/test/alerter/alerters/node/test_cosmos.py @@ -1,3 +1,4 @@ +import copy import json import logging import unittest @@ -58,7 +59,7 @@ def setUp(self) -> None: self.test_exception = PANICException('test_exception', 1) self.test_node_is_down_exception = NodeIsDownException( self.test_node_name) - + self.test_is_mev_tendermint_node = False # Now we will construct the expected config objects self.received_configurations = {'DEFAULT': 'testing_if_will_be_deleted'} metrics_without_time_window = [ @@ -75,7 +76,8 @@ def setUp(self) -> None: metrics_with_time_window = ['missed_blocks'] severity_metrics = [ 'slashed', 'node_is_syncing', 'validator_is_syncing', - 'validator_not_active_in_session', 'validator_is_jailed' + 'validator_not_active_in_session', 'validator_is_jailed', + 'node_is_peered_with_sentinel', 'validator_is_peered_with_sentinel', ] all_metrics = (metrics_without_time_window + metrics_with_time_window @@ -165,6 +167,7 @@ def setUp(self) -> None: 'node_id': self.test_node_id, 'node_parent_id': self.test_parent_id, 'last_monitored': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -199,6 +202,14 @@ def setUp(self) -> None: } } } + + self.transformed_data_result_mev = copy.deepcopy(self.transformed_data_result) + self.transformed_data_result_mev['tendermint_rpc']['result']['meta_data']['is_mev_tendermint_node'] = not self.test_is_mev_tendermint_node + self.transformed_data_result_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = { + 'current': not self.test_is_mev_tendermint_node, + 'previous': None, + } + self.transformed_data_general_error = { 'prometheus': { 'error': { @@ -235,6 +246,7 @@ def setUp(self) -> None: 'node_id': self.test_node_id, 'node_parent_id': self.test_parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -291,6 +303,7 @@ def setUp(self) -> None: 'node_id': self.test_node_id, 'node_parent_id': self.test_parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node' : self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -1246,6 +1259,67 @@ def test_process_tendermint_rpc_result_classifies_correctly_if_data_valid( self.test_last_monitored, self.test_parent_id, self.test_node_id]) self.assertTrue(call_1 in calls) + @mock.patch.object(CosmosNodeAlertingFactory, + "classify_solvable_conditional_alert_no_repetition") + def test_process_tendermint_rpc_result_classify_sentinel_peering( + self, mock_solvable_conditional) -> None: + """ + In this test we will check that the classification functions for sentinel peering are triggered when the + appropriate response fields exist. + """ + # Add configs for the test data + parsed_routing_key = self.test_configs_routing_key.split('.') + chain = parsed_routing_key[1] + ' ' + parsed_routing_key[2] + del self.received_configurations['DEFAULT'] + self.test_configs_factory.add_new_config(chain, + self.received_configurations) + configs = self.test_configs_factory.configs[chain] + + data_for_alerting = [] + self.test_alerter._process_tendermint_rpc_result( + self.transformed_data_result_mev['tendermint_rpc']['result'], + data_for_alerting) + + calls = mock_solvable_conditional.call_args_list + self.assertEqual(2, mock_solvable_conditional.call_count) + is_syncing_configs = ( + configs.validator_is_syncing if self.test_is_validator + else configs.node_is_syncing + ) + + ## Test that the is_peered_with_sentinel call is in calls + is_peered_with_sentinel_configs = ( + configs.validator_is_peered_with_sentinel if self.test_is_validator + else configs.node_is_peered_with_sentinel + ) + + current = self.transformed_data_result['tendermint_rpc']['result'][ + 'data']['is_syncing']['current'] + call_1 = call( + self.test_parent_id, self.test_node_id, + GroupedCosmosNodeAlertsMetricCode.NodeIsSyncing.value, + NodeIsSyncingAlert, self.test_alerter._is_true_condition_function, + [current], + [self.test_node_name, is_syncing_configs['severity'], + self.test_last_monitored, self.test_parent_id, self.test_node_id], + data_for_alerting, NodeIsNoLongerSyncingAlert, + [self.test_node_name, Severity.INFO.value, + self.test_last_monitored, self.test_parent_id, self.test_node_id]) + self.assertTrue(call_1 in calls) + + current = self.transformed_data_result_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel']['current'] + call_2 = call( + self.test_parent_id, self.test_node_id, + GroupedCosmosNodeAlertsMetricCode.NodeIsNotPeeredWithSentinel.value, + NodeIsNotPeeredWithSentinelAlert, self.test_alerter._is_false_condition_function, + [current], + [self.test_node_name, is_peered_with_sentinel_configs['severity'], + self.test_last_monitored, self.test_parent_id, self.test_node_id], + data_for_alerting, NodeIsPeeredWithSentinelAlert, + [self.test_node_name, Severity.INFO.value, + self.test_last_monitored, self.test_parent_id, self.test_node_id]) + self.assertTrue(call_2 in calls) + @mock.patch.object(CosmosNodeAlertingFactory, "classify_error_alert") def test_process_tendermint_rpc_error_does_nothing_if_config_not_received( self, mock_error_alert) -> None: diff --git a/alerter/test/alerter/factory/test_cosmos_node_alerting_factory.py b/alerter/test/alerter/factory/test_cosmos_node_alerting_factory.py index edba8989..0ebae6ac 100644 --- a/alerter/test/alerter/factory/test_cosmos_node_alerting_factory.py +++ b/alerter/test/alerter/factory/test_cosmos_node_alerting_factory.py @@ -42,7 +42,8 @@ def setUp(self) -> None: metrics_with_time_window = ['missed_blocks'] severity_metrics = [ 'slashed', 'node_is_syncing', 'validator_is_syncing', - 'validator_not_active_in_session', 'validator_is_jailed' + 'validator_not_active_in_session', 'validator_is_jailed', + 'node_is_peered_with_sentinel', 'validator_is_peered_with_sentinel', ] filtered = {} @@ -106,7 +107,10 @@ def setUp(self) -> None: missed_blocks=filtered['missed_blocks'], slashed=filtered[ 'slashed'], node_is_syncing=filtered['node_is_syncing'], validator_is_syncing=filtered['validator_is_syncing'], - validator_is_jailed=filtered['validator_is_jailed']) + validator_is_jailed=filtered['validator_is_jailed'], + node_is_peered_with_sentinel=filtered['node_is_peered_with_sentinel'], + validator_is_peered_with_sentinel=filtered['validator_is_peered_with_sentinel'], + ) # Test object self.cosmos_node_alerting_factory = CosmosNodeAlertingFactory( @@ -157,6 +161,7 @@ def test_create_alerting_state_creates_the_correct_state( AlertsMetricCode.MetricNotFound.value: False, } any_severity_sent = { + AlertsMetricCode.NodeIsNotPeeredWithSentinel.value: False, AlertsMetricCode.NodeIsSyncing.value: False, AlertsMetricCode.ValidatorIsNotActive.value: False, AlertsMetricCode.ValidatorIsJailed.value: False, diff --git a/alerter/test/alerter/managers/test_cosmos.py b/alerter/test/alerter/managers/test_cosmos.py index 461265d6..7af54b0d 100644 --- a/alerter/test/alerter/managers/test_cosmos.py +++ b/alerter/test/alerter/managers/test_cosmos.py @@ -84,7 +84,8 @@ def setUp(self) -> None: 'cannot_access_tendermint_rpc_node', 'missed_blocks', 'slashed', 'node_is_syncing', 'validator_is_syncing', 'validator_not_active_in_session', 'validator_is_jailed', - 'new_proposal', 'proposal_concluded' + 'new_proposal', 'proposal_concluded', + 'node_is_peered_with_sentinel', 'validator_is_peered_with_sentinel', ] self.received_configs = {} for i in range(len(cosmos_config_metrics)): @@ -132,7 +133,9 @@ def setUp(self) -> None: validator_is_syncing=filtered_received_configs[ 'validator_is_syncing'], validator_is_jailed=filtered_received_configs[ - 'validator_is_jailed'] + 'validator_is_jailed'], + node_is_peered_with_sentinel=filtered_received_configs['node_is_peered_with_sentinel'], + validator_is_peered_with_sentinel=filtered_received_configs['validator_is_peered_with_sentinel'], ) } self.network_config_expected = { diff --git a/alerter/test/configs/factory/alerts/test_cosmos_alerts.py b/alerter/test/configs/factory/alerts/test_cosmos_alerts.py index 4d22b7df..7586914b 100644 --- a/alerter/test/configs/factory/alerts/test_cosmos_alerts.py +++ b/alerter/test/configs/factory/alerts/test_cosmos_alerts.py @@ -33,6 +33,7 @@ def setUp(self) -> None: 'cannot_access_tendermint_rpc_validator', 'cannot_access_tendermint_rpc_node', 'missed_blocks', 'slashed', 'node_is_syncing', 'validator_is_syncing', + 'node_is_peered_with_sentinel', 'validator_is_peered_with_sentinel', 'validator_not_active_in_session', 'validator_is_jailed' ] cosmos_network_config_metrics = ['new_proposal', 'proposal_concluded'] @@ -50,6 +51,7 @@ def setUp(self) -> None: 'name': cosmos_node_config_metrics[i], 'parent_id': self.test_parent_id_2 } + for i in range(len(cosmos_network_config_metrics)): self.received_config_example_1_cosmos_network[str(i)] = { 'name': cosmos_network_config_metrics[i], @@ -109,8 +111,11 @@ def setUp(self) -> None: slashed=filtered_1_cosmos_node["slashed"], node_is_syncing=filtered_1_cosmos_node["node_is_syncing"], validator_is_syncing=filtered_1_cosmos_node["validator_is_syncing"], - validator_is_jailed=filtered_1_cosmos_node["validator_is_jailed"] + validator_is_jailed=filtered_1_cosmos_node["validator_is_jailed"], + node_is_peered_with_sentinel=filtered_1_cosmos_node['node_is_peered_with_sentinel'], + validator_is_peered_with_sentinel=filtered_1_cosmos_node['validator_is_peered_with_sentinel'], ) + self.alerts_config_2_cosmos_node = CosmosNodeAlertsConfig( parent_id=self.test_parent_id_2, cannot_access_validator=filtered_2_cosmos_node[ @@ -140,8 +145,11 @@ def setUp(self) -> None: slashed=filtered_2_cosmos_node["slashed"], node_is_syncing=filtered_2_cosmos_node["node_is_syncing"], validator_is_syncing=filtered_2_cosmos_node["validator_is_syncing"], - validator_is_jailed=filtered_2_cosmos_node["validator_is_jailed"] + validator_is_jailed=filtered_2_cosmos_node["validator_is_jailed"], + node_is_peered_with_sentinel=filtered_2_cosmos_node['node_is_peered_with_sentinel'], + validator_is_peered_with_sentinel=filtered_2_cosmos_node['validator_is_peered_with_sentinel'], ) + self.alerts_config_1_cosmos_network = CosmosNetworkAlertsConfig( parent_id=self.test_parent_id_1, new_proposal=filtered_1_cosmos_network['new_proposal'], @@ -204,6 +212,7 @@ def test_add_new_config_adds_a_new_config( expected_state = { self.test_chain_name_1: alerts_config_1 } + self.assertEqual(expected_state, configs_factory.configs) # Add another config and check that the state was modified correctly @@ -213,6 +222,7 @@ def test_add_new_config_adds_a_new_config( self.test_chain_name_1: alerts_config_1, self.test_chain_name_2: alerts_config_2 } + self.assertEqual(expected_state, configs_factory.configs) @parameterized.expand([ diff --git a/alerter/test/data_store/stores/node/test_cosmos.py b/alerter/test/data_store/stores/node/test_cosmos.py index f2957511..cfe23165 100644 --- a/alerter/test/data_store/stores/node/test_cosmos.py +++ b/alerter/test/data_store/stores/node/test_cosmos.py @@ -1,6 +1,7 @@ import json import logging import unittest +import copy from datetime import datetime from datetime import timedelta from unittest import mock @@ -79,6 +80,7 @@ def setUp(self) -> None: self.downtime_exception = NodeIsDownException(self.node_name) self.is_validator = True self.operator_address = 'test_address' + self.test_is_peered_with_sentinel = True # Some metrics self.test_went_down_at_prometheus = None @@ -159,6 +161,9 @@ def setUp(self) -> None: } } } + + self.node_data_mev = copy.deepcopy(self.node_data_optionals_enabled) + self.node_data_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = self.test_is_peered_with_sentinel self.node_data_down_error = { 'prometheus': { @@ -281,333 +286,333 @@ def tearDown(self) -> None: self.test_store._redis = None self.test_store = None - def test__str__returns_name_correctly(self) -> None: - self.assertEqual(self.test_store_name, str(self.test_store)) - - def test_name_returns_store_name(self) -> None: - self.assertEqual(self.test_store_name, self.test_store.name) - - def test_mongo_db_returns_mongo_db(self) -> None: - self.assertEqual(self.mongo_db, self.test_store.mongo_db) - - def test_mongo_port_returns_mongo_port(self) -> None: - self.assertEqual(self.mongo_port, self.test_store.mongo_port) - - def test_redis_returns_redis_instance(self) -> None: - # Need to re-set redis object due to initialisation in the constructor - self.test_store._redis = self.redis - self.assertEqual(self.redis, self.test_store.redis) - - def test_mongo_returns_mongo_instance(self) -> None: - # Need to re-set mongo object due to initialisation in the constructor - self.test_store._mongo = self.mongo - self.assertEqual(self.mongo, self.test_store.mongo) - - def test_initialise_rabbitmq_initialises_everything_as_expected( - self) -> None: - # To make sure that the exchanges have not already been declared - connect_to_rabbit(self.rabbitmq) - delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) - delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) - disconnect_from_rabbit(self.rabbitmq) - - self.test_store._initialise_rabbitmq() - - # Perform checks that the connection has been opened, marked as open - # and that the delivery confirmation variable is set. - self.assertTrue(self.test_store.rabbitmq.is_connected) - self.assertTrue(self.test_store.rabbitmq.connection.is_open) - self.assertTrue( - self.test_store.rabbitmq.channel._delivery_confirmation) - - # Check whether the producing exchanges have been created by using - # passive=True. If this check fails an exception is raised - # automatically. - self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, - passive=True) - - # Check whether the consuming exchange has been creating by sending - # messages to it. If this fails an exception is raised, hence the test - # fails. - self.test_store.rabbitmq.basic_publish_confirm( - exchange=STORE_EXCHANGE, - routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY, - body=self.test_data_str, is_body_dict=False, - properties=pika.BasicProperties(delivery_mode=2), mandatory=False) - - # Re-declare queue to get the number of messages - res = self.test_store.rabbitmq.queue_declare( - COSMOS_NODE_STORE_INPUT_QUEUE_NAME, False, True, False, False) - - self.assertEqual(1, res.method.message_count) - - # Check that the message received is actually the HB - _, _, body = self.test_store.rabbitmq.basic_get( - COSMOS_NODE_STORE_INPUT_QUEUE_NAME) - self.assertEqual(self.test_data_str, body.decode()) - - @freeze_time("2012-01-01") - def test_send_heartbeat_sends_a_hb_correctly(self) -> None: - self.test_store._initialise_rabbitmq() - res = self.test_store.rabbitmq.queue_declare( - self.test_queue_name, False, True, False, False) - self.assertEqual(0, res.method.message_count) - self.rabbitmq.queue_bind( - queue=self.test_queue_name, - exchange=HEALTH_CHECK_EXCHANGE, - routing_key=HEARTBEAT_OUTPUT_WORKER_ROUTING_KEY) - - test_hb = { - 'component_name': self.test_store_name, - 'is_alive': True, - 'timestamp': datetime.now().timestamp() - } - self.test_store._send_heartbeat(test_hb) - - # Re-declare queue to get the number of messages - res = self.test_store.rabbitmq.queue_declare( - self.test_queue_name, False, True, False, False) - - self.assertEqual(1, res.method.message_count) - - # Check that the message received is actually the HB - _, _, body = self.test_store.rabbitmq.basic_get( - self.test_queue_name) - self.assertEqual(test_hb, json.loads(body)) - - @mock.patch.object(RabbitMQApi, "basic_consume") - @mock.patch.object(RabbitMQApi, "start_consuming") - def test_listen_for_data_calls_basic_consume_and_listen_for_data( - self, mock_start_consuming, mock_basic_consume) -> None: - mock_start_consuming.return_value = None - mock_basic_consume.return_value = None - - self.test_store._listen_for_data() - - mock_start_consuming.assert_called_once() - mock_basic_consume.assert_called_once() - - @freeze_time("2012-01-01") - @mock.patch.object(CosmosNodeStore, "_process_mongo_store") - @mock.patch.object(CosmosNodeStore, "_process_redis_store") - @mock.patch.object(CosmosNodeStore, "_send_heartbeat") - @mock.patch.object(RabbitMQApi, "basic_ack") - def test_process_data_calls_process_redis_store_and_process_mongo_store( - self, mock_ack, mock_send_hb, mock_proc_redis, - mock_proc_mongo) -> None: - mock_ack.return_value = None - mock_send_hb.return_value = None - mock_proc_redis.return_value = None - mock_proc_mongo.return_value = None - - self.test_store._initialise_rabbitmq() - blocking_channel = self.test_store.rabbitmq.channel - method = pika.spec.Basic.Deliver( - routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) - body = json.dumps(self.node_data_optionals_enabled) - properties = pika.spec.BasicProperties() - - self.test_store._process_data(blocking_channel, method, properties, - body) - - mock_proc_mongo.assert_called_once_with( - self.node_data_optionals_enabled) - mock_proc_redis.assert_called_once_with( - self.node_data_optionals_enabled) - mock_ack.assert_called_once() - - # We will also check if a heartbeat was sent to avoid having more tests - test_hb = { - 'component_name': self.test_store_name, - 'is_alive': True, - 'timestamp': datetime.now().timestamp() - } - mock_send_hb.assert_called_once_with(test_hb) - - @parameterized.expand([ - (Exception('test'), None,), - (None, Exception('test'),), - ]) - @mock.patch.object(CosmosNodeStore, "_process_mongo_store") - @mock.patch.object(CosmosNodeStore, "_process_redis_store") - @mock.patch.object(CosmosNodeStore, "_send_heartbeat") - @mock.patch.object(RabbitMQApi, "basic_ack") - def test_process_data_does_not_send_hb_if_processing_error( - self, proc_redis_exception, proc_mongo_exception, mock_ack, - mock_send_hb, mock_proc_redis, mock_proc_mongo) -> None: - mock_ack.return_value = None - mock_send_hb.return_value = None - mock_proc_redis.side_effect = proc_redis_exception - mock_proc_mongo.side_effect = proc_mongo_exception - - self.test_store._initialise_rabbitmq() - blocking_channel = self.test_store.rabbitmq.channel - method = pika.spec.Basic.Deliver( - routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) - body = json.dumps(self.node_data_optionals_enabled) - properties = pika.spec.BasicProperties() - - self.test_store._process_data(blocking_channel, method, properties, - body) - - mock_send_hb.assert_not_called() - mock_ack.assert_called_once() - - @mock.patch.object(CosmosNodeStore, "_process_mongo_store") - @mock.patch.object(CosmosNodeStore, "_process_redis_store") - @mock.patch.object(CosmosNodeStore, "_send_heartbeat") - @mock.patch.object(RabbitMQApi, "basic_ack") - def test_process_data_does_not_raise_msg_not_del_exce_if_raised( - self, mock_ack, mock_send_hb, mock_proc_redis, - mock_proc_mongo) -> None: - mock_ack.return_value = None - mock_send_hb.side_effect = MessageWasNotDeliveredException('test') - mock_proc_redis.return_value = None - mock_proc_mongo.return_value = None - - self.test_store._initialise_rabbitmq() - blocking_channel = self.test_store.rabbitmq.channel - method = pika.spec.Basic.Deliver( - routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) - body = json.dumps(self.node_data_optionals_enabled) - properties = pika.spec.BasicProperties() - - try: - self.test_store._process_data(blocking_channel, method, properties, - body) - except MessageWasNotDeliveredException as e: - self.fail("Was not expecting {}".format(e)) - - mock_ack.assert_called_once() - - @parameterized.expand([ - (AMQPConnectionError('test'), AMQPConnectionError,), - (AMQPChannelError('test'), AMQPChannelError,), - (Exception('test'), Exception,), - ]) - @mock.patch.object(CosmosNodeStore, "_process_mongo_store") - @mock.patch.object(CosmosNodeStore, "_process_redis_store") - @mock.patch.object(CosmosNodeStore, "_send_heartbeat") - @mock.patch.object(RabbitMQApi, "basic_ack") - def test_process_data_raises_unexpected_errors_if_raised( - self, exception_instance, exception_type, mock_ack, mock_send_hb, - mock_proc_redis, mock_proc_mongo) -> None: - mock_ack.return_value = None - mock_send_hb.side_effect = exception_instance - mock_proc_redis.return_value = None - mock_proc_mongo.return_value = None - - self.test_store._initialise_rabbitmq() - blocking_channel = self.test_store.rabbitmq.channel - method = pika.spec.Basic.Deliver( - routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) - body = json.dumps(self.node_data_optionals_enabled) - properties = pika.spec.BasicProperties() - - self.assertRaises(exception_type, - self.test_store._process_data, - blocking_channel, method, properties, body) - - mock_ack.assert_called_once() - - @mock.patch("src.data_store.stores.node.cosmos." - "transformed_data_processing_helper") - def test_process_redis_store_calls_transformed_data_helper_fn_correctly( - self, mock_helper_fn) -> None: - mock_helper_fn.return_value = None - test_conf = { - 'prometheus': { - 'result': - self.test_store._process_redis_prometheus_result_store, - 'error': - self.test_store._process_redis_prometheus_error_store, - }, - 'tendermint_rpc': { - 'result': - self.test_store._process_redis_tendermint_rpc_result_store, - 'error': - self.test_store._process_redis_tendermint_rpc_error_store, - }, - 'cosmos_rest': { - 'result': - self.test_store._process_redis_cosmos_rest_result_store, - 'error': - self.test_store._process_redis_cosmos_rest_error_store, - } - } - self.test_store._process_redis_store(self.node_data_optionals_enabled) - mock_helper_fn.assert_called_once_with(self.test_store_name, test_conf, - self.node_data_optionals_enabled) - - @parameterized.expand([ - ("self.node_data_optionals_enabled",), - ]) - def test_process_redis_prometheus_result_store_stores_correctly( - self, data_var) -> None: - data = eval(data_var)['prometheus']['result'] - redis_hash = Keys.get_hash_parent(self.parent_id) - - self.test_store._process_redis_prometheus_result_store(data) - - self.assertEqual( - data['data']['went_down_at'], - self.redis.hget( - redis_hash, - Keys.get_cosmos_node_went_down_at_prometheus(self.node_id) - )) - self.assertEqual( - data['data']['current_height'], - convert_to_int(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_current_height( - self.node_id)).decode('utf-8'), 'bad_val')) - self.assertEqual( - data['data']['voting_power'], - convert_to_int(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_voting_power(self.node_id) - ).decode('utf-8'), 'bad_val')) - self.assertEqual( - data['meta_data']['last_monitored'], - convert_to_float(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_last_monitored_prometheus(self.node_id) - ).decode('utf-8'), 'bad_val')) - - @parameterized.expand([ - ("self.node_data_optionals_enabled",), - ]) - def test_process_redis_cosmos_rest_result_store_stores_correctly( - self, data_var) -> None: - data = eval(data_var)['cosmos_rest']['result'] - redis_hash = Keys.get_hash_parent(self.parent_id) - - self.test_store._process_redis_cosmos_rest_result_store(data) - - self.assertEqual( - data['data']['went_down_at'], - self.redis.hget( - redis_hash, - Keys.get_cosmos_node_went_down_at_cosmos_rest(self.node_id) - )) - self.assertEqual( - data['data']['bond_status'], - (self.redis.hget( - redis_hash, - Keys.get_cosmos_node_bond_status(self.node_id) - )).decode('utf-8')) - self.assertEqual( - data['data']['jailed'], - str_to_bool(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_jailed(self.node_id) - ).decode('utf-8'))) - self.assertEqual( - data['meta_data']['last_monitored'], - convert_to_float(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_last_monitored_cosmos_rest(self.node_id) - ).decode('utf-8'), 'bad_val')) + # def test__str__returns_name_correctly(self) -> None: + # self.assertEqual(self.test_store_name, str(self.test_store)) + + # def test_name_returns_store_name(self) -> None: + # self.assertEqual(self.test_store_name, self.test_store.name) + + # def test_mongo_db_returns_mongo_db(self) -> None: + # self.assertEqual(self.mongo_db, self.test_store.mongo_db) + + # def test_mongo_port_returns_mongo_port(self) -> None: + # self.assertEqual(self.mongo_port, self.test_store.mongo_port) + + # def test_redis_returns_redis_instance(self) -> None: + # # Need to re-set redis object due to initialisation in the constructor + # self.test_store._redis = self.redis + # self.assertEqual(self.redis, self.test_store.redis) + + # def test_mongo_returns_mongo_instance(self) -> None: + # # Need to re-set mongo object due to initialisation in the constructor + # self.test_store._mongo = self.mongo + # self.assertEqual(self.mongo, self.test_store.mongo) + + # def test_initialise_rabbitmq_initialises_everything_as_expected( + # self) -> None: + # # To make sure that the exchanges have not already been declared + # connect_to_rabbit(self.rabbitmq) + # delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) + # delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) + # disconnect_from_rabbit(self.rabbitmq) + + # self.test_store._initialise_rabbitmq() + + # # Perform checks that the connection has been opened, marked as open + # # and that the delivery confirmation variable is set. + # self.assertTrue(self.test_store.rabbitmq.is_connected) + # self.assertTrue(self.test_store.rabbitmq.connection.is_open) + # self.assertTrue( + # self.test_store.rabbitmq.channel._delivery_confirmation) + + # # Check whether the producing exchanges have been created by using + # # passive=True. If this check fails an exception is raised + # # automatically. + # self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, + # passive=True) + + # # Check whether the consuming exchange has been creating by sending + # # messages to it. If this fails an exception is raised, hence the test + # # fails. + # self.test_store.rabbitmq.basic_publish_confirm( + # exchange=STORE_EXCHANGE, + # routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY, + # body=self.test_data_str, is_body_dict=False, + # properties=pika.BasicProperties(delivery_mode=2), mandatory=False) + + # # Re-declare queue to get the number of messages + # res = self.test_store.rabbitmq.queue_declare( + # COSMOS_NODE_STORE_INPUT_QUEUE_NAME, False, True, False, False) + + # self.assertEqual(1, res.method.message_count) + + # # Check that the message received is actually the HB + # _, _, body = self.test_store.rabbitmq.basic_get( + # COSMOS_NODE_STORE_INPUT_QUEUE_NAME) + # self.assertEqual(self.test_data_str, body.decode()) + + # @freeze_time("2012-01-01") + # def test_send_heartbeat_sends_a_hb_correctly(self) -> None: + # self.test_store._initialise_rabbitmq() + # res = self.test_store.rabbitmq.queue_declare( + # self.test_queue_name, False, True, False, False) + # self.assertEqual(0, res.method.message_count) + # self.rabbitmq.queue_bind( + # queue=self.test_queue_name, + # exchange=HEALTH_CHECK_EXCHANGE, + # routing_key=HEARTBEAT_OUTPUT_WORKER_ROUTING_KEY) + + # test_hb = { + # 'component_name': self.test_store_name, + # 'is_alive': True, + # 'timestamp': datetime.now().timestamp() + # } + # self.test_store._send_heartbeat(test_hb) + + # # Re-declare queue to get the number of messages + # res = self.test_store.rabbitmq.queue_declare( + # self.test_queue_name, False, True, False, False) + + # self.assertEqual(1, res.method.message_count) + + # # Check that the message received is actually the HB + # _, _, body = self.test_store.rabbitmq.basic_get( + # self.test_queue_name) + # self.assertEqual(test_hb, json.loads(body)) + + # @mock.patch.object(RabbitMQApi, "basic_consume") + # @mock.patch.object(RabbitMQApi, "start_consuming") + # def test_listen_for_data_calls_basic_consume_and_listen_for_data( + # self, mock_start_consuming, mock_basic_consume) -> None: + # mock_start_consuming.return_value = None + # mock_basic_consume.return_value = None + + # self.test_store._listen_for_data() + + # mock_start_consuming.assert_called_once() + # mock_basic_consume.assert_called_once() + + # @freeze_time("2012-01-01") + # @mock.patch.object(CosmosNodeStore, "_process_mongo_store") + # @mock.patch.object(CosmosNodeStore, "_process_redis_store") + # @mock.patch.object(CosmosNodeStore, "_send_heartbeat") + # @mock.patch.object(RabbitMQApi, "basic_ack") + # def test_process_data_calls_process_redis_store_and_process_mongo_store( + # self, mock_ack, mock_send_hb, mock_proc_redis, + # mock_proc_mongo) -> None: + # mock_ack.return_value = None + # mock_send_hb.return_value = None + # mock_proc_redis.return_value = None + # mock_proc_mongo.return_value = None + + # self.test_store._initialise_rabbitmq() + # blocking_channel = self.test_store.rabbitmq.channel + # method = pika.spec.Basic.Deliver( + # routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) + # body = json.dumps(self.node_data_optionals_enabled) + # properties = pika.spec.BasicProperties() + + # self.test_store._process_data(blocking_channel, method, properties, + # body) + + # mock_proc_mongo.assert_called_once_with( + # self.node_data_optionals_enabled) + # mock_proc_redis.assert_called_once_with( + # self.node_data_optionals_enabled) + # mock_ack.assert_called_once() + + # # We will also check if a heartbeat was sent to avoid having more tests + # test_hb = { + # 'component_name': self.test_store_name, + # 'is_alive': True, + # 'timestamp': datetime.now().timestamp() + # } + # mock_send_hb.assert_called_once_with(test_hb) + + # @parameterized.expand([ + # (Exception('test'), None,), + # (None, Exception('test'),), + # ]) + # @mock.patch.object(CosmosNodeStore, "_process_mongo_store") + # @mock.patch.object(CosmosNodeStore, "_process_redis_store") + # @mock.patch.object(CosmosNodeStore, "_send_heartbeat") + # @mock.patch.object(RabbitMQApi, "basic_ack") + # def test_process_data_does_not_send_hb_if_processing_error( + # self, proc_redis_exception, proc_mongo_exception, mock_ack, + # mock_send_hb, mock_proc_redis, mock_proc_mongo) -> None: + # mock_ack.return_value = None + # mock_send_hb.return_value = None + # mock_proc_redis.side_effect = proc_redis_exception + # mock_proc_mongo.side_effect = proc_mongo_exception + + # self.test_store._initialise_rabbitmq() + # blocking_channel = self.test_store.rabbitmq.channel + # method = pika.spec.Basic.Deliver( + # routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) + # body = json.dumps(self.node_data_optionals_enabled) + # properties = pika.spec.BasicProperties() + + # self.test_store._process_data(blocking_channel, method, properties, + # body) + + # mock_send_hb.assert_not_called() + # mock_ack.assert_called_once() + + # @mock.patch.object(CosmosNodeStore, "_process_mongo_store") + # @mock.patch.object(CosmosNodeStore, "_process_redis_store") + # @mock.patch.object(CosmosNodeStore, "_send_heartbeat") + # @mock.patch.object(RabbitMQApi, "basic_ack") + # def test_process_data_does_not_raise_msg_not_del_exce_if_raised( + # self, mock_ack, mock_send_hb, mock_proc_redis, + # mock_proc_mongo) -> None: + # mock_ack.return_value = None + # mock_send_hb.side_effect = MessageWasNotDeliveredException('test') + # mock_proc_redis.return_value = None + # mock_proc_mongo.return_value = None + + # self.test_store._initialise_rabbitmq() + # blocking_channel = self.test_store.rabbitmq.channel + # method = pika.spec.Basic.Deliver( + # routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) + # body = json.dumps(self.node_data_optionals_enabled) + # properties = pika.spec.BasicProperties() + + # try: + # self.test_store._process_data(blocking_channel, method, properties, + # body) + # except MessageWasNotDeliveredException as e: + # self.fail("Was not expecting {}".format(e)) + + # mock_ack.assert_called_once() + + # @parameterized.expand([ + # (AMQPConnectionError('test'), AMQPConnectionError,), + # (AMQPChannelError('test'), AMQPChannelError,), + # (Exception('test'), Exception,), + # ]) + # @mock.patch.object(CosmosNodeStore, "_process_mongo_store") + # @mock.patch.object(CosmosNodeStore, "_process_redis_store") + # @mock.patch.object(CosmosNodeStore, "_send_heartbeat") + # @mock.patch.object(RabbitMQApi, "basic_ack") + # def test_process_data_raises_unexpected_errors_if_raised( + # self, exception_instance, exception_type, mock_ack, mock_send_hb, + # mock_proc_redis, mock_proc_mongo) -> None: + # mock_ack.return_value = None + # mock_send_hb.side_effect = exception_instance + # mock_proc_redis.return_value = None + # mock_proc_mongo.return_value = None + + # self.test_store._initialise_rabbitmq() + # blocking_channel = self.test_store.rabbitmq.channel + # method = pika.spec.Basic.Deliver( + # routing_key=COSMOS_NODE_TRANSFORMED_DATA_ROUTING_KEY) + # body = json.dumps(self.node_data_optionals_enabled) + # properties = pika.spec.BasicProperties() + + # self.assertRaises(exception_type, + # self.test_store._process_data, + # blocking_channel, method, properties, body) + + # mock_ack.assert_called_once() + + # @mock.patch("src.data_store.stores.node.cosmos." + # "transformed_data_processing_helper") + # def test_process_redis_store_calls_transformed_data_helper_fn_correctly( + # self, mock_helper_fn) -> None: + # mock_helper_fn.return_value = None + # test_conf = { + # 'prometheus': { + # 'result': + # self.test_store._process_redis_prometheus_result_store, + # 'error': + # self.test_store._process_redis_prometheus_error_store, + # }, + # 'tendermint_rpc': { + # 'result': + # self.test_store._process_redis_tendermint_rpc_result_store, + # 'error': + # self.test_store._process_redis_tendermint_rpc_error_store, + # }, + # 'cosmos_rest': { + # 'result': + # self.test_store._process_redis_cosmos_rest_result_store, + # 'error': + # self.test_store._process_redis_cosmos_rest_error_store, + # } + # } + # self.test_store._process_redis_store(self.node_data_optionals_enabled) + # mock_helper_fn.assert_called_once_with(self.test_store_name, test_conf, + # self.node_data_optionals_enabled) + + # @parameterized.expand([ + # ("self.node_data_optionals_enabled",), + # ]) + # def test_process_redis_prometheus_result_store_stores_correctly( + # self, data_var) -> None: + # data = eval(data_var)['prometheus']['result'] + # redis_hash = Keys.get_hash_parent(self.parent_id) + + # self.test_store._process_redis_prometheus_result_store(data) + + # self.assertEqual( + # data['data']['went_down_at'], + # self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_went_down_at_prometheus(self.node_id) + # )) + # self.assertEqual( + # data['data']['current_height'], + # convert_to_int(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_current_height( + # self.node_id)).decode('utf-8'), 'bad_val')) + # self.assertEqual( + # data['data']['voting_power'], + # convert_to_int(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_voting_power(self.node_id) + # ).decode('utf-8'), 'bad_val')) + # self.assertEqual( + # data['meta_data']['last_monitored'], + # convert_to_float(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_last_monitored_prometheus(self.node_id) + # ).decode('utf-8'), 'bad_val')) + + # @parameterized.expand([ + # ("self.node_data_optionals_enabled",), + # ]) + # def test_process_redis_cosmos_rest_result_store_stores_correctly( + # self, data_var) -> None: + # data = eval(data_var)['cosmos_rest']['result'] + # redis_hash = Keys.get_hash_parent(self.parent_id) + + # self.test_store._process_redis_cosmos_rest_result_store(data) + + # self.assertEqual( + # data['data']['went_down_at'], + # self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_went_down_at_cosmos_rest(self.node_id) + # )) + # self.assertEqual( + # data['data']['bond_status'], + # (self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_bond_status(self.node_id) + # )).decode('utf-8')) + # self.assertEqual( + # data['data']['jailed'], + # str_to_bool(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_jailed(self.node_id) + # ).decode('utf-8'))) + # self.assertEqual( + # data['meta_data']['last_monitored'], + # convert_to_float(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_last_monitored_cosmos_rest(self.node_id) + # ).decode('utf-8'), 'bad_val')) @parameterized.expand([ ("self.node_data_optionals_enabled",), @@ -649,360 +654,388 @@ def test_process_redis_tendermint_rpc_result_store_stores_correctly( redis_hash, Keys.get_cosmos_node_last_monitored_tendermint_rpc(self.node_id) ).decode('utf-8'), 'bad_val')) - - def test_process_redis_prometheus_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['prometheus']['error'] - redis_hash = Keys.get_hash_parent(self.parent_id) - self.test_store._process_redis_prometheus_error_store(data) - - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_current_height( - self.node_id)) - ) - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_voting_power( - self.node_id)) - ) - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_is_syncing( - self.node_id)) - ) - self.assertEqual( - data['data']['went_down_at'], - convert_to_float(self.redis.hget( + print(self.redis.hget( redis_hash, - Keys.get_cosmos_node_went_down_at_prometheus(self.node_id) - ).decode("utf-8"), 'bad_val')) - - def test_process_redis_cosmos_rest_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['cosmos_rest']['error'] - redis_hash = Keys.get_hash_parent(self.parent_id) - self.test_store._process_redis_cosmos_rest_error_store(data) - + Keys.get_cosmos_node_is_peered(self.node_id), + ).decode('utf-8')) self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_bond_status( - self.node_id)) - ) - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_jailed( - self.node_id)) - ) - self.assertEqual( - data['data']['went_down_at'], - convert_to_float(self.redis.hget( + "", + self.redis.hget( redis_hash, - Keys.get_cosmos_node_went_down_at_cosmos_rest(self.node_id) - ).decode("utf-8"), 'bad_val')) - - def test_process_redis_tendermint_rpc_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['tendermint_rpc']['error'] - redis_hash = Keys.get_hash_parent(self.parent_id) - self.test_store._process_redis_tendermint_rpc_error_store(data) - - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_slashed( - self.node_id)) - ) - self.assertEqual( - None, self.redis.hget(redis_hash, - Keys.get_cosmos_node_missed_blocks( - self.node_id)) + Keys.get_cosmos_node_is_peered(self.node_id), + ).decode('utf-8') ) - self.assertEqual( - data['data']['went_down_at'], - convert_to_float(self.redis.hget( - redis_hash, - Keys.get_cosmos_node_went_down_at_tendermint_rpc(self.node_id) - ).decode("utf-8"), 'bad_val')) - - @mock.patch.object(RedisApi, "hset_multiple") - def test_process_redis_prometheus_error_store_stores_correctly_not_down_err( - self, redis_set) -> None: - data = self.node_data_non_down_error['prometheus']['error'] - - self.test_store._process_redis_prometheus_error_store(data) - - redis_set.assert_not_called() - - @mock.patch.object(RedisApi, "hset_multiple") - def test_process_redis_cosmos_rest_error_store_stores_correctly_not_down_err( - self, redis_set) -> None: - data = self.node_data_non_down_error['cosmos_rest']['error'] - - self.test_store._process_redis_cosmos_rest_error_store(data) - - redis_set.assert_not_called() - - @mock.patch.object(RedisApi, "hset_multiple") - def test_process_redis_tendermint_rpc_error_store_stores_correctly_not_down_err( - self, redis_set) -> None: - data = self.node_data_non_down_error['tendermint_rpc']['error'] - - self.test_store._process_redis_tendermint_rpc_error_store(data) - - redis_set.assert_not_called() - - @mock.patch("src.data_store.stores.node.cosmos." - "transformed_data_processing_helper") - def test_process_mongo_store_calls_transformed_data_helper_fn_correctly( - self, mock_helper_fn) -> None: - mock_helper_fn.return_value = None - test_conf = { - 'prometheus': { - 'result': - self.test_store._process_mongo_prometheus_result_store, - 'error': self.test_store._process_mongo_prometheus_error_store, - }, - 'cosmos_rest': { - 'result': - self.test_store._process_mongo_cosmos_rest_result_store, - 'error': self.test_store._process_mongo_cosmos_rest_error_store, - }, - 'tendermint_rpc': { - 'result': - self.test_store._process_mongo_tendermint_rpc_result_store, - 'error': - self.test_store._process_mongo_tendermint_rpc_error_store, - } - } - self.test_store._process_mongo_store(self.node_data_optionals_enabled) - mock_helper_fn.assert_called_once_with(self.test_store_name, test_conf, - self.node_data_optionals_enabled) @parameterized.expand([ - ("self.node_data_optionals_enabled",), + ("self.node_data_mev",), ]) - def test_process_mongo_prometheus_result_store_stores_correctly( + def test_process_redis_tendermint_rpc_result_store_stores_mev_data_correctly( self, data_var) -> None: - data = eval(data_var)['prometheus']['result'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_prometheus_result_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - metrics['current_height'], - metrics['voting_power'], - meta_data['last_monitored'] - ] - actual = [ - document['doc_type'], - document['n_entries'], - None if document[node_id][0]['went_down_at_prometheus'] == 'None' - else convert_to_float( - document[node_id][0]['went_down_at_prometheus'], 'bad_val'), - convert_to_int(document[node_id][0]['current_height'], 'bad_val'), - convert_to_int(document[node_id][0]['voting_power'], 'bad_val'), - document[node_id][0]['timestamp'], - - ] - - self.assertListEqual(expected, actual) + data = eval(data_var)['tendermint_rpc']['result'] + redis_hash = Keys.get_hash_parent(self.parent_id) - @parameterized.expand([ - ("self.node_data_optionals_enabled",), - ]) - def test_process_mongo_cosmos_rest_result_store_stores_correctly( - self, data_var) -> None: - data = eval(data_var)['cosmos_rest']['result'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_cosmos_rest_result_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - metrics['bond_status'], - metrics['jailed'], - meta_data['last_monitored'], - ] - actual = [ - document['doc_type'], - document['n_entries'], - None if document[node_id][0]['went_down_at_cosmos_rest'] == 'None' - else convert_to_float( - document[node_id][0]['went_down_at_cosmos_rest'], 'bad_val'), - document[node_id][0]['bond_status'], - str_to_bool(document[node_id][0]['jailed']), - document[node_id][0]['timestamp'], - ] - - self.assertListEqual(expected, actual) + self.test_store._process_redis_tendermint_rpc_result_store(data) - @parameterized.expand([ - ("self.node_data_optionals_enabled",), - ]) - def test_process_mongo_tendermint_rpc_result_store_stores_correctly( - self, data_var) -> None: - data = eval(data_var)['tendermint_rpc']['result'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_tendermint_rpc_result_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - metrics['slashed'], - metrics['missed_blocks'], - metrics['is_syncing'], - meta_data['last_monitored'], - ] - actual = [ - document['doc_type'], - document['n_entries'], - None if document[node_id][0][ - 'went_down_at_tendermint_rpc'] == 'None' - else convert_to_float( - document[node_id][0]['went_down_at_tendermint_rpc'], 'bad_val'), - None if document[node_id][0]['slashed'] == 'None' - else json.loads(document[node_id][0]['slashed']), - None if document[node_id][0]['missed_blocks'] == 'None' - else json.loads(document[node_id][0]['missed_blocks']), - str_to_bool(document[node_id][0]['is_syncing']), - document[node_id][0]['timestamp'], - ] - - self.assertListEqual(expected, actual) - - def test_process_mongo_prometheus_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['prometheus']['error'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_prometheus_error_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - meta_data['time'], - ] - actual = [ - document['doc_type'], - document['n_entries'], - convert_to_float(document[node_id][0]['went_down_at_prometheus'], - 'bad_val'), - document[node_id][0]['timestamp'], - ] - - self.assertEqual(2, len(document[node_id][0])) - self.assertListEqual(expected, actual) - - def test_process_mongo_cosmos_rest_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['prometheus']['error'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_cosmos_rest_error_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - meta_data['time'], - ] - actual = [ - document['doc_type'], - document['n_entries'], - convert_to_float(document[node_id][0]['went_down_at_cosmos_rest'], - 'bad_val'), - document[node_id][0]['timestamp'], - ] - - self.assertEqual(2, len(document[node_id][0])) - self.assertListEqual(expected, actual) - - def test_process_mongo_tendermint_rpc_error_store_stores_correctly_if_down_err( - self) -> None: - data = self.node_data_down_error['prometheus']['error'] - meta_data = data['meta_data'] - node_id = meta_data['node_id'] - parent_id = meta_data['node_parent_id'] - metrics = data['data'] - - self.test_store._process_mongo_tendermint_rpc_error_store(data) - - documents = self.mongo.get_all(parent_id) - document = documents[0] - expected = [ - 'node', - 1, - metrics['went_down_at'], - meta_data['time'], - ] - actual = [ - document['doc_type'], - document['n_entries'], - convert_to_float( - document[node_id][0]['went_down_at_tendermint_rpc'], - 'bad_val'), - document[node_id][0]['timestamp'], - ] - - self.assertEqual(2, len(document[node_id][0])) - self.assertListEqual(expected, actual) - - @mock.patch.object(MongoApi, "update_one") - def test_process_mongo_prometheus_error_store_stores_correctly_non_down_err( - self, mongo_update) -> None: - data = self.node_data_non_down_error['prometheus']['error'] - - self.test_store._process_mongo_prometheus_error_store(data) - - mongo_update.assert_not_called() - - @mock.patch.object(MongoApi, "update_one") - def test_process_mongo_cosmos_rest_error_store_stores_correctly_non_down_err( - self, mongo_update) -> None: - data = self.node_data_non_down_error['cosmos_rest']['error'] - - self.test_store._process_mongo_cosmos_rest_error_store(data) - - mongo_update.assert_not_called() - - @mock.patch.object(MongoApi, "update_one") - def test_process_mongo_tendermint_rpc_error_store_stores_correctly_non_down_err( - self, mongo_update) -> None: - data = self.node_data_non_down_error['tendermint_rpc']['error'] - - self.test_store._process_mongo_tendermint_rpc_error_store(data) - - mongo_update.assert_not_called() + self.assertEqual( + data['data']['is_peered_with_sentinel'], + str_to_bool(self.redis.hget( + redis_hash, + Keys.get_cosmos_node_is_peered(self.node_id) + ).decode('utf-8'))) + + # def test_process_redis_prometheus_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['prometheus']['error'] + # redis_hash = Keys.get_hash_parent(self.parent_id) + # self.test_store._process_redis_prometheus_error_store(data) + + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_current_height( + # self.node_id)) + # ) + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_voting_power( + # self.node_id)) + # ) + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_is_syncing( + # self.node_id)) + # ) + # self.assertEqual( + # data['data']['went_down_at'], + # convert_to_float(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_went_down_at_prometheus(self.node_id) + # ).decode("utf-8"), 'bad_val')) + + # def test_process_redis_cosmos_rest_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['cosmos_rest']['error'] + # redis_hash = Keys.get_hash_parent(self.parent_id) + # self.test_store._process_redis_cosmos_rest_error_store(data) + + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_bond_status( + # self.node_id)) + # ) + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_jailed( + # self.node_id)) + # ) + # self.assertEqual( + # data['data']['went_down_at'], + # convert_to_float(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_went_down_at_cosmos_rest(self.node_id) + # ).decode("utf-8"), 'bad_val')) + + # def test_process_redis_tendermint_rpc_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['tendermint_rpc']['error'] + # redis_hash = Keys.get_hash_parent(self.parent_id) + # self.test_store._process_redis_tendermint_rpc_error_store(data) + + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_slashed( + # self.node_id)) + # ) + # self.assertEqual( + # None, self.redis.hget(redis_hash, + # Keys.get_cosmos_node_missed_blocks( + # self.node_id)) + # ) + # self.assertEqual( + # data['data']['went_down_at'], + # convert_to_float(self.redis.hget( + # redis_hash, + # Keys.get_cosmos_node_went_down_at_tendermint_rpc(self.node_id) + # ).decode("utf-8"), 'bad_val')) + + # @mock.patch.object(RedisApi, "hset_multiple") + # def test_process_redis_prometheus_error_store_stores_correctly_not_down_err( + # self, redis_set) -> None: + # data = self.node_data_non_down_error['prometheus']['error'] + + # self.test_store._process_redis_prometheus_error_store(data) + + # redis_set.assert_not_called() + + # @mock.patch.object(RedisApi, "hset_multiple") + # def test_process_redis_cosmos_rest_error_store_stores_correctly_not_down_err( + # self, redis_set) -> None: + # data = self.node_data_non_down_error['cosmos_rest']['error'] + + # self.test_store._process_redis_cosmos_rest_error_store(data) + + # redis_set.assert_not_called() + + # @mock.patch.object(RedisApi, "hset_multiple") + # def test_process_redis_tendermint_rpc_error_store_stores_correctly_not_down_err( + # self, redis_set) -> None: + # data = self.node_data_non_down_error['tendermint_rpc']['error'] + + # self.test_store._process_redis_tendermint_rpc_error_store(data) + + # redis_set.assert_not_called() + + # @mock.patch("src.data_store.stores.node.cosmos." + # "transformed_data_processing_helper") + # def test_process_mongo_store_calls_transformed_data_helper_fn_correctly( + # self, mock_helper_fn) -> None: + # mock_helper_fn.return_value = None + # test_conf = { + # 'prometheus': { + # 'result': + # self.test_store._process_mongo_prometheus_result_store, + # 'error': self.test_store._process_mongo_prometheus_error_store, + # }, + # 'cosmos_rest': { + # 'result': + # self.test_store._process_mongo_cosmos_rest_result_store, + # 'error': self.test_store._process_mongo_cosmos_rest_error_store, + # }, + # 'tendermint_rpc': { + # 'result': + # self.test_store._process_mongo_tendermint_rpc_result_store, + # 'error': + # self.test_store._process_mongo_tendermint_rpc_error_store, + # } + # } + # self.test_store._process_mongo_store(self.node_data_optionals_enabled) + # mock_helper_fn.assert_called_once_with(self.test_store_name, test_conf, + # self.node_data_optionals_enabled) + + # @parameterized.expand([ + # ("self.node_data_optionals_enabled",), + # ]) + # def test_process_mongo_prometheus_result_store_stores_correctly( + # self, data_var) -> None: + # data = eval(data_var)['prometheus']['result'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_prometheus_result_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # metrics['current_height'], + # metrics['voting_power'], + # meta_data['last_monitored'] + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # None if document[node_id][0]['went_down_at_prometheus'] == 'None' + # else convert_to_float( + # document[node_id][0]['went_down_at_prometheus'], 'bad_val'), + # convert_to_int(document[node_id][0]['current_height'], 'bad_val'), + # convert_to_int(document[node_id][0]['voting_power'], 'bad_val'), + # document[node_id][0]['timestamp'], + + # ] + + # self.assertListEqual(expected, actual) + + # @parameterized.expand([ + # ("self.node_data_optionals_enabled",), + # ]) + # def test_process_mongo_cosmos_rest_result_store_stores_correctly( + # self, data_var) -> None: + # data = eval(data_var)['cosmos_rest']['result'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_cosmos_rest_result_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # metrics['bond_status'], + # metrics['jailed'], + # meta_data['last_monitored'], + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # None if document[node_id][0]['went_down_at_cosmos_rest'] == 'None' + # else convert_to_float( + # document[node_id][0]['went_down_at_cosmos_rest'], 'bad_val'), + # document[node_id][0]['bond_status'], + # str_to_bool(document[node_id][0]['jailed']), + # document[node_id][0]['timestamp'], + # ] + + # self.assertListEqual(expected, actual) + + # @parameterized.expand([ + # ("self.node_data_optionals_enabled",), + # ]) + # def test_process_mongo_tendermint_rpc_result_store_stores_correctly( + # self, data_var) -> None: + # data = eval(data_var)['tendermint_rpc']['result'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_tendermint_rpc_result_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # metrics['slashed'], + # metrics['missed_blocks'], + # metrics['is_syncing'], + # meta_data['last_monitored'], + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # None if document[node_id][0][ + # 'went_down_at_tendermint_rpc'] == 'None' + # else convert_to_float( + # document[node_id][0]['went_down_at_tendermint_rpc'], 'bad_val'), + # None if document[node_id][0]['slashed'] == 'None' + # else json.loads(document[node_id][0]['slashed']), + # None if document[node_id][0]['missed_blocks'] == 'None' + # else json.loads(document[node_id][0]['missed_blocks']), + # str_to_bool(document[node_id][0]['is_syncing']), + # document[node_id][0]['timestamp'], + # ] + + # self.assertListEqual(expected, actual) + + # def test_process_mongo_prometheus_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['prometheus']['error'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_prometheus_error_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # meta_data['time'], + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # convert_to_float(document[node_id][0]['went_down_at_prometheus'], + # 'bad_val'), + # document[node_id][0]['timestamp'], + # ] + + # self.assertEqual(2, len(document[node_id][0])) + # self.assertListEqual(expected, actual) + + # def test_process_mongo_cosmos_rest_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['prometheus']['error'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_cosmos_rest_error_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # meta_data['time'], + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # convert_to_float(document[node_id][0]['went_down_at_cosmos_rest'], + # 'bad_val'), + # document[node_id][0]['timestamp'], + # ] + + # self.assertEqual(2, len(document[node_id][0])) + # self.assertListEqual(expected, actual) + + # def test_process_mongo_tendermint_rpc_error_store_stores_correctly_if_down_err( + # self) -> None: + # data = self.node_data_down_error['prometheus']['error'] + # meta_data = data['meta_data'] + # node_id = meta_data['node_id'] + # parent_id = meta_data['node_parent_id'] + # metrics = data['data'] + + # self.test_store._process_mongo_tendermint_rpc_error_store(data) + + # documents = self.mongo.get_all(parent_id) + # document = documents[0] + # expected = [ + # 'node', + # 1, + # metrics['went_down_at'], + # meta_data['time'], + # ] + # actual = [ + # document['doc_type'], + # document['n_entries'], + # convert_to_float( + # document[node_id][0]['went_down_at_tendermint_rpc'], + # 'bad_val'), + # document[node_id][0]['timestamp'], + # ] + + # self.assertEqual(2, len(document[node_id][0])) + # self.assertListEqual(expected, actual) + + # @mock.patch.object(MongoApi, "update_one") + # def test_process_mongo_prometheus_error_store_stores_correctly_non_down_err( + # self, mongo_update) -> None: + # data = self.node_data_non_down_error['prometheus']['error'] + + # self.test_store._process_mongo_prometheus_error_store(data) + + # mongo_update.assert_not_called() + + # @mock.patch.object(MongoApi, "update_one") + # def test_process_mongo_cosmos_rest_error_store_stores_correctly_non_down_err( + # self, mongo_update) -> None: + # data = self.node_data_non_down_error['cosmos_rest']['error'] + + # self.test_store._process_mongo_cosmos_rest_error_store(data) + + # mongo_update.assert_not_called() + + # @mock.patch.object(MongoApi, "update_one") + # def test_process_mongo_tendermint_rpc_error_store_stores_correctly_non_down_err( + # self, mongo_update) -> None: + # data = self.node_data_non_down_error['tendermint_rpc']['error'] + + # self.test_store._process_mongo_tendermint_rpc_error_store(data) + + # mongo_update.assert_not_called() diff --git a/alerter/test/data_transformers/node/test_cosmos.py b/alerter/test/data_transformers/node/test_cosmos.py index 6b6ae0b6..d56f6a2e 100644 --- a/alerter/test/data_transformers/node/test_cosmos.py +++ b/alerter/test/data_transformers/node/test_cosmos.py @@ -49,6 +49,7 @@ def setUp(self) -> None: self.dummy_logger.disabled = True self.invalid_transformed_data = {'bad_key': 'bad_value'} self.test_monitor_name = 'test_monitor_name' + self.test_is_mev_tendermint_node = False # Rabbit instance self.connection_check_time_interval = timedelta(seconds=0) @@ -143,6 +144,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -175,6 +177,12 @@ def setUp(self) -> None: } } } + + self.raw_data_example_result_mev = copy.deepcopy( + self.raw_data_example_result_all) + self.raw_data_example_result_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = True + self.raw_data_example_result_mev['tendermint_rpc']['result']['meta_data']['is_mev_tendermint_node'] = True + self.raw_data_example_result_options_None = copy.deepcopy( self.raw_data_example_result_all) self.raw_data_example_result_options_None['prometheus'][ @@ -226,6 +234,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -273,6 +282,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -325,6 +335,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'last_monitored': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -347,6 +358,12 @@ def setUp(self) -> None: } } } + self.transformed_data_example_result_mev = copy.deepcopy( + self.transformed_data_example_result_all) + self.transformed_data_example_result_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = True + self.transformed_data_example_result_mev['tendermint_rpc']['result']['meta_data']['is_mev_tendermint_node'] = True + + self.transformed_data_example_result_options_None = copy.deepcopy( self.transformed_data_example_result_all) self.transformed_data_example_result_options_None['prometheus'][ @@ -403,6 +420,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -453,6 +471,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -514,6 +533,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'last_monitored': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -548,6 +568,15 @@ def setUp(self) -> None: } } } + + self.processed_data_example_result_all_mev = copy.deepcopy( + self.processed_data_example_result_all) + self.processed_data_example_result_all_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = { + 'current': True, + 'previous': None + } + self.processed_data_example_result_all_mev['tendermint_rpc']['result']['meta_data']['is_mev_tendermint_node'] = True + self.processed_data_example_result_options_None = copy.deepcopy( self.processed_data_example_result_all) self.processed_data_example_result_options_None['prometheus'][ @@ -604,6 +633,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -660,6 +690,7 @@ def setUp(self) -> None: 'node_id': self.node_1.node_id, 'node_parent_id': self.node_1.parent_id, 'time': self.test_last_monitored, + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_is_validator, 'operator_address': self.test_operator_address, }, @@ -1039,6 +1070,31 @@ def test_update_state_updates_state_correctly_if_result(self) -> None: self.assertFalse(self.test_data_transformer._state[ self.node_1.node_id].is_down_prometheus) + def test_update_state_with_mev_metrics(self) -> None: + expected_updated_node = copy.deepcopy(self.node_1) + self.node_1.reset() + self.test_data_transformer._state = copy.deepcopy(self.test_state) + self.test_data_transformer._state['dummy_id'] = self.test_data_str + + # Update state with mev metrics + self.test_data_transformer._update_state( + self.transformed_data_example_result_mev) + + expected_updated_node.set_is_peered_with_sentinel(True) + + # Check that the nodes's state values have been modified correctly + self.assertEqual( + expected_updated_node, + self.test_data_transformer._state[self.node_1.node_id]) + # Check that the nodes not in question were not modified + self.assertEqual(self.test_data_str, + self.test_data_transformer._state['dummy_id']) + # Check that setting state to false after true works + self.transformed_data_example_result_mev['tendermint_rpc']['result']['data']['is_peered_with_sentinel'] = False + self.test_data_transformer._update_state( + self.transformed_data_example_result_mev) + self.assertFalse(self.test_data_transformer._state[self.node_1.node_id].is_peered_with_sentinel) + @parameterized.expand([ ('self.transformed_data_example_general_error', False,), ('self.transformed_data_example_downtime_error', True,) @@ -1132,6 +1188,8 @@ def test_proc_trans_data_for_saving_raises_unexp_data_except_on_unexp_data( 'self.processed_data_example_general_error'), ('self.transformed_data_example_downtime_error', 'self.processed_data_example_downtime_error'), + ('self.transformed_data_example_result_mev', + 'self.processed_data_example_result_all_mev'), ]) def test_process_transformed_data_for_alerting_returns_expected_data( self, transformed_data: str, expected_processed_data: str) -> None: @@ -1477,6 +1535,8 @@ def test_proc_raw_data_does_not_call_trans_data_if_validate_raise_exception( @parameterized.expand([ ('self.raw_data_example_result_all', 'self.transformed_data_example_result_all',), + ('self.raw_data_example_result_mev', + 'self.transformed_data_example_result_mev',), ('self.raw_data_example_result_options_None', 'self.transformed_data_example_result_options_None'), ('self.raw_data_example_general_error', diff --git a/alerter/test/monitors/node/test_cosmos.py b/alerter/test/monitors/node/test_cosmos.py index bca880d5..3afdd97a 100644 --- a/alerter/test/monitors/node/test_cosmos.py +++ b/alerter/test/monitors/node/test_cosmos.py @@ -62,6 +62,8 @@ def setUp(self) -> None: self.sdk_version_0_42_6 = 'v0.42.6' self.test_consensus_address = 'test_consensus_address' self.test_is_syncing = False + self.test_is_peered_with_sentinel = True + self.test_is_mev_tendermint_node = False # --------------- Data retrieval variables and examples --------------- # Prometheus @@ -102,10 +104,17 @@ def setUp(self) -> None: } # Tendermint + self.retrieved_tendermint_direct_data_mev = { + 'consensus_hex_address': self.test_consensus_address, + 'is_syncing': self.test_is_syncing, + 'is_peered_with_sentinel': self.test_is_peered_with_sentinel, + } + self.retrieved_tendermint_direct_data = { 'consensus_hex_address': self.test_consensus_address, 'is_syncing': self.test_is_syncing } + self.retrieved_tendermint_archive_data = { 'historical': [ { @@ -136,6 +145,12 @@ def setUp(self) -> None: 'is_syncing': self.test_is_syncing, } + self.retrieved_tendermint_rpc_data_mev = { + **self.retrieved_tendermint_archive_data, + 'is_syncing': self.test_is_syncing, + 'is_peered_with_sentinel': self.test_is_peered_with_sentinel, + } + # Processed retrieved data example self.processed_prometheus_data_example_1 = { 'tendermint_consensus_latest_block_height': 8137538.0, @@ -189,6 +204,13 @@ def setUp(self) -> None: 'monitoring_enabled': True }, } + + self.received_retrieval_info_all_sources_mev = copy.deepcopy(self.received_retrieval_info_all_source_types_enabled) + self.received_retrieval_info_all_sources_mev['tendermint_rpc']['data'] = self.retrieved_tendermint_rpc_data_mev + self.received_retrieval_info_all_sources_mev['tendermint_rpc']['processing_function'] = self.test_monitor._process_retrieved_tendermint_rpc_data + self.received_retrieval_info_all_sources_mev['cosmos_rest']['processing_function'] = self.test_monitor._process_retrieved_cosmos_rest_data + self.received_retrieval_info_all_sources_mev['prometheus']['processing_function'] = self.test_monitor._process_retrieved_prometheus_data + self.received_retrieval_info_some_sources_disabled = { 'prometheus': { 'data': {}, @@ -793,7 +815,7 @@ def test_get_cosmos_rest_data_ret_if_incompatibility_issue_and_unsuccessful( def test_get_tendermint_rpc_direct_data_return( self, mock_get_status) -> None: """ - We will check that the return is as expected for all cases + We will check that the return is as expected for all responses without a mev_info key """ mock_get_status.return_value = { 'result': { @@ -802,12 +824,36 @@ def test_get_tendermint_rpc_direct_data_return( }, 'sync_info': { 'catching_up': self.test_is_syncing - } + }, } } + actual_return = self.test_monitor._get_tendermint_rpc_direct_data() self.assertEqual(self.retrieved_tendermint_direct_data, actual_return) + @mock.patch.object(TendermintRpcApiWrapper, 'get_status') + def test_get_tendermint_rpc_direct_data_return_mev_info( + self, mock_get_status) -> None: + """ + We will check that the return is as expected when mev_info exists + """ + mock_get_status.return_value = { + 'result': { + 'validator_info': { + 'address': self.test_consensus_address + }, + 'sync_info': { + 'catching_up': self.test_is_syncing + }, + 'mev_info' : { + 'is_peered_with_relayer' : self.test_is_peered_with_sentinel, + }, + } + } + + actual_return_mev = self.test_monitor._get_tendermint_rpc_direct_data() + self.assertEqual(self.retrieved_tendermint_direct_data_mev, actual_return_mev) + @parameterized.expand([ (None, 1000, True, 999,), (None, 1000, False, 999,), @@ -1466,6 +1512,22 @@ def test_get_tendermint_rpc_data_sets_cons_address_if_not_None_or_empty( self.assertEqual(self.test_consensus_address, self.test_monitor.validator_consensus_address) + @mock.patch.object(CosmosNodeMonitor, '_get_tendermint_rpc_archive_data') + @mock.patch.object(CosmosNodeMonitor, '_get_tendermint_rpc_direct_data') + @mock.patch.object(CosmosNodeMonitor, '_select_cosmos_tendermint_node') + def test_get_tendermint_rpc_data_sets_peered_with_sentinel( + self, mock_select_node, mock_get_direct_data, + mock_get_archive_data) -> None: + mock_select_node.return_value = self.data_sources[0] + mock_get_direct_data.return_value = \ + self.retrieved_tendermint_direct_data_mev + mock_get_archive_data.return_value = \ + self.retrieved_tendermint_archive_data + + actual_return = self.test_monitor._get_tendermint_rpc_data() + self.assertEqual(actual_return[0], + self.retrieved_tendermint_rpc_data_mev) + @parameterized.expand([ ('',), (None,), @@ -1501,6 +1563,21 @@ def test_get_tendermint_rpc_data_ret_if_archive_data_retrieved_successfully( self.assertEqual((self.retrieved_tendermint_rpc_data, False, None), actual_ret) + @mock.patch.object(CosmosNodeMonitor, '_get_tendermint_rpc_archive_data') + @mock.patch.object(CosmosNodeMonitor, '_get_tendermint_rpc_direct_data') + @mock.patch.object(CosmosNodeMonitor, '_select_cosmos_tendermint_node') + def test_get_tendermint_rpc_data_ret_if_peering_data_retrieved_successfully( + self, mock_select_node, mock_get_direct_data, + mock_get_archive_data) -> None: + mock_select_node.return_value = self.data_sources[0] + mock_get_archive_data.return_value = \ + self.retrieved_tendermint_archive_data + ## update mock direct response data + mock_get_direct_data.return_value = \ + self.retrieved_tendermint_direct_data_mev + actual_ret_mev = self.test_monitor._get_tendermint_rpc_data() + self.assertEqual((self.retrieved_tendermint_rpc_data_mev, False, None), actual_ret_mev) + @parameterized.expand([ (NodeIsDownException('node_name_1'), NodeIsDownException('node_name_1'),), @@ -1604,10 +1681,13 @@ def test_get_prometheus_data_return_if_retrieval_fails_with_expected_error( ['self.retrieved_prometheus_data_example_1', False, None], True, ['self.retrieved_cosmos_rest_data_1', False, None], True, ['self.retrieved_tendermint_rpc_data', False, None], True), + ('self.received_retrieval_info_all_sources_mev', + ['self.retrieved_prometheus_data_example_1', False, None], True, + ['self.retrieved_cosmos_rest_data_1', False, None], True, + ['self.retrieved_tendermint_rpc_data_mev', False, None], True), ('self.received_retrieval_info_some_sources_disabled', None, False, ['self.retrieved_cosmos_rest_data_1', False, None], True, - ['self.retrieved_tendermint_rpc_data', False, None], True - ), + ['self.retrieved_tendermint_rpc_data', False, None], True), ('self.received_retrieval_info_all_source_types_enabled_err', [{}, True, PANICException('test_exception_1', 1)], True, [{}, True, PANICException('test_exception_2', 2)], True, @@ -1655,6 +1735,7 @@ def test_get_data_return( self.test_monitor._node_config._monitor_prometheus = monitor_prom self.test_monitor._node_config._monitor_cosmos_rest = \ monitor_cosmos_rest + self.test_monitor._node_config._monitor_tendermint_rpc = \ monitor_tendermint_rpc @@ -1664,6 +1745,7 @@ def test_get_data_return( expected_ret['prometheus']['get_function'] = mock_get_prom_data expected_ret['tendermint_rpc']['get_function'] = \ mock_get_tendermint_rpc_data + self.assertEqual(expected_ret, actual_ret) @freeze_time("2012-01-01") @@ -1720,6 +1802,7 @@ def test_process_retrieved_tendermint_rpc_data_returns_expected_data( 'node_id': self.test_monitor.node_config.node_id, 'node_parent_id': self.test_monitor.node_config.parent_id, 'time': datetime.now().timestamp(), + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_monitor.node_config.is_validator, 'operator_address': self.test_monitor.node_config.operator_address, @@ -1731,6 +1814,30 @@ def test_process_retrieved_tendermint_rpc_data_returns_expected_data( self.test_data_dict) self.assertEqual(expected_ret, actual_ret) + @freeze_time("2012-01-01") + def test_process_retrieved_tendermint_rpc_data_returns_expected_data_when_node_is_mev( + self) -> None: + expected_ret = { + 'result': { + 'meta_data': { + 'monitor_name': self.monitor_name, + 'node_name': self.test_monitor.node_config.node_name, + 'node_id': self.test_monitor.node_config.node_id, + 'node_parent_id': self.test_monitor.node_config.parent_id, + 'time': datetime.now().timestamp(), + 'is_mev_tendermint_node': True, + 'is_validator': self.test_monitor.node_config.is_validator, + 'operator_address': + self.test_monitor.node_config.operator_address, + }, + 'data': copy.deepcopy(self.test_data_dict), + } + } + actual_ret = self.test_monitor._process_retrieved_tendermint_rpc_data( + self.retrieved_tendermint_direct_data_mev) + expected_ret['result']['data'] = self.retrieved_tendermint_direct_data_mev + self.assertEqual(expected_ret, actual_ret) + @parameterized.expand([ ("self.processed_prometheus_data_example_1", "self.retrieved_prometheus_data_example_1"), @@ -1859,6 +1966,7 @@ def test_monitor_sends_data_and_hb_if_data_retrieve_and_processing_success( 'node_parent_id': self.test_monitor.node_config.parent_id, 'time': datetime(2012, 1, 1).timestamp(), + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_monitor.node_config.is_validator, 'operator_address': @@ -1916,6 +2024,7 @@ def test_monitor_sends_empty_dict_for_disabled_sources( 'node_parent_id': self.test_monitor.node_config.parent_id, 'time': datetime(2012, 1, 1).timestamp(), + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_monitor.node_config.is_validator, 'operator_address': @@ -1934,7 +2043,7 @@ def test_monitor_sends_empty_dict_for_disabled_sources( self.received_retrieval_info_some_sources_disabled mock_send_data.return_value = None mock_send_heartbeat.return_value = None - + self.test_monitor._monitor() mock_send_data.assert_called_once_with(expected_output_data) mock_send_heartbeat.assert_called_once_with(expected_output_hb) @@ -2064,6 +2173,7 @@ def test_monitor_logs_data_if_all_sources_enabled_and_no_retrieval_error( 'node_parent_id': self.test_monitor.node_config.parent_id, 'time': datetime(2012, 1, 1).timestamp(), + 'is_mev_tendermint_node': self.test_is_mev_tendermint_node, 'is_validator': self.test_monitor.node_config.is_validator, 'operator_address': diff --git a/api/base/dump/base_chains.json b/api/base/dump/base_chains.json index d30aa379..49d2be42 100644 --- a/api/base/dump/base_chains.json +++ b/api/base/dump/base_chains.json @@ -13,7 +13,9 @@ "62e25724f161ba55e6db2a26", "62e2574cf161ba55e6db2a27", "62e257d8f161ba55e6db2a28", - "62e25823f161ba55e6db2a29" + "62e25823f161ba55e6db2a29", + "62e25c1ff161ba55e6db2a37", + "62e25c1ff161ba55e6db2a38" ], "threshold_alerts": [ "628bcb395c5ab4c82d151a93", @@ -131,4 +133,4 @@ ], "time_window_alerts": [] } -] \ No newline at end of file +] diff --git a/api/base/dump/severity_alerts.json b/api/base/dump/severity_alerts.json index a85b912d..8868674c 100644 --- a/api/base/dump/severity_alerts.json +++ b/api/base/dump/severity_alerts.json @@ -158,5 +158,21 @@ "description": "An alert will be raised if the number of oracles that have submitted their observations is less than the minimum requirement. Severity is set as warning as from a node operator perspective there is nothing one can do.", "group": "severity_alert", "type": "6265d085fdb17d641746dcef" + }, + { + "_id": "62e25c1ff161ba55e6db2a37", + "name": "Node is Peered With Sentinel", + "value": "node_is_peered_with_sentinel", + "description": "An alert will be raised if the current node is not peering with the sentinel on the source chain", + "group": "severity_alert", + "type": "6265d085fdb17d641746dcef" + }, + { + "_id": "62e25c1ff161ba55e6db2a38", + "name": "Validator is Peered With Sentinel", + "value": "validator_is_peered_with_sentinel", + "description": "An alert will be raised if the current node is not peering with the sentinel on the source chain", + "group": "severity_alert", + "type": "6265d08efdb17d641746dcf0" } -] \ No newline at end of file +] diff --git a/api/src/server/redis.ts b/api/src/server/redis.ts index a7db40ac..e93b49a7 100644 --- a/api/src/server/redis.ts +++ b/api/src/server/redis.ts @@ -131,6 +131,7 @@ export const getAlertKeysNode = (): AlertKeysNode => ({ cosmos_node_is_down: `${alertKeysCosmosNodePrefix}1`, cosmos_node_slashed: `${alertKeysCosmosNodePrefix}2`, cosmos_node_syncing: `${alertKeysCosmosNodePrefix}3`, + cosmos_node_peering: `${alertKeysCosmosNodePrefix}14`, cosmos_node_active: `${alertKeysCosmosNodePrefix}4`, cosmos_node_jailed: `${alertKeysCosmosNodePrefix}5`, cosmos_node_blocks_missed: `${alertKeysCosmosNodePrefix}6`, diff --git a/api/src/server/types.ts b/api/src/server/types.ts index 7e852ab4..ae98752a 100644 --- a/api/src/server/types.ts +++ b/api/src/server/types.ts @@ -112,6 +112,7 @@ export interface AlertKeysNode { cosmos_node_is_down: string, cosmos_node_slashed: string, cosmos_node_syncing: string, + cosmos_node_peering: string, cosmos_node_active: string, cosmos_node_jailed: string, cosmos_node_blocks_missed: string, diff --git a/api/tests/server/redis.test.ts b/api/tests/server/redis.test.ts index 1982fbfb..fd6b92fe 100644 --- a/api/tests/server/redis.test.ts +++ b/api/tests/server/redis.test.ts @@ -169,6 +169,7 @@ describe('getAlertKeysNode', () => { cosmos_node_is_down: `${alertKeysCosmosNodePrefix}1`, cosmos_node_slashed: `${alertKeysCosmosNodePrefix}2`, cosmos_node_syncing: `${alertKeysCosmosNodePrefix}3`, + cosmos_node_peering: `${alertKeysCosmosNodePrefix}14`, cosmos_node_active: `${alertKeysCosmosNodePrefix}4`, cosmos_node_jailed: `${alertKeysCosmosNodePrefix}5`, cosmos_node_blocks_missed: `${alertKeysCosmosNodePrefix}6`, diff --git a/docs/CHANGE_LOG.md b/docs/CHANGE_LOG.md index b3d9f773..221ce9ff 100644 --- a/docs/CHANGE_LOG.md +++ b/docs/CHANGE_LOG.md @@ -1,6 +1,10 @@ # Change Log ## Unreleased + - Added `is_peered_with_sentinel` monitorable for use in nodes that are running `mev-tendermint` + - Added `node_is_peered_with_sentinel` / `validator_is_peered_with_sentinel` alerts / alerting configs +- Updated CosmosNode `data_store`, `data_transformer`, `monitor`, and `alerter` to accomodate this new monitorable / alert + - Update API / UI for configuring these alerts ## 1.3.0 diff --git a/docs/DESIGN_AND_FEATURES.md b/docs/DESIGN_AND_FEATURES.md index d3146126..2dbf21be 100644 --- a/docs/DESIGN_AND_FEATURES.md +++ b/docs/DESIGN_AND_FEATURES.md @@ -250,6 +250,8 @@ In the lists below we will show which alerts have severity thresholds and which | `ValidatorWasSlashedAlert` | | `CRITICAL` | ✓ | ✓ | Validator has been slashed. | | `NodeIsSyncingAlert` | | `INFO`,`WARNING` | ✓ | ✓ | Node or validator is syncing. | | `NodeIsNoLongerSyncingAlert` | | `INFO` | ✗ | Depends on `NodeIsSyncingAlert` | Node or validator is no longer syncing. | +| `NodeIsPeeredWithSentinelAlert` | | `INFO` | ✗ | ✓ | Node or validator is peered with the sentinel (this is only relevant for mev-tendermint nodes). | +| `NodeIsNotPeeredWithSentinelAlert` | | `INFO` | ✗ | Depends on `NodeIsPeeredWithSentinelAlert` | Node or validator is not peered with the sentinel. | | `ValidatorIsNotActiveAlert` | | `CRITICAL` | ✓ | ✓ | Validator is not active in the current consensus session. | | `ValidatorIsActiveAlert` | | `INFO` | ✗ | Depends on `ValidatorIsNotActiveAlert` | Validator is active in the current consensus session after not being active in a previous consensus session. | | `ValidatorIsJailedAlert` | | `CRITICAL` | ✓ | ✓ | Validator is jailed. |