Skip to content

Commit

Permalink
pylint: Enable more messages [BF-1560]
Browse files Browse the repository at this point in the history
  • Loading branch information
Samuel Giffard committed Mar 27, 2023
1 parent 30bcd0d commit 9dfaebe
Show file tree
Hide file tree
Showing 8 changed files with 41 additions and 53 deletions.
5 changes: 2 additions & 3 deletions pglookout/cluster_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,9 +178,9 @@ def _fetch_observer_state(self, instance: str, uri: str) -> ObservedState | None
instance,
time_diff,
response.json(),
) # pylint: disable=no-member
)
return None
result.update(response.json()) # pylint: disable=no-member
result.update(response.json())
except requests.ConnectionError as ex:
self.log.warning(
"%s (%s) fetching state from observer: %r, %r",
Expand Down Expand Up @@ -347,7 +347,6 @@ def _get_statement_query_updating_transaction(server_version: int) -> str:
return "SELECT txid_current(), pg_current_wal_lsn() AS pg_last_xlog_replay_location"
return "SELECT txid_current(), pg_current_xlog_location() AS pg_last_xlog_replay_location"

# FIXME: Find a tighter input + return type
@staticmethod
def _parse_status_query_result(result: MemberState) -> MemberState:
if not result:
Expand Down
19 changes: 9 additions & 10 deletions pglookout/current_master.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@
from __future__ import annotations

from . import version
from pathlib import Path
from pglookout.default import JSON_STATE_FILE_PATH

import argparse
import json
import os
import sys
import time

Expand All @@ -34,22 +34,21 @@ def main(args: list[str] | None = None) -> int:
help="show program version",
version=version.__version__,
)
parser.add_argument("state", help="pglookout state file")
parser.add_argument("state", type=Path, help="pglookout state file")
arg = parser.parse_args(args)

if not os.path.exists(arg.state):
print(f"pglookout_current_master: {arg.state!r} doesn't exist")
state_file: Path = arg.state
if not state_file.is_file():
print(f"pglookout_current_master: {arg.state!s} doesn't exist")
return 1

try:
with open(arg.state, "r") as fp:
config = json.load(fp)
state_file_path = config.get("json_state_file_path", JSON_STATE_FILE_PATH) # pylint: disable=no-member
if time.monotonic() - os.stat(state_file_path).st_mtime > 60.0:
config = json.loads(state_file.read_text(encoding="utf-8"))
state_file_path = Path(config.get("json_state_file_path", JSON_STATE_FILE_PATH))
if time.monotonic() - state_file_path.stat().st_mtime > 60.0:
# file older than one minute, pglookout probably dead, exit with minus one
return -1
with open(state_file_path, "r") as fp:
state_dict = json.load(fp)
state_dict = json.loads(state_file_path.read_text(encoding="utf-8"))
current_master = state_dict["current_master"]
print(current_master)
except: # pylint: disable=bare-except
Expand Down
2 changes: 1 addition & 1 deletion pglookout/logutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

with_systemd: bool = False
try:
from systemd import daemon # pylint: disable=no-name-in-module
from systemd import daemon

with_systemd = True
except ImportError:
Expand Down
30 changes: 14 additions & 16 deletions pglookout/pglookout.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def load_config(self) -> None:

previous_remote_conns = self.config.get("remote_conns")
try:
config = json.loads(self.config_path.read_text())
config = json.loads(self.config_path.read_text(encoding="utf-8"))
self.config = self.normalize_config(config)
except Exception as ex: # pylint: disable=broad-except
self.log.exception("Invalid JSON config, exiting")
Expand Down Expand Up @@ -282,7 +282,7 @@ def write_cluster_state_to_json_file(self) -> None:
)

state_file_path_tmp = state_file_path.with_name(f"{state_file_path.name}.tmp")
state_file_path_tmp.write_text(json_to_dump)
state_file_path_tmp.write_text(json_to_dump, encoding="utf-8")
state_file_path_tmp.rename(state_file_path)

self.log.debug(
Expand Down Expand Up @@ -452,7 +452,7 @@ def create_node_map(

return master_instance, master_node, standby_nodes

def is_restoring_or_catching_up_normally(self, state: MemberState) -> bool:
def _is_restoring_or_catching_up_normally(self, state: MemberState) -> bool:
"""
Return True if node is still in the replication catchup phase and
replication lag alerts/metrics should not yet be generated.
Expand All @@ -476,15 +476,15 @@ def is_restoring_or_catching_up_normally(self, state: MemberState) -> bool:
return False

def emit_stats(self, state: MemberState) -> None:
if self.is_restoring_or_catching_up_normally(state):
if self._is_restoring_or_catching_up_normally(state):
# do not emit misleading lag stats during catchup at restore
return

replication_time_lag = state.get("replication_time_lag")
if replication_time_lag is not None:
self.stats.gauge("pg.replication_lag", replication_time_lag)

def is_master_observer_new_enough(self, observer_state: dict[str, ObservedState]) -> bool:
def _is_master_observer_new_enough(self, observer_state: dict[str, ObservedState]) -> bool:
if not self.replication_lag_over_warning_limit:
return True

Expand Down Expand Up @@ -536,7 +536,7 @@ def check_cluster_state(self) -> None:
)
return

if self.config.get("poll_observers_on_warning_only") and not self.is_master_observer_new_enough(observer_state):
if self.config.get("poll_observers_on_warning_only") and not self._is_master_observer_new_enough(observer_state):
self.log.warning("observer data is not good enough, skipping check")
return

Expand All @@ -551,7 +551,7 @@ def check_cluster_state(self) -> None:
)
self.current_master = master_instance
if self.own_db and self.own_db != master_instance and self.config.get("autofollow"):
self.start_following_new_master(master_instance)
self._start_following_new_master(master_instance)

self._make_failover_decision(observer_state, standby_nodes, master_node)

Expand Down Expand Up @@ -685,7 +685,7 @@ def is_replication_lag_over_warning_limit(self) -> bool:
return self.replication_lag_over_warning_limit

def check_replication_lag(self, own_state: MemberState, standby_nodes: dict[str, MemberState]) -> None:
if self.is_restoring_or_catching_up_normally(own_state):
if self._is_restoring_or_catching_up_normally(own_state):
# do not raise alerts during catchup at restore
return

Expand Down Expand Up @@ -749,8 +749,7 @@ def get_replication_positions(self, standby_nodes: dict[str, MemberState]) -> di
node_state["connection"]
and now - parse_iso_datetime(node_state["fetch_time"]) < timedelta(seconds=20)
and instance not in self.never_promote_these_nodes
): # noqa # pylint: disable=line-too-long
# use pg_last_xlog_receive_location if it's available,
): # use pg_last_xlog_receive_location if it's available,
# otherwise fall back to pg_last_xlog_replay_location but
# note that both of them can be None. We prefer
# receive_location over replay_location as some nodes may
Expand Down Expand Up @@ -870,7 +869,7 @@ def do_failover_decision(self, own_state: MemberState, standby_nodes: dict[str,
furthest_along_instance,
)

def modify_recovery_conf_to_point_at_new_master(self, new_master_instance: str) -> bool:
def _modify_recovery_conf_to_point_at_new_master(self, new_master_instance: str) -> bool:
pg_data_directory = Path(self.config.get("pg_data_directory", PG_DATA_DIRECTORY))
pg_version = (pg_data_directory / "PG_VERSION").read_text().strip()

Expand Down Expand Up @@ -940,9 +939,9 @@ def modify_recovery_conf_to_point_at_new_master(self, new_master_instance: str)

return True

def start_following_new_master(self, new_master_instance: str) -> None:
def _start_following_new_master(self, new_master_instance: str) -> None:
start_time = time.monotonic()
updated_config = self.modify_recovery_conf_to_point_at_new_master(new_master_instance)
updated_config = self._modify_recovery_conf_to_point_at_new_master(new_master_instance)
if not updated_config:
self.log.info(
"Already following master %r, no need to start following it again",
Expand Down Expand Up @@ -979,7 +978,7 @@ def execute_external_command(self, command: list[str] | str) -> int:
err.output,
)
self.stats.unexpected_exception(err, where="execute_external_command")
return_code = err.returncode # pylint: disable=no-member
return_code = err.returncode
self.log.warning("Executed external command: %r, output: %r", return_code, output)
return return_code

Expand Down Expand Up @@ -1028,8 +1027,7 @@ def _get_health_timeout_seconds(self) -> float | None:
if "cluster_monitor_health_timeout_seconds" in self.config:
config_value = self.config.get("cluster_monitor_health_timeout_seconds")
return config_value if config_value is None else float(config_value)
else:
return self._get_check_interval() * 2
return self._get_check_interval() * 2

def _get_check_interval(self) -> float:
return float(self.config.get("replication_state_check_interval", 5.0))
Expand Down
8 changes: 5 additions & 3 deletions pglookout/pgutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from __future__ import annotations

from typing import cast, Literal, TypedDict, Union
from urllib.parse import parse_qs, urlparse # pylint: disable=no-name-in-module, import-error
from urllib.parse import parse_qs, urlparse

import psycopg2.extensions

Expand Down Expand Up @@ -131,7 +131,9 @@ def parse_connection_string_url(url: str) -> ConnectionParameterKeywords:
return cast(ConnectionParameterKeywords, fields)


def parse_connection_string_libpq(connection_string: str) -> ConnectionParameterKeywords:
def parse_connection_string_libpq(
connection_string: str,
) -> ConnectionParameterKeywords:
"""Parse a postgresql connection string.
See:
Expand Down Expand Up @@ -159,7 +161,7 @@ def parse_connection_string_libpq(connection_string: str) -> ConnectionParameter
value += rem[i]
else:
raise ValueError(f"invalid connection_string fragment {rem!r}")
connection_string = rem[i + 1 :] # pylint: disable=undefined-loop-variable
connection_string = rem[i + 1 :]
else:
res = rem.split(None, 1)
if len(res) > 1:
Expand Down
26 changes: 8 additions & 18 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,36 +34,22 @@ files = [
[[tool.mypy.overrides]]
module = "test.test_cluster_monitor"
# ignore no-untyped-call because conftest can only type hinted at the end. Remove at the end.
disallow_untyped_calls = false
#disallow_untyped_calls = false

[tool.pylint.'MESSAGES CONTROL']
enable = [
'useless-suppression',
]
disable = [
'bad-option-value',
'duplicate-code',
'fixme',
'import-outside-toplevel',
'invalid-name',
'len-as-condition',
'locally-disabled',
'missing-docstring',
'no-else-raise',
'no-else-return',
'no-self-use',
'raise-missing-from',
'too-few-public-methods',
'too-many-ancestors',
'too-many-arguments',
'too-many-boolean-expressions',
'too-many-branches',
'too-many-function-args',
'too-many-instance-attributes',
'too-many-locals',
'too-many-public-methods',
'too-many-statements',
'ungrouped-imports',
'unspecified-encoding',
'wrong-import-order',
'wrong-import-position',
]

[tool.pylint.'FORMAT']
Expand All @@ -76,3 +62,7 @@ reports = 'no'

[tool.pylint.'TYPECHECK']
extension-pkg-whitelist = 'pydantic'

[tool.pylint.'DESIGN']
max-args = 10
max-attributes = 10
2 changes: 1 addition & 1 deletion test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def run_cmd(self, cmd: str, *args: str) -> None:
subprocess.check_call(argv)

def run_pg(self) -> None:
self.pg = subprocess.Popen( # pylint: disable=bad-option-value,consider-using-with
self.pg = subprocess.Popen( # pylint: disable=consider-using-with
[
str(self.pgbin / "postgres"),
"-D",
Expand Down
2 changes: 1 addition & 1 deletion test/test_pglookout.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from pglookout.common_types import MemberState, ObservedState
from pglookout.pglookout import PgLookout
from pglookout.pgutil import ConnectionInfo, DsnDict, get_connection_info, get_connection_info_from_config_line
from psycopg2._psycopg import connection as pg_connection # pylint: disable=no-name-in-module
from psycopg2._psycopg import connection as pg_connection
from typing import cast
from unittest.mock import Mock, patch

Expand Down

0 comments on commit 9dfaebe

Please sign in to comment.