Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Add most missing type hints to synapse.util #11328

Merged
merged 14 commits into from
Nov 16, 2021
1 change: 1 addition & 0 deletions changelog.d/11328.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add type hints to `synapse.util`.
87 changes: 3 additions & 84 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -196,92 +196,11 @@ disallow_untyped_defs = True
[mypy-synapse.streams.*]
disallow_untyped_defs = True

[mypy-synapse.util.batching_queue]
[mypy-synapse.util.*]
disallow_untyped_defs = True

[mypy-synapse.util.caches.cached_call]
disallow_untyped_defs = True

[mypy-synapse.util.caches.dictionary_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.lrucache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.response_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.stream_change_cache]
disallow_untyped_defs = True

[mypy-synapse.util.caches.ttl_cache]
disallow_untyped_defs = True

[mypy-synapse.util.daemonize]
disallow_untyped_defs = True

[mypy-synapse.util.file_consumer]
disallow_untyped_defs = True

[mypy-synapse.util.frozenutils]
disallow_untyped_defs = True

[mypy-synapse.util.hash]
disallow_untyped_defs = True

[mypy-synapse.util.httpresourcetree]
disallow_untyped_defs = True

[mypy-synapse.util.iterutils]
disallow_untyped_defs = True

[mypy-synapse.util.linked_list]
disallow_untyped_defs = True

[mypy-synapse.util.logcontext]
disallow_untyped_defs = True

[mypy-synapse.util.logformatter]
disallow_untyped_defs = True

[mypy-synapse.util.macaroons]
disallow_untyped_defs = True

[mypy-synapse.util.manhole]
disallow_untyped_defs = True

[mypy-synapse.util.module_loader]
disallow_untyped_defs = True

[mypy-synapse.util.msisdn]
disallow_untyped_defs = True

[mypy-synapse.util.patch_inline_callbacks]
disallow_untyped_defs = True

[mypy-synapse.util.ratelimitutils]
disallow_untyped_defs = True

[mypy-synapse.util.retryutils]
disallow_untyped_defs = True

[mypy-synapse.util.rlimit]
disallow_untyped_defs = True

[mypy-synapse.util.stringutils]
disallow_untyped_defs = True

[mypy-synapse.util.templates]
disallow_untyped_defs = True

[mypy-synapse.util.threepids]
disallow_untyped_defs = True

[mypy-synapse.util.wheel_timer]
disallow_untyped_defs = True

[mypy-synapse.util.versionstring]
disallow_untyped_defs = True
[mypy-synapse.util.caches.treecache]
disallow_untyped_defs = False

[mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True
Expand Down
32 changes: 16 additions & 16 deletions synapse/util/async_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
Generic,
Hashable,
Iterable,
Iterator,
Optional,
Set,
TypeVar,
Expand All @@ -40,7 +41,6 @@
from twisted.internet import defer
from twisted.internet.defer import CancelledError
from twisted.internet.interfaces import IReactorTime
from twisted.python import failure
from twisted.python.failure import Failure

from synapse.logging.context import (
Expand Down Expand Up @@ -78,7 +78,7 @@ def __init__(self, deferred: "defer.Deferred[_T]", consumeErrors: bool = False):
object.__setattr__(self, "_result", None)
object.__setattr__(self, "_observers", [])

def callback(r):
def callback(r: _T) -> _T:
object.__setattr__(self, "_result", (True, r))

# once we have set _result, no more entries will be added to _observers,
Expand All @@ -98,7 +98,7 @@ def callback(r):
)
return r

def errback(f):
def errback(f: Failure) -> Optional[Failure]:
object.__setattr__(self, "_result", (False, f))

# once we have set _result, no more entries will be added to _observers,
Expand All @@ -109,7 +109,7 @@ def errback(f):
for observer in observers:
# This is a little bit of magic to correctly propagate stack
# traces when we `await` on one of the observer deferreds.
f.value.__failure__ = f
f.value.__failure__ = f # type: ignore[union-attr]
try:
observer.errback(f)
except Exception as e:
Expand Down Expand Up @@ -314,7 +314,7 @@ def queue(self, key: Hashable) -> defer.Deferred:
# will release the lock.

@contextmanager
def _ctx_manager(_):
def _ctx_manager(_: None) -> Iterator[None]:
try:
yield
finally:
Expand Down Expand Up @@ -355,7 +355,7 @@ def _await_lock(self, key: Hashable) -> defer.Deferred:
new_defer = make_deferred_yieldable(defer.Deferred())
entry.deferreds[new_defer] = 1

def cb(_r):
def cb(_r: None) -> "defer.Deferred[None]":
logger.debug("Acquired linearizer lock %r for key %r", self.name, key)
entry.count += 1

Expand All @@ -371,7 +371,7 @@ def cb(_r):
# code must be synchronous, so this is the only sensible place.)
return self._clock.sleep(0)

def eb(e):
def eb(e: Failure) -> Failure:
logger.info("defer %r got err %r", new_defer, e)
if isinstance(e, CancelledError):
logger.debug(
Expand Down Expand Up @@ -435,7 +435,7 @@ async def read(self, key: str) -> ContextManager:
await make_deferred_yieldable(curr_writer)

@contextmanager
def _ctx_manager():
def _ctx_manager() -> Iterator[None]:
try:
yield
finally:
Expand Down Expand Up @@ -464,7 +464,7 @@ async def write(self, key: str) -> ContextManager:
await make_deferred_yieldable(defer.gatherResults(to_wait_on))

@contextmanager
def _ctx_manager():
def _ctx_manager() -> Iterator[None]:
try:
yield
finally:
Expand Down Expand Up @@ -524,7 +524,7 @@ def time_it_out() -> None:

delayed_call = reactor.callLater(timeout, time_it_out)

def convert_cancelled(value: failure.Failure):
def convert_cancelled(value: Failure) -> Failure:
# if the original deferred was cancelled, and our timeout has fired, then
# the reason it was cancelled was due to our timeout. Turn the CancelledError
# into a TimeoutError.
Expand All @@ -534,19 +534,19 @@ def convert_cancelled(value: failure.Failure):

deferred.addErrback(convert_cancelled)

def cancel_timeout(result):
def cancel_timeout(result: _T) -> _T:
# stop the pending call to cancel the deferred if it's been fired
if delayed_call.active():
delayed_call.cancel()
return result

deferred.addBoth(cancel_timeout)

def success_cb(val):
def success_cb(val: _T) -> None:
if not new_d.called:
new_d.callback(val)

def failure_cb(val):
def failure_cb(val: Failure) -> None:
if not new_d.called:
new_d.errback(val)

Expand All @@ -557,13 +557,13 @@ def failure_cb(val):

# This class can't be generic because it uses slots with attrs.
# See: https://github.com/python-attrs/attrs/issues/313
@attr.s(slots=True, frozen=True)
@attr.s(slots=True, frozen=True, auto_attribs=True)
class DoneAwaitable: # should be: Generic[R]
"""Simple awaitable that returns the provided value."""

value = attr.ib(type=Any) # should be: R
value: Any # should be: R

def __await__(self):
def __await__(self) -> Any:
return self

def __iter__(self) -> "DoneAwaitable":
Expand Down
32 changes: 17 additions & 15 deletions synapse/util/caches/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import typing
from enum import Enum, auto
from sys import intern
from typing import Callable, Dict, Optional, Sized
from typing import Any, Callable, Dict, List, Optional, Sized

import attr
from prometheus_client.core import Gauge
Expand Down Expand Up @@ -58,20 +58,20 @@ class EvictionReason(Enum):
time = auto()


@attr.s(slots=True)
@attr.s(slots=True, auto_attribs=True)
class CacheMetric:

_cache = attr.ib()
_cache_type = attr.ib(type=str)
_cache_name = attr.ib(type=str)
_collect_callback = attr.ib(type=Optional[Callable])
_cache: Sized
_cache_type: str
_cache_name: str
_collect_callback: Optional[Callable]

hits = attr.ib(default=0)
misses = attr.ib(default=0)
hits: int = 0
misses: int = 0
eviction_size_by_reason: typing.Counter[EvictionReason] = attr.ib(
factory=collections.Counter
)
memory_usage = attr.ib(default=None)
DMRobertson marked this conversation as resolved.
Show resolved Hide resolved
memory_usage: Optional[int] = None

def inc_hits(self) -> None:
self.hits += 1
Expand All @@ -89,13 +89,14 @@ def inc_memory_usage(self, memory: int) -> None:
self.memory_usage += memory

def dec_memory_usage(self, memory: int) -> None:
assert self.memory_usage is not None
self.memory_usage -= memory

def clear_memory_usage(self) -> None:
if self.memory_usage is not None:
self.memory_usage = 0

def describe(self):
def describe(self) -> List[str]:
return []

def collect(self) -> None:
Expand All @@ -118,8 +119,9 @@ def collect(self) -> None:
self.eviction_size_by_reason[reason]
)
cache_total.labels(self._cache_name).set(self.hits + self.misses)
if getattr(self._cache, "max_size", None):
cache_max_size.labels(self._cache_name).set(self._cache.max_size)
max_size = getattr(self._cache, "max_size", None)
if max_size:
cache_max_size.labels(self._cache_name).set(max_size)

if TRACK_MEMORY_USAGE:
# self.memory_usage can be None if nothing has been inserted
Expand Down Expand Up @@ -193,7 +195,7 @@ def register_cache(
}


def intern_string(string):
def intern_string(string: Optional[str]) -> Optional[str]:
"""Takes a (potentially) unicode string and interns it if it's ascii"""
if string is None:
return None
Expand All @@ -204,15 +206,15 @@ def intern_string(string):
return string


def intern_dict(dictionary):
def intern_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]:
"""Takes a dictionary and interns well known keys and their values"""
return {
KNOWN_KEYS.get(key, key): _intern_known_values(key, value)
for key, value in dictionary.items()
}


def _intern_known_values(key, value):
def _intern_known_values(key: str, value: Any) -> Any:
DMRobertson marked this conversation as resolved.
Show resolved Hide resolved
intern_keys = ("event_id", "room_id", "sender", "user_id", "type", "state_key")

if key in intern_keys:
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/caches/deferred_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def prefill(
callbacks = [callback] if callback else []
self.cache.set(key, value, callbacks=callbacks)

def invalidate(self, key) -> None:
def invalidate(self, key: KT) -> None:
clokep marked this conversation as resolved.
Show resolved Hide resolved
"""Delete a key, or tree of entries

If the cache is backed by a regular dict, then "key" must be of
Expand Down
Loading