From 9f58c95ac8bc313ff0e074cd886467ed3debfd1c Mon Sep 17 00:00:00 2001 From: Sergei Kolesnikov Date: Sat, 19 Mar 2022 17:33:02 +0300 Subject: [PATCH] Upgrade to Python 3.7+ --- AUTHORS | 1 + setup.py | 14 +++++++---- tox.ini | 2 +- twtxt/cache.py | 2 +- twtxt/cli.py | 21 ++++++++++------- twtxt/helper.py | 11 ++++++++- twtxt/models.py | 8 +++++-- twtxt/twhttp.py | 63 +++++++++++++++++++++++-------------------------- 8 files changed, 70 insertions(+), 52 deletions(-) diff --git a/AUTHORS b/AUTHORS index e1e283f..f4319e1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -9,6 +9,7 @@ Jan Schütze Jeremy Kitchen Matthew Brady Melvin Carvalho +Sergei Kolesnikov tedder teddydestodes Ted Timmons diff --git a/setup.py b/setup.py index e82ee46..c08adbb 100644 --- a/setup.py +++ b/setup.py @@ -28,11 +28,13 @@ platforms='any', + python_requires='>=3.7', install_requires=[ - 'aiohttp>=2.2.5,<3', + 'aiohttp>=3.8.1,<4', + 'nest_asyncio>=1.5.4,<2', 'python-dateutil>=2.6.1,<3', - 'humanize>=0.5.1,<1', - 'click>=6.7,<7', + 'humanize>=4.0.0,<5', + 'click>=8.0.0,<9', ], extras_require={ @@ -56,8 +58,10 @@ classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Operating System :: OS Independent', 'Development Status :: 5 - Production/Stable', 'Environment :: Console', diff --git a/tox.ini b/tox.ini index c63301a..aa61e8c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py34, py35 +envlist = py37, py38, py39, py310 [testenv] commands = py.test --tb=short -v --cov {envsitepackagesdir}/twtxt/ tests/ diff --git a/twtxt/cache.py b/twtxt/cache.py index 0b87d19..2ea6839 100644 --- a/twtxt/cache.py +++ b/twtxt/cache.py @@ -22,7 +22,7 @@ class Cache: cache_dir = get_app_dir("twtxt") cache_name = "cache" - def __init__(self, cache_file, cache, update_interval): + def __init__(self, cache_file, cache, update_interval=0): """Initializes new :class:`Cache` object. :param str cache_file: full path to the loaded cache file. diff --git a/twtxt/cli.py b/twtxt/cli.py index 4d08491..a3c7ea5 100644 --- a/twtxt/cli.py +++ b/twtxt/cli.py @@ -15,9 +15,11 @@ from itertools import chain import click +import asyncio from twtxt.cache import Cache from twtxt.config import Config +from twtxt.helper import coro from twtxt.helper import run_pre_tweet_hook, run_post_tweet_hook from twtxt.helper import sort_and_truncate_tweets from twtxt.helper import style_timeline, style_source, style_source_with_status @@ -120,7 +122,8 @@ def tweet(ctx, created_at, twtfile, text): is_flag=True, help="Force update even if cache is up-to-date. (Default: False)") @click.pass_context -def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update): +@coro +async def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update): """Retrieve your personal timeline.""" if source: source_obj = ctx.obj["conf"].get_source_by_nick(source) @@ -138,7 +141,7 @@ def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, ca with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache: force_update = force_update or not cache.is_valid if force_update: - tweets = get_remote_tweets(sources, limit, timeout, cache) + tweets = await get_remote_tweets(sources, limit, timeout, cache) else: logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format( cache.update_interval)) @@ -146,9 +149,9 @@ def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, ca tweets = list(chain.from_iterable([cache.get_tweets(source.url) for source in sources])) except OSError as e: logger.debug(e) - tweets = get_remote_tweets(sources, limit, timeout) + tweets = await get_remote_tweets(sources, limit, timeout) else: - tweets = get_remote_tweets(sources, limit, timeout) + tweets = await get_remote_tweets(sources, limit, timeout) if twtfile and not source: source = Source(ctx.obj["conf"].nick, ctx.obj["conf"].twturl, file=twtfile) @@ -208,12 +211,13 @@ def view(ctx, **kwargs): is_flag=True, help="Style output in an easy-to-parse format. (Default: False)") @click.pass_context -def following(ctx, check, timeout, porcelain): +@coro +async def following(ctx, check, timeout, porcelain): """Return the list of sources you’re following.""" sources = ctx.obj['conf'].following if check: - sources = get_remote_status(sources, timeout) + sources = await get_remote_status(sources, timeout) for (source, status) in sources: click.echo(style_source_with_status(source, status, porcelain)) else: @@ -229,7 +233,8 @@ def following(ctx, check, timeout, porcelain): flag_value=True, help="Force adding and overwriting nick") @click.pass_context -def follow(ctx, nick, url, force): +@coro +async def follow(ctx, nick, url, force): """Add a new source to your followings.""" source = Source(nick, url) sources = ctx.obj['conf'].following @@ -239,7 +244,7 @@ def follow(ctx, nick, url, force): click.confirm("➤ You’re already following {0}. Overwrite?".format( click.style(source.nick, bold=True)), default=False, abort=True) - _, status = get_remote_status([source])[0] + _, status = (await get_remote_status([source]))[0] if not status or status.status_code != 200: click.confirm("➤ The feed of {0} at {1} is not available. Follow anyway?".format( click.style(source.nick, bold=True), diff --git a/twtxt/helper.py b/twtxt/helper.py index 148e96c..7276e20 100644 --- a/twtxt/helper.py +++ b/twtxt/helper.py @@ -12,7 +12,9 @@ import subprocess import sys import textwrap +from functools import wraps +import asyncio import click import pkg_resources @@ -69,7 +71,7 @@ def style_source_with_status(source, status, porcelain=False): content_length=status.content_length, last_modified=status.last_modified) else: - if status.status_code == 200: + if hasattr(status, "status_code") and status.status_code == 200: scolor, smessage = "green", str(status.status_code) elif status: scolor, smessage = "red", str(status.status_code) @@ -177,3 +179,10 @@ def generate_user_agent(): user_agent = "twtxt/{version}".format(version=version) return {"User-Agent": user_agent} + +def coro(f): + @wraps(f) + def wrapper(*args, **kwargs): + return asyncio.run(f(*args, **kwargs)) + + return wrapper diff --git a/twtxt/models.py b/twtxt/models.py index 9705c0c..a5beb26 100644 --- a/twtxt/models.py +++ b/twtxt/models.py @@ -75,8 +75,12 @@ def __str__(self): def relative_datetime(self): """Return human-readable relative time string.""" now = datetime.now(timezone.utc) - tense = "from now" if self.created_at > now else "ago" - return "{0} {1}".format(humanize.naturaldelta(now - self.created_at), tense) + created_at = self.created_at.astimezone(timezone.utc) + + delta = humanize.naturaldelta(abs(created_at - now)) + tense = "from now" if now < created_at else "ago" + + return f"{delta} {tense}" @property def absolute_datetime(self): diff --git a/twtxt/twhttp.py b/twtxt/twhttp.py index 358ef94..f3ef0f3 100644 --- a/twtxt/twhttp.py +++ b/twtxt/twhttp.py @@ -10,7 +10,9 @@ import asyncio import logging +import nest_asyncio from datetime import datetime, timezone +from itertools import chain from email.utils import parsedate_to_datetime from ssl import CertificateError @@ -22,7 +24,7 @@ from twtxt.parser import parse_tweets logger = logging.getLogger(__name__) - +nest_asyncio.apply() class SourceResponse: """A :class:`SourceResponse` contains information about a :class:`Source`’s HTTP request. @@ -49,11 +51,10 @@ def natural_last_modified(self): return "{0} {1}".format(humanize.naturaldelta(now - last_modified), tense) -@asyncio.coroutine -def retrieve_status(client, source): +async def retrieve_status(client, source): status = None try: - response = yield from client.head(source.url) + response = await client.head(source.url) if response.headers.get("Content-Length"): content_length = response.headers.get("Content-Length") else: @@ -61,7 +62,7 @@ def retrieve_status(client, source): status = SourceResponse(status_code=response.status, content_length=content_length, last_modified=response.headers.get("Last-Modified")) - yield from response.release() + await response.release() except CertificateError as e: click.echo("✗ SSL Certificate Error: The feed's ({0}) SSL certificate is untrusted. Try using HTTP, " "or contact the feed's owner to report this issue.".format(source.url)) @@ -72,14 +73,13 @@ def retrieve_status(client, source): return source, status -@asyncio.coroutine -def retrieve_file(client, source, limit, cache): +async def retrieve_file(client, source, limit, cache): is_cached = cache.is_cached(source.url) if cache else None headers = {"If-Modified-Since": cache.last_modified(source.url)} if is_cached else {} try: - response = yield from client.get(source.url, headers=headers) - content = yield from response.text() + response = await client.get(source.url, headers=headers) + content = await response.text() except Exception as e: if is_cached: logger.debug("{0}: {1} - using cached content".format(source.url, e)) @@ -120,44 +120,39 @@ def retrieve_file(client, source, limit, cache): return [] -@asyncio.coroutine -def process_sources_for_status(client, sources): - g_status = [] - coroutines = [retrieve_status(client, source) for source in sources] - for coroutine in asyncio.as_completed(coroutines): - status = yield from coroutine - g_status.append(status) - return sorted(g_status, key=lambda x: x[0].nick) +async def process_sources_for_status(client, sources): + tasks = [retrieve_status(client, source) for source in sources] + statuses = await asyncio.gather(*tasks) + + return sorted(statuses, key=lambda x: x[0].nick) + +async def process_sources_for_file(client, sources, limit, cache=None): + tasks = [retrieve_file(client, source, limit, cache) for source in sources] + tweets_by_source = await asyncio.gather(*tasks) -@asyncio.coroutine -def process_sources_for_file(client, sources, limit, cache=None): - g_tweets = [] - coroutines = [retrieve_file(client, source, limit, cache) for source in sources] - for coroutine in asyncio.as_completed(coroutines): - tweets = yield from coroutine - g_tweets.extend(tweets) - return sorted(g_tweets, reverse=True)[:limit] + all_tweets = list(chain.from_iterable(tweets_by_source)) + return sorted(all_tweets, reverse=True)[:limit] -def get_remote_tweets(sources, limit=None, timeout=5.0, cache=None): + +async def get_remote_tweets(sources, limit=None, timeout=5.0, cache=None): conn = aiohttp.TCPConnector(use_dns_cache=True) headers = generate_user_agent() - with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client: - loop = asyncio.get_event_loop() - def start_loop(client, sources, limit, cache=None): - return loop.run_until_complete(process_sources_for_file(client, sources, limit, cache)) - - tweets = start_loop(client, sources, limit, cache) + async with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client: + loop = asyncio.get_event_loop() + tweets = loop.run_until_complete(process_sources_for_file(client, sources, limit, cache)) return tweets -def get_remote_status(sources, timeout=5.0): +async def get_remote_status(sources, timeout=5.0): conn = aiohttp.TCPConnector(use_dns_cache=True) headers = generate_user_agent() - with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client: + + async with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client: loop = asyncio.get_event_loop() result = loop.run_until_complete(process_sources_for_status(client, sources)) + return result