Skip to content

Commit

Permalink
Upgrade to Python 3.7+
Browse files Browse the repository at this point in the history
  • Loading branch information
win0err committed Mar 19, 2022
1 parent 3fb7144 commit 9f58c95
Show file tree
Hide file tree
Showing 8 changed files with 70 additions and 52 deletions.
1 change: 1 addition & 0 deletions AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Jan Schütze <[email protected]>
Jeremy Kitchen <[email protected]>
Matthew Brady <[email protected]>
Melvin Carvalho <[email protected]>
Sergei Kolesnikov <[email protected]>
tedder <[email protected]>
teddydestodes <[email protected]>
Ted Timmons <[email protected]>
Expand Down
14 changes: 9 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,13 @@

platforms='any',

python_requires='>=3.7',
install_requires=[
'aiohttp>=2.2.5,<3',
'aiohttp>=3.8.1,<4',
'nest_asyncio>=1.5.4,<2',
'python-dateutil>=2.6.1,<3',
'humanize>=0.5.1,<1',
'click>=6.7,<7',
'humanize>=4.0.0,<5',
'click>=8.0.0,<9',
],

extras_require={
Expand All @@ -56,8 +58,10 @@
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tox]
envlist = py34, py35
envlist = py37, py38, py39, py310

[testenv]
commands = py.test --tb=short -v --cov {envsitepackagesdir}/twtxt/ tests/
Expand Down
2 changes: 1 addition & 1 deletion twtxt/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class Cache:
cache_dir = get_app_dir("twtxt")
cache_name = "cache"

def __init__(self, cache_file, cache, update_interval):
def __init__(self, cache_file, cache, update_interval=0):
"""Initializes new :class:`Cache` object.
:param str cache_file: full path to the loaded cache file.
Expand Down
21 changes: 13 additions & 8 deletions twtxt/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
from itertools import chain

import click
import asyncio

from twtxt.cache import Cache
from twtxt.config import Config
from twtxt.helper import coro
from twtxt.helper import run_pre_tweet_hook, run_post_tweet_hook
from twtxt.helper import sort_and_truncate_tweets
from twtxt.helper import style_timeline, style_source, style_source_with_status
Expand Down Expand Up @@ -120,7 +122,8 @@ def tweet(ctx, created_at, twtfile, text):
is_flag=True,
help="Force update even if cache is up-to-date. (Default: False)")
@click.pass_context
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update):
@coro
async def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update):
"""Retrieve your personal timeline."""
if source:
source_obj = ctx.obj["conf"].get_source_by_nick(source)
Expand All @@ -138,17 +141,17 @@ def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, ca
with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache:
force_update = force_update or not cache.is_valid
if force_update:
tweets = get_remote_tweets(sources, limit, timeout, cache)
tweets = await get_remote_tweets(sources, limit, timeout, cache)
else:
logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format(
cache.update_interval))
# Behold, almighty list comprehensions! (I might have gone overboard here…)
tweets = list(chain.from_iterable([cache.get_tweets(source.url) for source in sources]))
except OSError as e:
logger.debug(e)
tweets = get_remote_tweets(sources, limit, timeout)
tweets = await get_remote_tweets(sources, limit, timeout)
else:
tweets = get_remote_tweets(sources, limit, timeout)
tweets = await get_remote_tweets(sources, limit, timeout)

if twtfile and not source:
source = Source(ctx.obj["conf"].nick, ctx.obj["conf"].twturl, file=twtfile)
Expand Down Expand Up @@ -208,12 +211,13 @@ def view(ctx, **kwargs):
is_flag=True,
help="Style output in an easy-to-parse format. (Default: False)")
@click.pass_context
def following(ctx, check, timeout, porcelain):
@coro
async def following(ctx, check, timeout, porcelain):
"""Return the list of sources you’re following."""
sources = ctx.obj['conf'].following

if check:
sources = get_remote_status(sources, timeout)
sources = await get_remote_status(sources, timeout)
for (source, status) in sources:
click.echo(style_source_with_status(source, status, porcelain))
else:
Expand All @@ -229,7 +233,8 @@ def following(ctx, check, timeout, porcelain):
flag_value=True,
help="Force adding and overwriting nick")
@click.pass_context
def follow(ctx, nick, url, force):
@coro
async def follow(ctx, nick, url, force):
"""Add a new source to your followings."""
source = Source(nick, url)
sources = ctx.obj['conf'].following
Expand All @@ -239,7 +244,7 @@ def follow(ctx, nick, url, force):
click.confirm("➤ You’re already following {0}. Overwrite?".format(
click.style(source.nick, bold=True)), default=False, abort=True)

_, status = get_remote_status([source])[0]
_, status = (await get_remote_status([source]))[0]
if not status or status.status_code != 200:
click.confirm("➤ The feed of {0} at {1} is not available. Follow anyway?".format(
click.style(source.nick, bold=True),
Expand Down
11 changes: 10 additions & 1 deletion twtxt/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
import subprocess
import sys
import textwrap
from functools import wraps

import asyncio
import click
import pkg_resources

Expand Down Expand Up @@ -69,7 +71,7 @@ def style_source_with_status(source, status, porcelain=False):
content_length=status.content_length,
last_modified=status.last_modified)
else:
if status.status_code == 200:
if hasattr(status, "status_code") and status.status_code == 200:
scolor, smessage = "green", str(status.status_code)
elif status:
scolor, smessage = "red", str(status.status_code)
Expand Down Expand Up @@ -177,3 +179,10 @@ def generate_user_agent():
user_agent = "twtxt/{version}".format(version=version)

return {"User-Agent": user_agent}

def coro(f):
@wraps(f)
def wrapper(*args, **kwargs):
return asyncio.run(f(*args, **kwargs))

return wrapper
8 changes: 6 additions & 2 deletions twtxt/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,12 @@ def __str__(self):
def relative_datetime(self):
"""Return human-readable relative time string."""
now = datetime.now(timezone.utc)
tense = "from now" if self.created_at > now else "ago"
return "{0} {1}".format(humanize.naturaldelta(now - self.created_at), tense)
created_at = self.created_at.astimezone(timezone.utc)

delta = humanize.naturaldelta(abs(created_at - now))
tense = "from now" if now < created_at else "ago"

return f"{delta} {tense}"

@property
def absolute_datetime(self):
Expand Down
63 changes: 29 additions & 34 deletions twtxt/twhttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@

import asyncio
import logging
import nest_asyncio
from datetime import datetime, timezone
from itertools import chain
from email.utils import parsedate_to_datetime
from ssl import CertificateError

Expand All @@ -22,7 +24,7 @@
from twtxt.parser import parse_tweets

logger = logging.getLogger(__name__)

nest_asyncio.apply()

class SourceResponse:
"""A :class:`SourceResponse` contains information about a :class:`Source`’s HTTP request.
Expand All @@ -49,19 +51,18 @@ def natural_last_modified(self):
return "{0} {1}".format(humanize.naturaldelta(now - last_modified), tense)


@asyncio.coroutine
def retrieve_status(client, source):
async def retrieve_status(client, source):
status = None
try:
response = yield from client.head(source.url)
response = await client.head(source.url)
if response.headers.get("Content-Length"):
content_length = response.headers.get("Content-Length")
else:
content_length = 0
status = SourceResponse(status_code=response.status,
content_length=content_length,
last_modified=response.headers.get("Last-Modified"))
yield from response.release()
await response.release()
except CertificateError as e:
click.echo("✗ SSL Certificate Error: The feed's ({0}) SSL certificate is untrusted. Try using HTTP, "
"or contact the feed's owner to report this issue.".format(source.url))
Expand All @@ -72,14 +73,13 @@ def retrieve_status(client, source):
return source, status


@asyncio.coroutine
def retrieve_file(client, source, limit, cache):
async def retrieve_file(client, source, limit, cache):
is_cached = cache.is_cached(source.url) if cache else None
headers = {"If-Modified-Since": cache.last_modified(source.url)} if is_cached else {}

try:
response = yield from client.get(source.url, headers=headers)
content = yield from response.text()
response = await client.get(source.url, headers=headers)
content = await response.text()
except Exception as e:
if is_cached:
logger.debug("{0}: {1} - using cached content".format(source.url, e))
Expand Down Expand Up @@ -120,44 +120,39 @@ def retrieve_file(client, source, limit, cache):
return []


@asyncio.coroutine
def process_sources_for_status(client, sources):
g_status = []
coroutines = [retrieve_status(client, source) for source in sources]
for coroutine in asyncio.as_completed(coroutines):
status = yield from coroutine
g_status.append(status)
return sorted(g_status, key=lambda x: x[0].nick)
async def process_sources_for_status(client, sources):
tasks = [retrieve_status(client, source) for source in sources]
statuses = await asyncio.gather(*tasks)

return sorted(statuses, key=lambda x: x[0].nick)


async def process_sources_for_file(client, sources, limit, cache=None):
tasks = [retrieve_file(client, source, limit, cache) for source in sources]
tweets_by_source = await asyncio.gather(*tasks)

@asyncio.coroutine
def process_sources_for_file(client, sources, limit, cache=None):
g_tweets = []
coroutines = [retrieve_file(client, source, limit, cache) for source in sources]
for coroutine in asyncio.as_completed(coroutines):
tweets = yield from coroutine
g_tweets.extend(tweets)
return sorted(g_tweets, reverse=True)[:limit]
all_tweets = list(chain.from_iterable(tweets_by_source))

return sorted(all_tweets, reverse=True)[:limit]

def get_remote_tweets(sources, limit=None, timeout=5.0, cache=None):

async def get_remote_tweets(sources, limit=None, timeout=5.0, cache=None):
conn = aiohttp.TCPConnector(use_dns_cache=True)
headers = generate_user_agent()
with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:
loop = asyncio.get_event_loop()

def start_loop(client, sources, limit, cache=None):
return loop.run_until_complete(process_sources_for_file(client, sources, limit, cache))

tweets = start_loop(client, sources, limit, cache)
async with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:
loop = asyncio.get_event_loop()
tweets = loop.run_until_complete(process_sources_for_file(client, sources, limit, cache))

return tweets


def get_remote_status(sources, timeout=5.0):
async def get_remote_status(sources, timeout=5.0):
conn = aiohttp.TCPConnector(use_dns_cache=True)
headers = generate_user_agent()
with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:

async with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:
loop = asyncio.get_event_loop()
result = loop.run_until_complete(process_sources_for_status(client, sources))

return result

0 comments on commit 9f58c95

Please sign in to comment.