Skip to content

Commit

Permalink
chore: update requirements and fixutres
Browse files Browse the repository at this point in the history
- remove dead plugin calc
- remove dead fixtures
- update all fixtures
- fix wiki plugin
- fix github plugin
  • Loading branch information
llimllib committed Mar 9, 2024
1 parent 85dd7fc commit b498aad
Show file tree
Hide file tree
Showing 39 changed files with 16,820 additions and 14,781 deletions.
2 changes: 1 addition & 1 deletion limbo/limbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from .server import LimboServer
from .fakeserver import FakeServer

VERSION = "8.6.1"
VERSION = "8.7.0"

CURDIR = os.path.abspath(os.path.dirname(__file__))
DIR = functools.partial(os.path.join, CURDIR)
Expand Down
36 changes: 0 additions & 36 deletions limbo/plugins/calc.py

This file was deleted.

6 changes: 4 additions & 2 deletions limbo/plugins/gif.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@ def gif(search, unsafe=False):
searchb = quote(search.encode("utf8"))

safe = "&safe=" if unsafe else "&safe=active"
searchurl = "https://www.google.com/search?tbs=itp:animated&tbm=isch&q={0}{1}".format(
searchb, safe
searchurl = (
"https://www.google.com/search?tbs=itp:animated&tbm=isch&q={0}{1}".format(
searchb, safe
)
)

# this is an old iphone user agent. Seems to make google return good results.
Expand Down
22 changes: 15 additions & 7 deletions limbo/plugins/wiki.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""!wiki <topic> returns a wiki link for <topic>"""

import re

try:
from urllib import quote
except ImportError:
Expand All @@ -21,7 +23,7 @@ def wiki(searchterm):
pages = result["query"]["search"]

# try to reject disambiguation pages
pages = [p for p in pages if 'may refer to' not in p["snippet"]]
pages = [p for p in pages if "may refer to" not in p["snippet"]]

if not pages:
return ""
Expand All @@ -30,13 +32,19 @@ def wiki(searchterm):
link = "http://en.wikipedia.org/wiki/{0}".format(page)

r = requests.get(
"http://en.wikipedia.org/w/api.php?format=json&action=parse&page={0}".
format(page)).json()
"http://en.wikipedia.org/w/api.php?format=json&action=parse&page={0}".format(
page
)
).json()
soup = BeautifulSoup(r["parse"]["text"]["*"], "html5lib")
p = soup.find('p').get_text()
p = p[:8000]

return u"{0}\n{1}".format(p, link)
ps = soup.find_all("p")
first_para = None
for p in ps:
if p.get_text().strip():
first_para = str(p)[:8000]
break

return f"{first_para}\n{link}"


def on_message(msg, server):
Expand Down
35 changes: 0 additions & 35 deletions limbo/plugins/youtube.py

This file was deleted.

70 changes: 34 additions & 36 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,40 +1,38 @@
attrs==21.4.0
beautifulsoup4==4.10.0
certifi==2021.10.8
charset-normalizer==2.0.12
coverage==6.3.2
distlib==0.3.4
filelock==3.6.0
flake8==4.0.1
beautifulsoup4==4.12.3
cachetools==5.3.3
certifi==2024.2.2
chardet==5.2.0
charset-normalizer==3.3.2
colorama==0.4.6
coverage==7.4.3
distlib==0.3.8
exceptiongroup==1.2.0
filelock==3.13.1
flake8==7.0.0
html5lib==1.1
idna==3.3
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.6.1
multidict==6.0.2
packaging==21.3
platformdirs==2.5.1
pluggy==1.0.0
py==1.11.0
pycodestyle==2.8.0
pyfiglet==0.8.post1
pyflakes==2.4.0
pyparsing==3.0.7
pytest==7.0.1
pytest-cov==3.0.0
PyYAML==6.0
requests==2.27.1
idna==3.6
iniconfig==2.0.0
mccabe==0.7.0
multidict==6.0.5
packaging==23.2
platformdirs==4.2.0
pluggy==1.4.0
pycodestyle==2.11.1
pyfiglet==1.0.2
pyflakes==3.2.0
pyproject-api==1.6.1
pytest==8.1.1
pytest-cov==4.1.0
PyYAML==6.0.1
requests==2.31.0
six==1.16.0
soupsieve==2.3.1
toml==0.10.2
soupsieve==2.5
tomli==2.0.1
tox==3.24.5
typing-extensions==4.1.1
urllib3==1.26.8
vcrpy==4.1.1
virtualenv==20.13.1
tox==4.14.1
urllib3==2.2.1
vcrpy==6.0.1
virtualenv==20.25.1
webencodings==0.5.1
websocket-client==1.2.3
wrapt==1.13.3
yarl==1.7.2
zipp==3.7.0
websocket-client==1.7.0
wrapt==1.16.0
yarl==1.9.4
Loading

0 comments on commit b498aad

Please sign in to comment.