diff --git a/pywikibot/bot_choice.py b/pywikibot/bot_choice.py
index 035acd8133..2548a8b3ab 100644
--- a/pywikibot/bot_choice.py
+++ b/pywikibot/bot_choice.py
@@ -71,10 +71,9 @@ def __init__(self, stop: bool = True) -> None:
@staticmethod
def formatted(text: str, options: Iterable[Option],
default: str | None = None) -> str:
- """
- Create a text with the options formatted into it.
+ """Create a text with the options formatted into it.
- This static method is used by :py:meth:`pywikibot.input_choice`.
+ This static method is used by :meth:`pywikibot.input_choice`.
It calls :py:obj:`format` for all *options* to combine the
question for :py:meth:`pywikibot.input`.
@@ -84,9 +83,8 @@ def formatted(text: str, options: Iterable[Option],
:return: Text with the options formatted into it
"""
- formatted_options = []
- for option in options:
- formatted_options.append(option.format(default=default))
+ formatted_options = [option.format(default=default)
+ for option in options]
# remove color highlights before fill function
text = f"{text} ({', '.join(formatted_options)})"
pattern = '<<[a-z]+>>'
diff --git a/pywikibot/page/_collections.py b/pywikibot/page/_collections.py
index 8f4ac1a342..1abf43032f 100644
--- a/pywikibot/page/_collections.py
+++ b/pywikibot/page/_collections.py
@@ -1,6 +1,6 @@
"""Structures holding data for Wikibase entities."""
#
-# (C) Pywikibot team, 2019-2022
+# (C) Pywikibot team, 2019-2024
#
# Distributed under the terms of the MIT license.
#
@@ -471,14 +471,13 @@ def toJSON(self, diffto: dict | None = None) -> dict:
for dbname, sitelink in data.items():
if dbname not in diffto:
continue
+
diffto_link = diffto[dbname]
if diffto_link.get('title') == sitelink.get('title'):
# compare badges
- tmp_badges = []
diffto_badges = diffto_link.get('badges', [])
badges = sitelink.get('badges', [])
- for badge in set(diffto_badges) - set(badges):
- tmp_badges.append('')
+ tmp_badges = [''] * len(set(diffto_badges) - set(badges))
for badge in set(badges) - set(diffto_badges):
tmp_badges.append(badge)
if tmp_badges:
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 916d894233..abca600fe4 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -1651,7 +1651,7 @@ def getredirtarget(
if 'pages' not in result['query']:
# No "pages" element might indicate a circular redirect
# Check that a "to" link is also a "from" link in redirmap
- for _from, _to in redirmap.items():
+ for _to in redirmap.values():
if _to['title'] in redirmap:
raise CircularRedirectError(page)
diff --git a/scripts/dataextend.py b/scripts/dataextend.py
index 4ce122703b..4ae3dcc146 100755
--- a/scripts/dataextend.py
+++ b/scripts/dataextend.py
@@ -3190,12 +3190,11 @@ def findoccupations(self, html: str):
section = self.findbyre(
r'(?s)"description">\s*]*>(.*?)', html)
if section:
- result = []
texts = []
for subsection in section.split(' et '):
texts += self.findallbyre(r'(\w[\-\s\w&\']+)', subsection)
- for text in texts[:8]:
- result.append(self.findbyre(r'(.+)', text, 'occupation'))
+ result = [self.findbyre(r'(.+)', text, 'occupation')
+ for text in texts[:8]]
return result
return None
@@ -5261,24 +5260,20 @@ def findawards(self, html: str):
if section:
parts = self.findallbyre(r'(?s)(
\s*.*?
)',
section)
- result = []
- for part in parts:
- if '[nominee]' not in part:
- result.append(
- self.findbyre(r']*>(.*?)<', section, 'award'))
+ result = [self.findbyre(r' | ]*>(.*?)<', section, 'award')
+ for part in parts if '[nominee]' not in part]
return result
+ return None
def findnominations(self, html: str):
section = self.findbyre(r'(?s)(.*?)', html)
if section:
parts = self.findallbyre(r'(?s)( \s*.*? )',
section)
- result = []
- for part in parts:
- if '[nominee]' in part:
- result.append(
- self.findbyre(r']*>(.*?)<', section, 'award'))
+ result = [self.findbyre(r' | ]*>(.*?)<', section, 'award')
+ for part in parts if '[nominee]' in part]
return result
+ return None
def findspouses(self, html: str):
return self.findallbyre(
@@ -6305,14 +6300,15 @@ def findworkfields(self, html: str):
if section:
preresults = self.findallbyre(r'(?s) | (.*?) ',
section.replace(' ', ' '))[:5]
- results = []
- for preresult in preresults:
- if int(self.findbyre(r'">(\d+)', preresult) or 0) > 5:
- results.append(
- self.findbyre(
- r'(?s)"Mathematics Subject Classification">(.*?)<',
- preresult, 'subject'))
- return results
+ result = [
+ self.findbyre(
+ r'(?s)"Mathematics Subject Classification">(.*?)<',
+ preresult, 'subject')
+ for preresult in preresults
+ if int(self.findbyre(r'">(\d+)', preresult) or 0) > 5
+ ]
+ return result
+ return None
def findwebsite(self, html: str):
return self.findbyre(r'(?s)Homepage: | \s*]*>(.*?)<',
@@ -8340,15 +8336,16 @@ def findresidences(self, html: str):
r'(?s)Woon- en verblijfplaatsen\s* | \s* \s* (.*?)',
html)
if section:
- result = []
subsections = self.findallbyre(r'(?s)( )', section)
- for subsection in subsections:
- result.append(
- self.findbyre(
- r'([^<>]*) | ', subsection, 'city')
- or self.findbyre(
- r']*>(.*?)<', subsection, 'city'))
+ result = [
+ self.findbyre(
+ r'([^<>]*) | ', subsection, 'city')
+ or self.findbyre(
+ r']*>(.*?)<', subsection, 'city')
+ for subsection in subsections
+ ]
return result
+ return None
def findoccupations(self, html: str):
section = self.findbyre(
@@ -14882,10 +14879,9 @@ def findnationalities(self, html: str):
def findsources(self, html: str):
sources = self.getvalues('670', 'a', html)
- result = []
- for source in sources:
- if source and ' by ' not in source and ' / ' not in source:
- result.append(self.findbyre('(.*)', source, 'source'))
+ result = [self.findbyre('(.*)', source, 'source')
+ for source in sources
+ if source and ' by ' not in source and ' / ' not in source]
return result
diff --git a/scripts/patrol.py b/scripts/patrol.py
index 5712ba443b..ec0259b6e1 100755
--- a/scripts/patrol.py
+++ b/scripts/patrol.py
@@ -354,9 +354,8 @@ def match(self, page_title) -> bool:
if not self.linkedpages:
verbose_output('loading page links on ' + self.page_title)
p = pywikibot.Page(self.site, self.page_title)
- linkedpages = []
- for linkedpage in p.linkedPages():
- linkedpages.append(linkedpage.title())
+ linkedpages = [linkedpage.title()
+ for linkedpage in p.linkedPages()]
self.linkedpages = linkedpages
verbose_output(f'Loaded {len(linkedpages)} page links')
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 3a087db57b..0016b2c55f 100755
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -741,7 +741,7 @@ def test_revdel_file(self):
fp2 = pywikibot.FilePage(site, 'File:T276726.png')
site.loadimageinfo(fp2, history=True)
- for idx, v in fp2._file_revisions.items():
+ for v in fp2._file_revisions.values():
if v['timestamp'] in (ts1, ts2):
self.assertTrue(hasattr(v, 'commenthidden'))
@@ -753,7 +753,7 @@ def test_revdel_file(self):
fp3 = pywikibot.FilePage(site, 'File:T276726.png')
site.loadimageinfo(fp3, history=True)
- for idx, v in fp3._file_revisions.items():
+ for v in fp3._file_revisions.values():
if v['timestamp'] in (ts1, ts2):
self.assertFalse(hasattr(v, 'commenthidden'))
self.assertFalse(hasattr(v, 'userhidden'))
diff --git a/tests/utils.py b/tests/utils.py
index 31aea380ee..053743f365 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -357,11 +357,10 @@ def __init__(self, code, fam, user):
if self.family.name == 'wikisource':
extensions.append({'name': 'ProofreadPage'})
self._siteinfo._cache['extensions'] = (extensions, True)
- aliases = []
- for alias in ('PrefixIndex', ):
- # TODO: Not all follow that scheme (e.g. "BrokenRedirects")
- aliases.append(
- {'realname': alias.capitalize(), 'aliases': [alias]})
+
+ # TODO: Not all follow that scheme (e.g. "BrokenRedirects")
+ aliases = [{'realname': alias.capitalize(), 'aliases': [alias]}
+ for alias in ('PrefixIndex', )]
self._siteinfo._cache['specialpagealiases'] = (aliases, True)
self._msgcache = {'*': 'dummy entry', 'hello': 'world'}
diff --git a/tests/wikibase_edit_tests.py b/tests/wikibase_edit_tests.py
index 1ee33de808..d9069d6b15 100755
--- a/tests/wikibase_edit_tests.py
+++ b/tests/wikibase_edit_tests.py
@@ -91,9 +91,8 @@ def test_add_claim_with_qualifier(self):
item = pywikibot.PropertyPage(testsite, 'P115')
item.get()
if 'P115' in item.claims:
- to_remove = []
- for claim in item.claims['P115']:
- to_remove.append({'id': claim.toJSON()['id'], 'remove': ''})
+ to_remove = [{'id': claim.toJSON()['id'], 'remove': ''}
+ for claim in item.claims['P115']]
item.editEntity({'claims': to_remove})
claim = pywikibot.page.Claim(
|