Skip to content

Commit

Permalink
[IMPR] Code improvements
Browse files Browse the repository at this point in the history
- decrease nested statements
- simplify imports
- use maximum() function to determine maximum
- remove emtry comments

Change-Id: I7f95850f83ab822e0448f0766e40c9ecab4995bb
  • Loading branch information
xqt committed Nov 2, 2024
1 parent 35b39b7 commit 11b979d
Show file tree
Hide file tree
Showing 20 changed files with 122 additions and 140 deletions.
45 changes: 22 additions & 23 deletions pywikibot/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2284,32 +2284,31 @@ def treat_page(self) -> None:
item = pywikibot.ItemPage.fromPage(page)
except NoPageError:
item = None
elif isinstance(page, pywikibot.ItemPage):
item = page
page = None
else:
if isinstance(page, pywikibot.ItemPage):
item = page
# FIXME: Hack because 'is_data_repository' doesn't work if
# site is the APISite. See T85483
assert page is not None
data_site = page.site.data_repository()
if (data_site.family == page.site.family
and data_site.code == page.site.code):
is_item = page.namespace() == data_site.item_namespace.id
else:
is_item = False
if is_item:
item = pywikibot.ItemPage(data_site, page.title())
page = None
else:
# FIXME: Hack because 'is_data_repository' doesn't work if
# site is the APISite. See T85483
assert page is not None
data_site = page.site.data_repository()
if (data_site.family == page.site.family
and data_site.code == page.site.code):
is_item = page.namespace() == data_site.item_namespace.id
else:
is_item = False
if is_item:
item = pywikibot.ItemPage(data_site, page.title())
page = None
else:
try:
item = pywikibot.ItemPage.fromPage(page)
except NoPageError:
item = None
if self.use_from_page is False:
_error(f'{page} is not in the item namespace but must'
' be an item.')
return
try:
item = pywikibot.ItemPage.fromPage(page)
except NoPageError:
item = None
if self.use_from_page is False:
_error(f'{page} is not in the item namespace but must'
' be an item.')
return

assert not (page is None and item is None)

Expand Down
13 changes: 6 additions & 7 deletions pywikibot/bot_choice.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,14 +350,13 @@ def handle(self) -> Any:
kwargs['label'] += '#' + self.replacer._new.section
else:
kwargs['label'] = self.replacer._new.anchor
elif self.replacer.current_link.anchor is None:
kwargs['label'] = self.replacer.current_groups['title']
if self.replacer.current_groups['section']:
kwargs['label'] += '#' \
+ self.replacer.current_groups['section']
else:
if self.replacer.current_link.anchor is None:
kwargs['label'] = self.replacer.current_groups['title']
if self.replacer.current_groups['section']:
kwargs['label'] += '#' \
+ self.replacer.current_groups['section']
else:
kwargs['label'] = self.replacer.current_link.anchor
kwargs['label'] = self.replacer.current_link.anchor
return pywikibot.Link.create_separated(
self.replacer._new.canonical_title(), self.replacer._new.site,
**kwargs)
Expand Down
7 changes: 3 additions & 4 deletions pywikibot/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,10 +189,9 @@ def color_line(self, line: str, line_ref: str | None = None) -> str:
apply_color = 'default;' + self.bg_colors[color]
char_tagged = f'<<{apply_color}>>{char}'
color_closed = False
else:
if char_ref == ' ':
char_tagged = f'<<default>>{char}'
color_closed = True
elif char_ref == ' ':
char_tagged = f'<<default>>{char}'
color_closed = True
colored_line += char_tagged

if not color_closed:
Expand Down
7 changes: 3 additions & 4 deletions pywikibot/pagegenerators/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,10 +222,9 @@ def SubpageFilterGenerator(generator: Iterable[pywikibot.page.BasePage],
for page in generator:
if page.depth <= max_depth:
yield page
else:
if show_filtered:
pywikibot.info(
f'Page {page} is a subpage that is too deep. Skipping.')
elif show_filtered:
pywikibot.info(
f'Page {page} is a subpage that is too deep. Skipping.')


class RegexFilter:
Expand Down
15 changes: 7 additions & 8 deletions pywikibot/site/_datasite.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,15 +221,14 @@ def preload_entities(
ident = p._defined_by()
for key in ident:
req[key].append(ident[key])
elif (p.site == self
and p.namespace() in self._entity_namespaces.values()):
req['ids'].append(p.title(with_ns=False))
else:
if p.site == self and p.namespace() in (
self._entity_namespaces.values()):
req['ids'].append(p.title(with_ns=False))
else:
assert p.site.has_data_repository, \
'Site must have a data repository'
req['sites'].append(p.site.dbName())
req['titles'].append(p._link._text)
assert p.site.has_data_repository, \
'Site must have a data repository'
req['sites'].append(p.site.dbName())
req['titles'].append(p._link._text)

req = self.simple_request(action='wbgetentities', **req)
data = req.submit()
Expand Down
21 changes: 10 additions & 11 deletions pywikibot/site/_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,17 +413,16 @@ def ignore_warnings(warnings):
raise Error('Unrecognized result: {result}'
.format_map(data))

else: # not chunked upload
if file_key:
final_request['filekey'] = file_key
else:
file_contents = f.read()
filetype = (mimetypes.guess_type(self.filename)[0]
or 'application/octet-stream')
final_request.mime = {
'file': (file_contents, filetype.split('/'),
{'filename': mime_filename})
}
elif file_key:
final_request['filekey'] = file_key
else:
file_contents = f.read()
filetype = (mimetypes.guess_type(self.filename)[0]
or 'application/octet-stream')
final_request.mime = {
'file': (file_contents, filetype.split('/'),
{'filename': mime_filename})
}
else:
# upload by URL
if not self.site.has_right('upload_by_url'):
Expand Down
27 changes: 13 additions & 14 deletions pywikibot/site_detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,20 +243,19 @@ def set_api_url(self, url) -> None:
f'{new_parsed_url.netloc or self.url.netloc}'
f'{new_parsed_url.path}'
)
else:
if self._parsed_url:
# allow upgrades to https, but not downgrades
if self._parsed_url.scheme == 'https' \
and new_parsed_url.scheme != self._parsed_url.scheme:
return

# allow http://www.brickwiki.info/ vs http://brickwiki.info/
if (new_parsed_url.netloc in self._parsed_url.netloc
or self._parsed_url.netloc in new_parsed_url.netloc):
return

assert new_parsed_url == self._parsed_url, \
f'{self._parsed_url} != {new_parsed_url}'
elif self._parsed_url:
# allow upgrades to https, but not downgrades
if self._parsed_url.scheme == 'https' \
and new_parsed_url.scheme != self._parsed_url.scheme:
return

# allow http://www.brickwiki.info/ vs http://brickwiki.info/
if (new_parsed_url.netloc in self._parsed_url.netloc
or self._parsed_url.netloc in new_parsed_url.netloc):
return

assert new_parsed_url == self._parsed_url, \
f'{self._parsed_url} != {new_parsed_url}'

self._parsed_url = new_parsed_url
self.server = f'{self._parsed_url.scheme}://{self._parsed_url.netloc}'
Expand Down
2 changes: 1 addition & 1 deletion pywikibot/specialbots/_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@
import requests

import pywikibot
import pywikibot.comms.http as http
from pywikibot import config
from pywikibot.backports import Callable
from pywikibot.bot import BaseBot, QuitKeyboardInterrupt
from pywikibot.comms import http
from pywikibot.exceptions import APIError, FatalServerError, NoPageError


Expand Down
19 changes: 9 additions & 10 deletions scripts/category.py
Original file line number Diff line number Diff line change
Expand Up @@ -700,17 +700,16 @@ def __init__(self, oldcat,
elif deletion_comment == self.DELETION_COMMENT_SAME_AS_EDIT_COMMENT:
# Use the edit comment as the deletion comment.
self.deletion_comment = self.comment
# Deletion comment is set to internationalized default.
elif self.newcat:
# Category is moved.
self.deletion_comment = i18n.twtranslate(self.site,
'category-was-moved',
template_vars)
else:
# Deletion comment is set to internationalized default.
if self.newcat:
# Category is moved.
self.deletion_comment = i18n.twtranslate(self.site,
'category-was-moved',
template_vars)
else:
# Category is deleted.
self.deletion_comment = i18n.twtranslate(
self.site, 'category-was-disbanded')
# Category is deleted.
self.deletion_comment = i18n.twtranslate(
self.site, 'category-was-disbanded')
self.move_comment = move_comment if move_comment else self.comment

def run(self) -> None:
Expand Down
31 changes: 15 additions & 16 deletions scripts/checkimages.py
Original file line number Diff line number Diff line change
Expand Up @@ -1262,22 +1262,21 @@ def smart_detection(self) -> tuple[str, bool]:
else:
pywikibot.info('Skipping the file...')
self.some_problem = False
else:
if not self.seems_ok and self.license_found:
rep_text_license_fake = ((self.list_entry
+ "seems to have a ''fake license'',"
' license detected:'
' <nowiki>%s</nowiki>') %
(self.image_name, self.license_found))
print_with_time_zone(
f'{self.image_name} seems to have a fake license: '
f'{self.license_found}, reporting...')
self.report_image(self.image_name,
rep_text=rep_text_license_fake,
addings=False)
elif self.license_found:
pywikibot.info(f'[[{self.image_name}]] seems ok, license '
f'found: {{{{{self.license_found}}}}}...')
elif not self.seems_ok and self.license_found:
rep_text_license_fake = ((self.list_entry
+ "seems to have a ''fake license'',"
' license detected:'
' <nowiki>%s</nowiki>') %
(self.image_name, self.license_found))
print_with_time_zone(
f'{self.image_name} seems to have a fake license: '
f'{self.license_found}, reporting...')
self.report_image(self.image_name,
rep_text=rep_text_license_fake,
addings=False)
elif self.license_found:
pywikibot.info(f'[[{self.image_name}]] seems ok, license '
f'found: {{{{{self.license_found}}}}}...')
return (self.license_found, self.white_templates_found)

@staticmethod
Expand Down
5 changes: 2 additions & 3 deletions scripts/interwikidata.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,8 @@ def treat_page(self) -> None:
item = self.try_to_add()
if self.opt.create and item is None:
item = self.create_item()
else:
if self.opt.merge:
item = self.try_to_merge(item)
elif self.opt.merge:
item = self.try_to_merge(item)

if item and self.opt.clean:
self.current_item = item
Expand Down
7 changes: 3 additions & 4 deletions scripts/maintenance/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,11 +444,10 @@ def main():
if output:
sys.exit('Only one output may be defined.')
output = ''
elif not cache_paths:
cache_paths = [arg]
else:
if not cache_paths:
cache_paths = [arg]
else:
cache_paths.append(arg)
cache_paths.append(arg)

if not cache_paths:
folders = ('apicache', 'apicache-py2', 'apicache-py3')
Expand Down
5 changes: 2 additions & 3 deletions scripts/pagefromfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,8 @@ def treat_page(self) -> None:
else:
pywikibot.info(f'Page {title} already exists, not adding!')
return
else:
if self.opt.autosummary:
comment = config.default_edit_summary = ''
elif self.opt.autosummary:
comment = config.default_edit_summary = ''

self.put_current(contents, summary=comment,
minor=self.opt.minor,
Expand Down
3 changes: 1 addition & 2 deletions scripts/patrol.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,8 +323,7 @@ def treat(self, page):
else:
verbose_output('Skipped')

if rcid > self.highest_rcid:
self.highest_rcid = rcid
self.highest_rcid = max(rcid, self.highest_rcid)
self.last_rcid = rcid


Expand Down
29 changes: 14 additions & 15 deletions scripts/solve_disambiguation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,22 +1163,21 @@ def setSummaryMessage(
{'from': page.title(),
'to': targets,
'count': len(new_targets)})
elif unlink_counter and not new_targets:
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-links-removed',
{'from': page.title(),
'count': unlink_counter})
elif dn and not new_targets:
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-adding-dn-template',
{'from': page.title()})
else:
if unlink_counter and not new_targets:
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-links-removed',
{'from': page.title(),
'count': unlink_counter})
elif dn and not new_targets:
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-adding-dn-template',
{'from': page.title()})
else:
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-links-resolved',
{'from': page.title(),
'to': targets,
'count': len(new_targets)})
self.summary = i18n.twtranslate(
self.site, 'solve_disambiguation-links-resolved',
{'from': page.title(),
'to': targets,
'count': len(new_targets)})

def teardown(self) -> None:
"""Write ignoring pages to a file."""
Expand Down
7 changes: 3 additions & 4 deletions scripts/template.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,10 +255,9 @@ def main(*args: str) -> None:
skip = True
elif arg.startswith('-timestamp:'):
timestamp = arg[len('-timestamp:'):]
else:
if not gen_factory.handle_arg(arg):
template_name = pywikibot.Page(site, arg, ns=10)
template_names.append(template_name.title(with_ns=False))
elif not gen_factory.handle_arg(arg):
template_name = pywikibot.Page(site, arg, ns=10)
template_names.append(template_name.title(with_ns=False))

if not template_names:
pywikibot.bot.suggest_help(missing_parameters=['templates'])
Expand Down
13 changes: 6 additions & 7 deletions scripts/transwikiimport.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,13 +289,12 @@ def main(*args: str) -> None:
' exists)'
)
continue
else:
if not targetpage.botMayEdit():
pywikibot.warning(
f'Target page {targetpage.title(as_link=True)} is not'
' editable by bots'
)
continue
elif not targetpage.botMayEdit():
pywikibot.warning(
f'Target page {targetpage.title(as_link=True)} is not'
' editable by bots'
)
continue

params['interwikipage'] = fromtitle
api_query(tosite, params)
Expand Down
2 changes: 0 additions & 2 deletions scripts/welcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,8 +429,6 @@
'zh': '<small>(via ~~~)</small>',
}

#
#
LOGPAGE_HEADER = {
'_default': '{|border="2" cellpadding="4" cellspacing="0" style="margin: '
'0.5em 0.5em 0.5em 1em; padding: 0.5em; background: #bfcda5; '
Expand Down
Loading

0 comments on commit 11b979d

Please sign in to comment.