Skip to content

Commit

Permalink
Update precommit
Browse files Browse the repository at this point in the history
  • Loading branch information
Uxio0 committed Feb 21, 2024
1 parent 6d55e42 commit 75fa2b4
Show file tree
Hide file tree
Showing 7 changed files with 29 additions and 22 deletions.
15 changes: 8 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,28 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 23.11.0
rev: 24.2.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-docstring-first
- id: check-merge-conflict
- id: check-symlinks
- id: check-yaml
- id: debug-statements
- id: detect-private-key
- id: requirements-txt-fixer
- id: trailing-whitespace
- id: end-of-file-fixer
types: [python]
- id: check-yaml
- id: check-added-large-files
- id: requirements-txt-fixer
- id: trailing-whitespace
1 change: 1 addition & 0 deletions config/gunicorn.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Store gunicorn variables in this file, so they can be read by Django
"""

import os

gunicorn_request_timeout = os.environ.get("WEB_WORKER_TIMEOUT", 60)
Expand Down
1 change: 1 addition & 0 deletions config/wsgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
framework.
"""

import os
import sys

Expand Down
6 changes: 3 additions & 3 deletions safe_transaction_service/contracts/tx_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,9 +164,9 @@ class SafeTxDecoder:

def __init__(self):
logger.info("%s: Loading contract ABIs for decoding", self.__class__.__name__)
self.fn_selectors_with_abis: Dict[
bytes, ABIFunction
] = self._generate_selectors_with_abis_from_abis(self.get_supported_abis())
self.fn_selectors_with_abis: Dict[bytes, ABIFunction] = (
self._generate_selectors_with_abis_from_abis(self.get_supported_abis())
)
logger.info(
"%s: Contract ABIs for decoding were loaded", self.__class__.__name__
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,13 @@ def handle(self, *args, **options):
m.to,
str(m.failed),
m.origin,
json.dumps(decoder.get_data_decoded(m.data.tobytes()))
if m.data
else "",
(
json.dumps(
decoder.get_data_decoded(m.data.tobytes())
)
if m.data
else ""
),
]
)
+ "\n"
Expand Down
6 changes: 3 additions & 3 deletions safe_transaction_service/history/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,9 +299,9 @@ def save(self, **kwargs):
"safe": self.validated_data["safe"],
"to": self.validated_data["to"],
"value": self.validated_data["value"],
"data": self.validated_data["data"]
if self.validated_data["data"]
else None,
"data": (
self.validated_data["data"] if self.validated_data["data"] else None
),
"operation": self.validated_data["operation"],
"safe_tx_gas": self.validated_data["safe_tx_gas"],
"base_gas": self.validated_data["base_gas"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -489,9 +489,9 @@ def _get_collectibles_with_metadata(
# Creates a collectibles metadata keeping the initial order
for collectible_metadata_cached_index in range(len(collectibles_with_metadata)):
if collectibles_with_metadata[collectible_metadata_cached_index] is None:
collectibles_with_metadata[
collectible_metadata_cached_index
] = collectibles_with_metadata_not_cached.pop(0)
collectibles_with_metadata[collectible_metadata_cached_index] = (
collectibles_with_metadata_not_cached.pop(0)
)

return collectibles_with_metadata, count

Expand Down Expand Up @@ -607,9 +607,9 @@ def get_redis_key(address_with_token_id: Tuple[ChecksumAddress, int]) -> str:
if blockchain_token_uris:
pipe = self.redis.pipeline()
redis_map_to_store = {
get_redis_key(address_with_token_id): token_uri
if token_uri is not None
else ""
get_redis_key(address_with_token_id): (
token_uri if token_uri is not None else ""
)
for address_with_token_id, token_uri in blockchain_token_uris.items()
}
pipe.mset(redis_map_to_store)
Expand Down

0 comments on commit 75fa2b4

Please sign in to comment.