Skip to content

Commit

Permalink
pre-commit - Update configuration and job versions (#4201)
Browse files Browse the repository at this point in the history
* Update pre-commit config
  - Update versions of pre-commit jobs
  - Use flake8 from PyCQA

* Changes made by pyupgrade
* Changes made by black
* Use files and type rather than a regexp for dashboard validation
  • Loading branch information
samdoran authored Mar 16, 2023
1 parent 5063a5d commit 514853b
Show file tree
Hide file tree
Showing 7 changed files with 75 additions and 52 deletions.
90 changes: 51 additions & 39 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,42 +1,54 @@
default_language_version:
python: python3

repos:
- repo: https://github.com/asottile/reorder_python_imports
rev: v1.6.0
hooks:
- id: reorder-python-imports
args: [--py3-plus, --application-directories, "koku"]
- repo: https://github.com/asottile/pyupgrade
rev: v2.31.1
hooks:
- id: pyupgrade
args: [--py36-plus]
- repo: https://github.com/ambv/black
rev: 22.3.0
hooks:
- id: black
args: [--line-length, "119"]
require_serial: true
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
exclude: ".unleash/"
- id: debug-statements
- id: flake8
- repo: https://github.com/haizaar/check-pipfile-lock
rev: v0.0.3
hooks:
- id: check-pipfile-lock
- repo: local
hooks:
- id: validate-dashboards
name: Validate Grafana Dashboards
description: Ensures dashboard files are valid JSON
entry: dev/scripts/validate_dashboards.py
language: python
pass_filenames: false
files: dashboards/.*\.yaml
additional_dependencies:
- pyyaml
- repo: https://github.com/asottile/reorder_python_imports
rev: v3.9.0
hooks:
- id: reorder-python-imports
args: [--py3-plus, --application-directories, "koku"]

- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: [--py36-plus]

- repo: https://github.com/ambv/black
rev: 22.12.0
hooks:
- id: black
args: [--line-length, "119"]
require_serial: true

- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: debug-statements
- id: trailing-whitespace
- id: end-of-file-fixer
exclude: ".unleash/"

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8

- repo: https://github.com/haizaar/check-pipfile-lock
rev: v0.0.5
hooks:
- id: check-pipfile-lock

- repo: local
hooks:
- id: validate-dashboards
name: Validate Grafana Dashboards
description: Ensures dashboard files are valid JSON
entry: dev/scripts/validate_dashboards.py
language: python
# pass_filenames: true
files: dashboards/
types:
- yaml
additional_dependencies:
- pyyaml
2 changes: 1 addition & 1 deletion dev/scripts/log_timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def handle_log(self, log_file_name):
def open_dump_file(self):
# LOG.critical(f"DUMP = {self.dump}")
if self.dump:
return open(self.dump[0], "wt")
return open(self.dump[0], "w")
return sys.stdout

def get_error_log_ts_start_end(self):
Expand Down
2 changes: 1 addition & 1 deletion dev/scripts/update_e2e_srr.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def repl(match):

target_file = os.path.join(os.environ["E2E_REPO"], "buildfactory", yaml_file_name)
LOG.info(f'Processing "{target_file}"')
with open(target_file, "rt+") as e2e_yaml:
with open(target_file, "r+") as e2e_yaml:
buff = e2e_yaml.read()
e2e_yaml.seek(0)
e2e_yaml.write(SRR.sub(repl, buff))
Expand Down
25 changes: 18 additions & 7 deletions dev/scripts/validate_dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,26 @@


def main() -> None:
dashboards_path = pathlib.Path(__file__).parent.parent.parent / "dashboards"
errors = {}
for file in dashboards_path.glob("*.yaml"):
yaml_data = yaml.safe_load(file.read_text())
for item in yaml_data["data"]:
files = [pathlib.Path(file) for file in sys.argv[1:]]
for file in files:
try:
yaml_data = yaml.safe_load(file.read_text())
except yaml.error.YAMLError as yaml_exc:
errors[file.name] = f"Error loading YAML: {yaml_exc}"
continue

try:
dashboards = yaml_data["data"]
except (KeyError, TypeError) as exc:
errors[file.name] = f"Error getting 'data' field: {exc}"
continue

for dashboard in dashboards:
try:
json.loads(yaml_data["data"][item])
except json.decoder.JSONDecodeError as exc:
errors[file.name] = exc
json.loads(yaml_data["data"][dashboard])
except (json.decoder.JSONDecodeError, TypeError) as exc:
errors[file.name] = f"Error loading JSON data from '{dashboard}': {exc}"

if errors:
sys.exit("\n".join(f"{k}: {v}" for k, v in errors.items()))
Expand Down
2 changes: 1 addition & 1 deletion koku/api/report/test/test_query_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# SPDX-License-Identifier: Apache-2.0
#
"""Test the QueryFilter."""
from collections import Iterable
from collections.abc import Iterable

from django.db.models import Q
from django.test import TestCase
Expand Down
4 changes: 2 additions & 2 deletions koku/koku/test_migration_sql_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_apply_sql_file(self):
"""
filename = "./___test_apply_sql_file.sql"
try:
with open(filename, "wt") as f:
with open(filename, "w") as f:
print("select 1;", file=f)
self.assertEqual(msh.apply_sql_file(conn.schema_editor(), filename), True)
finally:
Expand All @@ -38,7 +38,7 @@ def test_no_apply_sql_file(self):
"""
filename = "./___test_apply_sql_file.sql"
try:
with open(filename, "wt") as f:
with open(filename, "w") as f:
print("select 1;", file=f)
with self.assertRaises(TypeError):
msh.apply_sql_file(None, filename)
Expand Down
2 changes: 1 addition & 1 deletion koku/sources/test/test_kafka_message_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def test_create_msg_processor_various_headers_failures(self):
"header_list": (
("event_type", bytes(event, encoding="utf-8")),
("x-rh-identity", bytes(header_missing_account_number, encoding="utf-8")),
("x-rh-sources-account-number", bytes()),
("x-rh-sources-account-number", b""),
)
},
]
Expand Down

0 comments on commit 514853b

Please sign in to comment.