Skip to content

Commit

Permalink
use Python to 3.7 for integration tests
Browse files Browse the repository at this point in the history
- use p2to3 to convert code
- use pylint to clean-up
- close #2311
  • Loading branch information
guwirth committed Mar 4, 2022
1 parent 35272c3 commit b4f337b
Show file tree
Hide file tree
Showing 9 changed files with 189 additions and 167 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/cxx-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '2.7'
python-version: '3.7'

# setup Tidy
#
Expand Down Expand Up @@ -251,7 +251,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '2.7'
python-version: '3.7'
# cache not working with 2.7?
#cache: 'pip'
- run: pip install -r "$GITHUB_WORKSPACE/integration-tests/requirements.txt"
Expand Down Expand Up @@ -392,7 +392,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '2.7'
python-version: '3.7'
# cache not working with 2.7?
#cache: 'pip'
- run: pip install -r "$GITHUB_WORKSPACE/integration-tests/requirements.txt"
Expand Down
2 changes: 1 addition & 1 deletion cxx-sensors/src/tools/cpplint_createrules.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def usage():
return 'Usage: %s <cpplint.py> .... txt file can be obtain by running python cpplint.py --filter=' % sys.argv[0]

if len(sys.argv) != 2:
print usage()
print(usage())
exit()

# open and parse cpplint parse file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def usage():
return 'Usage: %s <IntelCsvFile> <OutputFile> <RootFolder> <Executable>' % sys.argv[0]

if len(sys.argv) != 5:
print usage()
print(usage())
exit()

absfilepath = os.path.abspath(sys.argv[1])
Expand Down
4 changes: 2 additions & 2 deletions cxx-sensors/src/tools/utils_createrules.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def check_rules(path):
has_len_errors = True
description_dump_path = "/tmp/" + key + ".ruledump"
with open(description_dump_path, "w") as f:
html = u"""<!DOCTYPE html>
html = """<!DOCTYPE html>
<html>
<head>
<meta charset=\"utf-8\">
Expand All @@ -203,7 +203,7 @@ def check_rules(path):
<body>{description}</body>
</html>
""".format(name=escape(name_tag.text), description=description_tag.text)
f.write(html.encode("UTF-8"))
f.write(html)
is_tidy_error = call_tidy(description_dump_path)
has_tidy_errors = has_tidy_errors or is_tidy_error

Expand Down
14 changes: 7 additions & 7 deletions cxx-sensors/src/tools/vc_createrules.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def read_warning_pages(browser, warnings):
Iterate over all HTML warning pages and parse content.
"""
# read HTML pages of warnings
for _key, data in warnings.items():
for _key, data in list(warnings.items()):
page_source = read_page_source(browser, data['href'])
data = parse_warning_page(page_source, data)

Expand Down Expand Up @@ -390,7 +390,7 @@ def create_rules(warnings, rules):
- 'remediationFunction' to 'LINEAR'
- - 'remediationFunctionGapMultiplier' to '5min'
"""
for _key, data in warnings.items():
for _key, data in list(warnings.items()):
rule = et.Element('rule')

# mandatory
Expand Down Expand Up @@ -449,7 +449,7 @@ def assign_warning_properties(warning, defaults, override):
# set default values only once
assign = True
if assign:
for key, value in defaults.items():
for key, value in list(defaults.items()):
warning[key] = value


Expand All @@ -465,22 +465,22 @@ def read_warnings():

# read links to warning pages from menu of overview pages
warnings = {}
for url, properties in URLS.items():
for url, properties in list(URLS.items()):
page_source = read_page_source(browser, url)
parse_warning_hrefs(page_source, warnings)
for key, warning in warnings.items():
for key, warning in list(warnings.items()):
assign_warning_properties(warning, properties, False)

# warnings = dict(list(warnings.items())[:1]) # for testing only

# sort warnings ascending by message number
warnings = dict(sorted(warnings.items(), key=sorter))
warnings = dict(sorted(list(warnings.items()), key=sorter))

# read cotent of warning pages
read_warning_pages(browser, warnings)

# override defaults
for key, defaults in RULE_MAP.items():
for key, defaults in list(RULE_MAP.items()):
if key in warnings:
warning = warnings[key]
assign_warning_properties(warning, defaults, True)
Expand Down
10 changes: 5 additions & 5 deletions cxx-squid/dox/tools/grammar_parser/grammar_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def use(self, search):
rules = {}
if search in self.rules:
rules[search] = self.rules[search]
for rulename, sequences in self.rules.items():
for rulename, sequences in list(self.rules.items()):
if self.__rule_use_expression(search, sequences):
rules[rulename] = sequences
self.rules = rules
Expand All @@ -165,7 +165,7 @@ def resolve_optionals(self):
"""
Replace rules with optional tokens with explicit rules.
"""
for rulename, sequences in self.rules.items():
for rulename, sequences in list(self.rules.items()):
sequences_without_opts = []
for i, sequence in enumerate(sequences, 1):
sequence.append('[[{}.{}]]'.format(rulename, i)) # add original rulename.sequence
Expand Down Expand Up @@ -213,7 +213,7 @@ def flatten_rules(self, max_sequences):
"""
Tokens of rules are replaced recursively.
"""
for rulename, sequences in self.rules.items():
for rulename, sequences in list(self.rules.items()):
already_replaced = { rulename }
while True:
expanded_sequences = []
Expand Down Expand Up @@ -308,7 +308,7 @@ def match(self, input):
tokens = input.split()
tokens.reverse()
branch = deque()
for rulename, sequences in self.rules.items():
for rulename, sequences in list(self.rules.items()):
logging.info("Match: '%s', Root: [[%s]]", input, rulename)
logging.info("")
for sequence in sequences:
Expand All @@ -334,7 +334,7 @@ def print_grammar(self):
"""
logging.info("*** Grammar ***")
logging.info("")
for rulename, sequences in self.rules.items():
for rulename, sequences in list(self.rules.items()):
logging.info("%s:", rulename)
for i, sequence in enumerate(sequences, 1):
tokens = " {}".format(i)
Expand Down
44 changes: 22 additions & 22 deletions integration-tests/features/common.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# -*- mode: python; coding: iso-8859-1 -*-
# -*- mode: python; coding: utf-8 -*-

# C++ Community Plugin (cxx plugin)
# Copyright (C) Waleri Enns
Expand All @@ -22,9 +22,9 @@
import re
import os
import sys
import requests
import json
import time
import requests

from requests.auth import HTTPBasicAuth

Expand Down Expand Up @@ -62,21 +62,21 @@ def get_sonar_log_folder(sonarhome):

def get_sonar_log_file(sonarhome):
if "sonarqube-7." in sonarhome:
SONAR_LOG_FILE = "sonar.log"
sonar_log_file = "sonar.log"
else:
SONAR_LOG_FILE = "sonar." + time.strftime("%Y%m%d") + ".log"
return os.path.join(get_sonar_log_folder(sonarhome), SONAR_LOG_FILE)
sonar_log_file = "sonar." + time.strftime("%Y%m%d") + ".log"
return os.path.join(get_sonar_log_folder(sonarhome), sonar_log_file)

def sonar_analysis_finished(logpath):
url = ""

print(BRIGHT + " Read Log : " + logpath + RESET_ALL)

try:
with open(logpath, "r") as log:
with open(logpath, "r", encoding="utf8") as log:
lines = log.readlines()
url = get_url_from_log(lines)
except IOError, e:
except IOError:
pass

print(BRIGHT + " Get Analysis In Background : " + url + RESET_ALL)
Expand Down Expand Up @@ -106,14 +106,14 @@ def sonar_analysis_finished(logpath):
break

serverlogurl = url.replace("task?id", "logs?taskId")
r = requests.get(serverlogurl, auth=HTTPBasicAuth(SONAR_LOGIN, SONAR_PASSWORD), timeout=10)
request = requests.get(serverlogurl,
auth=HTTPBasicAuth(SONAR_LOGIN, SONAR_PASSWORD),
timeout=10)

writepath = logpath + ".server"
f = open(writepath, 'w')
f.write(r.text)
f.close()
with open(logpath + ".server", "w", encoding="utf8") as serverlog:
serverlog.write(request.text)

# print(BRIGHT + " LOG: " + r.text + RESET_ALL)
# print(BRIGHT + " LOG: " + request.text + RESET_ALL)

return status

Expand All @@ -123,8 +123,8 @@ def cleanup_logs(sonarhome):
try:
logpath = get_sonar_log_folder(sonarhome)
filelist = [ f for f in os.listdir(logpath) if f.endswith(".log") ]
for f in filelist:
os.remove(os.path.join(logpath, f))
for filename in filelist:
os.remove(os.path.join(logpath, filename))
except OSError:
pass
sys.stdout.write(GREEN + "OK\n" + RESET)
Expand All @@ -135,10 +135,10 @@ def print_logs(sonarhome):
try:
logpath = get_sonar_log_folder(sonarhome)
filelist = [ f for f in os.listdir(logpath) if f.endswith(".log") ]
for f in filelist:
sys.stdout.write("\n--- " + f + " ---\n")
with open(os.path.join(logpath, f), 'r') as file:
sys.stdout.write(file.read());
for filename in filelist:
sys.stdout.write("\n--- " + filename + " ---\n")
with open(os.path.join(logpath, filename), "r", encoding="utf8") as file:
sys.stdout.write(file.read())
except OSError:
pass
sys.stdout.write("\n")
Expand All @@ -148,11 +148,11 @@ def analyse_log(logpath, toignore=None):
errors = warnings = 0

try:
with open(logpath, "r") as log:
with open(logpath, "r", encoding="utf8") as log:
lines = log.readlines()
badlines, errors, warnings = analyse_log_lines(lines, toignore)
except IOError, e:
badlines.append(str(e) + "\n")
except IOError as error:
badlines.append(str(error) + "\n")

return badlines, errors, warnings

Expand Down
Loading

0 comments on commit b4f337b

Please sign in to comment.