Skip to content

Commit

Permalink
Black everything.
Browse files Browse the repository at this point in the history
  • Loading branch information
PapyChacal committed Apr 23, 2023
1 parent ed2f156 commit c1d91c7
Show file tree
Hide file tree
Showing 116 changed files with 7,703 additions and 5,782 deletions.
5 changes: 1 addition & 4 deletions .github/workflows/update_xdsl_pyodide_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,6 @@
sha256_hash.update(byte_block)

# Make it build the local xDSL, not the PyPi release. The pyodide build still requires the SHA256 sum.
yaml_doc["source"] = {
"url": f"file://{xdsl_sdist}",
"sha256": sha256_hash.hexdigest()
}
yaml_doc["source"] = {"url": f"file://{xdsl_sdist}", "sha256": sha256_hash.hexdigest()}
with open(meta_yaml_path, "w") as f:
yaml.dump(yaml_doc, f)
29 changes: 16 additions & 13 deletions bench/parser/bench_lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,9 @@ def run_on_files(file_names: Iterable[str]):
try:
contents = open(file_name, "r").read()
input = Input(contents, file_name)
file_time = timeit.timeit(lambda: lex_file(input),
number=args.num_iterations)
file_time = timeit.timeit(
lambda: lex_file(input), number=args.num_iterations
)
total_time += file_time / args.num_iterations
print("Time taken: " + str(file_time))
except Exception as e:
Expand All @@ -45,20 +46,22 @@ def run_on_files(file_names: Iterable[str]):
arg_parser.add_argument(
"root_directory",
type=str,
help="Path to the root directory containing MLIR files.")
arg_parser.add_argument("--num_iterations",
type=int,
required=False,
default=1,
help="Number of times to lex each file.")
arg_parser.add_argument("--profile",
action="store_true",
help="Enable profiling metrics.")
help="Path to the root directory containing MLIR files.",
)
arg_parser.add_argument(
"--num_iterations",
type=int,
required=False,
default=1,
help="Number of times to lex each file.",
)
arg_parser.add_argument(
"--profile", action="store_true", help="Enable profiling metrics."
)

args = arg_parser.parse_args()

file_names = list(
glob.iglob(args.root_directory + "/**/*.mlir", recursive=True))
file_names = list(glob.iglob(args.root_directory + "/**/*.mlir", recursive=True))
print("Found " + str(len(file_names)) + " files to lex.")

if args.profile:
Expand Down
53 changes: 29 additions & 24 deletions bench/parser/bench_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,19 +54,20 @@ def run_on_files(file_names: Iterable[str], mlir_path: str, ctx: MLContext):

# Parse each sub-file separately.
for sub_contents in splitted_contents:

# First, parse the file with MLIR to check that it is valid, and
# print it back in generic form.
res = subprocess.run([
mlir_path,
"--allow-unregistered-dialect",
"-mlir-print-op-generic",
"-mlir-print-local-scope",
],
input=sub_contents,
text=True,
capture_output=True,
timeout=60)
res = subprocess.run(
[
mlir_path,
"--allow-unregistered-dialect",
"-mlir-print-op-generic",
"-mlir-print-local-scope",
],
input=sub_contents,
text=True,
capture_output=True,
timeout=60,
)
if res.returncode != 0:
continue
n_total_files += 1
Expand All @@ -78,7 +79,8 @@ def run_on_files(file_names: Iterable[str], mlir_path: str, ctx: MLContext):
try:
file_time = timeit.timeit(
lambda: parse_file(generic_sub_contents, ctx),
number=args.num_iterations)
number=args.num_iterations,
)
total_time += file_time / args.num_iterations
print("Time taken: " + str(file_time))
n_parsed_files += 1
Expand All @@ -96,27 +98,30 @@ def run_on_files(file_names: Iterable[str], mlir_path: str, ctx: MLContext):
arg_parser.add_argument(
"root_directory",
type=str,
help="Path to the root directory containing MLIR files.")
help="Path to the root directory containing MLIR files.",
)
arg_parser.add_argument("--mlir-path", type=str, help="Path to mlir-opt.")
arg_parser.add_argument("--num_iterations",
type=int,
required=False,
default=1,
help="Number of times to parse each file.")
arg_parser.add_argument("--profile",
action="store_true",
help="Enable profiling metrics.")
arg_parser.add_argument(
"--num_iterations",
type=int,
required=False,
default=1,
help="Number of times to parse each file.",
)
arg_parser.add_argument(
"--profile", action="store_true", help="Enable profiling metrics."
)
arg_parser.add_argument(
"--timeout",
type=int,
required=False,
default=60,
help="Timeout for processing each sub-program with MLIR. (in seconds)")
help="Timeout for processing each sub-program with MLIR. (in seconds)",
)

args = arg_parser.parse_args()

file_names = list(
glob.iglob(args.root_directory + "/**/*.mlir", recursive=True))
file_names = list(glob.iglob(args.root_directory + "/**/*.mlir", recursive=True))
print("Found " + str(len(file_names)) + " files to parse.")

ctx = MLContext()
Expand Down
Loading

0 comments on commit c1d91c7

Please sign in to comment.