From b88965f7035b24003b9c899abfa2013654212ab4 Mon Sep 17 00:00:00 2001 From: Florian Bruggisser Date: Fri, 31 May 2024 15:12:35 +0200 Subject: [PATCH] added explicit utf-8 encoding parameter to write methods added explicit utf-8 encoding parameter to write methods signed --- src/super_gradients/common/auto_logging/auto_logger.py | 2 +- src/super_gradients/common/plugins/deci_client.py | 2 +- src/super_gradients/common/sg_loggers/base_sg_logger.py | 4 ++-- src/super_gradients/convert_recipe_to_code.py | 2 +- .../datasets/detection_datasets/pascal_voc_detection.py | 2 +- .../training/datasets/samplers/class_balanced_sampler.py | 2 +- tests/unit_tests/export_detection_model_test.py | 2 +- tests/unit_tests/pose_estimation_metrics_test.py | 2 +- utils_script/create_sub_coco.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/super_gradients/common/auto_logging/auto_logger.py b/src/super_gradients/common/auto_logging/auto_logger.py index e0ef358a05..2a5fabc6a7 100755 --- a/src/super_gradients/common/auto_logging/auto_logger.py +++ b/src/super_gradients/common/auto_logging/auto_logger.py @@ -52,7 +52,7 @@ def _setup_logging(self, filename: str, copy_already_logged_messages: bool, file if copy_already_logged_messages and self.filename is not None and os.path.exists(self.filename): with open(self.filename, "r", encoding="utf-8") as src: - with open(filename, "w") as dst: + with open(filename, "w", encoding="utf-8") as dst: dst.write(src.read()) file_logging_level = log_level or env_variables.FILE_LOG_LEVEL diff --git a/src/super_gradients/common/plugins/deci_client.py b/src/super_gradients/common/plugins/deci_client.py index 0d80eb9239..aab22d404e 100644 --- a/src/super_gradients/common/plugins/deci_client.py +++ b/src/super_gradients/common/plugins/deci_client.py @@ -161,7 +161,7 @@ def load_code_from_zipfile(*, file: str, target_path: str, package_name: str = " zipfile.extractall(package_path) # add an init file that imports all code files - with open(os.path.join(package_path, "__init__.py"), "w") as init_file: + with open(os.path.join(package_path, "__init__.py"), "w", encoding="utf-8") as init_file: all_str = "\n\n__all__ = [" for code_file in os.listdir(path=package_path): if code_file.endswith(".py") and not code_file.startswith("__init__"): diff --git a/src/super_gradients/common/sg_loggers/base_sg_logger.py b/src/super_gradients/common/sg_loggers/base_sg_logger.py index e5091979f4..503e8d5ff3 100644 --- a/src/super_gradients/common/sg_loggers/base_sg_logger.py +++ b/src/super_gradients/common/sg_loggers/base_sg_logger.py @@ -156,7 +156,7 @@ def _init_log_file(self): @multi_process_safe def _write_to_log_file(self, lines: list): - with open(self.experiment_log_path, "a" if os.path.exists(self.experiment_log_path) else "w") as log_file: + with open(self.experiment_log_path, "a" if os.path.exists(self.experiment_log_path) else "w", encoding="utf-8") as log_file: for line in lines: log_file.write(line + "\n") @@ -345,7 +345,7 @@ def _save_code(self): name = name + ".py" path = os.path.join(self._local_dir, name) - with open(path, "w") as f: + with open(path, "w", encoding="utf-8") as f: f.write(code) self.add_file(name) diff --git a/src/super_gradients/convert_recipe_to_code.py b/src/super_gradients/convert_recipe_to_code.py index fd32c57a66..f5dde8f4c3 100644 --- a/src/super_gradients/convert_recipe_to_code.py +++ b/src/super_gradients/convert_recipe_to_code.py @@ -311,7 +311,7 @@ def main(): key_to_replace_with = f"{key}" content = content.replace(key_to_search, key_to_replace_with) - with open(output_script_path, "w") as f: + with open(output_script_path, "w", encoding="utf-8") as f: black = try_import_black() if black is not None: content = black.format_str(content, mode=black.FileMode(line_length=160)) diff --git a/src/super_gradients/training/datasets/detection_datasets/pascal_voc_detection.py b/src/super_gradients/training/datasets/detection_datasets/pascal_voc_detection.py index 3157f168dd..02e3aa2826 100755 --- a/src/super_gradients/training/datasets/detection_datasets/pascal_voc_detection.py +++ b/src/super_gradients/training/datasets/detection_datasets/pascal_voc_detection.py @@ -166,7 +166,7 @@ def get_coord(box_coord): xmin, ymin, xmax, ymax = get_coord("xmin"), get_coord("ymin"), get_coord("xmax"), get_coord("ymax") labels.append(" ".join([xmin, ymin, xmax, ymax, str(PASCAL_VOC_2012_CLASSES_LIST.index(cls))])) - with open(new_label_path, "w") as f: + with open(new_label_path, "w", encoding="utf-8") as f: f.write("\n".join(labels)) urls = [ diff --git a/src/super_gradients/training/datasets/samplers/class_balanced_sampler.py b/src/super_gradients/training/datasets/samplers/class_balanced_sampler.py index bf16c2958d..523c619877 100644 --- a/src/super_gradients/training/datasets/samplers/class_balanced_sampler.py +++ b/src/super_gradients/training/datasets/samplers/class_balanced_sampler.py @@ -100,7 +100,7 @@ def precompute_sample_repeat_factors( str_repeat_factors = [np.format_float_positional(rf, trim="0", precision=4) for rf in repeat_factors] - with open(output_path, "w") as f: + with open(output_path, "w", encoding="utf-8") as f: json.dump(str_repeat_factors, f) @staticmethod diff --git a/tests/unit_tests/export_detection_model_test.py b/tests/unit_tests/export_detection_model_test.py index fdc12695ed..3d0936ab61 100644 --- a/tests/unit_tests/export_detection_model_test.py +++ b/tests/unit_tests/export_detection_model_test.py @@ -568,7 +568,7 @@ def manual_test_export_export_all_variants(self): os.makedirs(export_dir, exist_ok=True) benchmark_command_dir = "benchmark_command.sh" - with open(benchmark_command_dir, "w") as f: + with open(benchmark_command_dir, "w", encoding="utf-8") as f: pass for output_predictions_format in [DetectionOutputFormatMode.BATCH_FORMAT, DetectionOutputFormatMode.FLAT_FORMAT]: diff --git a/tests/unit_tests/pose_estimation_metrics_test.py b/tests/unit_tests/pose_estimation_metrics_test.py index 8f5800495e..e7f7caf7f7 100644 --- a/tests/unit_tests/pose_estimation_metrics_test.py +++ b/tests/unit_tests/pose_estimation_metrics_test.py @@ -87,7 +87,7 @@ def convert_predictions_to_target_format(preds) -> List[PoseEstimationPrediction with tempfile.TemporaryDirectory() as td: res_file = os.path.join(td, "keypoints_coco2017_results.json") - with open(res_file, "w") as f: + with open(res_file, "w", encoding="utf-8") as f: json.dump(coco_pred, f, sort_keys=True, indent=4) coco_dt = self._load_coco_groundtruth(with_crowd, with_duplicates, with_invisible_keypoitns) diff --git a/utils_script/create_sub_coco.py b/utils_script/create_sub_coco.py index 312b66b218..67ae432d42 100644 --- a/utils_script/create_sub_coco.py +++ b/utils_script/create_sub_coco.py @@ -46,7 +46,7 @@ def _copy_to_new_dir(mode: str, n_images: int, input_data_dir: Path, dest_data_d dest_images_dir = dest_data_dir / "images" / f"{mode}2017" dest_images_dir.mkdir(exist_ok=True, parents=True) - with open(dest_instances_path, "w") as f: + with open(dest_instances_path, "w", encoding="utf-8") as f: json.dump(instances, f) for image_name in kept_images_name: