diff --git a/changes/548.fixed b/changes/548.fixed new file mode 100644 index 00000000..05924edd --- /dev/null +++ b/changes/548.fixed @@ -0,0 +1 @@ +Fixed SSoT jobs not respecting DryRun variable. \ No newline at end of file diff --git a/development/app_config_schema.py b/development/app_config_schema.py index e52e2478..a779b14e 100644 --- a/development/app_config_schema.py +++ b/development/app_config_schema.py @@ -40,9 +40,7 @@ def _main(): **SchemaBuilder().to_json_schema(app_config), # type: ignore } app_config = import_module(package_name).config - _enrich_object_schema( - schema, app_config.default_settings, app_config.required_settings - ) + _enrich_object_schema(schema, app_config.default_settings, app_config.required_settings) schema_path.write_text(json.dumps(schema, indent=4) + "\n") print(f"\n==================\nGenerated schema:\n\n{schema_path}\n") print( diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 33bb8028..123ad7ec 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -18,12 +18,8 @@ if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405 INSTALLED_APPS.append("debug_toolbar") # noqa: F405 - if ( - "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE - ): # noqa: F405 - MIDDLEWARE.insert( - 0, "debug_toolbar.middleware.DebugToolbarMiddleware" - ) # noqa: F405 + if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405 + MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405 # # Misc. settings @@ -55,9 +51,7 @@ "NAUTOBOT_DB_PORT", default_db_settings[nautobot_db_engine]["NAUTOBOT_DB_PORT"], ), # Database port, default to postgres - "CONN_MAX_AGE": int( - os.getenv("NAUTOBOT_DB_TIMEOUT", "300") - ), # Database timeout + "CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", "300")), # Database timeout "ENGINE": nautobot_db_engine, } } diff --git a/nautobot_ssot/jobs/base.py b/nautobot_ssot/jobs/base.py index 9e087bae..972f8952 100644 --- a/nautobot_ssot/jobs/base.py +++ b/nautobot_ssot/jobs/base.py @@ -45,7 +45,10 @@ class DataSyncBaseJob(Job): # pylint: disable=too-many-instance-attributes - `data_source_icon` and `data_target_icon` """ - dryrun = DryRunVar(description="Perform a dry-run, making no actual changes to Nautobot data.", default=True) + dryrun = DryRunVar( + description="Perform a dry-run, making no actual changes to Nautobot data.", + default=True, + ) memory_profiling = BooleanVar(description="Perform a memory profiling analysis.", default=False) def load_source_adapter(self): @@ -96,7 +99,7 @@ def execute_sync(self): else: self.logger.warning("Not both adapters were properly initialized prior to synchronization.") - def sync_data(self, memory_profiling): + def sync_data(self, memory_profiling): # pylint: disable=too-many-statements """Method to load data from adapters, calculate diffs and sync (if not dry-run). It is composed by 4 methods: @@ -117,10 +120,16 @@ def format_size(size): # pylint: disable=inconsistent-return-statements for unit in ("B", "KiB", "MiB", "GiB", "TiB"): if abs(size) < 100 and unit != "B": # 3 digits (xx.x UNIT) - return "%.1f %s" % (size, unit) # pylint: disable=consider-using-f-string + return "%.1f %s" % ( # pylint: disable=consider-using-f-string + size, + unit, + ) if abs(size) < 10 * 1024 or unit == "TiB": # 4 or 5 digits (xxxx UNIT) - return "%.0f %s" % (size, unit) # pylint: disable=consider-using-f-string + return "%.0f %s" % ( # pylint: disable=consider-using-f-string + size, + unit, + ) size /= 1024 def record_memory_trace(step: str): @@ -150,7 +159,11 @@ def record_memory_trace(step: str): load_source_adapter_time = datetime.now() self.sync.source_load_time = load_source_adapter_time - start_time self.sync.save() - self.logger.info("Source Load Time from %s: %s", self.source_adapter, self.sync.source_load_time) + self.logger.info( + "Source Load Time from %s: %s", + self.source_adapter, + self.sync.source_load_time, + ) if memory_profiling: record_memory_trace("source_load") @@ -159,7 +172,11 @@ def record_memory_trace(step: str): load_target_adapter_time = datetime.now() self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time self.sync.save() - self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + self.logger.info( + "Target Load Time from %s: %s", + self.target_adapter, + self.sync.target_load_time, + ) if memory_profiling: record_memory_trace("target_load") @@ -172,10 +189,11 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("diff") - if self.dryrun: + if self.sync.dry_run: self.logger.info("As `dryrun` is set, skipping the actual data sync.") else: self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) + print("I'm executing the sync now") self.execute_sync() execute_sync_time = datetime.now() self.sync.sync_time = execute_sync_time - calculate_diff_time @@ -185,7 +203,11 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("sync") - def lookup_object(self, model_name, unique_id) -> Optional[BaseModel]: # pylint: disable=unused-argument + def lookup_object( # pylint: disable=unused-argument + self, + model_name, + unique_id, + ) -> Optional[BaseModel]: """Look up the Nautobot record, if any, identified by the args. Optional helper method used to build more detailed/accurate SyncLogEntry records from DiffSync logs. @@ -321,7 +343,10 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu # Add _structlog_to_sync_log_entry as a processor for structlog calls from DiffSync structlog.configure( - processors=[self._structlog_to_sync_log_entry, structlog.stdlib.render_to_log_kwargs], + processors=[ + self._structlog_to_sync_log_entry, + structlog.stdlib.render_to_log_kwargs, + ], context_class=dict, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, diff --git a/nautobot_ssot/tests/test_basic.py b/nautobot_ssot/tests/test_basic.py index 83142fac..d72f2d02 100644 --- a/nautobot_ssot/tests/test_basic.py +++ b/nautobot_ssot/tests/test_basic.py @@ -11,21 +11,11 @@ class TestDocsPackaging(unittest.TestCase): def test_version(self): """Verify that pyproject.toml dev dependencies have the same versions as in the docs requirements.txt.""" - parent_path = os.path.dirname( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - ) + parent_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) poetry_path = os.path.join(parent_path, "pyproject.toml") - poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"][ - "dependencies" - ] - with open( - f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8" - ) as file: - requirements = [ - line - for line in file.read().splitlines() - if (len(line) > 0 and not line.startswith("#")) - ] + poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"]["dependencies"] + with open(f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8") as file: + requirements = [line for line in file.read().splitlines() if (len(line) > 0 and not line.startswith("#"))] for pkg in requirements: package_name = pkg if len(pkg.split("==")) == 2: # noqa: PLR2004 diff --git a/nautobot_ssot/tests/test_jobs.py b/nautobot_ssot/tests/test_jobs.py index b4052b24..de2f09cc 100644 --- a/nautobot_ssot/tests/test_jobs.py +++ b/nautobot_ssot/tests/test_jobs.py @@ -1,7 +1,7 @@ """Test the Job classes in nautobot_ssot.""" import os.path -from unittest.mock import Mock, call +from unittest.mock import Mock, call, patch from django.db.utils import IntegrityError, OperationalError from django.test import override_settings @@ -90,9 +90,53 @@ def test_run(self): self.assertIsNone(self.job.sync.sync_time) self.assertEqual(self.job.sync.source, self.job.data_source) self.assertEqual(self.job.sync.target, self.job.data_target) - self.assertTrue(self.job.dryrun) + self.assertTrue(self.job.sync.dry_run) self.assertEqual(self.job.job_result, self.job.sync.job_result) + def test_job_dryrun_false(self): + """Test the job is not ran in dryrun mode.""" + with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync: + isolated_job = DataSyncBaseJob() + + isolated_job.job_result = JobResult.objects.create( + name="fake job no dryrun", + task_name="fake job no dryrun", + worker="default", + ) + isolated_job.load_source_adapter = lambda *x, **y: None + isolated_job.load_target_adapter = lambda *x, **y: None + isolated_job.run(dryrun=False, memory_profiling=False) + self.assertFalse(isolated_job.sync.dry_run) + mock_execute_sync.assert_called() + + def test_job_dryrun_true(self): + """Test the job is ran in dryrun mode.""" + with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync: + isolated_job = DataSyncBaseJob() + + isolated_job.job_result = JobResult.objects.create( + name="fake job", + task_name="fake job", + worker="default", + ) + isolated_job.load_source_adapter = lambda *x, **y: None + isolated_job.load_target_adapter = lambda *x, **y: None + isolated_job.run(dryrun=True, memory_profiling=False) + self.assertTrue(isolated_job.sync.dry_run) + mock_execute_sync.assert_not_called() + + @patch("tracemalloc.start") + def test_job_memory_profiling_true(self, mock_malloc_start): + """Test the job is ran in dryrun mode.""" + self.job.run(dryrun=False, memory_profiling=True) + mock_malloc_start.assert_called() + + @patch("tracemalloc.start") + def test_job_memory_profiling_false(self, mock_malloc_start): + """Test the job is ran in dryrun mode.""" + self.job.run(dryrun=False, memory_profiling=False) + mock_malloc_start.assert_not_called() + def test_calculate_diff(self): """Test calculate_diff() method.""" self.job.sync = Mock() diff --git a/tasks.py b/tasks.py index 3006f401..58a4484f 100644 --- a/tasks.py +++ b/tasks.py @@ -72,9 +72,7 @@ def _is_compose_included(context, name): def _await_healthy_service(context, service): - container_id = docker_compose( - context, f"ps -q -- {service}", pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, f"ps -q -- {service}", pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) @@ -166,9 +164,7 @@ def docker_compose(context, command, **kwargs): ] for compose_file in context.nautobot_ssot.compose_files: - compose_file_path = os.path.join( - context.nautobot_ssot.compose_dir, compose_file - ) + compose_file_path = os.path.join(context.nautobot_ssot.compose_dir, compose_file) compose_command_tokens.append(f' -f "{compose_file_path}"') compose_command_tokens.append(command) @@ -244,20 +240,10 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): if python_ver is None: python_ver = context.nautobot_ssot.python_ver dockerfile_path = os.path.join(context.nautobot_ssot.compose_dir, "Dockerfile") - base_image = ( - context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True) - .stdout.strip() - .split(" ")[1] - ) - base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace( - r"${PYTHON_VER}", python_ver - ) - pip_nautobot_ver = context.run( - f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True - ) - match_version = re.search( - r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE - ) + base_image = context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True).stdout.strip().split(" ")[1] + base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace(r"${PYTHON_VER}", python_ver) + pip_nautobot_ver = context.run(f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True) + match_version = re.search(r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE) if match_version: return match_version.group(1) else: @@ -282,9 +268,7 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): ), } ) -def lock( - context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False -): +def lock(context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False): """Generate poetry.lock file.""" if constrain_nautobot_ver: docker_nautobot_version = _get_docker_nautobot_version(context) @@ -324,9 +308,7 @@ def restart(context, service=""): def stop(context, service=""): """Stop specified or all services, if service is not specified, remove all containers.""" print("Stopping Nautobot...") - docker_compose( - context, "stop" if service else "down --remove-orphans", service=service - ) + docker_compose(context, "stop" if service else "down --remove-orphans", service=service) @task( @@ -345,9 +327,7 @@ def destroy(context, volumes=True, import_db_file=""): return if not volumes: - raise ValueError( - "Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time." - ) + raise ValueError("Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time.") print(f"Importing database file: {import_db_file}...") @@ -364,16 +344,12 @@ def destroy(context, volumes=True, import_db_file=""): "db", ] - container_id = docker_compose( - context, " ".join(command), pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, " ".join(command), pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) print("Stopping database container...") context.run(f"docker stop {container_id}", pty=False, echo=False, hide=True) - print( - "Database import complete, you can start Nautobot with the following command:" - ) + print("Database import complete, you can start Nautobot with the following command:") print("invoke start") @@ -549,9 +525,7 @@ def dbshell(context, db_name="", input_file="", output_file="", query=""): if input_file and query: raise ValueError("Cannot specify both, `input_file` and `query` arguments") if output_file and not (input_file or query): - raise ValueError( - "`output_file` argument requires `input_file` or `query` argument" - ) + raise ValueError("`output_file` argument requires `input_file` or `query` argument") env = {} if query: @@ -689,9 +663,7 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True): docker_compose(context, " ".join(command), pty=False) print(50 * "=") - print( - "The database backup has been successfully completed and saved to the following file:" - ) + print("The database backup has been successfully completed and saved to the following file:") print(output_file) print("You can import this database backup with the following command:") print(f"invoke import-db --input-file '{output_file}'") @@ -860,9 +832,7 @@ def unittest( # noqa: PLR0913 @task def unittest_coverage(context): """Report on code test coverage as measured by 'invoke unittest'.""" - command = ( - "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" - ) + command = "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" run_command(context, command)