Skip to content

Commit

Permalink
Merge pull request #549 from qduk/ab_bug_548
Browse files Browse the repository at this point in the history
Update variable used in Dryrun job decision
  • Loading branch information
jdrew82 authored Oct 1, 2024
2 parents 81093b2 + b6af591 commit f381da2
Show file tree
Hide file tree
Showing 7 changed files with 103 additions and 81 deletions.
1 change: 1 addition & 0 deletions changes/548.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixed SSoT jobs not respecting DryRun variable.
4 changes: 1 addition & 3 deletions development/app_config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,7 @@ def _main():
**SchemaBuilder().to_json_schema(app_config), # type: ignore
}
app_config = import_module(package_name).config
_enrich_object_schema(
schema, app_config.default_settings, app_config.required_settings
)
_enrich_object_schema(schema, app_config.default_settings, app_config.required_settings)
schema_path.write_text(json.dumps(schema, indent=4) + "\n")
print(f"\n==================\nGenerated schema:\n\n{schema_path}\n")
print(
Expand Down
12 changes: 3 additions & 9 deletions development/nautobot_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,8 @@

if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405
INSTALLED_APPS.append("debug_toolbar") # noqa: F405
if (
"debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE
): # noqa: F405
MIDDLEWARE.insert(
0, "debug_toolbar.middleware.DebugToolbarMiddleware"
) # noqa: F405
if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405
MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405

#
# Misc. settings
Expand Down Expand Up @@ -55,9 +51,7 @@
"NAUTOBOT_DB_PORT",
default_db_settings[nautobot_db_engine]["NAUTOBOT_DB_PORT"],
), # Database port, default to postgres
"CONN_MAX_AGE": int(
os.getenv("NAUTOBOT_DB_TIMEOUT", "300")
), # Database timeout
"CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", "300")), # Database timeout
"ENGINE": nautobot_db_engine,
}
}
Expand Down
43 changes: 34 additions & 9 deletions nautobot_ssot/jobs/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,10 @@ class DataSyncBaseJob(Job): # pylint: disable=too-many-instance-attributes
- `data_source_icon` and `data_target_icon`
"""

dryrun = DryRunVar(description="Perform a dry-run, making no actual changes to Nautobot data.", default=True)
dryrun = DryRunVar(
description="Perform a dry-run, making no actual changes to Nautobot data.",
default=True,
)
memory_profiling = BooleanVar(description="Perform a memory profiling analysis.", default=False)

def load_source_adapter(self):
Expand Down Expand Up @@ -96,7 +99,7 @@ def execute_sync(self):
else:
self.logger.warning("Not both adapters were properly initialized prior to synchronization.")

def sync_data(self, memory_profiling):
def sync_data(self, memory_profiling): # pylint: disable=too-many-statements
"""Method to load data from adapters, calculate diffs and sync (if not dry-run).
It is composed by 4 methods:
Expand All @@ -117,10 +120,16 @@ def format_size(size): # pylint: disable=inconsistent-return-statements
for unit in ("B", "KiB", "MiB", "GiB", "TiB"):
if abs(size) < 100 and unit != "B":
# 3 digits (xx.x UNIT)
return "%.1f %s" % (size, unit) # pylint: disable=consider-using-f-string
return "%.1f %s" % ( # pylint: disable=consider-using-f-string
size,
unit,
)
if abs(size) < 10 * 1024 or unit == "TiB":
# 4 or 5 digits (xxxx UNIT)
return "%.0f %s" % (size, unit) # pylint: disable=consider-using-f-string
return "%.0f %s" % ( # pylint: disable=consider-using-f-string
size,
unit,
)
size /= 1024

def record_memory_trace(step: str):
Expand Down Expand Up @@ -150,7 +159,11 @@ def record_memory_trace(step: str):
load_source_adapter_time = datetime.now()
self.sync.source_load_time = load_source_adapter_time - start_time
self.sync.save()
self.logger.info("Source Load Time from %s: %s", self.source_adapter, self.sync.source_load_time)
self.logger.info(
"Source Load Time from %s: %s",
self.source_adapter,
self.sync.source_load_time,
)
if memory_profiling:
record_memory_trace("source_load")

Expand All @@ -159,7 +172,11 @@ def record_memory_trace(step: str):
load_target_adapter_time = datetime.now()
self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time
self.sync.save()
self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time)
self.logger.info(
"Target Load Time from %s: %s",
self.target_adapter,
self.sync.target_load_time,
)
if memory_profiling:
record_memory_trace("target_load")

Expand All @@ -172,10 +189,11 @@ def record_memory_trace(step: str):
if memory_profiling:
record_memory_trace("diff")

if self.dryrun:
if self.sync.dry_run:
self.logger.info("As `dryrun` is set, skipping the actual data sync.")
else:
self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter)
print("I'm executing the sync now")
self.execute_sync()
execute_sync_time = datetime.now()
self.sync.sync_time = execute_sync_time - calculate_diff_time
Expand All @@ -185,7 +203,11 @@ def record_memory_trace(step: str):
if memory_profiling:
record_memory_trace("sync")

def lookup_object(self, model_name, unique_id) -> Optional[BaseModel]: # pylint: disable=unused-argument
def lookup_object( # pylint: disable=unused-argument
self,
model_name,
unique_id,
) -> Optional[BaseModel]:
"""Look up the Nautobot record, if any, identified by the args.
Optional helper method used to build more detailed/accurate SyncLogEntry records from DiffSync logs.
Expand Down Expand Up @@ -321,7 +343,10 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu

# Add _structlog_to_sync_log_entry as a processor for structlog calls from DiffSync
structlog.configure(
processors=[self._structlog_to_sync_log_entry, structlog.stdlib.render_to_log_kwargs],
processors=[
self._structlog_to_sync_log_entry,
structlog.stdlib.render_to_log_kwargs,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
Expand Down
18 changes: 4 additions & 14 deletions nautobot_ssot/tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,11 @@ class TestDocsPackaging(unittest.TestCase):

def test_version(self):
"""Verify that pyproject.toml dev dependencies have the same versions as in the docs requirements.txt."""
parent_path = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
)
parent_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
poetry_path = os.path.join(parent_path, "pyproject.toml")
poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"][
"dependencies"
]
with open(
f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8"
) as file:
requirements = [
line
for line in file.read().splitlines()
if (len(line) > 0 and not line.startswith("#"))
]
poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"]["dependencies"]
with open(f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8") as file:
requirements = [line for line in file.read().splitlines() if (len(line) > 0 and not line.startswith("#"))]
for pkg in requirements:
package_name = pkg
if len(pkg.split("==")) == 2: # noqa: PLR2004
Expand Down
48 changes: 46 additions & 2 deletions nautobot_ssot/tests/test_jobs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Test the Job classes in nautobot_ssot."""

import os.path
from unittest.mock import Mock, call
from unittest.mock import Mock, call, patch

from django.db.utils import IntegrityError, OperationalError
from django.test import override_settings
Expand Down Expand Up @@ -90,9 +90,53 @@ def test_run(self):
self.assertIsNone(self.job.sync.sync_time)
self.assertEqual(self.job.sync.source, self.job.data_source)
self.assertEqual(self.job.sync.target, self.job.data_target)
self.assertTrue(self.job.dryrun)
self.assertTrue(self.job.sync.dry_run)
self.assertEqual(self.job.job_result, self.job.sync.job_result)

def test_job_dryrun_false(self):
"""Test the job is not ran in dryrun mode."""
with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync:
isolated_job = DataSyncBaseJob()

isolated_job.job_result = JobResult.objects.create(
name="fake job no dryrun",
task_name="fake job no dryrun",
worker="default",
)
isolated_job.load_source_adapter = lambda *x, **y: None
isolated_job.load_target_adapter = lambda *x, **y: None
isolated_job.run(dryrun=False, memory_profiling=False)
self.assertFalse(isolated_job.sync.dry_run)
mock_execute_sync.assert_called()

def test_job_dryrun_true(self):
"""Test the job is ran in dryrun mode."""
with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync:
isolated_job = DataSyncBaseJob()

isolated_job.job_result = JobResult.objects.create(
name="fake job",
task_name="fake job",
worker="default",
)
isolated_job.load_source_adapter = lambda *x, **y: None
isolated_job.load_target_adapter = lambda *x, **y: None
isolated_job.run(dryrun=True, memory_profiling=False)
self.assertTrue(isolated_job.sync.dry_run)
mock_execute_sync.assert_not_called()

@patch("tracemalloc.start")
def test_job_memory_profiling_true(self, mock_malloc_start):
"""Test the job is ran in dryrun mode."""
self.job.run(dryrun=False, memory_profiling=True)
mock_malloc_start.assert_called()

@patch("tracemalloc.start")
def test_job_memory_profiling_false(self, mock_malloc_start):
"""Test the job is ran in dryrun mode."""
self.job.run(dryrun=False, memory_profiling=False)
mock_malloc_start.assert_not_called()

def test_calculate_diff(self):
"""Test calculate_diff() method."""
self.job.sync = Mock()
Expand Down
58 changes: 14 additions & 44 deletions tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,7 @@ def _is_compose_included(context, name):


def _await_healthy_service(context, service):
container_id = docker_compose(
context, f"ps -q -- {service}", pty=False, echo=False, hide=True
).stdout.strip()
container_id = docker_compose(context, f"ps -q -- {service}", pty=False, echo=False, hide=True).stdout.strip()
_await_healthy_container(context, container_id)


Expand Down Expand Up @@ -166,9 +164,7 @@ def docker_compose(context, command, **kwargs):
]

for compose_file in context.nautobot_ssot.compose_files:
compose_file_path = os.path.join(
context.nautobot_ssot.compose_dir, compose_file
)
compose_file_path = os.path.join(context.nautobot_ssot.compose_dir, compose_file)
compose_command_tokens.append(f' -f "{compose_file_path}"')

compose_command_tokens.append(command)
Expand Down Expand Up @@ -244,20 +240,10 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None):
if python_ver is None:
python_ver = context.nautobot_ssot.python_ver
dockerfile_path = os.path.join(context.nautobot_ssot.compose_dir, "Dockerfile")
base_image = (
context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True)
.stdout.strip()
.split(" ")[1]
)
base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace(
r"${PYTHON_VER}", python_ver
)
pip_nautobot_ver = context.run(
f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True
)
match_version = re.search(
r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE
)
base_image = context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True).stdout.strip().split(" ")[1]
base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace(r"${PYTHON_VER}", python_ver)
pip_nautobot_ver = context.run(f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True)
match_version = re.search(r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE)
if match_version:
return match_version.group(1)
else:
Expand All @@ -282,9 +268,7 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None):
),
}
)
def lock(
context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False
):
def lock(context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False):
"""Generate poetry.lock file."""
if constrain_nautobot_ver:
docker_nautobot_version = _get_docker_nautobot_version(context)
Expand Down Expand Up @@ -324,9 +308,7 @@ def restart(context, service=""):
def stop(context, service=""):
"""Stop specified or all services, if service is not specified, remove all containers."""
print("Stopping Nautobot...")
docker_compose(
context, "stop" if service else "down --remove-orphans", service=service
)
docker_compose(context, "stop" if service else "down --remove-orphans", service=service)


@task(
Expand All @@ -345,9 +327,7 @@ def destroy(context, volumes=True, import_db_file=""):
return

if not volumes:
raise ValueError(
"Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time."
)
raise ValueError("Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time.")

print(f"Importing database file: {import_db_file}...")

Expand All @@ -364,16 +344,12 @@ def destroy(context, volumes=True, import_db_file=""):
"db",
]

container_id = docker_compose(
context, " ".join(command), pty=False, echo=False, hide=True
).stdout.strip()
container_id = docker_compose(context, " ".join(command), pty=False, echo=False, hide=True).stdout.strip()
_await_healthy_container(context, container_id)
print("Stopping database container...")
context.run(f"docker stop {container_id}", pty=False, echo=False, hide=True)

print(
"Database import complete, you can start Nautobot with the following command:"
)
print("Database import complete, you can start Nautobot with the following command:")
print("invoke start")


Expand Down Expand Up @@ -549,9 +525,7 @@ def dbshell(context, db_name="", input_file="", output_file="", query=""):
if input_file and query:
raise ValueError("Cannot specify both, `input_file` and `query` arguments")
if output_file and not (input_file or query):
raise ValueError(
"`output_file` argument requires `input_file` or `query` argument"
)
raise ValueError("`output_file` argument requires `input_file` or `query` argument")

env = {}
if query:
Expand Down Expand Up @@ -689,9 +663,7 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True):
docker_compose(context, " ".join(command), pty=False)

print(50 * "=")
print(
"The database backup has been successfully completed and saved to the following file:"
)
print("The database backup has been successfully completed and saved to the following file:")
print(output_file)
print("You can import this database backup with the following command:")
print(f"invoke import-db --input-file '{output_file}'")
Expand Down Expand Up @@ -860,9 +832,7 @@ def unittest( # noqa: PLR0913
@task
def unittest_coverage(context):
"""Report on code test coverage as measured by 'invoke unittest'."""
command = (
"coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*"
)
command = "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*"

run_command(context, command)

Expand Down

0 comments on commit f381da2

Please sign in to comment.