Skip to content

Commit

Permalink
fix nvd provider to not wipe out existing results on incremental upda…
Browse files Browse the repository at this point in the history
…te (#38)

Signed-off-by: Alex Goodman <[email protected]>

Signed-off-by: Alex Goodman <[email protected]>
  • Loading branch information
wagoodman authored Jan 11, 2023
1 parent 5e48f33 commit ad3c9b8
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 27 deletions.
8 changes: 7 additions & 1 deletion src/vunnel/providers/nvd/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class Config:
runtime: provider.RuntimeConfig = field(
default_factory=lambda: provider.RuntimeConfig(
result_store=result.StoreStrategy.SQLITE,
existing_results=provider.ResultStatePolicy.DELETE_BEFORE_WRITE,
existing_results=provider.ResultStatePolicy.KEEP,
)
)
request_timeout: int = 125
Expand Down Expand Up @@ -39,6 +39,12 @@ def __init__(self, root: str, config: Config) -> None:

self.logger.debug(f"config: {config}")

if self.config.runtime.skip_if_exists and config.runtime.existing_results != provider.ResultStatePolicy.KEEP:
raise ValueError(
"if 'skip_if_exists' is set then 'runtime.existing_results' must be 'keep' "
+ "(otherwise incremental updates will fail)"
)

self.schema = schema.NVDSchema()
self.manager = Manager(
workspace=self.workspace,
Expand Down
27 changes: 1 addition & 26 deletions tests/unit/providers/nvd/test_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@

import pytest

from vunnel import result, workspace
from vunnel.providers import nvd
from vunnel import workspace
from vunnel.providers.nvd import manager


Expand Down Expand Up @@ -31,27 +30,3 @@ def test_parser(tmpdir, helpers, mock_data_path, mocker):
actual_vulns = [v for v in subject.get(None)]

assert expected_vulns == actual_vulns


@pytest.mark.parametrize(
"mock_data_path,expected_written_entries",
[
("test-fixtures/single-entry.json", 1),
],
)
def test_provider_schema(helpers, mock_data_path, expected_written_entries, mocker):
workspace = helpers.provider_workspace_helper(name=nvd.Provider.name())
mock_data_path = helpers.local_dir(mock_data_path)

with open(mock_data_path) as f:
json_dict = json.load(f)

c = nvd.Config()
c.runtime.result_store = result.StoreStrategy.FLAT_FILE
p = nvd.Provider(root=workspace.root, config=c)
p.manager.api.cve = mocker.Mock(return_value=[json_dict])

p.update(None)

assert expected_written_entries == workspace.num_result_entries()
assert workspace.result_schemas_valid(require_entries=expected_written_entries > 0)
49 changes: 49 additions & 0 deletions tests/unit/providers/nvd/test_nvd.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import json

import pytest

from vunnel import provider, result, workspace
from vunnel.providers import nvd


@pytest.mark.parametrize(
"policy,should_raise",
(
(provider.ResultStatePolicy.KEEP, False),
(provider.ResultStatePolicy.DELETE_BEFORE_WRITE, True),
(provider.ResultStatePolicy.DELETE, True),
),
)
def test_incremental_update_with_existing_results(policy, should_raise):
def make():
nvd.Provider("/tmp/doesntmatter", nvd.Config(runtime=provider.RuntimeConfig(existing_results=policy)))

if should_raise:
with pytest.raises(Exception):
make()
else:
make()


@pytest.mark.parametrize(
"mock_data_path,expected_written_entries",
[
("test-fixtures/single-entry.json", 1),
],
)
def test_provider_schema(helpers, mock_data_path, expected_written_entries, mocker):
workspace = helpers.provider_workspace_helper(name=nvd.Provider.name())
mock_data_path = helpers.local_dir(mock_data_path)

with open(mock_data_path) as f:
json_dict = json.load(f)

c = nvd.Config()
c.runtime.result_store = result.StoreStrategy.FLAT_FILE
p = nvd.Provider(root=workspace.root, config=c)
p.manager.api.cve = mocker.Mock(return_value=[json_dict])

p.update(None)

assert expected_written_entries == workspace.num_result_entries()
assert workspace.result_schemas_valid(require_entries=expected_written_entries > 0)

0 comments on commit ad3c9b8

Please sign in to comment.