Skip to content

Commit

Permalink
Merge pull request #5364 from wazuh/enhancement/6203-adapt-workload-b…
Browse files Browse the repository at this point in the history
…enchmar-plots-to-api-format

Fix Workload Benchmark Plots
  • Loading branch information
juliamagan authored Jun 4, 2024
2 parents 90481dc + a218d4b commit b19b94a
Show file tree
Hide file tree
Showing 10 changed files with 228 additions and 86 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ All notable changes to this project will be documented in this file.

### Changed

- Fix workload benchmark plots ([#5364](https://github.com/wazuh/wazuh-qa/pull/5364)) \- (Framework)
- Increase feed update time in Vulnerability Detection E2E tests to 10h ([#5424](https://github.com/wazuh/wazuh-qa/pull/5424)) \- (Tests)
- Migrate E2E Vulnerability Detector test packages to S3 repository ([#5376](https://github.com/wazuh/wazuh-qa/pull/5376)) \- (Framework)
- Include "Agent key already in use" in the E2E Vulnerability Detection expected error list. ([#5409](https://github.com/wazuh/wazuh-qa/pull/5409)) \- (Tests)
Expand Down
3 changes: 2 additions & 1 deletion deps/wazuh_testing/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@
'data/vulnerability_parsed_packages.json',
'tools/migration_tool/delta_schema.json',
'end_to_end/vulnerability_detector_packages/vuln_packages.json',
'tools/migration_tool/CVE_JSON_5.0_bundled.json'
'tools/migration_tool/CVE_JSON_5.0_bundled.json',
'data/data_visualizer/*'
]

scripts_list = [
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"number_of_events_and_messages": {
"title": "Number of events and messages",
"columns": [
"Number of generated events", "Number of messages", "Number of events buffered"
]
},
"ack_and_keepalive": {
"title": "Last ACK and KeepAlive",
"columns": [
"Last ACK", "Last Keepalive"
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"decoded_events": {
"title": "Events decoded per queue",
"columns": [
"Decoded from azure", "Decoded from ciscat", "Decoded from command", "Decoded from docker",
"Decoded from logcollector eventchannel", "Decoded from logcollector eventlog",
"Decoded from logcollector macos", "Decoded from logcollector others", "Decoded from osquery",
"Decoded from rootcheck", "Decoded from sca", "Decoded from syscheck", "Decoded from syscollector",
"Decoded from vulnerability", "Decoded from agentd", "Decoded from dbsync", "Decoded from monitor",
"Decoded from remote"
]
},
"dropped_events": {
"title": "Events dropped per queue",
"columns": [
"Dropped from azure", "Dropped from ciscat", "Dropped from command", "Dropped from docker",
"Dropped from logcollector eventchannel", "Dropped from logcollector eventlog",
"Dropped from logcollector macos", "Dropped from logcollector others", "Dropped from osquery",
"Dropped from rootcheck", "Dropped from sca", "Dropped from syscheck", "Dropped from syscollector",
"Dropped from vulnerability", "Dropped from agentd", "Dropped from dbsync", "Dropped from monitor",
"Dropped from remote"
]
},
"events_decoded_per_second": {
"title": "Events decoded per second",
"columns": [
"EDPS from azure", "EDPS from ciscat", "EDPS from command", "EDPS from docker",
"EDPS from logcollector eventchannel", "EDPS from logcollector eventlog", "EDPS from logcollector macos",
"EDPS from logcollector others", "EDPS from osquery", "EDPS from rootcheck", "EDPS from sca",
"EDPS from syscheck", "EDPS from syscollector", "EDPS from vulnerability", "EDPS from agentd",
"EDPS from dbsync", "EDPS from monitor", "EDPS from remote"
]
},
"alerts_info": {
"title": "Alerts and events info.",
"columns": [
"Events processed", "Events received", "Written alerts", "Written firewall", "Written fts"
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"binary_test": {
"title": "Usage during the test",
"columns": [
"PID",
"Daemon",
"Version"
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"events": {
"title": "Events generated",
"columns": [
"Events"
]
},
"bytes_sent": {
"title": "Bytes sent",
"columns": [
"Bytes"
]
},
"drops": {
"title": "Events dropped",
"columns": [
"Target Drops"
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"events_info": {
"title": "Events sent and count",
"columns": [
"Events count", "Control messages", "Discarded messages", "Queue usage",
"Metrics-Bytes sent", "Dequeued messages"
]
},
"queue_size": {
"title": "Queue status",
"columns": [
"Queue size", "Queue usage"
]
},
"tcp_sessions": {
"title": "TCP sessions",
"columns": [
"TCP sessions"
]
},
"recv_bytes": {
"title": "Bytes received",
"columns": [
"Metrics-Bytes received"
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"database_queries_counts": {
"title": "Database queries counts",
"columns": [
"Received queries", "Agent queries", "Global queries"
]
},
"agent_queries_breakdown": {
"title": "Agent queries breakdown",
"columns": [
"db-begin", "db-close", "db-commit", "db-remove", "db-sql", "db-vacuum", "db-get_fragmentation"
]
},
"agent_tables_breakdown": {
"title": "Agent tables breakdown",
"columns": [
"Table CisCat", "Table Rootcheck", "Table SCA", "Table dbsync", "Table Syscheck",
"Table Syscheck file", "Table Syscheck registry", "Table Syscheck registry_key",
"Table Syscheck registry_value", "Table Syscollector hotfixes", "Table Syscollector hwinfo",
"Table Syscollector network_address", "Table Syscollector network_iface",
"Table Syscollector network_protocol", "Table Syscollector os_info",
"Table Syscollector packages", "Table Syscollector ports", "Table Syscollector processes",
"Table Vulnerability CVEs"
]
},
"global_queries_breakdown": {
"title": "Global queries breakdown",
"columns": [
"db-backup", "db-sql", "db-vacuum", "db-get_fragmentation"
]
},
"global_agent_queries": {
"title": "Global agent queries",
"columns": [
"agent-delete-agent", "agent-disconnect-agents", "agent-find-agent", "agent-get-agent-info",
"agent-get-agents-by-connection-status", "agent-get-all-agents", "agent-get-distinct-groups",
"agent-get-groups-integrity", "agent-insert-agent", "agent-reset-agents-connection",
"agent-select-agent-group", "agent-select-agent-name", "agent-set-agent-groups",
"agent-sync-agent-groups-get", "agent-sync-agent-info-get", "agent-sync-agent-info-set",
"agent-update-agent-data", "agent-update-agent-name", "agent-update-connection-status",
"agent-update-status-code", "agent-update-keepalive"
]
},
"global_group_queries": {
"title": "Global group queries",
"columns": [
"belongs-get-group-agents", "belongs-select-group-belong", "group-delete-group",
"group-find-group", "group-insert-agent-group", "group-select-groups"
]
},
"task_queries_breakdown": {
"title": "Task queries breakdown",
"columns": [
"tasks-delete old task", "tasks-set timeout", "tasks-upgrade", "tasks-upgrade cancel",
"tasks-upgrade custom", "tasks-upgrade get status", "tasks-upgrade results",
"tasks-upgrade update status"
]
},
"queries_time_metrics": {
"title": "Queries time metrics",
"columns": [
"Total Execution Time", "Agent ExecTime", "Global Queries ExecTime"
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@ def get_script_arguments():
parser.add_argument('-s', '--sources', dest='csv_list', required=True, type=str, nargs='+', action='store',
help='Paths to the CSV files separated by whitespace.')
parser.add_argument('-t', '--target', dest='visualization_target', default='binary',
choices=['binary', 'analysis', 'remote', 'agent', 'logcollector', 'cluster', 'api'],
choices=['binary', 'analysis', 'remote', 'agent', 'logcollector', 'cluster', 'api', 'wazuhdb'],
help='Generate data visualizations for a specific target. Default binary.')
parser.add_argument('-d', '--destination', dest='destination', default=gettempdir(),
help=f'Directory to store the images. Default {gettempdir()}')
parser.add_argument('-n', '--name', dest='name', default=None,
help=f'Base name for the images. Default {None}.')
parser.add_argument('-c', '--columns', dest='columns', default=None,
help=f'Path to Json with Columns to Plot. Default {None}.')

return parser.parse_args()

Expand All @@ -29,7 +31,8 @@ def main():
if not exists(destination):
makedirs(destination)
dv = DataVisualizer(dataframes=options.csv_list, target=options.visualization_target,
compare=False, store_path=options.destination, base_name=options.name)
compare=False, store_path=options.destination, base_name=options.name,
columns_path=options.columns)
dv.plot()


Expand Down
Loading

0 comments on commit b19b94a

Please sign in to comment.