Skip to content

Commit

Permalink
refactor: Remobe old network_trace and use a view
Browse files Browse the repository at this point in the history
Removing the old tables `network_trace` and `time_mapping` and introduces a new
view `network_trace` that contains the combined information. I renamed some
columns in the view since naming a column `from` in sql is fraught with danger.

Also instead of using `at` we now use the `recv_logical_time` which in my mind
is more expressive of what this time represents.
  • Loading branch information
symbiont-daniel-gustafsson committed Jan 22, 2021
1 parent 3febbc3 commit eb79616
Show file tree
Hide file tree
Showing 7 changed files with 63 additions and 64 deletions.
41 changes: 41 additions & 0 deletions src/db/migrations/1611237448_remove_network_trace.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
-- +migrate Up
DROP TABLE IF EXISTS network_trace;
DROP TABLE IF EXISTS time_mapping;

CREATE VIEW IF NOT EXISTS network_trace AS
SELECT
json_extract(meta, '$.test-id') as test_id,
json_extract(meta, '$.run-id') as run_id,
json_extract(data, '$.message') as message,
json_extract(data, '$.args') as args,
json_extract(data, '$.from') as sender,
json_extract(data, '$.to') as receiver,
json_extract(data, '$.kind') as kind,
json_extract(data, '$.sent-logical-time') as sent_logical_time,
json_extract(data, '$.recv-logical-time') as recv_logical_time,
json_extract(data, '$.recv-simulated-time') as recv_simulated_time,
json_extract(data, '$.dropped') as dropped
FROM event_log
WHERE event like 'NetworkTrace';

-- +migrate Down

DROP VIEW IF EXISTS network_trace;
CREATE TABLE IF NOT EXISTS time_mapping (rowid INTEGER PRIMARY KEY) WITHOUT ROWID;

-- we need to create this table fully since we have migrations that actually do stuff with
-- this table...
CREATE TABLE IF NOT EXISTS network_trace (
test_id INTEGER NOT NULL,
run_id INTEGER NOT NULL,
id INTEGER NOT NULL,
message TEXT NOT NULL,
args JSON NOT NULL,
`from` TEXT NOT NULL,
`to` TEXT NOT NULL,
sent_logical_time INTEGER NOT NULL,
at INTEGER NOT NULL,
dropped INT2 NOT NULL,
PRIMARY KEY(test_id, run_id, id),
FOREIGN KEY(test_id) REFERENCES test(id),
FOREIGN KEY(run_id) REFERENCES run(id));
2 changes: 1 addition & 1 deletion src/debugger/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ buildGoModule rec {
buildInputs = [ detsysLib ];
propagatedBuildInputs = [ plantuml ];

vendorSha256 = "0a6766499c6ahgsaq7b62rds2ygxwfmh28lbbf9629m8fsdkq5k5";
vendorSha256 = "19idzkfakvr58rs98ajsc9lnvrpjgn9z4h35ldgx5fdfja39nnzb";

buildFlagsArray =
[ "-ldflags=-X main.version=${lib.commitIdFromGitRepo ./../../.git}" ];
Expand Down
20 changes: 11 additions & 9 deletions src/debugger/internal/debugger.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,14 +73,16 @@ func GetNetworkTrace(testId lib.TestId, runId lib.RunId) []NetworkEvent {
db := lib.OpenDB()
defer db.Close()

rows, err := db.Query("SELECT message,args,`from`,`to`,dropped,at,simulated_time "+
"FROM network_trace "+
"LEFT JOIN time_mapping "+
"ON network_trace.test_id = time_mapping.test_id "+
"AND network_trace.run_id = time_mapping.run_id "+
"AND network_trace.AT = time_mapping.logical_time "+
"WHERE network_trace.test_id = ? "+
"AND network_trace.run_id = ?", testId.TestId, runId.RunId)
rows, err := db.Query(`SELECT message,
args,
sender,
receiver,
dropped,
recv_logical_time,
recv_simulated_time
FROM network_trace
WHERE test_id = ?
AND run_id = ?`, testId.TestId, runId.RunId)
if err != nil {
panic(err)
}
Expand All @@ -89,7 +91,7 @@ func GetNetworkTrace(testId lib.TestId, runId lib.RunId) []NetworkEvent {
var trace []NetworkEvent
for rows.Next() {
event := NetworkEvent{}
err := rows.Scan(&event.Message, &event.Args, &event.From, &event.To, &event.Dropped, &event.At, &event.Simulated)
err := rows.Scan(&event.Message, &event.Args, &event.From, &event.To, &event.Dropped, &event.At, (*lib.TimeFromString)(&event.Simulated))
if err != nil {
panic(err)
}
Expand Down
12 changes: 6 additions & 6 deletions src/ldfi/ldfi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ def load_previous_faults(self, config: Config) -> List[List[Dict]]:

def load_potential_faults(self, config: Config) -> List[List[Dict]]:
potential_faults: List[List[Dict]] = [ [] for _ in range(len(config.run_ids)) ]
self.c.execute("""SELECT run_id,`from`,`to`,at,sent_logical_time FROM network_trace
self.c.execute("""SELECT run_id,sender,receiver,recv_logical_time,sent_logical_time FROM network_trace
WHERE test_id = %d
AND kind <> 'timer'
AND NOT (`from` LIKE 'client:%%')
AND NOT (`to` LIKE 'client:%%')
AND NOT (sender LIKE 'client:%%')
AND NOT (receiver LIKE 'client:%%')
ORDER BY run_id ASC""" % config.test_id)
i = 0
run_id = config.run_ids[0]
Expand All @@ -89,9 +89,9 @@ def load_potential_faults(self, config: Config) -> List[List[Dict]]:
run_id = row["run_id"]
i += 1
potential_faults[i].append(
{"from": row["from"],
"to": row["to"],
"at": int(row["at"]),
{"from": row["sender"],
"to": row["receiver"],
"at": int(row["recv_logical_time"]),
"sent_logical_time": int(row["sent_logical_time"])})

return potential_faults
Expand Down
6 changes: 3 additions & 3 deletions src/ldfi/tests/test_ldfi.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def test_load_potential_faults(caplog):
test_id INT NOT NULL,
run_id INT NOT NULL,
kind TEXT NOT NULL,
`from` TEXT NOT NULL,
`to` TEXT NOT NULL,
at INT NOT NULL,
sender TEXT NOT NULL,
receiver TEXT NOT NULL,
recv_logical_time INT NOT NULL,
sent_logical_time INT NOT NULL)""")
storage.conn.commit()
config = ldfi.Config(1, [0, 1], 2, 0)
Expand Down
37 changes: 1 addition & 36 deletions src/scheduler/src/scheduler/db.clj
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,6 @@
["SELECT MAX(id) as `run-id` FROM run WHERE test_id = ?" test-id]
{:return-keys true :builder-fn rs/as-unqualified-lower-maps}))

(defn append-history!
[test-id run-id kind event args process]
(jdbc/execute-one!
ds
["INSERT INTO history (test_id, run_id, id, kind, event, args, process)
VALUES (?, ?, (SELECT IFNULL(MAX(id), -1) + 1 FROM history WHERE run_id = ?), ?, ?, ?, ?)"
test-id run-id run-id (name kind) event args process]
{:return-keys true :builder-fn rs/as-unqualified-lower-maps}))

(comment
(setup-db "/tmp/test.sqlite3")
(destroy-db!)
Expand All @@ -67,24 +58,6 @@
(create-run! 0 123)
(append-history! 1 :invoke "a" "{\"id\": 1}" 0))

(defn append-trace!
[test-id run-id message args kind from to sent-logical-time at dropped?]
(jdbc/execute-one!
ds
["INSERT INTO network_trace (test_id, run_id, id, message, args, kind, `from`, `to`, sent_logical_time, at, dropped)
VALUES (?, ?, (SELECT IFNULL(MAX(id), -1) + 1 FROM network_trace WHERE test_id = ? AND run_id = ?), ?, ?, ?, ?, ?, ?, ?, ?)"
test-id run-id test-id run-id message args kind from to sent-logical-time at (if dropped? 1 0)]
{:return-keys true :builder-fn rs/as-unqualified-lower-maps}))

(defn append-time-mapping!
[test-id run-id logical-time simulated-time]
(jdbc/execute-one!
ds
["INSERT INTO time_mapping (test_id, run_id, logical_time, simulated_time)
VALUES (?, ?, ?, ?)"
test-id run-id logical-time simulated-time]
{:return-keys true :builder-fn rs/as-unqualified-lower-maps}))

(defn append-event!
[test-id run-id event data]
(jdbc/execute-one!
Expand All @@ -97,14 +70,6 @@
(json/write data)]
{:return-keys true :builder-fn rs/as-unqualified-lower-maps}))

;; Remove this when we no longer use the old events
(defn append-old-network-history-events!
[test-id run-id data]
(append-trace! test-id run-id (:message data) (json/write (:args data)) (:kind data) (:from data) (:to data) (:sent-logical-time data) (:recv-logical-time data) (:dropped data)))

(defn append-network-trace!
[test-id run-id data]
(append-event! test-id run-id "NetworkTrace" data)
;; This should be removed when everything has been refactored to new
;; events.
(append-old-network-history-events! test-id run-id data))
(append-event! test-id run-id "NetworkTrace" data))
9 changes: 0 additions & 9 deletions src/scheduler/src/scheduler/pure.clj
Original file line number Diff line number Diff line change
Expand Up @@ -387,10 +387,6 @@
(db/append-network-trace! (:test-id data)
(:run-id data)
obj))
(db/append-time-mapping! (:test-id data)
(:run-id data)
(-> data' :logical-clock)
(-> data' :clock))
(if dropped?
(do
(log/debug :dropped? dropped? :clock (:clock data'))
Expand Down Expand Up @@ -421,11 +417,6 @@
(not (empty? client-responses)) (update :logical-clock inc)
true (remove-client-requests (map :to client-responses)))]
;; TODO(stevan): use seed to shuffle client-responses?
(if (not (empty? client-responses))
(db/append-time-mapping! (:test-id data'')
(:run-id data'')
(-> data'' :logical-clock)
(-> data'' :clock)))
(doseq [client-response client-responses]
(db/append-network-trace! (:test-id data)
(:run-id data)
Expand Down

0 comments on commit eb79616

Please sign in to comment.