Skip to content

Commit

Permalink
Merge branch 'edge' into fix_RQA-3492
Browse files Browse the repository at this point in the history
  • Loading branch information
koji committed Nov 13, 2024
2 parents 8b68363 + 2f0ba2e commit a742c0d
Show file tree
Hide file tree
Showing 219 changed files with 6,045 additions and 2,019 deletions.
2 changes: 2 additions & 0 deletions abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ def list_folder(self, delete: Any = False, folder: bool = False) -> Set[str]:
else "" # type: ignore
if self.parent_folder
else None,
supportsAllDrives=True,
includeItemsFromAllDrives=True,
pageSize=1000,
fields="nextPageToken, files(id, name, mimeType)",
pageToken=page_token,
Expand Down
28 changes: 24 additions & 4 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,13 @@ def create_data_dictionary(
plate: str,
accuracy: Any,
hellma_plate_standards: List[Dict[str, Any]],
) -> Tuple[List[List[Any]], List[str], List[List[Any]], List[str]]:
) -> Tuple[List[List[Any]], List[str], List[List[Any]], List[str], List[List[Any]]]:
"""Pull data from run files and format into a dictionary."""
runs_and_robots: List[Any] = []
runs_and_lpc: List[Dict[str, Any]] = []
headers: List[str] = []
headers_lpc: List[str] = []
list_of_heights: List[List[Any]] = [[], [], [], [], [], [], [], []]
for filename in os.listdir(storage_directory):
file_path = os.path.join(storage_directory, filename)
if file_path.endswith(".json"):
Expand Down Expand Up @@ -120,6 +121,9 @@ def create_data_dictionary(
plate_reader_dict = read_robot_logs.plate_reader_commands(
file_results, hellma_plate_standards
)
list_of_heights = read_robot_logs.liquid_height_commands(
file_results, list_of_heights
)
notes = {"Note1": "", "Jira Link": issue_url}
plate_measure = {
"Plate Measured": plate,
Expand Down Expand Up @@ -155,7 +159,13 @@ def create_data_dictionary(
print(f"Number of runs read: {num_of_runs_read}")
transposed_runs_and_robots = list(map(list, zip(*runs_and_robots)))
transposed_runs_and_lpc = list(map(list, zip(*runs_and_lpc)))
return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc
return (
transposed_runs_and_robots,
headers,
transposed_runs_and_lpc,
headers_lpc,
list_of_heights,
)


def run(
Expand All @@ -173,7 +183,8 @@ def run(
credentials_path, google_sheet_name, 0
)
# Get run ids on google sheet
run_ids_on_gs = set(google_sheet.get_column(2))
run_ids_on_gs: Set[str] = set(google_sheet.get_column(2))

# Get robots on google sheet
# Uploads files that are not in google drive directory
google_drive.upload_missing_files(storage_directory)
Expand All @@ -191,6 +202,7 @@ def run(
headers,
transposed_runs_and_lpc,
headers_lpc,
list_of_heights,
) = create_data_dictionary(
missing_runs_from_gs,
storage_directory,
Expand All @@ -201,7 +213,15 @@ def run(
)
start_row = google_sheet.get_index_row() + 1
google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0")

# Record Liquid Heights Found
google_sheet_ldf = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 2
)
google_sheet_ldf.get_row(1)
start_row_lhd = google_sheet_ldf.get_index_row() + 1
google_sheet_ldf.batch_update_cells(
list_of_heights, "A", start_row_lhd, "2075262446"
)
# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
start_row_lpc = google_sheet_lpc.get_index_row() + 1
Expand Down
10 changes: 10 additions & 0 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,6 +602,7 @@ def get_run_error_info_from_robot(
headers,
runs_and_lpc,
headers_lpc,
list_of_heights,
) = abr_google_drive.create_data_dictionary(
run_id,
error_folder_path,
Expand All @@ -614,6 +615,15 @@ def get_run_error_info_from_robot(
start_row = google_sheet.get_index_row() + 1
google_sheet.batch_update_cells(runs_and_robots, "A", start_row, "0")
print("Wrote run to ABR-run-data")
# Record Liquid Heights Found
google_sheet_ldf = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 4
)
start_row_lhd = google_sheet_ldf.get_index_row() + 1
google_sheet_ldf.batch_update_cells(
list_of_heights, "A", start_row_lhd, "1795535088"
)
print("wrote liquid heights found.")
# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(
credentials_path, "ABR-LPC", 0
Expand Down
36 changes: 36 additions & 0 deletions abr-testing/abr_testing/data_collection/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,42 @@ def instrument_commands(
return pipette_dict


def liquid_height_commands(
file_results: Dict[str, Any], all_heights_list: List[List[Any]]
) -> List[List[Any]]:
"""Record found liquid heights during a protocol."""
commandData = file_results.get("commands", "")
robot = file_results.get("robot_name", "")
run_id = file_results.get("run_id", "")
for command in commandData:
commandType = command["commandType"]
if commandType == "comment":
result = command["params"].get("message", "")
try:
result_str = "'" + result.split("result: {")[1] + "'"
entries = result_str.split(", (")
comment_time = command["completedAt"]
for entry in entries:
height = float(entry.split(": ")[1].split("'")[0].split("}")[0])
labware_type = str(
entry.split(",")[0].replace("'", "").replace("(", "")
)
well_location = str(entry.split(", ")[1].split(" ")[0])
slot_location = str(entry.split("slot ")[1].split(")")[0])
labware_name = str(entry.split("of ")[1].split(" on")[0])
all_heights_list[0].append(robot)
all_heights_list[1].append(run_id)
all_heights_list[2].append(comment_time)
all_heights_list[3].append(labware_type)
all_heights_list[4].append(labware_name)
all_heights_list[5].append(slot_location)
all_heights_list[6].append(well_location)
all_heights_list[7].append(height)
except (IndexError, ValueError):
continue
return all_heights_list


def plate_reader_commands(
file_results: Dict[str, Any], hellma_plate_standards: List[Dict[str, Any]]
) -> Dict[str, object]:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
header,
runs_and_lpc,
lpc_headers,
list_of_heights,
) = abr_google_drive.create_data_dictionary(
run_ids_in_storage,
run_log_file_path,
Expand All @@ -42,6 +43,7 @@
"",
hellma_plate_standards=file_values,
)
print("list_of_heights not recorded.")
transposed_list = list(zip(*runs_and_robots))
# Adds Run to local csv
sheet_location = os.path.join(run_log_file_path, "saved_data.csv")
Expand Down
110 changes: 92 additions & 18 deletions abr-testing/abr_testing/protocol_simulation/abr_sim_check.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,108 @@
"""Check ABR Protocols Simulate Successfully."""
from abr_testing.protocol_simulation import simulation_metrics
import os
import traceback
from pathlib import Path
from typing import Dict, List, Tuple, Union
import traceback


def run(file_to_simulate: Path) -> None:
def run(
file_dict: Dict[str, Dict[str, Union[str, Path]]], labware_defs: List[Path]
) -> None:
"""Simulate protocol and raise errors."""
protocol_name = file_to_simulate.stem
try:
simulation_metrics.main(file_to_simulate, False)
except Exception:
print(f"Error in protocol: {protocol_name}")
traceback.print_exc()
for file in file_dict:
path = file_dict[file]["path"]
csv_params = ""
try:
csv_params = str(file_dict[file]["csv"])
except KeyError:
pass
try:
print(f"Simulating {file}")
simulation_metrics.main(
protocol_file_path=Path(path),
save=False,
parameters=csv_params,
extra_files=labware_defs,
)
except Exception as e:
traceback.print_exc()
print(str(e))
print("\n")


def search(seq: str, dictionary: dict) -> str:
"""Search for specific sequence in file."""
for key in dictionary.keys():
parts = key.split("_")
if parts[0] == seq:
return key
return ""


def get_files() -> Tuple[Dict[str, Dict[str, Union[str, Path]]], List[Path]]:
"""Map protocols with corresponding csv files."""
file_dict: Dict[str, Dict[str, Union[str, Path]]] = {}
labware_defs = []
for root, directories, _ in os.walk(root_dir):
for directory in directories:
if directory == "active_protocols":
active_dir = os.path.join(root, directory)
for file in os.listdir(
active_dir
): # Iterate over files in `active_protocols`
if file.endswith(".py") and file not in exclude:
file_dict[file] = {}
file_dict[file]["path"] = Path(
os.path.abspath(
os.path.join(root_dir, os.path.join(directory, file))
)
)
if directory == "csv_parameters":
active_dir = os.path.join(root, directory)
for file in os.listdir(
active_dir
): # Iterate over files in `active_protocols`
if file.endswith(".csv") and file not in exclude:
search_str = file.split("_")[0]
protocol = search(search_str, file_dict)
if protocol:
file_dict[protocol]["csv"] = str(
os.path.abspath(
os.path.join(
root_dir, os.path.join(directory, file)
)
)
)
if directory == "custom_labware":
active_dir = os.path.join(root, directory)
for file in os.listdir(
active_dir
): # Iterate over files in `active_protocols`
if file.endswith(".json") and file not in exclude:
labware_defs.append(
Path(
os.path.abspath(
os.path.join(
root_dir, os.path.join(directory, file)
)
)
)
)
return (file_dict, labware_defs)


if __name__ == "__main__":
# Directory to search
global root_dir
root_dir = "abr_testing/protocols"

global exclude
exclude = [
"__init__.py",
"helpers.py",
"shared_vars_and_funcs.py",
]
# Walk through the root directory and its subdirectories
for root, dirs, files in os.walk(root_dir):
for file in files:
if file.endswith(".py"): # If it's a Python file
if file in exclude:
continue
file_path = Path(os.path.join(root, file))
print(f"Simulating protocol: {file_path.stem}")
run(file_path)
print("Simulating Protocols")
file_dict, labware_defs = get_files()
# print(file_dict)
run(file_dict, labware_defs)
Loading

0 comments on commit a742c0d

Please sign in to comment.