diff --git a/hardware-testing/hardware_testing/abr_tools/abr_command_data.py b/hardware-testing/hardware_testing/abr_tools/abr_command_data.py index 7616922cfdb..99fa68a0a51 100644 --- a/hardware-testing/hardware_testing/abr_tools/abr_command_data.py +++ b/hardware-testing/hardware_testing/abr_tools/abr_command_data.py @@ -5,13 +5,7 @@ import sys import json from datetime import datetime, timedelta -from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids -from .abr_read_logs import ( - create_abr_data_sheet, - read_abr_data_sheet, - get_error_info, - write_to_abr_sheet, -) +from . import read_robot_logs def set_up_data_sheet( @@ -26,7 +20,9 @@ def set_up_data_sheet( except FileNotFoundError: print("No google sheets credentials. Add credentials to storage notebook.") local_file_str = google_sheet_name + "-" + commandTypes - csv_name = create_abr_data_sheet(storage_directory, local_file_str, headers) + csv_name = read_robot_logs.create_abr_data_sheet( + storage_directory, local_file_str, headers + ) return google_sheet, csv_name @@ -309,7 +305,7 @@ def command_data_dictionary( error_code, error_instrument, error_level, - ) = get_error_info(file_results) + ) = read_robot_logs.get_error_info(file_results) all_pipette_commands_list = pipette_commands(file_results) all_module_commands_list = module_commands(file_results) @@ -491,33 +487,33 @@ def command_data_dictionary( google_sheet_movement, csv_movement = set_up_data_sheet( 3, google_sheet_name, "Movement", movement_headers ) - runs_from_storage = get_run_ids_from_storage(storage_directory) + runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory) i = 0 n = 0 m = 0 p = 0 - runs_in_sheet = read_abr_data_sheet( + runs_in_sheet = read_robot_logs.read_abr_data_sheet( storage_directory, csv_instruments, google_sheet_instruments ) - runs_to_save = get_unseen_run_ids(runs_from_storage, runs_in_sheet) + runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet) ( runs_and_instrument_commands, runs_and_module_commands, runs_and_setup_commands, runs_and_move_commands, ) = command_data_dictionary(runs_to_save, storage_directory, i, m, n, p) - write_to_abr_sheet( + read_robot_logs.write_to_abr_sheet( runs_and_instrument_commands, storage_directory, csv_instruments, google_sheet_instruments, ) - write_to_abr_sheet( + read_robot_logs.write_to_abr_sheet( runs_and_module_commands, storage_directory, csv_modules, google_sheet_modules ) - write_to_abr_sheet( + read_robot_logs.write_to_abr_sheet( runs_and_setup_commands, storage_directory, csv_setup, google_sheet_setup ) - write_to_abr_sheet( + read_robot_logs.write_to_abr_sheet( runs_and_move_commands, storage_directory, csv_movement, google_sheet_movement ) diff --git a/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py b/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py index 9c685e9e223..a0b43fad2c0 100644 --- a/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py +++ b/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py @@ -1,14 +1,11 @@ """Read ABR run logs and save data to ABR testing csv and google sheet.""" -from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids -from .error_levels import ERROR_LEVELS_PATH -from typing import Set, Dict, Tuple, Any, List +from typing import Set, Dict, Any import argparse import os -import csv import json import sys from datetime import datetime, timedelta -import time as t +from . import read_robot_logs def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]: @@ -30,59 +27,6 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]: return all_modules -def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]: - """Determines if errors exist in run log and documents them.""" - error_levels = [] - # Read error levels file - with open(ERROR_LEVELS_PATH, "r") as error_file: - error_levels = list(csv.reader(error_file)) - num_of_errors = len(file_results["errors"]) - if num_of_errors == 0: - error_type = "" - error_code = "" - error_instrument = "" - error_level = "" - return 0, error_type, error_code, error_instrument, error_level - commands_of_run: List[Dict[str, Any]] = file_results.get("commands", []) - run_command_error: Dict[str, Any] = commands_of_run[-1] - error_str: int = len(run_command_error.get("error", "")) - if error_str > 1: - error_type = run_command_error["error"].get("errorType", "") - error_code = run_command_error["error"].get("errorCode", "") - try: - # Instrument Error - error_instrument = run_command_error["error"]["errorInfo"]["node"] - except KeyError: - # Module Error - error_instrument = run_command_error["error"]["errorInfo"].get("port", "") - else: - error_type = file_results["errors"][0]["errorType"] - print(error_type) - error_code = file_results["errors"][0]["errorCode"] - error_instrument = file_results["errors"][0]["detail"] - for error in error_levels: - code_error = error[1] - if code_error == error_code: - error_level = error[4] - - return num_of_errors, error_type, error_code, error_instrument, error_level - - -def create_abr_data_sheet(storage_directory: str, file_name: str, headers: List) -> str: - """Creates csv file to log ABR data.""" - file_name_csv = file_name + ".csv" - print(file_name_csv) - sheet_location = os.path.join(storage_directory, file_name_csv) - if os.path.exists(sheet_location): - print(f"File {sheet_location} located. Not overwriting.") - else: - with open(sheet_location, "w") as csvfile: - writer = csv.DictWriter(csvfile, fieldnames=headers) - writer.writeheader() - print(f"Created file. Located: {sheet_location}.") - return file_name_csv - - def create_data_dictionary( runs_to_save: Set[str], storage_directory: str ) -> Dict[Any, Dict[str, Any]]: @@ -109,7 +53,7 @@ def create_data_dictionary( error_code, error_instrument, error_level, - ) = get_error_info(file_results) + ) = read_robot_logs.get_error_info(file_results) all_modules = get_modules(file_results) start_time_str, complete_time_str, start_date, run_time_min = ( @@ -162,52 +106,6 @@ def create_data_dictionary( return runs_and_robots -def read_abr_data_sheet( - storage_directory: str, file_name_csv: str, google_sheet: Any -) -> Set[str]: - """Reads current run sheet to determine what new run data should be added.""" - print(file_name_csv) - sheet_location = os.path.join(storage_directory, file_name_csv) - runs_in_sheet = set() - # Read the CSV file - with open(sheet_location, "r") as csv_start: - data = csv.DictReader(csv_start) - headers = data.fieldnames - if headers is not None: - for row in data: - run_id = row[headers[1]] - runs_in_sheet.add(run_id) - print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.") - # Read Google Sheet - if google_sheet.creditals.access_token_expired: - google_sheet.gc.login() - google_sheet.write_header(headers) - google_sheet.update_row_index() - return runs_in_sheet - - -def write_to_abr_sheet( - runs_and_robots: Dict[Any, Dict[str, Any]], - storage_directory: str, - file_name_csv: str, - google_sheet: Any, -) -> None: - """Write dict of data to abr csv.""" - sheet_location = os.path.join(storage_directory, file_name_csv) - list_of_runs = list(runs_and_robots.keys()) - with open(sheet_location, "a", newline="") as f: - writer = csv.writer(f) - for run in range(len(list_of_runs)): - row = runs_and_robots[list_of_runs[run]].values() - row_list = list(row) - writer.writerow(row_list) - if google_sheet.creditals.access_token_expired: - google_sheet.gc.login() - google_sheet.update_row_index() - google_sheet.write_to_row(row_list) - t.sleep(3) - - if __name__ == "__main__": parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.") parser.add_argument( @@ -273,9 +171,15 @@ def write_to_abr_sheet( "magneticBlockV1", "thermocyclerModuleV2", ] - runs_from_storage = get_run_ids_from_storage(storage_directory) - file_name_csv = create_abr_data_sheet(storage_directory, file_name, headers) - runs_in_sheet = read_abr_data_sheet(storage_directory, file_name_csv, google_sheet) - runs_to_save = get_unseen_run_ids(runs_from_storage, runs_in_sheet) + runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory) + file_name_csv = read_robot_logs.create_abr_data_sheet( + storage_directory, file_name, headers + ) + runs_in_sheet = read_robot_logs.read_abr_data_sheet( + storage_directory, file_name_csv, google_sheet + ) + runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet) runs_and_robots = create_data_dictionary(runs_to_save, storage_directory) - write_to_abr_sheet(runs_and_robots, storage_directory, file_name_csv, google_sheet) + read_robot_logs.write_to_abr_sheet( + runs_and_robots, storage_directory, file_name_csv, google_sheet + ) diff --git a/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py b/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py index c73df9e20bd..c9362571f2b 100644 --- a/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py +++ b/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py @@ -5,28 +5,7 @@ import json import traceback import requests - - -def get_run_ids_from_storage(storage_directory: str) -> Set[str]: - """Read all files in storage directory, extracts run id, adds to set.""" - os.makedirs(storage_directory, exist_ok=True) - list_of_files = os.listdir(storage_directory) - run_ids = set() - for this_file in list_of_files: - read_file = os.path.join(storage_directory, this_file) - if read_file.endswith(".json"): - file_results = json.load(open(read_file)) - run_id = file_results.get("run_id", "") - if len(run_id) > 0: - run_ids.add(run_id) - return run_ids - - -def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]: - """Subtracts runs from storage from current runs being read.""" - runs_to_save = runs - runs_from_storage - print(f"There are {str(len(runs_to_save))} new run(s) to save.") - return runs_to_save +from . import read_robot_logs def get_run_ids_from_robot(ip: str) -> Set[str]: @@ -116,11 +95,11 @@ def get_all_run_logs(storage_directory: str) -> None: ip_address_list = ip_file["ip_address_list"] print(ip_address_list) - runs_from_storage = get_run_ids_from_storage(storage_directory) + runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory) for ip in ip_address_list: try: runs = get_run_ids_from_robot(ip) - runs_to_save = get_unseen_run_ids(runs, runs_from_storage) + runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage) save_runs(runs_to_save, ip, storage_directory) except Exception: print(f"Failed to read IP address: {ip}.") diff --git a/hardware-testing/hardware_testing/abr_tools/abr_scale.py b/hardware-testing/hardware_testing/abr_tools/abr_scale.py new file mode 100644 index 00000000000..b9d5c35715f --- /dev/null +++ b/hardware-testing/hardware_testing/abr_tools/abr_scale.py @@ -0,0 +1,122 @@ +"""ABR Scale Reader.""" +import os +import sys +import datetime +from hardware_testing.drivers import find_port, list_ports_and_select +from hardware_testing.drivers.radwag import RadwagScale +from typing import Any, List +import argparse +import csv +from . import read_robot_logs + + +def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> None: + """Write list to google sheet and csv.""" + sheet_location = os.path.join(storage_directory, file_name_csv) + with open(sheet_location, "a", newline="") as f: + writer = csv.writer(f) + writer.writerow(row_list) + print(f"Written {row_list} point to {file_name_csv}") + # Read Google Sheet + if google_sheet.creditals.access_token_expired: + google_sheet.gc.login() + google_sheet.write_header(headers) + google_sheet.update_row_index() + google_sheet.write_to_row(row_list) + print(f"Written {row_list} to google sheet.") + + +if __name__ == "__main__": + # Adds Arguments + parser = argparse.ArgumentParser(description="Record stable mass for labware.") + parser.add_argument( + "storage_directory", + metavar="STORAGE_DIRECTORY", + type=str, + nargs=1, + help="Path to long term storage directory for scale .csvs.", + ) + parser.add_argument( + "file_name", + metavar="FILE_NAME", + type=str, + nargs=1, + help="Name of google sheet and local csv to save data to.", + ) + parser.add_argument("robot", metavar="ROBOT", type=str, nargs=1, help="Robot name.") + parser.add_argument( + "labware_name", + metavar="LABWARE_NAME", + type=str, + nargs=1, + help="Name of labware.", + ) + parser.add_argument( + "protocol_step", + metavar="PROTOCOL_STEP", + type=str, + nargs=1, + help="1 for empty plate, 2 for filled plate, 3 for end of protocol.", + ) + args = parser.parse_args() + robot = args.robot[0] + labware = args.labware_name[0] + protocol_step = args.protocol_step[0] + storage_directory = args.storage_directory[0] + file_name = args.file_name[0] + file_name_csv = file_name + ".csv" + # find port using known VID:PID, then connect + vid, pid = RadwagScale.vid_pid() + try: + scale = RadwagScale.create(port=find_port(vid=vid, pid=pid)) + except RuntimeError: + device = list_ports_and_select() + scale = RadwagScale.create(device) + scale.connect() + grams = 0.0 + is_stable = False + # Set up csv sheet + headers = ["Robot", "Date", "Timestamp", "Labware", "Mass (g)", "Measurement Step"] + all_data_csv = read_robot_logs.create_abr_data_sheet( + storage_directory, file_name, headers + ) + # Set up google sheet + try: + sys.path.insert(0, storage_directory) + import google_sheets_tool # type: ignore[import] + + credentials_path = os.path.join(storage_directory, "credentials.json") + except ImportError: + raise ImportError( + "Check for google_sheets_tool.py and credentials.json in storage directory." + ) + try: + google_sheet = google_sheets_tool.google_sheet( + credentials_path, file_name, tab_number=0 + ) + print("Connected to google sheet.") + except FileNotFoundError: + print("No google sheets credentials. Add credentials to storage notebook.") + # Scale Loop + break_all = False + while is_stable is False: + grams, is_stable = scale.read_mass() + print(f"Scale reading: grams={grams}, is_stable={is_stable}") + time_now = datetime.datetime.now() + date = str(time_now.date()) + row = [robot, date, str(time_now), labware, grams, protocol_step] + row_list = list(row) + while is_stable is True: + print("is stable") + write_to_sheets(file_name_csv, google_sheet, row_list) + is_stable = False + y_or_no = input("Do you want to weigh another sample? (Y/N): ") + if y_or_no == "Y": + # Uses same storage directory and file. + robot = input("Robot: ") + labware = input("Labware: ") + protocol_step = input("Measurement Step (1,2,3): ") + elif y_or_no == "N": + break_all = True + if break_all: + break diff --git a/hardware-testing/hardware_testing/abr_tools/read_robot_logs.py b/hardware-testing/hardware_testing/abr_tools/read_robot_logs.py new file mode 100644 index 00000000000..8f28d392140 --- /dev/null +++ b/hardware-testing/hardware_testing/abr_tools/read_robot_logs.py @@ -0,0 +1,133 @@ +"""ABR Read Robot Logs. + +This library is downloading logs from robots, extracting wanted information, +and uploading to a google sheet using credentials and google_sheets_tools module +saved in a local directory. +""" +import csv +import os +from .error_levels import ERROR_LEVELS_PATH +from typing import List, Dict, Any, Tuple, Set +import time as t +import json + + +def create_abr_data_sheet(storage_directory: str, file_name: str, headers: List) -> str: + """Creates csv file to log ABR data.""" + file_name_csv = file_name + ".csv" + print(file_name_csv) + sheet_location = os.path.join(storage_directory, file_name_csv) + if os.path.exists(sheet_location): + print(f"File {sheet_location} located. Not overwriting.") + else: + with open(sheet_location, "w") as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=headers) + writer.writeheader() + print(f"Created file. Located: {sheet_location}.") + return file_name_csv + + +def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]: + """Determines if errors exist in run log and documents them.""" + error_levels = [] + # Read error levels file + with open(ERROR_LEVELS_PATH, "r") as error_file: + error_levels = list(csv.reader(error_file)) + num_of_errors = len(file_results["errors"]) + if num_of_errors == 0: + error_type = "" + error_code = "" + error_instrument = "" + error_level = "" + return 0, error_type, error_code, error_instrument, error_level + commands_of_run: List[Dict[str, Any]] = file_results.get("commands", []) + run_command_error: Dict[str, Any] = commands_of_run[-1] + error_str: int = len(run_command_error.get("error", "")) + if error_str > 1: + error_type = run_command_error["error"].get("errorType", "") + error_code = run_command_error["error"].get("errorCode", "") + try: + # Instrument Error + error_instrument = run_command_error["error"]["errorInfo"]["node"] + except KeyError: + # Module Error + error_instrument = run_command_error["error"]["errorInfo"].get("port", "") + else: + error_type = file_results["errors"][0]["errorType"] + print(error_type) + error_code = file_results["errors"][0]["errorCode"] + error_instrument = file_results["errors"][0]["detail"] + for error in error_levels: + code_error = error[1] + if code_error == error_code: + error_level = error[4] + + return num_of_errors, error_type, error_code, error_instrument, error_level + + +def write_to_abr_sheet( + runs_and_robots: Dict[Any, Dict[str, Any]], + storage_directory: str, + file_name_csv: str, + google_sheet: Any, +) -> None: + """Write dict of data to abr csv.""" + sheet_location = os.path.join(storage_directory, file_name_csv) + list_of_runs = list(runs_and_robots.keys()) + with open(sheet_location, "a", newline="") as f: + writer = csv.writer(f) + for run in range(len(list_of_runs)): + row = runs_and_robots[list_of_runs[run]].values() + row_list = list(row) + writer.writerow(row_list) + if google_sheet.creditals.access_token_expired: + google_sheet.gc.login() + google_sheet.update_row_index() + google_sheet.write_to_row(row_list) + t.sleep(3) + + +def read_abr_data_sheet( + storage_directory: str, file_name_csv: str, google_sheet: Any +) -> Set[str]: + """Reads current run sheet to determine what new run data should be added.""" + print(file_name_csv) + sheet_location = os.path.join(storage_directory, file_name_csv) + runs_in_sheet = set() + # Read the CSV file + with open(sheet_location, "r") as csv_start: + data = csv.DictReader(csv_start) + headers = data.fieldnames + if headers is not None: + for row in data: + run_id = row[headers[1]] + runs_in_sheet.add(run_id) + print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.") + # Read Google Sheet + if google_sheet.creditals.access_token_expired: + google_sheet.gc.login() + google_sheet.write_header(headers) + google_sheet.update_row_index() + return runs_in_sheet + + +def get_run_ids_from_storage(storage_directory: str) -> Set[str]: + """Read all files in storage directory, extracts run id, adds to set.""" + os.makedirs(storage_directory, exist_ok=True) + list_of_files = os.listdir(storage_directory) + run_ids = set() + for this_file in list_of_files: + read_file = os.path.join(storage_directory, this_file) + if read_file.endswith(".json"): + file_results = json.load(open(read_file)) + run_id = file_results.get("run_id", "") + if len(run_id) > 0: + run_ids.add(run_id) + return run_ids + + +def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]: + """Subtracts runs from storage from current runs being read.""" + runs_to_save = runs - runs_from_storage + print(f"There are {str(len(runs_to_save))} new run(s) to save.") + return runs_to_save diff --git a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py index 3d256169a58..aa66f230409 100644 --- a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py +++ b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py @@ -10,16 +10,6 @@ import argparse -def _get_user_input(lst: List[str], some_string: str) -> str: - variable = input(some_string) - while variable not in lst: - print( - f"Your input was {variable}. Expected input is one of the following: {lst}" - ) - variable = input(some_string) - return variable - - class _ABRAsairSensor: def __init__(self, robot: str, duration: int, frequency: int) -> None: try: @@ -79,6 +69,7 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None: temp, rh, ] + results_list.append(row) # Check if duration elapsed elapsed_time = datetime.datetime.now() - start_time @@ -86,6 +77,8 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None: break # write to google sheet try: + if google_sheet.creditals.access_token_expired: + google_sheet.gc.login() google_sheet.write_header(header) google_sheet.update_row_index() google_sheet.write_to_row(row) @@ -108,22 +101,6 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None: if __name__ == "__main__": - robot_list: List = [ - "DVT1ABR1", - "DVT1ABR2", - "DVT1ABR3", - "DVT1ABR4", - "DVT2ABR5", - "DVT2ABR6", - "PVT1ABR7", - "PVT1ABR8", - "PVT1ABR9", - "PVT1ABR10", - "PVT1ABR11", - "PVT1ABR12", - "ROOM_339", - "Room_340", - ] parser = argparse.ArgumentParser(description="Starts Temp/RH Sensor.") parser.add_argument( "robot", metavar="ROBOT", type=str, nargs=1, help="ABR Robot Name" diff --git a/hardware-testing/hardware_testing/scripts/abr_scale.py b/hardware-testing/hardware_testing/scripts/abr_scale.py deleted file mode 100644 index cf9763e135d..00000000000 --- a/hardware-testing/hardware_testing/scripts/abr_scale.py +++ /dev/null @@ -1,186 +0,0 @@ -"""ABR Scale Reader.""" -import os -import datetime -from hardware_testing import data -from hardware_testing.drivers import find_port -from hardware_testing.drivers.radwag import RadwagScale -from typing import Dict -from typing import List - - -# Test Variables -test_type_list = ["E", "P"] -step_list = ["1", "2", "3"] -robot_list = [ - "DVT1ABR1", - "DVT1ABR2", - "DVT1ABR3", - "DVT1ABR4", - "DVT2ABR5", - "DVT2ABR6", - "PVT1ABR7", - "PVT1ABR8", - "PVT1ABR9", - "PVT1ABR10", - "PVT1ABR11", - "PVT1ABR12", - "ROOM_339", - "ROOM_340", -] -# Labware per Robot -labware_DVT1ABR2 = ["Reagents", "Sample Plate"] -labware_DVT1ABR4 = [ - "Sample Plate", - "Reservoir", - "Reagent Plate", - "Plate1", - "Seal1", - "Plate2", - "Seal2", -] -labware_PVT1ABR9 = ["Waste", "Reservoir", "PCR Plate", "Deep Well Plate"] -labware_PVT1ABR10 = ["Waste", "R1", "R2", "PCR Plate", "Deep Well Plate"] -labware_PVT1ABR11 = [ - "Waste", - "Reservoir", - "Sample Plate", - "Working Plate", - "Final Plate", - "Reagents", -] -labware_DVT1ABR3 = ["Plate1", "Seal1", "Plate2", "Seal2"] -labware_PVT1ABR7 = ["Waste", "R1", "R2", "PCR Plate", "Deep Well Plate"] -labware = [ - labware_DVT1ABR2, - labware_DVT1ABR4, - labware_PVT1ABR9, - labware_PVT1ABR10, - labware_PVT1ABR11, - labware_DVT1ABR3, - labware_PVT1ABR7, -] -abr = [ - "DVT1ABR2", - "DVT1ABR4", - "PVT1ABR9", - "PVT1ABR10", - "PVT1ABR11", - "DVT1ABR3", - "PVT1ABR7", -] -robot_labware: Dict[str, List[str]] = {"Robot": [], "Labware": []} -for i in range(len(labware)): - robot_labware["Robot"].extend([abr[i]] * len(labware[i])) - robot_labware["Labware"].extend(labware[i]) - - -def _get_user_input(list: List, some_string: str) -> str: - variable = input(some_string) - while variable not in list: - print( - f"Your input was {variable}. Expected input is one of the following: {list}" - ) - variable = input(some_string) - return variable - - -if __name__ == "__main__": - try: - # find port using known VID:PID, then connect - vid, pid = RadwagScale.vid_pid() - # NOTE: using different scale in ABR than production - # and we found the PID is different - # TODO: maybe make this an argument that can be passed into script :shrug" - pid = 41207 - scale = RadwagScale.create(port=find_port(vid=vid, pid=pid)) - scale.connect() - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - # Get user input to label data entry correctly - scale_measurement = "ABR-Liquids-" - robot_to_filter = _get_user_input(robot_list, "Robot: ") - test_type = _get_user_input(test_type_list, "Test Type (E/P): ") - test_name = scale_measurement + robot_to_filter + "-" + test_type - run_id = data.create_run_id() - filtered_robot_labware = { - "Robot": [ - robot - for robot in robot_labware["Robot"] - if robot.upper() == robot_to_filter.upper() - ], - "Labware": [ - labware1 - for i, labware1 in enumerate(robot_labware["Labware"]) - if robot_labware["Robot"][i].upper() == robot_to_filter.upper() - ], - } - labware_list = filtered_robot_labware["Labware"] - labware_input = _get_user_input( - labware_list, f"Labware, Expected Values: {labware_list}: " - ) - step = _get_user_input(step_list, "Testing Step (1, 2, 3): ") - # Set up .csv file - tag = labware_input + "-" + str(step) - file_name = data.create_file_name(test_name, run_id, tag) - header = ["Date", "Labware", "Step", "Robot", "Scale Reading", "Stable"] - header_str = ",".join(header) + "\n" - data.append_data_to_file( - test_name=test_name, run_id=run_id, file_name=file_name, data=header_str - ) - results_list = [] - while is_stable is False: - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - time_now = datetime.datetime.now() - row = [time_now, labware, step, robot_to_filter, grams, is_stable] - results_list.append(row) - if is_stable is True: - print("is stable") - break - result_string = "" - for sublist in results_list: - row_str = ", ".join(map(str, sublist)) + "\n" - result_string += row_str - file_path = data.append_data_to_file( - test_name, run_id, file_name, result_string - ) - if os.path.exists(file_path): - print("File saved") - with open(file_path, "r") as file: - line_count = sum(1 for line in file) - if line_count < 2: - print(f"Line count is {line_count}. Re-weigh.") - grams, is_stable = scale.read_mass() - while is_stable is False: - grams, is_stable = scale.read_mass() - print(f"Scale reading: grams={grams}, is_stable={is_stable}") - time_now = datetime.datetime.now() - row = [ - time_now, - labware_input, - step, - robot_to_filter, - grams, - is_stable, - ] - results_list.append(row) - if is_stable is True: - print("is stable") - break - result_string = "" - for sublist in results_list: - row_str = ", ".join(map(str, sublist)) + "\n" - result_string += row_str - file_path = data.append_data_to_file( - test_name, run_id, file_name, result_string - ) - else: - print("File did not save.") - finally: - scale.disconnect() diff --git a/hardware-testing/hardware_testing/scripts/analyze_abr.py b/hardware-testing/hardware_testing/scripts/analyze_abr.py deleted file mode 100644 index f6e7ec0a9b7..00000000000 --- a/hardware-testing/hardware_testing/scripts/analyze_abr.py +++ /dev/null @@ -1,70 +0,0 @@ -"""ABR Scale Measurement Analyzer.""" -import os -from datetime import datetime -from hardware_testing import data -import csv -from typing import List - - -def _get_user_input(list: List, some_string: str) -> str: - variable = input(some_string) - while variable not in list: - print( - f"Your input was {variable}. Expected input is one of the following: {list}" - ) - variable = input(some_string) - return variable - - -if __name__ == "__main__": - # Format Results Sheet - header = ["Date", "File Name", "Plate State", "Robot", "Mass (g)", "Sample"] - time_now = datetime.now().date() - # Get data folders - current_dir = data.get_testing_data_directory() - file_list = os.listdir(current_dir) - folder_of_interest = _get_user_input( - file_list, f"Folder List, Expected Values: {file_list}: " - ) - robot = folder_of_interest.split("-")[2] - results_file_name = str(time_now) + "-" + str(robot) + "-Results.csv" - dir_2 = os.path.join(current_dir, folder_of_interest) - new_csv_file_path = os.path.join(current_dir, results_file_name) - file_list_2 = os.listdir(dir_2) # LIST OF individual run folders - # WRITE HEADER - with open(new_csv_file_path, "w", newline="") as csv_file: - csv_writer = csv.writer(csv_file) - csv_writer.writerow(header) - for file2 in file_list_2: - raw_data_folder = os.path.join(dir_2, file2) - raw_data_file_csv = os.listdir(raw_data_folder)[0] - plate_state = raw_data_file_csv.split("_")[-1].split("-")[1].split(".")[0] - sample = raw_data_file_csv.split("_")[-1].split("-")[0] - raw_data_file_csv_path = os.path.join(raw_data_folder, raw_data_file_csv) - results_list = [] - try: - with open(raw_data_file_csv_path, "r") as f: - csvreader = csv.reader(f) - rows = list(csvreader) - except Exception as e: - print(f"Error opening file: {e}") - last_row = rows[-1] - # Process the file here - stable_value = last_row[-2] - print(stable_value) - date_of_measurement = last_row[0] - date = str(date_of_measurement).split(" ")[0] - row_data = ( - date, - raw_data_file_csv, - plate_state, - robot, - stable_value, - sample, - ) - results_list.append(row_data) - - with open(new_csv_file_path, "a", newline="") as csv_file: - csv_writer = csv.writer(csv_file) - # Write data - csv_writer.writerows([row_data])