From bc91c4af1ba602db7b5d23e6559dea01dab83229 Mon Sep 17 00:00:00 2001 From: Rhyann Clarke <146747548+rclarke0@users.noreply.github.com> Date: Wed, 3 Apr 2024 11:21:57 -0400 Subject: [PATCH] Automated ABR Calibration Data Uploading (#14782) # Overview Pulls Calibration Data from Robots and Uploads to google_drive/google_sheet # Test Plan Tested on ABR robots. Successfully pulls calibration data, uploads to google drive, and saves to google sheet. # Changelog - Adds abr_calibration_logs.py 1. Connects to google drive folder 2. Connects to google sheet 3. Pulls module, instrument, and deck calibration data and compiles into one .json file per robot via http requests 4. Uploads new files to google drive folder 5. adds new rows to instrument, module, and deck calibration sheets if the serial and calibration lastmodified timestamp pairing do not already exist - Split jira_tool up into a file with just jira_tools and a file that uses the tools with the robots. - For all scripts uploading to google drive, changed the folder_name argument to folder_id so that the service_account is writing to the correct folder. Adds email as argument to allow for permission sharing by service account. # Review requests # Risk assessment --- .../automation/google_drive_tool.py | 60 ++++- .../abr_testing/automation/jira_tool.py | 114 ---------- .../data_collection/abr_calibration_logs.py | 214 ++++++++++++++++++ .../data_collection/abr_google_drive.py | 23 +- .../data_collection/abr_robot_error.py | 165 ++++++++++++++ .../data_collection/get_run_logs.py | 13 +- .../data_collection/read_robot_logs.py | 67 +++++- abr-testing/abr_testing/tools/abr_scale.py | 23 +- 8 files changed, 514 insertions(+), 165 deletions(-) create mode 100644 abr-testing/abr_testing/data_collection/abr_calibration_logs.py create mode 100644 abr-testing/abr_testing/data_collection/abr_robot_error.py diff --git a/abr-testing/abr_testing/automation/google_drive_tool.py b/abr-testing/abr_testing/automation/google_drive_tool.py index 836ba2083b0..8b56d0390fe 100644 --- a/abr-testing/abr_testing/automation/google_drive_tool.py +++ b/abr-testing/abr_testing/automation/google_drive_tool.py @@ -1,6 +1,8 @@ """Google Drive Tool.""" import os -from typing import Set, Any +from typing import Set, Any, Optional +import webbrowser +import mimetypes from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import] from googleapiclient.discovery import build from googleapiclient.http import MediaFileUpload @@ -14,15 +16,16 @@ class google_drive: """Google Drive Tool.""" - def __init__(self, credentials: Any, folder_name: str, parent_folder: Any) -> None: + def __init__(self, credentials: Any, folder_name: str, email: str) -> None: """Connects to google drive via credentials file.""" self.scope = ["https://www.googleapis.com/auth/drive"] self.credentials = ServiceAccountCredentials.from_json_keyfile_name( credentials, self.scope ) self.drive_service = build("drive", "v3", credentials=self.credentials) - self.folder_name = folder_name - self.parent_folder = parent_folder + self.parent_folder = folder_name + self.email = email + self.folder = self.open_folder() def list_folder(self, delete: Any = False) -> Set[str]: """List folders and files in Google Drive.""" @@ -72,10 +75,9 @@ def upload_file(self, file_path: str) -> str: """Upload file to Google Drive.""" file_metadata = { "name": os.path.basename(file_path), - "mimeType": "application/vnd.google-apps.folder", - "parents": [self.parent_folder] if self.parent_folder else "", + "mimeType": str(mimetypes.guess_type(file_path)[0]), + "parents": [self.parent_folder], } - media = MediaFileUpload(file_path, resumable=True) uploaded_file = ( @@ -83,15 +85,27 @@ def upload_file(self, file_path: str) -> str: .create(body=file_metadata, media_body=media, fields="id") # type: ignore .execute() ) - return uploaded_file["id"] - def upload_missing_files(self, storage_directory: str, missing_files: set) -> None: + def upload_missing_files(self, storage_directory: str) -> None: """Upload missing files to Google Drive.""" + # Read Google Drive .json files. + google_drive_files = self.list_folder() + google_drive_files_json = [ + file for file in google_drive_files if file.endswith(".json") + ] + # Read local directory. + local_files_json = set( + file for file in os.listdir(storage_directory) if file.endswith(".json") + ) + missing_files = local_files_json - set(google_drive_files_json) + print(f"Missing files: {len(missing_files)}") + # Upload missing files. uploaded_files = [] for file in missing_files: file_path = os.path.join(storage_directory, file) uploaded_file_id = google_drive.upload_file(self, file_path) + self.share_permissions(uploaded_file_id) uploaded_files.append( {"name": os.path.basename(file_path), "id": uploaded_file_id} ) @@ -108,3 +122,31 @@ def upload_missing_files(self, storage_directory: str, missing_files: set) -> No print( f"File '{this_name}' was not found in the list of files after uploading." ) + + def open_folder(self) -> Optional[str]: + """Open folder in web browser.""" + folder_metadata = ( + self.drive_service.files() + .get(fileId=self.parent_folder, fields="webViewLink") + .execute() + ) + folder_link = folder_metadata.get("webViewLink") + if folder_link: + print(f"Folder link: {folder_link}") + webbrowser.open( + folder_link + ) # Open the folder link in the default web browser + else: + print("Folder link not found.") + return folder_link + + def share_permissions(self, file_id: str) -> None: + """Share permissions with self.""" + new_permission = { + "type": "user", + "role": "writer", + "emailAddress": self.email, + } + self.drive_service.permissions().create( + fileId=file_id, body=new_permission, transferOwnership=False # type: ignore + ).execute() diff --git a/abr-testing/abr_testing/automation/jira_tool.py b/abr-testing/abr_testing/automation/jira_tool.py index a98b023a44a..5ed521c0430 100644 --- a/abr-testing/abr_testing/automation/jira_tool.py +++ b/abr-testing/abr_testing/automation/jira_tool.py @@ -6,77 +6,6 @@ import webbrowser import argparse from typing import List, Tuple -from abr_testing.data_collection import read_robot_logs, abr_google_drive, get_run_logs - - -def get_error_runs_from_robot(ip: str) -> List[str]: - """Get runs that have errors from robot.""" - error_run_ids = [] - response = requests.get( - f"http://{ip}:31950/runs", headers={"opentrons-version": "3"} - ) - run_data = response.json() - run_list = run_data["data"] - for run in run_list: - run_id = run["id"] - num_of_errors = len(run["errors"]) - if not run["current"] and num_of_errors > 0: - error_run_ids.append(run_id) - return error_run_ids - - -def get_error_info_from_robot( - ip: str, one_run: str, storage_directory: str -) -> Tuple[str, str, str, List[str], str, str]: - """Get error information from robot to fill out ticket.""" - description = dict() - # get run information - results = get_run_logs.get_run_data(one_run, ip) - # save run information to local directory as .json file - saved_file_path = read_robot_logs.save_run_log_to_json( - ip, results, storage_directory - ) - - # Error Printout - ( - num_of_errors, - error_type, - error_code, - error_instrument, - error_level, - ) = read_robot_logs.get_error_info(results) - # JIRA Ticket Fields - failure_level = "Level " + str(error_level) + " Failure" - components = [failure_level, "Flex-RABR"] - affects_version = results["API_Version"] - parent = results.get("robot_name", "") - print(parent) - summary = parent + "_" + str(one_run) + "_" + str(error_code) + "_" + error_type - # Description of error - description["protocol_name"] = results["protocol"]["metadata"].get( - "protocolName", "" - ) - description["error"] = " ".join([error_code, error_type, error_instrument]) - description["protocol_step"] = list(results["commands"])[-1] - description["right_mount"] = results.get("right", "No attachment") - description["left_mount"] = results.get("left", "No attachment") - description["gripper"] = results.get("extension", "No attachment") - all_modules = abr_google_drive.get_modules(results) - whole_description = {**description, **all_modules} - whole_description_str = ( - "{" - + "\n".join("{!r}: {!r},".format(k, v) for k, v in whole_description.items()) - + "}" - ) - - return ( - summary, - parent, - affects_version, - components, - whole_description_str, - saved_file_path, - ) class JiraTicket: @@ -193,20 +122,6 @@ def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None if __name__ == "__main__": """Create ticket for specified robot.""" parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.") - parser.add_argument( - "storage_directory", - metavar="STORAGE_DIRECTORY", - type=str, - nargs=1, - help="Path to long term storage directory for run logs.", - ) - parser.add_argument( - "robot_ip", - metavar="ROBOT_IP", - type=str, - nargs=1, - help="IP address of robot as string.", - ) parser.add_argument( "jira_api_token", metavar="JIRA_API_TOKEN", @@ -238,38 +153,9 @@ def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None help="JIRA Board ID. RABR is 217", ) args = parser.parse_args() - storage_directory = args.storage_directory[0] - ip = args.robot_ip[0] url = "https://opentrons.atlassian.net" api_token = args.jira_api_token[0] email = args.email[0] board_id = args.board_id[0] reporter_id = args.reporter_id[0] ticket = JiraTicket(url, api_token, email) - error_runs = get_error_runs_from_robot(ip) - one_run = error_runs[-1] # Most recent run with error. - ( - summary, - robot, - affects_version, - components, - whole_description_str, - saved_file_path, - ) = get_error_info_from_robot(ip, one_run, storage_directory) - print(f"Making ticket for run: {one_run} on robot {robot}.") - # TODO: make argument or see if I can get rid of with using board_id. - project_key = "RABR" - parent_key = project_key + "-" + robot[-1] - issue_url, issue_key = ticket.create_ticket( - summary, - whole_description_str, - project_key, - reporter_id, - "Bug", - "Medium", - components, - affects_version, - parent_key, - ) - ticket.open_issue(issue_key) - ticket.post_attachment_to_ticket(issue_key, saved_file_path) diff --git a/abr-testing/abr_testing/data_collection/abr_calibration_logs.py b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py new file mode 100644 index 00000000000..6e897dd78eb --- /dev/null +++ b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py @@ -0,0 +1,214 @@ +"""Get Calibration logs from robots.""" +from typing import Dict, Any, List +import argparse +import os +import json +import gspread # type: ignore[import] +import sys +from abr_testing.data_collection import read_robot_logs +from abr_testing.automation import google_drive_tool, google_sheets_tool + + +def check_for_duplicates( + sheet_location: str, + google_sheet: Any, + col_1: int, + col_2: int, + row: List[str], + headers: List[str], +) -> List[str]: + """Check google sheet for duplicates.""" + serials = google_sheet.get_column(col_1) + modify_dates = google_sheet.get_column(col_2) + for serial, modify_date in zip(serials, modify_dates): + if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date: + print(f"Skipped row{row}. Already on Google Sheet.") + continue + read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers) + return row + + +def upload_calibration_offsets( + calibration: Dict[str, Any], storage_directory: str +) -> None: + """Upload calibration data to google_sheet.""" + # Common Headers + headers_beg = list(calibration.keys())[:4] + headers_end = list(["X", "Y", "Z", "lastModified"]) + # INSTRUMENT SHEET + instrument_headers = ( + headers_beg + list(calibration["Instruments"][0].keys())[:7] + headers_end + ) + local_instrument_file = google_sheet_name + "-Instruments" + instrument_sheet_location = read_robot_logs.create_abr_data_sheet( + storage_directory, local_instrument_file, instrument_headers + ) + # INSTRUMENTS DATA + instruments = calibration["Instruments"] + for instrument in range(len(instruments)): + one_instrument = instruments[instrument] + x = one_instrument["data"]["calibratedOffset"]["offset"].get("x", "") + y = one_instrument["data"]["calibratedOffset"]["offset"].get("y", "") + z = one_instrument["data"]["calibratedOffset"]["offset"].get("z", "") + modified = one_instrument["data"]["calibratedOffset"].get("last_modified", "") + instrument_row = ( + list(calibration.values())[:4] + + list(one_instrument.values())[:7] + + list([x, y, z, modified]) + ) + check_for_duplicates( + instrument_sheet_location, + google_sheet_instruments, + 8, + 15, + instrument_row, + instrument_headers, + ) + # MODULE SHEET + if len(calibration.get("Modules", "")) > 0: + module_headers = ( + headers_beg + list(calibration["Modules"][0].keys())[:7] + headers_end + ) + local_modules_file = google_sheet_name + "-Modules" + modules_sheet_location = read_robot_logs.create_abr_data_sheet( + storage_directory, local_modules_file, module_headers + ) + # MODULES DATA + modules = calibration["Modules"] + for module in range(len(modules)): + one_module = modules[module] + x = one_module["moduleOffset"]["offset"].get("x", "") + y = one_module["moduleOffset"]["offset"].get("y", "") + z = one_module["moduleOffset"]["offset"].get("z", "") + modified = one_module["moduleOffset"].get("last_modified", "") + module_row = ( + list(calibration.values())[:4] + + list(one_module.values())[:7] + + list([x, y, z, modified]) + ) + check_for_duplicates( + modules_sheet_location, + google_sheet_modules, + 8, + 15, + module_row, + module_headers, + ) + # DECK SHEET + local_deck_file = google_sheet_name + "-Deck" + deck_headers = headers_beg + list(["pipetteCalibratedWith", "Slot"]) + headers_end + deck_sheet_location = read_robot_logs.create_abr_data_sheet( + storage_directory, local_deck_file, deck_headers + ) + # DECK DATA + deck = calibration["Deck"] + slots = ["D3", "D1", "A1"] + deck_modified = deck["data"].get("lastModified", "") + pipette_calibrated_with = deck["data"].get("pipetteCalibratedWith", "") + for i in range(len(deck["data"]["matrix"])): + coords = deck["data"]["matrix"][i] + x = coords[0] + y = coords[1] + z = coords[2] + deck_row = list(calibration.values())[:4] + list( + [pipette_calibrated_with, slots[i], x, y, z, deck_modified] + ) + check_for_duplicates( + deck_sheet_location, google_sheet_deck, 6, 10, deck_row, deck_headers + ) + + +if __name__ == "__main__": + """Get calibration logs.""" + parser = argparse.ArgumentParser( + description="Pulls calibration logs from ABR robots." + ) + parser.add_argument( + "storage_directory", + metavar="STORAGE_DIRECTORY", + type=str, + nargs=1, + help="Path to long term storage directory for run logs.", + ) + parser.add_argument( + "folder_name", + metavar="FOLDER_NAME", + type=str, + nargs=1, + help="Google Drive folder name. Open desired folder and copy string after drive/folders/.", + ) + parser.add_argument( + "google_sheet_name", + metavar="GOOGLE_SHEET_NAME", + type=str, + nargs=1, + help="Google sheet name.", + ) + parser.add_argument( + "email", metavar="EMAIL", type=str, nargs=1, help="opentrons gmail." + ) + parser.add_argument( + "ip_or_all", + metavar="IP_OR_ALL", + type=str, + nargs=1, + help="Enter 'ALL' to read IPs.json or type full IP address of 1 robot.", + ) + args = parser.parse_args() + storage_directory = args.storage_directory[0] + folder_name = args.folder_name[0] + google_sheet_name = args.google_sheet_name[0] + ip_or_all = args.ip_or_all[0] + email = args.email[0] + # Connect to google drive. + try: + credentials_path = os.path.join(storage_directory, "credentials.json") + except FileNotFoundError: + print(f"Add credentials.json file to: {storage_directory}.") + sys.exit() + try: + google_drive = google_drive_tool.google_drive( + credentials_path, folder_name, email + ) + # Upload calibration logs to google drive. + print("Connected to google drive.") + except json.decoder.JSONDecodeError: + print( + "Credential file is damaged. Get from https://console.cloud.google.com/apis/credentials" + ) + sys.exit() + # Connect to google sheet + try: + google_sheet_instruments = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 0 + ) + google_sheet_modules = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 1 + ) + google_sheet_deck = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 2 + ) + print(f"Connected to google sheet: {google_sheet_name}") + except gspread.exceptions.APIError: + print("ERROR: Check google sheet name. Check credentials file.") + sys.exit() + ip_json_file = os.path.join(storage_directory, "IPs.json") + try: + ip_file = json.load(open(ip_json_file)) + except FileNotFoundError: + print(f"Add .json file with robot IPs to: {storage_directory}.") + sys.exit() + if ip_or_all == "ALL": + ip_address_list = ip_file["ip_address_list"] + for ip in ip_address_list: + saved_file_path, calibration = read_robot_logs.get_calibration_offsets( + ip, storage_directory + ) + upload_calibration_offsets(calibration, storage_directory) + else: + saved_file_path, calibration = read_robot_logs.get_calibration_offsets( + ip_or_all, storage_directory + ) + upload_calibration_offsets(calibration, storage_directory) + + google_drive.upload_missing_files(storage_directory) diff --git a/abr-testing/abr_testing/data_collection/abr_google_drive.py b/abr-testing/abr_testing/data_collection/abr_google_drive.py index 6dfc5e8f284..1d79bbe2ca2 100644 --- a/abr-testing/abr_testing/data_collection/abr_google_drive.py +++ b/abr-testing/abr_testing/data_collection/abr_google_drive.py @@ -122,7 +122,7 @@ def create_data_dictionary( metavar="FOLDER_NAME", type=str, nargs=1, - help="Google Drive folder name.", + help="Google Drive folder name. Open desired folder and copy string after drive/folders/.", ) parser.add_argument( "google_sheet_name", @@ -131,11 +131,14 @@ def create_data_dictionary( nargs=1, help="Google sheet name.", ) + parser.add_argument( + "email", metavar="EMAIL", type=str, nargs=1, help="opentrons gmail." + ) args = parser.parse_args() folder_name = args.folder_name[0] storage_directory = args.storage_directory[0] google_sheet_name = args.google_sheet_name[0] - parent_folder = False + email = args.email[0] try: credentials_path = os.path.join(storage_directory, "credentials.json") except FileNotFoundError: @@ -143,7 +146,7 @@ def create_data_dictionary( sys.exit() try: google_drive = google_drive_tool.google_drive( - credentials_path, folder_name, parent_folder + credentials_path, folder_name, email ) print("Connected to google drive.") except json.decoder.JSONDecodeError: @@ -162,21 +165,9 @@ def create_data_dictionary( sys.exit() run_ids_on_gs = google_sheet.get_column(2) run_ids_on_gs = set(run_ids_on_gs) - # Read Google Drive .json files - google_drive_files = google_drive.list_folder() - google_drive_files_json = [ - file for file in google_drive_files if file.endswith(".json") - ] - # read local directory - list_of_files = os.listdir(storage_directory) - local_files_json = set( - file for file in os.listdir(storage_directory) if file.endswith(".json") - ) - missing_files = local_files_json - set(google_drive_files_json) - print(f"Missing files: {len(missing_files)}") # Uploads files that are not in google drive directory - google_drive.upload_missing_files(storage_directory, missing_files) + google_drive.upload_missing_files(storage_directory) # Run ids in google_drive_folder run_ids_on_gd = read_robot_logs.get_run_ids_from_google_drive(google_drive) diff --git a/abr-testing/abr_testing/data_collection/abr_robot_error.py b/abr-testing/abr_testing/data_collection/abr_robot_error.py new file mode 100644 index 00000000000..9e9e2240a84 --- /dev/null +++ b/abr-testing/abr_testing/data_collection/abr_robot_error.py @@ -0,0 +1,165 @@ +"""Create ticket for robot with error.""" +from typing import List, Tuple +from abr_testing.data_collection import read_robot_logs, abr_google_drive, get_run_logs +import requests +import argparse +from abr_testing.automation import jira_tool + + +def get_error_runs_from_robot(ip: str) -> List[str]: + """Get runs that have errors from robot.""" + error_run_ids = [] + response = requests.get( + f"http://{ip}:31950/runs", headers={"opentrons-version": "3"} + ) + run_data = response.json() + run_list = run_data["data"] + for run in run_list: + run_id = run["id"] + num_of_errors = len(run["errors"]) + if not run["current"] and num_of_errors > 0: + error_run_ids.append(run_id) + return error_run_ids + + +def get_error_info_from_robot( + ip: str, one_run: str, storage_directory: str +) -> Tuple[str, str, str, List[str], str, str]: + """Get error information from robot to fill out ticket.""" + description = dict() + # get run information + results = get_run_logs.get_run_data(one_run, ip) + # save run information to local directory as .json file + saved_file_path = read_robot_logs.save_run_log_to_json( + ip, results, storage_directory + ) + # Error Printout + ( + num_of_errors, + error_type, + error_code, + error_instrument, + error_level, + ) = read_robot_logs.get_error_info(results) + # JIRA Ticket Fields + failure_level = "Level " + str(error_level) + " Failure" + components = [failure_level, "Flex-RABR"] + affects_version = results["API_Version"] + parent = results.get("robot_name", "") + print(parent) + summary = parent + "_" + str(one_run) + "_" + str(error_code) + "_" + error_type + # Description of error + description["protocol_name"] = results["protocol"]["metadata"].get( + "protocolName", "" + ) + description["error"] = " ".join([error_code, error_type, error_instrument]) + description["protocol_step"] = list(results["commands"])[-1] + description["right_mount"] = results.get("right", "No attachment") + description["left_mount"] = results.get("left", "No attachment") + description["gripper"] = results.get("extension", "No attachment") + all_modules = abr_google_drive.get_modules(results) + whole_description = {**description, **all_modules} + whole_description_str = ( + "{" + + "\n".join("{!r}: {!r},".format(k, v) for k, v in whole_description.items()) + + "}" + ) + + return ( + summary, + parent, + affects_version, + components, + whole_description_str, + saved_file_path, + ) + + +if __name__ == "__main__": + """Create ticket for specified robot.""" + parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.") + parser.add_argument( + "storage_directory", + metavar="STORAGE_DIRECTORY", + type=str, + nargs=1, + help="Path to long term storage directory for run logs.", + ) + parser.add_argument( + "robot_ip", + metavar="ROBOT_IP", + type=str, + nargs=1, + help="IP address of robot as string.", + ) + parser.add_argument( + "jira_api_token", + metavar="JIRA_API_TOKEN", + type=str, + nargs=1, + help="JIRA API Token. Get from https://id.atlassian.com/manage-profile/security.", + ) + parser.add_argument( + "email", + metavar="EMAIL", + type=str, + nargs=1, + help="Email connected to JIRA account.", + ) + # TODO: write function to get reporter_id from email. + parser.add_argument( + "reporter_id", + metavar="REPORTER_ID", + type=str, + nargs=1, + help="JIRA Reporter ID.", + ) + # TODO: improve help comment on jira board id. + parser.add_argument( + "board_id", + metavar="BOARD_ID", + type=str, + nargs=1, + help="JIRA Board ID. RABR is 217", + ) + args = parser.parse_args() + storage_directory = args.storage_directory[0] + ip = args.robot_ip[0] + url = "https://opentrons.atlassian.net" + api_token = args.jira_api_token[0] + email = args.email[0] + board_id = args.board_id[0] + reporter_id = args.reporter_id[0] + ticket = jira_tool.JiraTicket(url, api_token, email) + error_runs = get_error_runs_from_robot(ip) + one_run = error_runs[-1] # Most recent run with error. + ( + summary, + robot, + affects_version, + components, + whole_description_str, + saved_file_path, + ) = get_error_info_from_robot(ip, one_run, storage_directory) + print(f"Making ticket for run: {one_run} on robot {robot}.") + # TODO: make argument or see if I can get rid of with using board_id. + project_key = "RABR" + parent_key = project_key + "-" + robot[-1] + issue_url, issue_key = ticket.create_ticket( + summary, + whole_description_str, + project_key, + reporter_id, + "Bug", + "Medium", + components, + affects_version, + parent_key, + ) + ticket.open_issue(issue_key) + ticket.post_attachment_to_ticket(issue_key, saved_file_path) + # get calibration data + saved_file_path_calibration, calibration = read_robot_logs.get_calibration_offsets( + ip, storage_directory + ) + ticket.post_attachment_to_ticket(issue_key, saved_file_path_calibration) diff --git a/abr-testing/abr_testing/data_collection/get_run_logs.py b/abr-testing/abr_testing/data_collection/get_run_logs.py index 1511e3405e7..4034f076dc9 100644 --- a/abr-testing/abr_testing/data_collection/get_run_logs.py +++ b/abr-testing/abr_testing/data_collection/get_run_logs.py @@ -107,8 +107,8 @@ def get_all_run_logs(storage_directory: str) -> None: try: runs = get_run_ids_from_robot(ip) runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage) - saved_file_paths = save_runs(runs_to_save, ip, storage_directory) - google_drive.upload_missing_files(storage_directory, saved_file_paths) + save_runs(runs_to_save, ip, storage_directory) + google_drive.upload_missing_files(storage_directory) except Exception: print(f"ERROR: Failed to read IP address: {ip}.") @@ -128,12 +128,15 @@ def get_all_run_logs(storage_directory: str) -> None: metavar="FOLDER_NAME", type=str, nargs=1, - help="Google Drive folder name.", + help="Google Drive folder name. Open desired folder and copy string after drive/folders/.", + ) + parser.add_argument( + "email", metavar="EMAIL", type=str, nargs=1, help="opentrons gmail." ) args = parser.parse_args() storage_directory = args.storage_directory[0] folder_name = args.folder_name[0] - parent_folder = False + email = args.email[0] try: credentials_path = os.path.join(storage_directory, "credentials.json") except FileNotFoundError: @@ -141,7 +144,7 @@ def get_all_run_logs(storage_directory: str) -> None: sys.exit() try: google_drive = google_drive_tool.google_drive( - credentials_path, folder_name, parent_folder + credentials_path, folder_name, email ) print("Connected to google drive.") except json.decoder.JSONDecodeError: diff --git a/abr-testing/abr_testing/data_collection/read_robot_logs.py b/abr-testing/abr_testing/data_collection/read_robot_logs.py index abc8efb095e..6a7276c142b 100644 --- a/abr-testing/abr_testing/data_collection/read_robot_logs.py +++ b/abr-testing/abr_testing/data_collection/read_robot_logs.py @@ -1,15 +1,17 @@ """ABR Read Robot Logs. -This library is downloading logs from robots, extracting wanted information, +This library has functions to download logs from robots, extracting wanted information, and uploading to a google sheet using credentials and google_sheets_tools module saved in a local directory. """ import csv +import datetime import os from abr_testing.data_collection.error_levels import ERROR_LEVELS_PATH from typing import List, Dict, Any, Tuple, Set import time as t import json +import requests def create_abr_data_sheet( @@ -26,7 +28,7 @@ def create_abr_data_sheet( writer = csv.DictWriter(csvfile, fieldnames=headers) writer.writeheader() print(f"Created file. Located: {sheet_location}.") - return file_name_csv + return sheet_location def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]: @@ -158,3 +160,64 @@ def get_run_ids_from_google_drive(google_drive: Any) -> Set[str]: file_id = file.split(".json")[0].split("_")[1] run_ids_on_gd.add(file_id) return run_ids_on_gd + + +def write_to_sheets( + sheet_location: str, google_sheet: Any, row_list: List[Any], headers: List[str] +) -> None: + """Write list to google sheet and csv.""" + with open(sheet_location, "a", newline="") as f: + writer = csv.writer(f) + writer.writerow(row_list) + # Read Google Sheet + google_sheet.token_check() + google_sheet.write_header(headers) + google_sheet.update_row_index() + google_sheet.write_to_row(row_list) + t.sleep(5) # Sleep added to avoid API error. + + +def get_calibration_offsets( + ip: str, storage_directory: str +) -> Tuple[str, Dict[str, Any]]: + """Connect to robot via ip and get calibration data.""" + calibration = dict() + # Robot Information [Name, Software Version] + response = requests.get( + f"http://{ip}:31950/health", headers={"opentrons-version": "3"} + ) + health_data = response.json() + robot_name = health_data.get("name", "") + api_version = health_data.get("api_version", "") + pull_date_timestamp = datetime.datetime.now() + date = pull_date_timestamp.date().isoformat() + file_date = str(pull_date_timestamp).replace(":", "").split(".")[0] + calibration["Robot"] = robot_name + calibration["Software Version"] = api_version + calibration["Pull Date"] = date + calibration["Pull Timestamp"] = pull_date_timestamp.isoformat() + calibration["run_id"] = "calibration" + "_" + file_date + # Calibration [Instruments, modules, deck] + response = requests.get( + f"http://{ip}:31950/instruments", + headers={"opentrons-version": "3"}, + params={"cursor": 0, "pageLength": 0}, + ) + instruments: Dict[str, Any] = response.json() + calibration["Instruments"] = instruments.get("data", "") + response = requests.get( + f"http://{ip}:31950/modules", + headers={"opentrons-version": "3"}, + params={"cursor": 0, "pageLength": 0}, + ) + modules: Dict[str, Any] = response.json() + calibration["Modules"] = modules.get("data", "") + response = requests.get( + f"http://{ip}:31950/calibration/status", + headers={"opentrons-version": "3"}, + params={"cursor": 0, "pageLength": 0}, + ) + deck: Dict[str, Any] = response.json() + calibration["Deck"] = deck.get("deckCalibration", "") + saved_file_path = save_run_log_to_json(ip, calibration, storage_directory) + return saved_file_path, calibration diff --git a/abr-testing/abr_testing/tools/abr_scale.py b/abr-testing/abr_testing/tools/abr_scale.py index 04ed34c3f8e..0947091fe4b 100644 --- a/abr-testing/abr_testing/tools/abr_scale.py +++ b/abr-testing/abr_testing/tools/abr_scale.py @@ -3,28 +3,11 @@ import datetime from hardware_testing.drivers import find_port, list_ports_and_select # type: ignore[import] from hardware_testing.drivers.radwag import RadwagScale # type: ignore[import] -from typing import Any, List import argparse -import csv from abr_testing.data_collection import read_robot_logs from abr_testing.automation import google_sheets_tool -def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> None: - """Write list to google sheet and csv.""" - sheet_location = os.path.join(storage_directory, file_name_csv) - with open(sheet_location, "a", newline="") as f: - writer = csv.writer(f) - writer.writerow(row_list) - print(f"Written {row_list} point to {file_name_csv}") - # Read Google Sheet - google_sheet.token_check() - google_sheet.write_header(headers) - google_sheet.update_row_index() - google_sheet.write_to_row(row_list) - print(f"Written {row_list} to google sheet.") - - if __name__ == "__main__": # Adds Arguments parser = argparse.ArgumentParser(description="Record stable mass for labware.") @@ -76,7 +59,7 @@ def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> No is_stable = False # Set up csv sheet headers = ["Robot", "Date", "Timestamp", "Labware", "Mass (g)", "Measurement Step"] - all_data_csv = read_robot_logs.create_abr_data_sheet( + sheet_location = read_robot_logs.create_abr_data_sheet( storage_directory, file_name, headers ) # Set up google sheet @@ -100,7 +83,9 @@ def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> No row_list = list(row) while is_stable is True: print("is stable") - write_to_sheets(file_name_csv, google_sheet, row_list) + read_robot_logs.write_to_sheets( + sheet_location, google_sheet, row_list, headers + ) is_stable = False y_or_no = input("Do you want to weigh another sample? (Y/N): ") if y_or_no == "Y":