Skip to content

Commit

Permalink
Upload Scale Value to google sheet (#14691)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Reads scale value until it becomes stable and uploads value to google
sheet and local csv.

# Test Plan

Tested with ABR scale.

# Changelog

Moved abr_scale script to abr_tools folder
Added labware, robot, measurement step, storage directory, google sheet
name as arguments
saves only the stable value, rather than all of the unstable values
asks the user if they want to measure another sample to reduce the
number of times all arguments need to be put in .
deleted analyze_abr script because no longer needed. Analysis is
automated on the google sheet.

# Review requests

<!--
Describe any requests for your reviewers here.
-->

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 authored Mar 19, 2024
1 parent 8e4ffb9 commit eb4692a
Show file tree
Hide file tree
Showing 8 changed files with 287 additions and 432 deletions.
28 changes: 12 additions & 16 deletions hardware-testing/hardware_testing/abr_tools/abr_command_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,7 @@
import sys
import json
from datetime import datetime, timedelta
from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids
from .abr_read_logs import (
create_abr_data_sheet,
read_abr_data_sheet,
get_error_info,
write_to_abr_sheet,
)
from . import read_robot_logs


def set_up_data_sheet(
Expand All @@ -26,7 +20,9 @@ def set_up_data_sheet(
except FileNotFoundError:
print("No google sheets credentials. Add credentials to storage notebook.")
local_file_str = google_sheet_name + "-" + commandTypes
csv_name = create_abr_data_sheet(storage_directory, local_file_str, headers)
csv_name = read_robot_logs.create_abr_data_sheet(
storage_directory, local_file_str, headers
)

return google_sheet, csv_name

Expand Down Expand Up @@ -309,7 +305,7 @@ def command_data_dictionary(
error_code,
error_instrument,
error_level,
) = get_error_info(file_results)
) = read_robot_logs.get_error_info(file_results)

all_pipette_commands_list = pipette_commands(file_results)
all_module_commands_list = module_commands(file_results)
Expand Down Expand Up @@ -491,33 +487,33 @@ def command_data_dictionary(
google_sheet_movement, csv_movement = set_up_data_sheet(
3, google_sheet_name, "Movement", movement_headers
)
runs_from_storage = get_run_ids_from_storage(storage_directory)
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
i = 0
n = 0
m = 0
p = 0
runs_in_sheet = read_abr_data_sheet(
runs_in_sheet = read_robot_logs.read_abr_data_sheet(
storage_directory, csv_instruments, google_sheet_instruments
)
runs_to_save = get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
(
runs_and_instrument_commands,
runs_and_module_commands,
runs_and_setup_commands,
runs_and_move_commands,
) = command_data_dictionary(runs_to_save, storage_directory, i, m, n, p)
write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_instrument_commands,
storage_directory,
csv_instruments,
google_sheet_instruments,
)
write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_module_commands, storage_directory, csv_modules, google_sheet_modules
)
write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_setup_commands, storage_directory, csv_setup, google_sheet_setup
)
write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_move_commands, storage_directory, csv_movement, google_sheet_movement
)
124 changes: 14 additions & 110 deletions hardware-testing/hardware_testing/abr_tools/abr_read_logs.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
"""Read ABR run logs and save data to ABR testing csv and google sheet."""
from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids
from .error_levels import ERROR_LEVELS_PATH
from typing import Set, Dict, Tuple, Any, List
from typing import Set, Dict, Any
import argparse
import os
import csv
import json
import sys
from datetime import datetime, timedelta
import time as t
from . import read_robot_logs


def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
Expand All @@ -30,59 +27,6 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
return all_modules


def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]:
"""Determines if errors exist in run log and documents them."""
error_levels = []
# Read error levels file
with open(ERROR_LEVELS_PATH, "r") as error_file:
error_levels = list(csv.reader(error_file))
num_of_errors = len(file_results["errors"])
if num_of_errors == 0:
error_type = ""
error_code = ""
error_instrument = ""
error_level = ""
return 0, error_type, error_code, error_instrument, error_level
commands_of_run: List[Dict[str, Any]] = file_results.get("commands", [])
run_command_error: Dict[str, Any] = commands_of_run[-1]
error_str: int = len(run_command_error.get("error", ""))
if error_str > 1:
error_type = run_command_error["error"].get("errorType", "")
error_code = run_command_error["error"].get("errorCode", "")
try:
# Instrument Error
error_instrument = run_command_error["error"]["errorInfo"]["node"]
except KeyError:
# Module Error
error_instrument = run_command_error["error"]["errorInfo"].get("port", "")
else:
error_type = file_results["errors"][0]["errorType"]
print(error_type)
error_code = file_results["errors"][0]["errorCode"]
error_instrument = file_results["errors"][0]["detail"]
for error in error_levels:
code_error = error[1]
if code_error == error_code:
error_level = error[4]

return num_of_errors, error_type, error_code, error_instrument, error_level


def create_abr_data_sheet(storage_directory: str, file_name: str, headers: List) -> str:
"""Creates csv file to log ABR data."""
file_name_csv = file_name + ".csv"
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
else:
with open(sheet_location, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
print(f"Created file. Located: {sheet_location}.")
return file_name_csv


def create_data_dictionary(
runs_to_save: Set[str], storage_directory: str
) -> Dict[Any, Dict[str, Any]]:
Expand All @@ -109,7 +53,7 @@ def create_data_dictionary(
error_code,
error_instrument,
error_level,
) = get_error_info(file_results)
) = read_robot_logs.get_error_info(file_results)
all_modules = get_modules(file_results)

start_time_str, complete_time_str, start_date, run_time_min = (
Expand Down Expand Up @@ -162,52 +106,6 @@ def create_data_dictionary(
return runs_and_robots


def read_abr_data_sheet(
storage_directory: str, file_name_csv: str, google_sheet: Any
) -> Set[str]:
"""Reads current run sheet to determine what new run data should be added."""
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
runs_in_sheet = set()
# Read the CSV file
with open(sheet_location, "r") as csv_start:
data = csv.DictReader(csv_start)
headers = data.fieldnames
if headers is not None:
for row in data:
run_id = row[headers[1]]
runs_in_sheet.add(run_id)
print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.")
# Read Google Sheet
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.write_header(headers)
google_sheet.update_row_index()
return runs_in_sheet


def write_to_abr_sheet(
runs_and_robots: Dict[Any, Dict[str, Any]],
storage_directory: str,
file_name_csv: str,
google_sheet: Any,
) -> None:
"""Write dict of data to abr csv."""
sheet_location = os.path.join(storage_directory, file_name_csv)
list_of_runs = list(runs_and_robots.keys())
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
for run in range(len(list_of_runs)):
row = runs_and_robots[list_of_runs[run]].values()
row_list = list(row)
writer.writerow(row_list)
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(3)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.")
parser.add_argument(
Expand Down Expand Up @@ -273,9 +171,15 @@ def write_to_abr_sheet(
"magneticBlockV1",
"thermocyclerModuleV2",
]
runs_from_storage = get_run_ids_from_storage(storage_directory)
file_name_csv = create_abr_data_sheet(storage_directory, file_name, headers)
runs_in_sheet = read_abr_data_sheet(storage_directory, file_name_csv, google_sheet)
runs_to_save = get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
file_name_csv = read_robot_logs.create_abr_data_sheet(
storage_directory, file_name, headers
)
runs_in_sheet = read_robot_logs.read_abr_data_sheet(
storage_directory, file_name_csv, google_sheet
)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_and_robots = create_data_dictionary(runs_to_save, storage_directory)
write_to_abr_sheet(runs_and_robots, storage_directory, file_name_csv, google_sheet)
read_robot_logs.write_to_abr_sheet(
runs_and_robots, storage_directory, file_name_csv, google_sheet
)
27 changes: 3 additions & 24 deletions hardware-testing/hardware_testing/abr_tools/abr_run_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,7 @@
import json
import traceback
import requests


def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
"""Read all files in storage directory, extracts run id, adds to set."""
os.makedirs(storage_directory, exist_ok=True)
list_of_files = os.listdir(storage_directory)
run_ids = set()
for this_file in list_of_files:
read_file = os.path.join(storage_directory, this_file)
if read_file.endswith(".json"):
file_results = json.load(open(read_file))
run_id = file_results.get("run_id", "")
if len(run_id) > 0:
run_ids.add(run_id)
return run_ids


def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
print(f"There are {str(len(runs_to_save))} new run(s) to save.")
return runs_to_save
from . import read_robot_logs


def get_run_ids_from_robot(ip: str) -> Set[str]:
Expand Down Expand Up @@ -116,11 +95,11 @@ def get_all_run_logs(storage_directory: str) -> None:
ip_address_list = ip_file["ip_address_list"]
print(ip_address_list)

runs_from_storage = get_run_ids_from_storage(storage_directory)
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
for ip in ip_address_list:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = get_unseen_run_ids(runs, runs_from_storage)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
except Exception:
print(f"Failed to read IP address: {ip}.")
Expand Down
Loading

0 comments on commit eb4692a

Please sign in to comment.