Skip to content

Commit

Permalink
Connect Robot Ambient Conditions and Run Data (#15504)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Adds RH and Temp to ABR Data Sheet

# Test Plan

Ran script with both sheets. Rows updated successfully.
# Changelog

<!--
List out the changes to the code in this PR. Please try your best to
categorize your changes and describe what has changed and why.

Example changelog:
- Fixed app crash when trying to calibrate an illegal pipette
- Added state to API to track pipette usage
- Updated API docs to mention only two pipettes are supported

IMPORTANT: MAKE SURE ANY BREAKING CHANGES ARE PROPERLY COMMUNICATED
-->

# Review requests

<!--
Describe any requests for your reviewers here.
-->

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 authored Jun 25, 2024
1 parent ee62429 commit 2172685
Show file tree
Hide file tree
Showing 9 changed files with 478 additions and 162 deletions.
2 changes: 2 additions & 0 deletions abr-testing/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ opentrons-hardware = {editable = true, path = "./../hardware", extras=['FLEX']}
opentrons = {editable = true, path = "./../api", extras=['flex-hardware']}
slackclient = "*"
slack-sdk = "*"
scikit-learn = "*"
pandas = "*"

[dev-packages]
atomicwrites = "==1.4.1"
Expand Down
449 changes: 295 additions & 154 deletions abr-testing/Pipfile.lock

Large diffs are not rendered by default.

21 changes: 21 additions & 0 deletions abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,27 @@ def share_permissions(self, file_id: str) -> None:
fileId=file_id, body=new_permission, transferOwnership=False # type: ignore
).execute()

def download_single_file(
self, save_directory: str, file_id: str, file_name: str, mime_type: str
) -> str:
"""Download single file."""
# google sheets: text/csv
file_path = ""
if mime_type:
request = self.drive_service.files().export_media(
fileId=file_id, mimeType=mime_type
)
else:
request = self.drive_service.files().get_media(fileId=file_id)
file_path = os.path.join(save_directory, file_name)
fh = io.FileIO(file_path, "wb")
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print(f"Downloading {file_name}... {int(status.progress() * 100)}%")
return file_path

def download_files(
self, files_to_download: List[Dict[str, Any]], save_directory: str
) -> None:
Expand Down
12 changes: 9 additions & 3 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def column_letter_to_index(column_letter: str) -> int:
try:
float_value = float(value)
user_entered_value = {"numberValue": float_value}
except ValueError:
except (ValueError, TypeError):
user_entered_value = {"stringValue": str(value)}
requests.append(
{
Expand Down Expand Up @@ -172,14 +172,20 @@ def update_cell(
self.spread_sheet.worksheet(sheet_title).update_cell(row, column, single_data)
return row, column, single_data

def get_all_data(self) -> List[Dict[str, Any]]:
def get_all_data(
self, expected_headers: Optional[Set[str]]
) -> List[Dict[str, Any]]:
"""Get all the Data recorded from worksheet."""
return self.worksheet.get_all_records()
return self.worksheet.get_all_records(expected_headers=expected_headers)

def get_column(self, column_number: int) -> Set[str]:
"""Get all values in column."""
return self.worksheet.col_values(column_number)

def get_row(self, row_number: int) -> Set[str]:
"""Get all values in row."""
return self.worksheet.row_values(row_number)

def get_cell(self, sheet_title: str, cell: str) -> Any:
"""Get cell value with location ex: 'A1'."""
return self.spread_sheet.worksheet(sheet_title).acell(cell).value
Expand Down
3 changes: 3 additions & 0 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ def create_data_dictionary(
plate_measure = {
"Plate Measured": plate,
"End Volume Accuracy (%)": accuracy,
"Average Temp (oC)": "",
"Average RH(%)": "",
}
row_for_lpc = {**row, **all_modules, **notes}
row_2 = {
Expand Down Expand Up @@ -203,6 +205,7 @@ def create_data_dictionary(
) = create_data_dictionary(missing_runs_from_gs, storage_directory, "", "", "")

start_row = google_sheet.get_index_row() + 1
print(start_row)
google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0")

# Add LPC to google sheet
Expand Down
3 changes: 2 additions & 1 deletion abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,8 @@ def get_run_error_info_from_robot(
print(f"Making ticket for {summary}.")
# TODO: make argument or see if I can get rid of with using board_id.
project_key = "RABR"
parent_key = project_key + "-" + robot[-1]
print(robot)
parent_key = project_key + "-" + robot.split("ABR")[1]
# TODO: read board to see if ticket for run id already exists.
# CREATE TICKET
issue_key = ticket.create_ticket(
Expand Down
9 changes: 6 additions & 3 deletions abr-testing/abr_testing/tools/abr_lpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ def remove_duplicate_data() -> None:
seen = set()
new_values = []
row_indices = []
sheet_data = google_sheet_lpc.get_all_data()
headers = google_sheet_lpc.get_row(1)
sheet_data = google_sheet_lpc.get_all_data(headers)
for i, row in enumerate(sheet_data):
key = (
row["Robot"],
Expand Down Expand Up @@ -49,8 +50,10 @@ def remove_duplicate_data() -> None:
except FileNotFoundError:
print(f"Add credentials.json file to: {storage_directory}.")
sys.exit()

google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
print(len(google_sheet_lpc.get_all_data()))
headers = google_sheet_lpc.get_row(1)
print(len(google_sheet_lpc.get_all_data(headers)))
remove_duplicate_data()
num_of_rows = print(len(google_sheet_lpc.get_all_data()))
num_of_rows = print(len(google_sheet_lpc.get_all_data(headers)))
# TODO: automate data analysis
4 changes: 3 additions & 1 deletion abr-testing/abr_testing/tools/abr_scale.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ def get_all_plate_readings(
) -> float:
"""Calculate accuracy of liquid moved on final measurement step."""
accuracy = 0.0
all_data = google_sheet.get_all_data()
header_list = google_sheet.get_row(1)
all_data = google_sheet.get_all_data(header_list)

# Get mass of first reading
mass_1_readings = []
for row in all_data:
Expand Down
137 changes: 137 additions & 0 deletions abr-testing/abr_testing/tools/add_temprh_to_abr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
"""Add temperature and humidity data to ABR-run-data sheet."""

from abr_testing.automation import google_sheets_tool
from abr_testing.automation import google_drive_tool
import argparse
import csv
import sys
import os
from typing import Dict, Tuple, Any, List
from statistics import mean, StatisticsError


def add_robot_lifetime(abr_data: List[Dict[str, Any]]) -> None:
"""Add % Robot Lifetime to each run."""
# TODO: add robot lifetime to each run.


def compare_run_to_temp_data(
abr_data: List[Dict[str, Any]], temp_data: List[Dict[str, Any]], google_sheet: Any
) -> None:
"""Read ABR Data and compare robot and timestamp columns to temp data."""
row_update = 0
for run in abr_data:
run_id = run["Run_ID"]
try:
average_temp = float(run["Average Temp (oC)"])
except ValueError:
average_temp = 0
if len(run_id) < 1 or average_temp > 0:
continue
else:
# Determine which runs do not have average temp/rh data
temps = []
rel_hums = []
for recording in temp_data:
temp_robot = recording["Robot"]
if len(recording["Timestamp"]) > 1:
timestamp = recording["Timestamp"]
if (
temp_robot == run["Robot"]
and timestamp >= run["Start_Time"]
and timestamp <= run["End_Time"]
):
temps.append(float(recording["Temp (oC)"]))
rel_hums.append(float(recording["Relative Humidity (%)"]))
try:
avg_temps = mean(temps)
avg_humidity = mean(rel_hums)
row_num = google_sheet.get_row_index_with_value(run_id, 2)
# Write average temperature
google_sheet.update_cell("Sheet1", row_num, 46, avg_temps)
# Write average humidity
google_sheet.update_cell("Sheet1", row_num, 47, avg_humidity)
# TODO: Write averages to google sheet
print(f"Updated row {row_num}.")
except StatisticsError:
avg_temps = None
avg_humidity = None
print(f"Updated {row_update} rows with temp and RH data.")


def read_csv_as_dict(file_path: str) -> List[Dict[str, Any]]:
"""Read a CSV file and return its content as a list of dictionaries."""
with open(file_path, mode="r", newline="", encoding="utf-8") as csvfile:
reader = csv.DictReader(csvfile)
data = [row for row in reader]
return data


def connect_and_download(
sheets: Dict[str, str], storage_directory: str
) -> Tuple[List[str], str]:
"""Connect to google sheet and download."""
try:
credentials_path = os.path.join(storage_directory, "credentials.json")
google_drive = google_drive_tool.google_drive(
credentials_path,
"1W8S3EV3cIfC-ZoRF3km0ad5XqyVkO3Tu",
"[email protected]",
)
print("connected to gd")
except FileNotFoundError:
print(f"Add credentials.json file to: {storage_directory}.")
sys.exit()
file_paths = []
for sheet in sheets.items():
file_name, file_id = sheet[0], sheet[1]
print(file_name)
file_path = google_drive.download_single_file(
storage_directory, file_id, file_name, "text/csv"
)
file_paths.append(file_path)
return file_paths, credentials_path


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Adds average robot ambient conditions to run sheet."
)
parser.add_argument(
"--abr-data-sheet",
type=str,
default="1M6LSLNwvWuHQOwIwUpblF_Eyx4W5y5gXgdU3rjU2XFk",
help="end of url of main data sheet.",
)
parser.add_argument(
"--room-conditions-sheet",
type=str,
default="1cIjSvK_mPCq4IFqUPB7SgdDuuMKve5kJh0xyH4znAd0",
help="end fo url of ambient conditions data sheet",
)
parser.add_argument(
"--storage-directory",
type=str,
default="C:/Users/Rhyann Clarke/test_folder",
help="Path to long term storage directory for run logs.",
)
args = parser.parse_args()
google_sheets_to_download = {
"ABR-run-data": args.abr_data_sheet,
"ABR Ambient Conditions": args.room_conditions_sheet,
}
storage_directory = args.storage_directory
# Download google sheets.
file_paths, credentials_path = connect_and_download(
google_sheets_to_download, storage_directory
)
# TODO: read csvs.
abr_data = read_csv_as_dict(file_paths[0])
temp_data = read_csv_as_dict(file_paths[1])
# TODO: compare robot and timestamps.
abr_google_sheet = google_sheets_tool.google_sheet(
credentials_path, "ABR-run-data", 0
)

compare_run_to_temp_data(abr_data, temp_data, abr_google_sheet)
# TODO: Write average for matching cells.

0 comments on commit 2172685

Please sign in to comment.