Skip to content

Commit

Permalink
Changes to print statements
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 committed Apr 24, 2024
1 parent 6e8c480 commit eeaccab
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 35 deletions.
11 changes: 8 additions & 3 deletions abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import webbrowser
import mimetypes
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
import googleapiclient # type: ignore[import]
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload

Expand Down Expand Up @@ -58,7 +59,6 @@ def list_folder(self, delete: Any = False) -> Set[str]:
break
if not file_names:
print("No folders or files found in Google Drive.")
print(f"{len(file_names)} item(s) in Google Drive")
return file_names

def delete_files(self, file_or_folder_id: str) -> None:
Expand Down Expand Up @@ -98,18 +98,22 @@ def upload_missing_files(self, storage_directory: str) -> None:
file for file in os.listdir(storage_directory) if file.endswith(".json")
)
missing_files = local_files_json - set(google_drive_files_json)
print(f"Missing files: {len(missing_files)}")
# Upload missing files.
uploaded_files = []
for file in missing_files:
file_path = os.path.join(storage_directory, file)
uploaded_file_id = google_drive.upload_file(self, file_path)
self.share_permissions(uploaded_file_id)
uploaded_files.append(
{"name": os.path.basename(file_path), "id": uploaded_file_id}
)
try:
self.share_permissions(uploaded_file_id)
except googleapiclient.errors.HttpError:
continue

# Fetch the updated file list after all files are uploaded
files = google_drive.list_folder(self)

file_names = [file for file in files]
for uploaded_file in uploaded_files:
this_name = uploaded_file["name"]
Expand All @@ -121,6 +125,7 @@ def upload_missing_files(self, storage_directory: str) -> None:
print(
f"File '{this_name}' was not found in the list of files after uploading."
)
print(f"{len(files)} item(s) in Google Drive")

def open_folder(self) -> Optional[str]:
"""Open folder in web browser."""
Expand Down
7 changes: 6 additions & 1 deletion abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import gspread # type: ignore[import]
import socket
import httplib2
import time as t
from datetime import datetime
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
from typing import Dict, List, Any, Set, Tuple
Expand Down Expand Up @@ -71,6 +72,10 @@ def write_to_row(self, data: List) -> None:
print("UNABLE TO CONNECT TO SERVER!!, CHECK CONNECTION")
except Exception as error:
print(error.__traceback__)
except gspread.exceptions.APIError:
print("Write quotes exceeded. Waiting 30 sec before writing.")
t.sleep(30)
self.worksheet.insert_row(data, index=self.row_index)

def delete_row(self, row_index: int) -> None:
"""Delete Row from google sheet."""
Expand All @@ -94,7 +99,7 @@ def get_column(self, column_number: int) -> Set[str]:
def get_index_row(self) -> int:
"""Check for the next available row to write too."""
row_index = len(self.get_column(1))
print("Row Index: ", row_index)
print(f"Row Index: {row_index} recorded on google sheet.")
return row_index

def update_row_index(self) -> None:
Expand Down
34 changes: 17 additions & 17 deletions abr-testing/abr_testing/data_collection/abr_calibration_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import gspread # type: ignore[import]
import sys
import time as t
from abr_testing.data_collection import read_robot_logs
from abr_testing.automation import google_drive_tool, google_sheets_tool

Expand All @@ -18,16 +19,20 @@ def check_for_duplicates(
headers: List[str],
) -> Union[List[str], None]:
"""Check google sheet for duplicates."""
t.sleep(5)
serials = google_sheet.get_column(col_1)
modify_dates = google_sheet.get_column(col_2)
# check for complete calibration.
if len(row[-1]) > 0:
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(f"Skipped row for instrument {serial}. Already on Google Sheet.")
return None
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
print(f"Writing calibration for: {serial}")
# Check for calibration time stamp.
if row[-1] is not None:
if len(row[-1]) > 0:
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(
f"Skipped row for instrument {serial}. Already on Google Sheet."
)
return None
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
print(f"Writing calibration for: {row[7]}")
return row


Expand Down Expand Up @@ -206,15 +211,10 @@ def upload_calibration_offsets(
if ip_or_all == "ALL":
ip_address_list = ip_file["ip_address_list"]
for ip in ip_address_list:
print(ip)
try:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
except Exception:
print(f"ERROR: Failed to read IP address: {ip}")
continue
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
else:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip_or_all, storage_directory
Expand Down
11 changes: 4 additions & 7 deletions abr-testing/abr_testing/data_collection/get_run_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,10 @@ def get_all_run_logs(storage_directory: str) -> None:
ip_address_list = ip_file["ip_address_list"]
runs_from_storage = read_robot_logs.get_run_ids_from_google_drive(google_drive)
for ip in ip_address_list:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
google_drive.upload_missing_files(storage_directory)
except Exception:
print(f"ERROR: Failed to read IP address: {ip}.")
runs = get_run_ids_from_robot(ip)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
google_drive.upload_missing_files(storage_directory)


if __name__ == "__main__":
Expand Down
19 changes: 12 additions & 7 deletions abr-testing/abr_testing/data_collection/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import time as t
import json
import requests
import sys


def lpc_data(file_results: Dict[str, Any], protocol_info: Dict) -> List[Dict[str, Any]]:
Expand Down Expand Up @@ -72,7 +73,7 @@ def hs_commands(file_results: Dict[str, Any]) -> Dict[str, float]:
hs_home_count: float = 0.0
hs_speed: float = 0.0
hs_rotations: Dict[str, float] = dict()
hs_temps: Dict[str, float] = dict()
hs_temps: Dict[float, float] = dict()
temp_time = None
shake_time = None
deactivate_time = None
Expand Down Expand Up @@ -266,7 +267,7 @@ def create_abr_data_sheet(
file_name_csv = file_name + ".csv"
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
return sheet_location
else:
with open(sheet_location, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
Expand Down Expand Up @@ -380,7 +381,6 @@ def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
print(f"There are {str(len(runs_to_save))} new run(s) to save.")
return runs_to_save


Expand Down Expand Up @@ -418,7 +418,7 @@ def write_to_sheets(
google_sheet.write_header(headers)
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(5) # Sleep added to avoid API error.
t.sleep(5)


def get_calibration_offsets(
Expand All @@ -427,9 +427,14 @@ def get_calibration_offsets(
"""Connect to robot via ip and get calibration data."""
calibration = dict()
# Robot Information [Name, Software Version]
response = requests.get(
f"http://{ip}:31950/health", headers={"opentrons-version": "3"}
)
try:
response = requests.get(
f"http://{ip}:31950/health", headers={"opentrons-version": "3"}
)
print(f"Connected to {ip}")
except Exception:
print(f"ERROR: Failed to read IP address: {ip}")
sys.exit()
health_data = response.json()
robot_name = health_data.get("name", "")
api_version = health_data.get("api_version", "")
Expand Down

0 comments on commit eeaccab

Please sign in to comment.