Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix (abr-testing): Checks that most recent run is completed in abr_scale script #15661

Merged
merged 3 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ def column_letter_to_index(column_letter: str) -> int:

for col_offset, col_values in enumerate(data):
column_index = start_column_index + col_offset
# column_letter = index_to_column_letter(column_index)
for row_offset, value in enumerate(col_values):
row_index = start_row + row_offset
try:
Expand All @@ -163,7 +162,10 @@ def column_letter_to_index(column_letter: str) -> int:
)

body = {"requests": requests}
self.spread_sheet.batch_update(body=body)
try:
self.spread_sheet.batch_update(body=body)
except gspread.exceptions.APIError as e:
print(f"ERROR MESSAGE: {e}")

def update_cell(
self, sheet_title: str, row: int, column: int, single_data: Any
Expand Down
20 changes: 15 additions & 5 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ def create_data_dictionary(
"""Pull data from run files and format into a dictionary."""
runs_and_robots: List[Any] = []
runs_and_lpc: List[Dict[str, Any]] = []
headers: List[str] = []
headers_lpc: List[str] = []
for filename in os.listdir(storage_directory):
file_path = os.path.join(storage_directory, filename)
if file_path.endswith(".json"):
Expand All @@ -49,7 +51,14 @@ def create_data_dictionary(
if not isinstance(file_results, dict):
continue
run_id = file_results.get("run_id", "NaN")
try:
start_time_test = file_results["startedAt"]
completed_time_test = file_results["completedAt"]
except KeyError:
print(f"Run {run_id} is incomplete. Skipping run.")
continue
if run_id in runs_to_save:
print("started reading run.")
robot = file_results.get("robot_name")
protocol_name = file_results["protocol"]["metadata"].get("protocolName", "")
software_version = file_results.get("API_Version", "")
Expand All @@ -74,13 +83,13 @@ def create_data_dictionary(
)
try:
start_time = datetime.strptime(
file_results.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
start_time_test, "%Y-%m-%dT%H:%M:%S.%f%z"
)
adjusted_start_time = start_time - timedelta(hours=4)
start_date = str(adjusted_start_time.date())
start_time_str = str(adjusted_start_time).split("+")[0]
complete_time = datetime.strptime(
file_results.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
completed_time_test, "%Y-%m-%dT%H:%M:%S.%f%z"
)
adjusted_complete_time = complete_time - timedelta(hours=4)
complete_time_str = str(adjusted_complete_time).split("+")[0]
Expand Down Expand Up @@ -130,15 +139,16 @@ def create_data_dictionary(
**pipette_dict,
**plate_measure,
}
headers: List[str] = list(row_2.keys())
# runs_and_robots[run_id] = row_2
headers = list(row_2.keys())
runs_and_robots.append(list(row_2.values()))
# LPC Data Recording
runs_and_lpc, headers_lpc = read_robot_logs.lpc_data(
file_results, row_for_lpc, runs_and_lpc
)
else:
continue
num_of_runs_read = len(runs_and_robots)
print(f"Number of runs read: {num_of_runs_read}")
transposed_runs_and_robots = list(map(list, zip(*runs_and_robots)))
transposed_runs_and_lpc = list(map(list, zip(*runs_and_lpc)))
return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc
Expand Down Expand Up @@ -207,7 +217,6 @@ def create_data_dictionary(
start_row = google_sheet.get_index_row() + 1
print(start_row)
google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0")
# Calculate Robot Lifetimes

# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
Expand All @@ -216,4 +225,5 @@ def create_data_dictionary(
transposed_runs_and_lpc, "A", start_row_lpc, "0"
)
robots = list(set(google_sheet.get_column(1)))
# Calculate Robot Lifetimes
sync_abr_sheet.determine_lifetime(google_sheet)
18 changes: 17 additions & 1 deletion abr-testing/abr_testing/tools/abr_scale.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,25 @@ def get_most_recent_run_and_record(
most_recent_run_id = run_list[-1]["id"]
results = get_run_logs.get_run_data(most_recent_run_id, ip)
# Save run information to local directory as .json file
read_robot_logs.save_run_log_to_json(ip, results, storage_directory)
saved_file_path = read_robot_logs.save_run_log_to_json(
ip, results, storage_directory
)
# Check that last run is completed.
with open(saved_file_path) as file:
file_results = json.load(file)
try:
file_results["completedAt"]
except ValueError:
# no completedAt field, get run before the last run.
most_recent_run_id = run_list[-2]["id"]
results = get_run_logs.get_run_data(most_recent_run_id, ip)
# Save run information to local directory as .json file
saved_file_path = read_robot_logs.save_run_log_to_json(
ip, results, storage_directory
)
# Record run to google sheets.
print(most_recent_run_id)

(
runs_and_robots,
headers,
Expand Down
Loading