Skip to content

Commit

Permalink
Changed write row commands to batch write cells
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 committed May 21, 2024
1 parent 1a32d61 commit b6939b1
Show file tree
Hide file tree
Showing 5 changed files with 92 additions and 55 deletions.
73 changes: 56 additions & 17 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import sys
from datetime import datetime
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
from typing import Dict, List, Any, Set, Tuple
from typing import Dict, List, Any, Set, Tuple, Optional

"""Google Sheets Tool.
Expand Down Expand Up @@ -48,13 +48,14 @@ def open_worksheet(self, tab_number: int) -> Any:
"""Open individual worksheet within a googlesheet."""
return self.spread_sheet.get_worksheet(tab_number)

def create_worksheet(self, title: str) -> None:
def create_worksheet(self, title: str) -> Optional[str]:
"""Create a worksheet with tab name. Existing spreadsheet needed."""
try:
new_sheet = self.spread_sheet.add_worksheet(title, rows="2000", cols="26")
new_sheet = self.spread_sheet.add_worksheet(title, rows="2500", cols="40")
return new_sheet.id
except gspread.exceptions.APIError:
print("Sheet already exists.")
return new_sheet.id

def write_header(self, header: List) -> None:
"""Write Header to first row if not present."""
Expand Down Expand Up @@ -108,17 +109,55 @@ def batch_delete_rows(self, row_indices: List[int]) -> None:
self.spread_sheet.batch_update(body=delete_body)

def batch_update_cells(
self, sheet_title: str, data: List[List[str]], start_column: str, start_row: int
self,
data: List[List[Any]],
start_column: str,
start_row: int,
sheet_id: str,
) -> None:
"""Writes to multiple cells at once in a specific sheet."""
sheet = self.spread_sheet.worksheet(sheet_title)
for idx, values in enumerate(data):
column = chr(ord(start_column) + idx) # Convert index to column letter
location = f"{column}{start_row}:{column}{start_row + len(values) - 1}"
cells_to_update = sheet.range(location)
for cell, value in zip(cells_to_update, values):
cell.value = value
sheet.update_cells(cells_to_update)

def column_letter_to_index(column_letter: str) -> int:
"""Convert a column letter (e.g., 'A') to a 1-based column index (e.g., 1)."""
index = 0
for char in column_letter.upper():
index = index * 26 + (ord(char) - ord("A") + 1)
return index

requests = []
user_entered_value: Dict[str, Any] = {}
start_column_index = column_letter_to_index(start_column) - 1

for col_offset, col_values in enumerate(data):
column_index = start_column_index + col_offset
# column_letter = index_to_column_letter(column_index)
for row_offset, value in enumerate(col_values):
row_index = start_row + row_offset
try:
float_value = float(value)
user_entered_value = {"numberValue": float_value}
except ValueError:
user_entered_value = {"stringValue": str(value)}
requests.append(
{
"updateCells": {
"range": {
"sheetId": sheet_id,
"startRowIndex": row_index - 1,
"endRowIndex": row_index,
"startColumnIndex": column_index,
"endColumnIndex": column_index + 1,
},
"rows": [
{"values": [{"userEnteredValue": user_entered_value}]}
],
"fields": "userEnteredValue",
}
}
)

body = {"requests": requests}
self.spread_sheet.batch_update(body=body)

def update_cell(
self, sheet_title: str, row: int, column: int, single_data: Any
Expand All @@ -135,13 +174,13 @@ def get_column(self, column_number: int) -> Set[str]:
"""Get all values in column."""
return self.worksheet.col_values(column_number)

def get_cell(self, cell: str) -> Any:
def get_cell(self, sheet_title: str, cell: str) -> Any:
"""Get cell value with location ex: 'A1'."""
return self.worksheet.acell(cell).value
return self.spread_sheet.worksheet(sheet_title).acell(cell).value

def get_single_col_range(self, range: str) -> List:
def get_single_col_range(self, sheet_name: str, range: str) -> List:
"""Get cell values from one column range."""
values_range = self.worksheet.range(range)
values_range = self.spread_sheet.worksheet(sheet_name).range(range)
return [cell.value for cell in values_range]

def get_index_row(self) -> int:
Expand Down Expand Up @@ -214,7 +253,7 @@ def create_line_chart(
"overlayPosition": {
"anchorCell": {
"sheetId": sheet_id,
"rowIndex": 1,
"rowIndex": 15,
"columnIndex": col_position,
}
}
Expand Down
34 changes: 21 additions & 13 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ def create_data_dictionary(
runs_to_save: Union[Set[str], str],
storage_directory: str,
issue_url: str,
) -> Tuple[Dict[str, Dict[str, Any]], List[str], Dict[str, Dict[str, Any]], List[str]]:
) -> Tuple[List[List[Any]], List[str], List[List[Any]], List[str]]:
"""Pull data from run files and format into a dictionary."""
runs_and_robots: Dict[Any, Dict[str, Any]] = {}
runs_and_lpc: Dict[Any, Dict[str, Any]] = {}
runs_and_robots: List[Any] = []
runs_and_lpc: List[Dict[str, Any]] = []
for filename in os.listdir(storage_directory):
file_path = os.path.join(storage_directory, filename)
if file_path.endswith(".json"):
Expand Down Expand Up @@ -119,14 +119,17 @@ def create_data_dictionary(
**tc_dict,
}
headers: List[str] = list(row_2.keys())
runs_and_robots[run_id] = row_2
# runs_and_robots[run_id] = row_2
runs_and_robots.append(list(row_2.values()))
# LPC Data Recording
runs_and_lpc, headers_lpc = read_robot_logs.lpc_data(
file_results, row_for_lpc, runs_and_lpc
)
else:
continue
return runs_and_robots, headers, runs_and_lpc, headers_lpc
transposed_runs_and_robots = list(map(list, zip(*runs_and_robots)))
transposed_runs_and_lpc = list(map(list, zip(*runs_and_lpc)))
return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc


if __name__ == "__main__":
Expand Down Expand Up @@ -183,14 +186,19 @@ def create_data_dictionary(
run_ids_on_gd, run_ids_on_gs
)
# Add missing runs to google sheet
runs_and_robots, headers, runs_and_lpc, headers_lpc = create_data_dictionary(
missing_runs_from_gs, storage_directory, ""
)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
)
(
transposed_runs_and_robots,
headers,
transposed_runs_and_lpc,
headers_lpc,
) = create_data_dictionary(missing_runs_from_gs, storage_directory, "")

start_row = google_sheet.get_index_row() + 1
google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0")

# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_lpc, storage_directory, "ABR-LPC", google_sheet_lpc, headers_lpc
start_row_lpc = google_sheet_lpc.get_index_row() + 1
google_sheet_lpc.batch_update_cells(
transposed_runs_and_lpc, "A", start_row_lpc, "0"
)
12 changes: 6 additions & 6 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def get_error_info_from_robot(
) = read_robot_logs.get_error_info(results)
# JIRA Ticket Fields
failure_level = "Level " + str(error_level) + " Failure"

components = [failure_level, "Flex-RABR"]
affects_version = results["API_Version"]
parent = results.get("robot_name", "")
Expand Down Expand Up @@ -201,12 +202,11 @@ def get_error_info_from_robot(
runs_and_lpc,
headers_lpc,
) = abr_google_drive.create_data_dictionary(run_id, error_folder_path, issue_url)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
)

start_row = google_sheet.get_index_row() + 1
google_sheet.batch_update_cells(runs_and_robots, "A", start_row, "0")
print("Wrote run to ABR-run-data")
# Add LPC to google sheet
google_sheet_lpc = google_sheets_tool.google_sheet(credentials_path, "ABR-LPC", 0)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_lpc, storage_directory, "ABR-LPC", google_sheet_lpc, headers_lpc
)
start_row_lpc = google_sheet_lpc.get_index_row() + 1
google_sheet_lpc.batch_update_cells(runs_and_lpc, "A", start_row_lpc, "0")
12 changes: 6 additions & 6 deletions abr-testing/abr_testing/data_collection/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,10 @@
def lpc_data(
file_results: Dict[str, Any],
protocol_info: Dict[str, Any],
runs_and_lpc: Dict[str, Any],
) -> Tuple[Dict[str, Dict[str, Any]], List[str]]:
runs_and_lpc: List[Dict[str, Any]],
) -> Tuple[List[Dict[str, Any]], List[str]]:
"""Get labware offsets from one run log."""
offsets = file_results.get("labwareOffsets", "")
n = 0
# TODO: per UNIQUE slot AND LABWARE TYPE only keep the most recent LPC recording
if len(offsets) > 0:
unique_offsets: Dict[Any, Any] = {}
Expand Down Expand Up @@ -55,9 +54,7 @@ def lpc_data(
"Z": z_offset,
}
for item in unique_offsets:
run_id = protocol_info["Run_ID"] + "_" + str(n)
runs_and_lpc[run_id] = unique_offsets[item]
n += 1
runs_and_lpc.append(unique_offsets[item].values())
headers_lpc = list(unique_offsets[(slot, labware_type)].keys())

return runs_and_lpc, headers_lpc
Expand Down Expand Up @@ -298,6 +295,7 @@ def create_abr_data_sheet(
def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]:
"""Determines if errors exist in run log and documents them."""
error_levels = []
error_level = ""
# Read error levels file
with open(ERROR_LEVELS_PATH, "r") as error_file:
error_levels = list(csv.reader(error_file))
Expand Down Expand Up @@ -328,6 +326,8 @@ def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, st
code_error = error[1]
if code_error == error_code:
error_level = error[4]
if len(error_level) < 1:
error_level = str(4)

return num_of_errors, error_type, error_code, error_instrument, error_level

Expand Down
16 changes: 3 additions & 13 deletions abr-testing/abr_testing/data_collection/single_run_log_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,8 @@
nargs=1,
help="Folder path that holds individual run logs of interest.",
)
parser.add_argument(
"google_sheet_name",
metavar="GOOGLE_SHEET_NAME",
type=str,
nargs=1,
help="Google sheet name.",
)
args = parser.parse_args()
run_log_file_path = args.run_log_file_path[0]
google_sheet_name = args.google_sheet_name[0]

try:
credentials_path = os.path.join(run_log_file_path, "credentials.json")
Expand All @@ -41,16 +33,14 @@
) = abr_google_drive.create_data_dictionary(
run_ids_in_storage, run_log_file_path, ""
)
list_of_runs = list(runs_and_robots.keys())
transposed_list = list(zip(*runs_and_robots))
# Adds Run to local csv
sheet_location = os.path.join(run_log_file_path, "saved_data.csv")
file_exists = os.path.exists(sheet_location) and os.path.getsize(sheet_location) > 0
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
if not file_exists:
writer.writerow(header)
for run in list_of_runs:
for run in transposed_list:
# Add new row
row = runs_and_robots[run].values()
row_list = list(row)
writer.writerow(row_list)
writer.writerow(run)

0 comments on commit b6939b1

Please sign in to comment.