From 88d78acf87dc15f46189b23c7171329ca7768b94 Mon Sep 17 00:00:00 2001 From: MarianneAzzopardi-HMCTS <147049624+mazzopardi2@users.noreply.github.com> Date: Thu, 25 Jan 2024 15:42:10 +0000 Subject: [PATCH] 2188/data mapping (#88) Co-authored-by: Swagger V2 bot Co-authored-by: Jason Paige Co-authored-by: lucas-phillips28 <140058662+lucas-phillips28@users.noreply.github.com> --- .gitignore | 1 + bin/migration/README.md | 108 +++++++++++++ bin/migration/db_utils.py | 24 +++ bin/migration/main_script.py | 133 ++++++++++++++++ bin/migration/requirements.txt | 13 ++ bin/migration/summary.py | 104 +++++++++++++ bin/migration/tables/appaccess.py | 95 ++++++++++++ bin/migration/tables/audits.py | 54 +++++++ bin/migration/tables/bookingparticipants.py | 57 +++++++ bin/migration/tables/bookings.py | 116 ++++++++++++++ bin/migration/tables/capturesessions.py | 130 ++++++++++++++++ bin/migration/tables/cases.py | 81 ++++++++++ bin/migration/tables/courtregions.py | 69 +++++++++ bin/migration/tables/courtrooms.py | 70 +++++++++ bin/migration/tables/courts.py | 88 +++++++++++ bin/migration/tables/helpers.py | 115 ++++++++++++++ bin/migration/tables/participants.py | 88 +++++++++++ bin/migration/tables/portalaccess.py | 110 ++++++++++++++ bin/migration/tables/recordings.py | 142 ++++++++++++++++++ bin/migration/tables/regions.py | 50 ++++++ bin/migration/tables/roles.py | 46 ++++++ bin/migration/tables/rooms.py | 50 ++++++ bin/migration/tables/sharebookings.py | 85 +++++++++++ bin/migration/tables/users.py | 57 +++++++ pre-api-stg.yaml | 3 + .../reform/preapi/entities/AuditTest.java | 4 - .../gov/hmcts/reform/preapi/dto/RoleDTO.java | 4 + .../hmcts/reform/preapi/entities/Audit.java | 6 - .../reform/preapi/entities/Recording.java | 2 +- .../hmcts/reform/preapi/entities/Role.java | 3 + .../db/migration/V009__AddFieldRoles.sql | 1 + ...010__AmendConstraintsAndDroppingFields.sql | 7 + .../V011__RemoveConstraintOnField.sql | 1 + 33 files changed, 1906 insertions(+), 11 deletions(-) create mode 100644 bin/migration/README.md create mode 100644 bin/migration/db_utils.py create mode 100644 bin/migration/main_script.py create mode 100644 bin/migration/requirements.txt create mode 100644 bin/migration/summary.py create mode 100644 bin/migration/tables/appaccess.py create mode 100644 bin/migration/tables/audits.py create mode 100644 bin/migration/tables/bookingparticipants.py create mode 100644 bin/migration/tables/bookings.py create mode 100644 bin/migration/tables/capturesessions.py create mode 100644 bin/migration/tables/cases.py create mode 100644 bin/migration/tables/courtregions.py create mode 100644 bin/migration/tables/courtrooms.py create mode 100644 bin/migration/tables/courts.py create mode 100644 bin/migration/tables/helpers.py create mode 100644 bin/migration/tables/participants.py create mode 100644 bin/migration/tables/portalaccess.py create mode 100644 bin/migration/tables/recordings.py create mode 100644 bin/migration/tables/regions.py create mode 100644 bin/migration/tables/roles.py create mode 100644 bin/migration/tables/rooms.py create mode 100644 bin/migration/tables/sharebookings.py create mode 100644 bin/migration/tables/users.py create mode 100644 src/main/resources/db/migration/V009__AddFieldRoles.sql create mode 100644 src/main/resources/db/migration/V010__AmendConstraintsAndDroppingFields.sql create mode 100644 src/main/resources/db/migration/V011__RemoveConstraintOnField.sql diff --git a/.gitignore b/.gitignore index 7683d4e75..98a08ab10 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,4 @@ bin/main/application.yaml applicationinsights-agent-*.jar *.log +bin/migration/failed_imports_log.txt diff --git a/bin/migration/README.md b/bin/migration/README.md new file mode 100644 index 000000000..83c4a5fbe --- /dev/null +++ b/bin/migration/README.md @@ -0,0 +1,108 @@ +# Database Migration Script + +This script manages the migration of data from a source database to a destination database. + + +## How to Run the Script + +1. **Set Environment Variables:** : + ``` + export SOURCE_DB_PASSWORD= + export DESTINATION_DB_PASSWORD= + ``` + +2. **Install Dependencies:** Install the required Python packages if not installed already: + ``` + pip install -r requirements.txt + ``` + +3. **Execute the Script:** Run the migration script: + ``` + python main_script.py + ``` + +4. **Test the migrated counts:** Run the summary script: + ``` + python summary.py + ``` + +## Summary +The `summary.py` file provides an overview of database record counts for the source and destination dbs and count of failed imports + +## DatabaseManager Class + +### Methods: +- **`__init__(self, database, user, password, host, port)`:** Initializes the DatabaseManager class and establishes connections to databases. +- **`execute_query(self, query, params=None)`:** Executes queries and fetches results. +- **`close_connection(self)`:** Closes the database connections. + +## Helper Functions + +### `parse_to_timestamp(input_text)` +Parses date strings into UK timestamps, handling various date formats and returning the current time in the UK timezone if the input is invalid or empty. + +### `check_existing_record(db_connection, table_name, field, record)` +Checks if a record exists in the database. + +### `audit_entry_creation(db_connection, table_name, record_id, record, created_at=None, created_by="Data Entry")` +Creates an audit entry in the database for a new record. + +### `log_failed_imports(failed_imports, filename='failed_imports_log.txt')` +Writes to failed_imports_log if record import fails + +### `clear_migrations_file(filename='failed_imports_log.txt')` +Clears the failed imports log before the migration to avoid duplicate entries + +## Main Logic + +1. Initializes database connections. +2. Executes migration logic for each table manager. +3. Closes database connections. + +## Table Managers + +### RoomManager +Handles the migration of room data. + +### UserManager +Manages the migration of user data. + +### RoleManager +Manages user roles migration. + +### CourtManager +Handles the migration of court-related data. An added 'Default Court' added for records with no data of which courts they're tried in. + +### CourtRoomManager +Manages the migration of courtroom data. + +### RegionManager +Manages the migration of region-related data. + +### CourtRegionManager +Handles associations between courts and regions. + +### PortalAccessManager +Manages user access to portals. The assumption is that Level 3 users have access to the Portal + +### AppAccessManager +Handles user access to applications. The assumption is that all Roles except for Level 3 users have this access. + +### CaseManager +Manages the migration of case-related data. + +### BookingManager +Handles the migration of booking-related data. + +### ParticipantManager +Manages the migration of participant-related data. + +### BookingParticipantManager +Handles associations between bookings and participants. + +### CaptureSessionManager +Manages the migration of capture session data. + +### RecordingManager +Handles the migration of recording data. + diff --git a/bin/migration/db_utils.py b/bin/migration/db_utils.py new file mode 100644 index 000000000..63bd4fa88 --- /dev/null +++ b/bin/migration/db_utils.py @@ -0,0 +1,24 @@ +import psycopg2 + +class DatabaseManager: + def __init__(self, database, user, password, host, port): + self.connection = psycopg2.connect( + database=database, + user=user, + password=password, + host=host, + port=port + ) + self.cursor = self.connection + + def execute_query(self, query, params=None): + self.cursor.execute(query, params) + return self.cursor.fetchall() + + def close_connection(self): + self.cursor.close() + self.connection.close() + + + + diff --git a/bin/migration/main_script.py b/bin/migration/main_script.py new file mode 100644 index 000000000..47af4ad99 --- /dev/null +++ b/bin/migration/main_script.py @@ -0,0 +1,133 @@ +import os +import time + +from db_utils import DatabaseManager + +from tables.rooms import RoomManager +from tables.users import UserManager +from tables.roles import RoleManager +from tables.courts import CourtManager +from tables.courtrooms import CourtRoomManager +from tables.regions import RegionManager +from tables.courtregions import CourtRegionManager +from tables.portalaccess import PortalAccessManager +from tables.appaccess import AppAccessManager +from tables.cases import CaseManager +from tables.bookings import BookingManager +from tables.participants import ParticipantManager +from tables.bookingparticipants import BookingParticipantManager +from tables.capturesessions import CaptureSessionManager +from tables.recordings import RecordingManager +from tables.sharebookings import ShareBookingsManager +from tables.audits import AuditLogManager + +from tables.helpers import clear_migrations_file + + +# get passwords from env variables +source_db_password = os.environ.get('SOURCE_DB_PASSWORD') +destination_db_password = os.environ.get('DESTINATION_DB_PASSWORD') +test_db_password = os.environ.get('TEST_DB_PASSWORD') +staging_db_password = os.environ.get('STAGING_DB_PASSWORD') + + +# database connections +# staging db +# source_db = DatabaseManager( +# database="pre-pdb-stg", +# user="psqladmin", +# password=staging_db_password, +# host="pre-db-stg.postgres.database.azure.com", +# port="5432", +# ) + + +# test db +# source_db = DatabaseManager( +# database="pre-pdb-test", +# user="psqladmin", +# password=test_db_password, +# host="pre-db-test.postgres.database.azure.com", +# port="5432", +# ) + +# demo database +source_db = DatabaseManager( + database="pre-pdb-demo", + user="psqladmin", + password=source_db_password, + host="pre-db-demo.postgres.database.azure.com", + port="5432", +) + + +# dummy database on dev server +destination_db = DatabaseManager( + database="dev-pre-copy", + user="psqladmin", + password=destination_db_password, + host="pre-db-dev.postgres.database.azure.com", + port="5432", +) + +# managers for different tables +room_manager = RoomManager(source_db.connection.cursor()) +user_manager = UserManager(source_db.connection.cursor()) +role_manager = RoleManager(source_db.connection.cursor()) +court_manager = CourtManager(source_db.connection.cursor()) +courtroom_manager = CourtRoomManager() +region_manager = RegionManager() +court_region_manager = CourtRegionManager() +portal_access_manager = PortalAccessManager(source_db.connection.cursor()) +app_access_manager = AppAccessManager(source_db.connection.cursor()) +case_manager = CaseManager(source_db.connection.cursor()) +booking_manager = BookingManager(source_db.connection.cursor()) +participant_manager = ParticipantManager(source_db.connection.cursor()) +booking_participant_manager = BookingParticipantManager(source_db.connection.cursor()) +capture_session_manager = CaptureSessionManager(source_db.connection.cursor()) +recording_manager = RecordingManager(source_db.connection.cursor()) +share_bookings_manager = ShareBookingsManager(source_db.connection.cursor()) +audit_log_manager = AuditLogManager(source_db.connection.cursor()) + +def migrate_manager_data(manager, destination_cursor): + start_time = time.time() + print(f"Migrating data for {manager.__class__.__name__}...") + + if hasattr(manager, 'get_data') and callable(getattr(manager, 'get_data')): + source_data = manager.get_data() + manager.migrate_data(destination_cursor, source_data) + else: + manager.migrate_data(destination_cursor) + + end_time = time.time() + time_taken = end_time - start_time + print(f"Data migration for {manager.__class__.__name__} complete in : {time_taken:.2f} seconds.\n") + +def main(): + clear_migrations_file() + + destination_db_cursor = destination_db.connection.cursor() + + migrate_manager_data(room_manager, destination_db_cursor) + migrate_manager_data(user_manager, destination_db_cursor) + migrate_manager_data(role_manager, destination_db_cursor) + migrate_manager_data(court_manager, destination_db_cursor) + migrate_manager_data(courtroom_manager, destination_db_cursor) + migrate_manager_data(region_manager, destination_db_cursor) + migrate_manager_data(court_region_manager, destination_db_cursor) + migrate_manager_data(portal_access_manager, destination_db_cursor) + migrate_manager_data(app_access_manager, destination_db_cursor) + migrate_manager_data(case_manager, destination_db_cursor) + migrate_manager_data(booking_manager, destination_db_cursor) + migrate_manager_data(participant_manager, destination_db_cursor) + migrate_manager_data(capture_session_manager, destination_db_cursor) + migrate_manager_data(recording_manager, destination_db_cursor) + migrate_manager_data(booking_participant_manager, destination_db_cursor) + migrate_manager_data(share_bookings_manager, destination_db_cursor) + migrate_manager_data(audit_log_manager, destination_db_cursor) + + source_db.close_connection() + destination_db.close_connection() + +if __name__ == "__main__": + main() diff --git a/bin/migration/requirements.txt b/bin/migration/requirements.txt new file mode 100644 index 000000000..09baa1c15 --- /dev/null +++ b/bin/migration/requirements.txt @@ -0,0 +1,13 @@ +attrs==22.2.0 +importlib-metadata==4.8.3 +iniconfig==1.1.1 +packaging==21.3 +pluggy==1.0.0 +psycopg2==2.9.8 +py==1.11.0 +pyparsing==3.1.1 +pytest==7.0.1 +pytz==2023.3.post1 +tomli==1.2.3 +typing_extensions==4.1.1 +zipp==3.6.0 diff --git a/bin/migration/summary.py b/bin/migration/summary.py new file mode 100644 index 000000000..a78ea2d3e --- /dev/null +++ b/bin/migration/summary.py @@ -0,0 +1,104 @@ +import psycopg2 +import os + +# Connection +source_db_password = os.environ.get('SOURCE_DB_PASSWORD') +destination_db_password = os.environ.get('DESTINATION_DB_PASSWORD') + +destination_conn = psycopg2.connect( + database="dev-pre-copy", + user="psqladmin", + password=destination_db_password, + host="pre-db-dev.postgres.database.azure.com", + port="5432", +) + +source_conn = psycopg2.connect( + database="pre-pdb-demo", + user="psqladmin", + password=source_db_password, + host="pre-db-demo.postgres.database.azure.com", + port="5432", +) + +# table mapping from old db table names to new +table_mapping = { + 'recordings': 'recordings', + 'share_recordings' : 'share_recordings', + 'portal_access' : 'portal_access', + 'audits': 'audits', + 'courts': 'courts', + 'court_region':'court_region', + 'regions':'regions', + 'courtrooms':'courtrooms', + 'rooms':'rooms', + 'contacts': 'participants', + 'bookings':'bookings', + 'cases': 'cases', + 'booking_participant':'booking_participant', + 'roles':'roles', + 'role_permission':'role_permission', + 'permissions': 'permissions', + 'users': 'users', + 'app_access':'app_access', + 'capture_sessions':'capture_sessions' +} + +# Counts the number of records in all tables in a provided db connection +def count_records_in_all_tables(connection): + cursor = connection.cursor() + + cursor.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_type = 'BASE TABLE'") + tables = cursor.fetchall() + + table_counts = {} + for table in tables: + table_name = table[0] + cursor.execute(f"SELECT COUNT(*) FROM public.{table_name}") + count = cursor.fetchone()[0] + table_counts[table_name] = count + + cursor.close() + return table_counts + +# Parses the failed imports log file to count the number of failed imports for each tables. +def count_failed_imports(file_path): + table_counts = {} + + with open(file_path, 'r') as file: + for line in file: + split_line = line.split(', ') + if len(split_line) >= 2: + table_name = split_line[0].split(': ')[1].strip() + + if table_name in table_counts: + table_counts[table_name] += 1 + else: + table_counts[table_name] = 1 + return table_counts + +source_table_counts = count_records_in_all_tables(source_conn) +destination_table_counts = count_records_in_all_tables(destination_conn) + +file_path = 'failed_imports_log.txt' +failed_imports = count_failed_imports(file_path) + +# Displays the record counts in both source and destination db and the failed logs. This is to monitor for data loss. +def print_summary(source_counts, destination_counts, failed_imports): + print(f"| {'Table Name'.ljust(20)} | {'Source DB Records'.ljust(18)} | {'Destination DB Records'.ljust(26)} | {'Failed Imports Logs'.ljust(19)} ") + print(f"| {'------------'.ljust(20)} | {'------------------'.ljust(18)} | {'------------------'.ljust(26)} | {'---------------'.ljust(19)} ") + + for source_table, destination_table in table_mapping.items(): + source_records = source_counts.get(source_table, '-') + destination_records = destination_counts.get(destination_table, '-') + failed_import_count = failed_imports.get(source_table, '-') + + print(f"| {destination_table.ljust(20)} | {str(source_records).ljust(18)} | {str(destination_records).ljust(26)} | {str(failed_import_count).ljust(19)}") + +print_summary(source_table_counts, destination_table_counts, failed_imports) + +source_conn.close() +destination_conn.close() + + + diff --git a/bin/migration/tables/appaccess.py b/bin/migration/tables/appaccess.py new file mode 100644 index 000000000..8b05759e7 --- /dev/null +++ b/bin/migration/tables/appaccess.py @@ -0,0 +1,95 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + +class AppAccessManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + query = """ SELECT + u.userid, + MAX(CASE WHEN gl.grouptype = 'Security' THEN ga.groupid ELSE NULL END) AS role_id, + MAX(CASE WHEN gl.grouptype = 'Location' THEN ga.groupid ELSE NULL END) AS court_id, + MAX(u.status) as active, + MAX(ga.assigned) AS created, + MAX(ga.assignedby) AS createdby, + MAX(ga.gaid) AS app_access_id + FROM public.users u + JOIN public.groupassignments ga ON u.userid = ga.userid + JOIN public.grouplist gl ON ga.groupid = gl.groupid + WHERE gl.groupname != 'Level 3' AND (gl.grouptype = 'Security' OR gl.grouptype = 'Location') + GROUP BY u.userid ; + """ + self.source_cursor.execute(query) + return self.source_cursor.fetchall() + + + def migrate_data(self, destination_cursor, source_data): + destination_cursor.execute("SELECT id FROM public.courts WHERE name = 'Default Court'") + default_court_id = destination_cursor.fetchone()[0] + + batch_app_users_data = [] + id = None + + for user in source_data: + id=user[6] + user_id = user[0] + role_id = user[1] + + if role_id is None: + continue + + court_id = user[2] + active = True if user[3].lower() == "active" else False + created_at = parse_to_timestamp(user[4]) + modified_at = created_at + created_by = get_user_id(destination_cursor,user[5]) + + if not check_existing_record(destination_cursor,'users', 'id', user_id): + self.failed_imports.add(('app_access',user_id, f"User id not in users table: {user_id}")) + continue + + if not check_existing_record(destination_cursor,'roles', 'id', role_id): + self.failed_imports.add(('app_access',user_id, f"Role: {role_id} not found in roles table for user_id: {user_id}")) + continue + + if court_id is None: + court_id = default_court_id + + if not check_existing_record(destination_cursor,'courts', 'id', court_id): + self.failed_imports.add(('app_access',user_id, f"Court: {court_id} not found in courts table for user_id: {user_id}")) + continue + + if not check_existing_record(destination_cursor,'app_access',"user_id",user_id ): + + # last_access = + batch_app_users_data.append(( + id, user_id, court_id, role_id, active, created_at, modified_at,created_by, + )) + audit_entry_creation( + destination_cursor, + table_name='app_access', + record_id=id, + record=user_id, + created_at=created_at, + created_by=created_by if created_by is not None else None, + ) + + try: + if batch_app_users_data: + destination_cursor.executemany( + """ + INSERT INTO public.app_access + (id, user_id, court_id, role_id, active, created_at, modified_at) + VALUES (%s, %s, %s, %s, %s, %s, %s) + """, + [entry[:-1] for entry in batch_app_users_data], + ) + destination_cursor.connection.commit() + + + except Exception as e: + self.failed_imports.add(('app_access',user_id, e)) + + log_failed_imports(self.failed_imports) + \ No newline at end of file diff --git a/bin/migration/tables/audits.py b/bin/migration/tables/audits.py new file mode 100644 index 000000000..f08fe8888 --- /dev/null +++ b/bin/migration/tables/audits.py @@ -0,0 +1,54 @@ +from .helpers import check_existing_record, parse_to_timestamp, log_failed_imports, get_user_id +import uuid + +class AuditLogManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * from public.audits") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_data): + batch_audit_data = [] + + for audit_log in source_data: + table_name = "audits" + table_record_id = audit_log[0] + + if not check_existing_record(destination_cursor,'audits', 'table_record_id', table_record_id): + id = str(uuid.uuid4()) + source = "AUTO" + type = "CREATE" + category =audit_log[20] + activity = audit_log[2] + functional_area = audit_log[17] + audit_details = f"{audit_log[5]}, {audit_log[18]}, {audit_log[19]}" + created_at = parse_to_timestamp(audit_log[12]) + + if audit_log[11]: + created_by_id = get_user_id(destination_cursor,audit_log[11]) + elif audit_log[10]: + created_by_id = get_user_id(destination_cursor,audit_log[10]) + + created_by = created_by_id if created_by_id is not None else None + + batch_audit_data.append(( + id, table_name, table_record_id, source, type, category, activity, functional_area, audit_details, created_at, created_by)) + try: + if batch_audit_data: + destination_cursor.executemany( + """INSERT INTO public.audits (id, table_name, table_record_id, source, type, category, activity, functional_area, audit_details, created_at, created_by) + VALUES (%s, %s, %s,%s, %s, %s,%s,%s, %s,%s, %s)""", + batch_audit_data + ) + destination_cursor.connection.commit() + else: + print("No data to insert.") + + except Exception as e: + self.failed_imports.add(('audits', id, e)) + + log_failed_imports(self.failed_imports) + diff --git a/bin/migration/tables/bookingparticipants.py b/bin/migration/tables/bookingparticipants.py new file mode 100644 index 000000000..4288b3d3c --- /dev/null +++ b/bin/migration/tables/bookingparticipants.py @@ -0,0 +1,57 @@ +from .helpers import log_failed_imports, check_existing_record + +class BookingParticipantManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT recordinguid, defendants, witnessnames FROM recordings") + return self.source_cursor.fetchall() + + + def migrate_data(self, destination_cursor, source_data): + destination_cursor.execute("SELECT id FROM public.participants") + participant_ids = [row[0] for row in destination_cursor.fetchall()] + + for recording in source_data: + recording_id = recording[0] + defendants_list = recording[1].split(',') if recording[1] else [] + witnesses_list = recording[2].split(',') if recording[2] else [] + + destination_cursor.execute(""" + SELECT r.id, cs.booking_id + FROM recordings r + LEFT JOIN capture_sessions cs ON r.capture_session_id = cs.id + WHERE r.id = %s + """, (recording_id,)) + result = destination_cursor.fetchone() + + if result is not None and len(result) > 0: + booking_id = result[1] + + for participant_id in (defendants_list + witnesses_list): + if participant_id in participant_ids: + + try: + destination_cursor.execute( + """ + INSERT INTO public.booking_participant (participant_id, booking_id) + SELECT %s, %s + WHERE NOT EXISTS ( + SELECT 1 + FROM public.booking_participant + WHERE participant_id = %s AND booking_id = %s + ) + """, + (participant_id, booking_id, participant_id, booking_id), + ) + destination_cursor.connection.commit() + + except Exception as e: + self.failed_imports.add(('booking_participants', None, e)) + else: + self.failed_imports.add(('booking_participants', participant_id, "Participant ID not found")) + + log_failed_imports(self.failed_imports) + diff --git a/bin/migration/tables/bookings.py b/bin/migration/tables/bookings.py new file mode 100644 index 000000000..417376332 --- /dev/null +++ b/bin/migration/tables/bookings.py @@ -0,0 +1,116 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id +import uuid + + +class BookingManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * FROM public.recordings WHERE recordingversion = '1'") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_data): + destination_cursor.execute("SELECT id FROM public.cases") + cases_data = destination_cursor.fetchall() + existing_case_ids = {case[0] for case in cases_data} + + destination_cursor.execute( + """CREATE TABLE IF NOT EXISTS public.temp_recordings ( + capture_session_id UUID, + recording_id UUID, + booking_id UUID, + parent_recording_id UUID, + case_id UUID, + scheduled_for TIMESTAMPTZ, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ, + modified_at TIMESTAMPTZ, + created_by UUID, + started_by_user_id UUID, + ingest_address VARCHAR(255), + live_output_url VARCHAR(255), + status TEXT + ) + """ + ) + + for recording in source_data: + case_id = recording[1] + recording_id = recording[0] + booking_id = str(uuid.uuid4()) + scheduled_for = parse_to_timestamp(recording[10]) + + if scheduled_for is None: + self.failed_imports.add(('bookings', booking_id, f'Scheduled for date is NULL for case id: {case_id}')) + continue + + recording_status = recording[11] + deleted_at = parse_to_timestamp(recording[24]) if recording_status == 'Deleted' else None + created_at = parse_to_timestamp(recording[22]) + modified_at = parse_to_timestamp(recording[24]) if recording[24] is not None else created_at + created_by = get_user_id(destination_cursor,recording[21]) + + # Check if the case has been migrated into the cases table + if case_id not in existing_case_ids: + self.failed_imports.add(('bookings', booking_id, f'Case ID {case_id} not found in cases data')) + continue + + # Insert into temp table + if not check_existing_record(destination_cursor,'temp_recordings', 'recording_id', recording_id): + try: + destination_cursor.execute( + """ + INSERT INTO public.temp_recordings + (case_id, recording_id, booking_id,scheduled_for, deleted_at, created_at, created_by,modified_at ) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + """, + ( case_id, recording_id, booking_id,scheduled_for,deleted_at, created_at, + created_by if created_by is not None else None, + modified_at if modified_at is not None else None + ), + ) + except Exception as e: + self.failed_imports.add(('temp_recordings', recording_id, f'Failed to insert into temp_recordings: {e}')) + continue + + # Fetch temp data + destination_cursor.execute("SELECT * FROM public.temp_recordings") + temp_recordings_data = destination_cursor.fetchall() + + for booking in temp_recordings_data: + id = booking[2] + case_id = booking[4] + + if not check_existing_record(destination_cursor,'bookings', 'id', id): + try: + scheduled_for = booking[5] + created_at = booking[7] + modified_at = booking[8] + created_by = booking[9] + deleted_at = booking[6] + + destination_cursor.execute( + """ + INSERT INTO public.bookings + (id, case_id, scheduled_for, created_at, modified_at, deleted_at) + VALUES (%s, %s, %s, %s, %s, %s ) + """, + (id, case_id, scheduled_for, created_at, modified_at, deleted_at), + ) + + audit_entry_creation( + destination_cursor, + table_name="bookings", + record_id=id, + record=case_id, + created_at=created_at, + created_by=created_by if created_by is not None else None, + ) + except Exception as e: + self.failed_imports.add(('bookings', id,e)) + + log_failed_imports(self.failed_imports) + + \ No newline at end of file diff --git a/bin/migration/tables/capturesessions.py b/bin/migration/tables/capturesessions.py new file mode 100644 index 000000000..9b0d78c6f --- /dev/null +++ b/bin/migration/tables/capturesessions.py @@ -0,0 +1,130 @@ +from .helpers import check_existing_record, audit_entry_creation, log_failed_imports, parse_to_timestamp, get_user_id +import uuid + +class CaptureSessionManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT DISTINCT ON (parentrecuid) * FROM public.recordings WHERE recordingversion = '1' and recordingstatus != 'No Recording'") + return self.source_cursor.fetchall() + + def map_recording_status(self, status): + status_lower = status.lower() + + if status_lower in ["assets created - waiting for event to start", "deleted", "error - failed to start", "no recording available", "no stream detected", "no recording"]: + result = "STANDBY" + elif status_lower in ["initiating request...","ready to record"]: + result = "INITIALISATION" + elif status_lower == "recording": + result = "RECORDING" + elif status_lower in ["edit requested","ready to stream", "mp4 ready for viewing", "stream ok"]: + result = "AVAILABLE" + elif status_lower == "checking stream...": + result = "PROCESSING" + else: + result = "STANDBY" + return result + + def get_recording_date(self, recording_id, activity): + query = """ + SELECT createdon + FROM public.audits + WHERE activity = %s + AND recordinguid = %s + """ + self.source_cursor.execute(query, (activity, recording_id)) + result = self.source_cursor.fetchone() + return parse_to_timestamp(result[0]) if result else None + + + def migrate_data(self, destination_cursor, source_data): + destination_cursor.execute("SELECT * FROM public.temp_recordings") + temp_recording_data = destination_cursor.fetchall() + + destination_cursor.execute("SELECT * FROM public.users") + user_data = destination_cursor.fetchall() + + for recording in source_data: + recording_id = recording[0] + capture_session_id = str(uuid.uuid4()) + booking_id = next((temp_rec[2] for temp_rec in temp_recording_data if temp_rec[1] == recording_id), None) + parent_recording_id = recording[9] + + if parent_recording_id is None: + self.failed_imports.add(('temp_recordings', recording_id, f'parent_recording_id blank for recording id: {recording_id}')) + continue + + ingest_address = recording[8] + live_output_url = recording[20] + started_by = recording[21] + started_by_user_id = next((user[0] for user in user_data if user[3] == started_by), None) + deleted_at = parse_to_timestamp(recording[24]) if str(recording[11]).lower() == 'deleted' else None + status = self.map_recording_status(recording[11]) + + try: + destination_cursor.execute( + """ + UPDATE public.temp_recordings + SET capture_session_id = %s, parent_recording_id = %s, deleted_at=%s, + started_by_user_id=%s, ingest_address=%s, live_output_url=%s, status=%s + WHERE recording_id = %s + """, + (capture_session_id, parent_recording_id, deleted_at, started_by_user_id, ingest_address, live_output_url, status, recording_id), + ) + destination_cursor.connection.commit() + + except Exception as e: + self.failed_imports.add(('temp_recordings', recording_id, f'Failed to insert into temp_recordings: {e}')) + continue + + # inserting only version 1 into capture sessions as this would be the parent recording + destination_cursor.execute("SELECT * FROM public.temp_recordings WHERE recording_id = parent_recording_id") + temp_recording_data = destination_cursor.fetchall() + + destination_cursor.execute("SELECT * FROM public.users") + user_data = destination_cursor.fetchall() + + for temp_recording in temp_recording_data: + id = temp_recording[0] + recording_id = temp_recording[1] + booking_id = temp_recording[2] + deleted_at = temp_recording[6] + started_by_user_id = temp_recording[10] + finished_by_user_id = temp_recording[10] + created_at = temp_recording[7] + ingest_address=temp_recording[11] + live_output_url=temp_recording[12] + status=temp_recording[13] + started_at = self.get_recording_date(recording_id, 'Start Recording Clicked') or created_at + finished_at = self.get_recording_date(recording_id, 'Finish Recording') or created_at + + if not check_existing_record(destination_cursor,'bookings', 'id', booking_id): + self.failed_imports.add(('capture_sessions', id, f"Booking id: {booking_id} not recorded in bookings table")) + continue + + if not check_existing_record(destination_cursor,'capture_sessions','id', id): + origin = 'PRE' + + try: + destination_cursor.execute( + """ + INSERT INTO public.capture_sessions (id, booking_id, origin, ingest_address, live_output_url, deleted_at, started_by_user_id, finished_by_user_id, status, started_at, finished_at) + VALUES (%s, %s, %s,%s,%s, %s,%s,%s, %s,%s, %s) + """, + ( id, booking_id, origin, ingest_address, live_output_url, deleted_at, started_by_user_id, finished_by_user_id, status, started_at, finished_at), + ) + destination_cursor.connection.commit() + + audit_entry_creation( + destination_cursor, + table_name="capture_sessions", + record_id=id, + record=booking_id, + created_at=created_at, + ) + except Exception as e: + self.failed_imports.add(('capture_sessions', id,e)) + + log_failed_imports(self.failed_imports) diff --git a/bin/migration/tables/cases.py b/bin/migration/tables/cases.py new file mode 100644 index 000000000..de13d4ef1 --- /dev/null +++ b/bin/migration/tables/cases.py @@ -0,0 +1,81 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + +class CaseManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * FROM public.cases") + return self.source_cursor.fetchall() + + def get_case_deleted_date(self, case_id, modified_at_date): + self.source_cursor.execute(""" + SELECT createdon FROM audits + WHERE auditdetails = 'Case marked as Deleted.' AND caseuid = %s + """, (case_id,)) + + deleted_date = self.source_cursor.fetchall() + if deleted_date: + deleted_date_str =deleted_date[0][0] + return parse_to_timestamp(deleted_date_str) + else: + return modified_at_date + + def migrate_data(self, destination_cursor, source_data): + + destination_cursor.execute("SELECT id, name FROM public.courts") + courts_data = destination_cursor.fetchall() + court_name_to_id = {court[1]: court[0] for court in courts_data} + + default_court_name = "Default Court" + default_court_id = court_name_to_id.get(default_court_name) + + cases_data = [] + for case in source_data: + reference = case[1] + id = case[0] + + if reference is None: + self.failed_imports.add(('cases', id, 'Null value for reference')) + continue + + if not check_existing_record(destination_cursor,'cases', 'id', id): + court_id = court_name_to_id.get(case[2]) + if court_id is None: + court_id = default_court_id + + test = False + created_at = parse_to_timestamp(case[5]) + created_by = get_user_id(destination_cursor,case[4]) + modified_at = parse_to_timestamp(case[7]) if case[7] is not None else created_at + deleted_at = self.get_case_deleted_date(id, modified_at) if case[3] == "Deleted" else None + + + cases_data.append((id, court_id, reference, test, deleted_at, created_at, modified_at)) + + audit_entry_creation( + destination_cursor, + table_name="cases", + record_id=id, + record=reference, + created_at=created_at, + created_by=created_by if created_by is not None else None + ) + + try: + if cases_data: + destination_cursor.executemany( + """ + INSERT INTO public.cases + (id, court_id, reference, test,deleted_at, created_at, modified_at) + VALUES (%s, %s, %s, %s, %s,%s, %s) + """, + cases_data, + ) + destination_cursor.connection.commit() + + except Exception as e: + self.failed_imports.add(('cases', id,e)) + + log_failed_imports(self.failed_imports) diff --git a/bin/migration/tables/courtregions.py b/bin/migration/tables/courtregions.py new file mode 100644 index 000000000..124f1c85c --- /dev/null +++ b/bin/migration/tables/courtregions.py @@ -0,0 +1,69 @@ +from .helpers import check_existing_record, log_failed_imports +import re + + +class CourtRegionManager: + def __init__(self): + self.failed_imports = set() + + def migrate_data(self,destination_cursor): + batch_court_region_data = [] + + # Court regions data - https://cjscommonplatform-my.sharepoint.com/:x:/r/personal/lawrie_baber-scovell2_hmcts_net/_layouts/15/Doc.aspx?sourcedoc=%7B07C83A7F-EF01-4C78-9B02-AEDD443D15A1%7D&file=Courts%20PRE%20NRO.xlsx&wdOrigin=TEAMS-WEB.undefined_ns.rwc&action=default&mobileredirect=true + court_regions = [ + {"name": "Birmingham", "region": "West Midlands (England)"}, + {"name": "Birmingham Youth", "region": "West Midlands (England)"}, + {"name": "Mold Crown Court", "region": "Wales"}, + {"name": "Reading Crown Court", "region": "South East (England)"}, + {"name": "Leeds Crown Court", "region": "Yorkshire and The Humber"}, + {"name": "Durham Crown Court", "region": "North East (England)"}, + {"name": "Liverpool Crown Court", "region": "North West (England)"}, + {"name": "Nottingham Crown Court", "region": "East Midlands (England)"}, + {"name": "Kingston upon Thames Crown Court", "region": "London"}, + # {"name": "Kingston-upon-Thames Crown Court", "region": "London"}, + {"name": "Leeds Youth Court", "region": "Yorkshire and The Humber"}, + {"name": "Default Court", "region": "London"} + ] + court_regions_dict = {court["name"]: court["region"] for court in court_regions} + + destination_cursor.execute('SELECT id, name FROM public.courts') + courts_data = destination_cursor.fetchall() + + destination_cursor.execute('SELECT id, name FROM public.regions') + regions_data = destination_cursor.fetchall() + regions_dict = {region[1]: region[0] for region in regions_data} + + for court in courts_data: + court_id = court[0] + court_name = court[1] + region_id = None + + + for court_key, region_name in court_regions_dict.items(): + regex_pattern = re.compile(rf"{re.escape(court_key)}(?:\sCourt)?", re.IGNORECASE) + + if re.search(regex_pattern, court_name): + region_id = regions_dict.get(region_name) + break + + if region_id is None: + self.failed_imports.add(('court_region', court_id, f'Missing region_id for {court_name}')) + continue + + if not check_existing_record(destination_cursor,'court_region', 'court_id', court_id): + batch_court_region_data.append((court_id, region_id)) + + try: + if batch_court_region_data: + destination_cursor.executemany( + "INSERT INTO public.court_region (court_id, region_id) VALUES (%s, %s)", + batch_court_region_data + ) + + destination_cursor.connection.commit() + + except Exception as e: + self.failed_imports.add(('court_region', None, e)) + + log_failed_imports(self.failed_imports) + \ No newline at end of file diff --git a/bin/migration/tables/courtrooms.py b/bin/migration/tables/courtrooms.py new file mode 100644 index 000000000..b34d64efc --- /dev/null +++ b/bin/migration/tables/courtrooms.py @@ -0,0 +1,70 @@ +from .helpers import check_existing_record, audit_entry_creation, log_failed_imports +import uuid +import re + +class CourtRoomManager: + def __init__(self): + self.failed_imports = set() + + def migrate_data(self, destination_cursor): + # CVP room data - https://tools.hmcts.net/confluence/display/S28/CVP+Guides#CVPGuides-CVPRooms-EnvironmentandCourtAllocation + courtroom_data = { + "PRE001": "Leeds Youth Court", + "PRE002": "Leeds Youth Court", + "PRE003": "Leeds Youth Court", + "PRE004": "Mold Crown Court", + "PRE005": "Mold Crown Court", + "PRE006": "Leeds Crown Court", + "PRE007": "Leeds Crown Court", + "PRE008": "Default Court", + "PRE009": "Default Court", + "PRE010": "Default Court", + "PRE011": "Durham Crown Court", + "PRE012": "Durham Crown Court", + "PRE013": "Kingston upon Thames Crown Court", + "PRE014": "Kingston upon Thames Crown Court", + "PRE015": "Liverpool Crown Court", + "PRE016": "Liverpool Crown Court", + "PRE017": "Nottingham Crown Court", + "PRE018": "Nottingham Crown Court", + "PRE019": "Reading Crown Court", + "PRE020": "Reading Crown Court" + } + + batch_courtrooms_data = [] + + destination_cursor.execute("SELECT * FROM public.rooms") + dest_rooms_data = destination_cursor.fetchall() + rooms_dict = {role[1]: role[0] for role in dest_rooms_data} + + destination_cursor.execute("SELECT * FROM public.courts") + dest_courts_data = destination_cursor.fetchall() + + court_dict = {court[2]: court[0] for court in dest_courts_data} + + for room, court in courtroom_data.items(): + court_name_pattern = re.compile(rf"{re.escape(court)}", re.IGNORECASE) + + if room in rooms_dict: + room_id = rooms_dict[room] + + matched_court_ids = [court_id for court_name, court_id in court_dict.items() if re.search(court_name_pattern, court_name)] + if matched_court_ids: + court_id = matched_court_ids[0] + + if not check_existing_record(destination_cursor,'courtrooms', 'room_id', room_id): + batch_courtrooms_data.append((court_id, room_id)) + + try: + if batch_courtrooms_data: + destination_cursor.executemany( + "INSERT INTO public.courtrooms ( court_id, room_id) VALUES ( %s, %s)", + batch_courtrooms_data + ) + destination_cursor.connection.commit() + + except Exception as e: + self.failed_imports.add(('court_rooms', None, e)) + + log_failed_imports(self.failed_imports) + diff --git a/bin/migration/tables/courts.py b/bin/migration/tables/courts.py new file mode 100644 index 000000000..f40c80da6 --- /dev/null +++ b/bin/migration/tables/courts.py @@ -0,0 +1,88 @@ +import uuid +from .helpers import audit_entry_creation, check_existing_record, log_failed_imports +import re + +class CourtManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT groupid, groupname from public.grouplist where grouptype = 'Location'") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_courts_data): + # Courts data - https://cjscommonplatform-my.sharepoint.com/:x:/r/personal/lawrie_baber-scovell2_hmcts_net/_layouts/15/Doc.aspx?sourcedoc=%7B07C83A7F-EF01-4C78-9B02-AEDD443D15A1%7D&file=Courts%20PRE%20NRO.xlsx&wdOrigin=TEAMS-WEB.undefined_ns.rwc&action=default&mobileredirect=true + court_types = { + 'Reading Crown Court': ('CROWN','449','UKJ-South East (England)'), + 'Nottingham Crown Court': ('CROWN','444','UKF-East Midlands (England)'), + 'Mold Crown': ('CROWN','438','UKL-Wales'), + 'Liverpool Crown Court': ('CROWN','433','UKD-North West (England)'), + 'Leeds Youth Court': ('MAGISTRATE','429','UKE-Yorkshire and The Humber'), + 'Leeds Crown Court': ('CROWN','429','UKE-Yorkshire and The Humber'), + 'Kingston upon Thames Crown Court': ('CROWN','427','UKI-London'), + 'Durham Crown Court': ('CROWN','422','UKC-North East (England)'), + 'Birmingham Crown Court': ('CROWN','404','UKG-West Midlands (England)'), + 'Birmingham Youth Court': ('CROWN','404','UKG-West Midlands (England)') + } + batch_courts_data = [] + + for court in source_courts_data: + id = court[0] + name = court[1] + court_info = None + + for court_pattern, info in court_types.items(): + regex_pattern = re.compile(rf"{re.escape(court_pattern)}(?:\sCourt)?", re.IGNORECASE) + if re.search(regex_pattern, name): + court_info = info + break + + if court_info is None: + court_info = ('CROWN', '', '') + + court_type, location_code, _ = court_info + + + if not check_existing_record(destination_cursor,'courts', 'id', id ): + batch_courts_data.append((id, court_type.upper(), name, location_code)) + + try: + if batch_courts_data: + destination_cursor.executemany( + 'INSERT INTO public.courts (id, court_type, name, location_code) VALUES (%s, %s, %s, %s)', + batch_courts_data + ) + + for court in batch_courts_data: + audit_entry_creation( + destination_cursor, + table_name='courts', + record_id=court[0], + record=court[2] + ) + except Exception as e: + self.failed_imports.add(('courts', id, e )) + + # Inserting an 'Unknown' court type for records missing this info + default_court_id = str(uuid.uuid4()) + + try: + if not check_existing_record(destination_cursor,'courts', 'name', 'Default Court'): + default_court_id =str(uuid.uuid4()) + destination_cursor.execute( + 'INSERT INTO public.courts (id, court_type, name, location_code) VALUES (%s, %s, %s, %s)', + (default_court_id, 'CROWN', 'Default Court', 'default'), + ) + destination_cursor.connection.commit() + + audit_entry_creation( + destination_cursor, + table_name='courts', + record_id=default_court_id, + record='Default Court' + ) + except Exception as e: + self.failed_imports.add(('courts', default_court_id, e)) + + log_failed_imports(self.failed_imports) diff --git a/bin/migration/tables/helpers.py b/bin/migration/tables/helpers.py new file mode 100644 index 000000000..1ca881a1e --- /dev/null +++ b/bin/migration/tables/helpers.py @@ -0,0 +1,115 @@ +from datetime import datetime +import pytz +import uuid + + +# Parses timestamp string to date format +def parse_to_timestamp(input_text): + failed_imports = set() + if input_text: + try: + parsed_datetime = None + formats_to_try = [ + "%d/%m/%Y", + "%d/%m/%Y %H:%M", + "%d/%m/%Y %H:%M:%S", + "%Y/%m/%d %H:%M:%S", + "%Y/%m/%d %H:%M", + "%d-%m-%Y %H:%M:%S", + "%d-%m-%Y %H:%M", + "%Y-%m-%d %H:%M:%S", + ] + for date_format in formats_to_try: + try: + parsed_datetime = datetime.strptime(input_text, date_format) + break + except ValueError: + pass + + if parsed_datetime: + uk_timezone = pytz.timezone('Europe/London') + parsed_datetime = uk_timezone.localize(parsed_datetime) + return parsed_datetime + + except (ValueError, TypeError) as e: + failed_imports.add(('date/time', input_text, e)) + log_failed_imports(failed_imports) + + +# Checks if record is already imported +def check_existing_record(db_connection, table_name, field, record): + query = f"SELECT EXISTS (SELECT 1 FROM public.{table_name} WHERE {field} = %s)" + db_connection.execute(query, (record,)) + return db_connection.fetchone()[0] + + +# Audit entry into database +def audit_entry_creation(db_connection, table_name, record_id, record, created_at=None, created_by="Data Entry"): + created_at = created_at or datetime.now() + + failed_imports = set() + + audit_entry = { + "id": str(uuid.uuid4()), + "table_name": table_name, + "table_record_id": record_id, + "source": "AUTO", + "type": "CREATE", + "category": "data_migration", + "activity": f"{table_name}_record_creation", + "functional_area": "data_processing", + "audit_details": f"Created {table_name}_record for: {record}", + "created_by": created_by, + "created_at": created_at, + } + + try: + db_connection.execute( + """ + INSERT INTO public.audits + (id, table_name, table_record_id, source, type, category, activity, functional_area, audit_details, created_by, created_at) + VALUES + (%(id)s, %(table_name)s, %(table_record_id)s, %(source)s, %(type)s, %(category)s, %(activity)s, %(functional_area)s, %(audit_details)s, %(created_by)s, %(created_at)s) + """, + audit_entry + ) + db_connection.connection.commit() + + except Exception as e: + failed_imports.add(('audit table', table_name, e)) + log_failed_imports(failed_imports) + +# Logs failed imports to file +def log_failed_imports(failed_imports, filename='failed_imports_log.txt'): + with open(filename, 'a') as file: + for entry in failed_imports: + if len(entry) == 2: + table_name, failed_id = entry + details = 'Import failed' + elif len(entry) == 3: + table_name, failed_id, details = entry + else: + raise ValueError("Each entry in failed_imports should have 2 or 3 elements") + + file.write(f"Table: {table_name}, ID: {failed_id}, Details: {details}\n") + + +# Clear the migration file - run before the migration script is run +def clear_migrations_file(filename='failed_imports_log.txt'): + with open(filename, 'w') as file: + file.write("") + +# Get the user_id associated with an email from the users table for the audits record. +def get_user_id(db_connection, email): + db_connection.execute(""" + SELECT id + FROM public.users + WHERE email = %s + """, (email,)) + result = db_connection.fetchone() + + if result is not None: + user_id = result[0] + return user_id + else: + return None \ No newline at end of file diff --git a/bin/migration/tables/participants.py b/bin/migration/tables/participants.py new file mode 100644 index 000000000..09b7edc1d --- /dev/null +++ b/bin/migration/tables/participants.py @@ -0,0 +1,88 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + +class ParticipantManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * FROM public.contacts") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_data): + batch_participant_data = [] + + created_by = None + + for participant in source_data: + id = participant[0] + p_type = participant[3] + case_id = participant[4] + + if case_id is None: + self.failed_imports.add(('contacts', id, 'No case id associated with this participant')) + continue + + if p_type is None: + self.failed_imports.add(('contacts', id, 'No participant type detail')) + continue + + + destination_cursor.execute( + "SELECT id FROM public.cases WHERE id = %s", (case_id,) + ) + case_id_exists = destination_cursor.fetchone() + + if case_id_exists is None: + self.failed_imports.add(('contacts', id, f'Invalid case id {case_id} associated with this participant')) + continue + + if not check_existing_record(destination_cursor,'participants','case_id', case_id): + participant_type = p_type.upper() + + if participant_type not in ('WITNESS', 'DEFENDANT'): + self.failed_imports.add(('contacts', id, f'Invalid participant type: {p_type}')) + continue + + first_name = participant[6] + last_name = participant[7] + if (first_name is None) or (last_name is None): + self.failed_imports.add(('contacts', id, 'no participant names')) + continue + + created_at = parse_to_timestamp(participant[9]) + modified_at = parse_to_timestamp(participant[11]) + created_by = get_user_id(destination_cursor,participant[8]) + + batch_participant_data.append((id, case_id, participant_type, first_name, last_name, created_at, modified_at, created_by)) + + try: + if batch_participant_data: + + destination_cursor.executemany( + """ + INSERT INTO public.participants + (id, case_id, participant_type, first_name, last_name, created_at, modified_at) + VALUES (%s, %s, %s, %s, %s, %s, %s) + """, + [entry[:-1] for entry in batch_participant_data], + ) + + destination_cursor.connection.commit() + + for entry in batch_participant_data: + audit_entry_creation( + destination_cursor, + table_name="participants", + record_id=entry[0], + record=entry[1], + created_at=entry[5], + created_by=entry[7] if entry[7] is not None else None + ) + + except Exception as e: + self.failed_imports.add(('contacts', id, e)) + + + log_failed_imports(self.failed_imports) + \ No newline at end of file diff --git a/bin/migration/tables/portalaccess.py b/bin/migration/tables/portalaccess.py new file mode 100644 index 000000000..8ca269f61 --- /dev/null +++ b/bin/migration/tables/portalaccess.py @@ -0,0 +1,110 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id +from datetime import datetime + +class PortalAccessManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + query = """ SELECT + u.userid, + MAX(u.status) as active, + MAX(u.loginenabled) as loginenabled, + MAX(u.invited) as invited, + MAX(u.emailconfirmed) as emailconfirmed, + MAX(ga.assigned) AS created, + MAX(ga.assignedby) AS createdby, + MAX(ga.gaid) AS portal_access_id + FROM public.users u + JOIN public.groupassignments ga ON u.userid = ga.userid + JOIN public.grouplist gl ON ga.groupid = gl.groupid + WHERE gl.groupname = 'Level 3' OR gl.groupname = 'Super User' + GROUP BY u.userid """ + self.source_cursor.execute(query) + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_user_data): + batch_portal_user_data = [] + + for user in source_user_data: + user_id = user[0] + + if not check_existing_record(destination_cursor,'portal_access', 'user_id', user_id): + id=user[7] + password = 'password' # temporary field - to be removed once B2C implemented + status = 'INVITATION_SENT' + + login_enabled = str(user[2]).lower() == 'true' + login_disabled = str(user[2]).lower() == 'false' + email_confirmed = str(user[4]).lower() == 'true' + status_active = str(user[1]).lower() == 'active' + invited = True + status_inactive = str(user[1]).lower() == 'inactive' + + login_enabled_and_invited = login_enabled and invited + email_confirmed_and_status_inactive = email_confirmed and status_inactive + email_confirmed_and_status_active = email_confirmed and status_active + login_disabled_and_status_inactive = login_disabled and status_inactive + + if status_inactive or login_disabled_and_status_inactive: + status = "INACTIVE" + elif login_enabled_and_invited and email_confirmed_and_status_inactive: + status = 'REGISTERED' + elif login_enabled_and_invited and email_confirmed_and_status_active: + status = 'ACTIVE' + elif login_enabled_and_invited: + status = "INVITATION_SENT" + else: + self.failed_imports.add(('portal_access', user_id, "Missing status details")) + continue + + # last_access = datetime.now() # this value is obtained from DV + # invitation_datetime = parse_to_timestamp(user[5]) # this value is obtained from DV + # registered_datetime = parse_to_timestamp(user[5]) # this value is obtained from DV + created_by = get_user_id(destination_cursor, user[6]) + + created_at = parse_to_timestamp(user[5]) + modified_at = created_at + + batch_portal_user_data.append(( + id, user_id, password, status, created_at, modified_at, created_by + )) + + audit_entry_creation( + destination_cursor, + table_name='portal_access', + record_id=id, + record=user_id, + created_at=created_at, + created_by=created_by if created_by is not None else None + ) + + try: + if batch_portal_user_data: + destination_cursor.executemany( + """ + INSERT INTO public.portal_access + (id, user_id, password, status, created_at, modified_at) + VALUES (%s, %s, %s, %s, %s, %s) + """, + [entry[:-1] for entry in batch_portal_user_data], + ) + + destination_cursor.connection.commit() + + for entry in batch_portal_user_data: + audit_entry_creation( + destination_cursor, + table_name='portal_access', + record_id=entry[0], + record=entry[1], + created_at=entry[7], + created_by= entry[9] + ) + except Exception as e: + self.failed_imports.add(('portal_access', user_id, e)) + + log_failed_imports(self.failed_imports) + + diff --git a/bin/migration/tables/recordings.py b/bin/migration/tables/recordings.py new file mode 100644 index 000000000..6026eb6e3 --- /dev/null +++ b/bin/migration/tables/recordings.py @@ -0,0 +1,142 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + + +class RecordingManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute(""" SELECT * + FROM public.recordings + WHERE (recordingavailable IS NULL OR + recordingavailable NOT ILIKE 'false' AND + recordingavailable NOT ILIKE 'no')""") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_data): + # first inserting the recordings with multiple recordings versions - this is to satisfy the parent_recording_id FK constraint + parent_recording_ids = [recording[9] for recording in source_data] + seen = set() + duplicate_parent_ids = set() + + for recording_id in parent_recording_ids: + if recording_id in seen: + duplicate_parent_ids.add(recording_id) + else: + seen.add(recording_id) + + duplicate_parent_id_records = [recording for recording in source_data if recording[0] in duplicate_parent_ids] + non_duplicate_parent_id_records = [recording for recording in source_data if recording[0] not in duplicate_parent_ids] + + for recording in duplicate_parent_id_records: + id = recording[0] + parent_recording_id = recording[9] + + if parent_recording_id not in (rec[0] for rec in source_data): + self.failed_imports.add(('recordings', id, f'Parent recording id: {parent_recording_id} does not match a recording id')) + continue + + destination_cursor.execute("SELECT capture_session_id FROM public.temp_recordings WHERE parent_recording_id = %s", (parent_recording_id,)) + result = destination_cursor.fetchone() + + if result is None: + self.failed_imports.add(('recordings', id, f'No capture_session id found for parent recording id {parent_recording_id}')) + continue + + capture_session_id = result[0] + + if not check_existing_record(destination_cursor,'capture_sessions', 'id', capture_session_id): + self.failed_imports.add(('recordings', id, f'Recording not captured in capture sessions with capture_session_id {capture_session_id}')) + continue + + if not check_existing_record(destination_cursor,'recordings', 'id', id): + version = recording[12] + url = recording[20] if recording[20] is not None else None + filename = recording[14] + created_at = parse_to_timestamp(recording[22]) + created_by = get_user_id(destination_cursor,recording[21]) + recording_status = recording[11] if recording[11] is not None else None + deleted_at = parse_to_timestamp(recording[24]) if recording_status == 'Deleted' else None + + try: + destination_cursor.execute( + """ + INSERT INTO public.recordings (id, capture_session_id, parent_recording_id, version, url, filename, created_at, deleted_at) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + """, + (id, capture_session_id, parent_recording_id, version, url, filename, created_at, deleted_at), + ) + + audit_entry_creation( + destination_cursor, + table_name="recordings", + record_id=id, + record=capture_session_id, + created_at=created_at, + created_by=created_by if created_by is not None else None + ) + + except Exception as e: + self.failed_imports.add(('recordings', id, e)) + + # inserting remaining records + for recording in non_duplicate_parent_id_records: + recording_id = recording[0] + parent_recording_id = recording[9] + + try: + destination_cursor.execute("SELECT capture_session_id from public.temp_recordings where parent_recording_id = %s",(parent_recording_id,)) + result = destination_cursor.fetchone() + except Exception as e: + self.failed_imports.add(('recordings', id, e)) + + if result is None: + self.failed_imports.add(('recordings', id, f'No capture_session id found for parent recording id {parent_recording_id}')) + continue + + capture_session_id = result[0] + + try: + if not check_existing_record(destination_cursor,'capture_sessions', 'id', capture_session_id): + self.failed_imports.add(('recordings', id, f'Recording not captured in capture sessions with capture_session_id {capture_session_id}')) + continue + except Exception as e: + self.failed_imports.add(('recordings', id, e)) + + version = recording[12] + url = recording[20] if recording[20] is not None else None + filename = recording[14] + created_at = parse_to_timestamp(recording[22]) + recording_status = recording[11] if recording[11] is not None else None + created_by = get_user_id(destination_cursor,recording[21]) + deleted_at = parse_to_timestamp(recording[24]) if recording_status == 'Deleted' else None + # duration = ? - this info is in the asset files on AMS + # edit_instruction = ? + + try: + destination_cursor.execute( + """ + INSERT INTO public.recordings (id, capture_session_id, parent_recording_id, version, url, filename, created_at, deleted_at) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + """, + (recording_id, capture_session_id, parent_recording_id, version, url, filename, created_at, deleted_at), + ) + + audit_entry_creation( + destination_cursor, + table_name="recordings", + record_id=recording_id, + record=capture_session_id, + created_at=created_at, + created_by=created_by if created_by is not None else None, + ) + + except Exception as e: + self.failed_imports.add(('recordings', recording_id, e)) + + log_failed_imports(self.failed_imports) + + + + diff --git a/bin/migration/tables/regions.py b/bin/migration/tables/regions.py new file mode 100644 index 000000000..1a7f0b792 --- /dev/null +++ b/bin/migration/tables/regions.py @@ -0,0 +1,50 @@ +from .helpers import check_existing_record, audit_entry_creation, log_failed_imports +import uuid + +class RegionManager: + def __init__(self): + self.failed_imports = set() + + def migrate_data(self,destination_cursor): + batch_region_data = [] + # Regions data - https://cjscommonplatform-my.sharepoint.com/:x:/r/personal/lawrie_baber-scovell2_hmcts_net/_layouts/15/Doc.aspx?sourcedoc=%7B07C83A7F-EF01-4C78-9B02-AEDD443D15A1%7D&file=Courts%20PRE%20NRO.xlsx&wdOrigin=TEAMS-WEB.undefined_ns.rwc&action=default&mobileredirect=true + region_data = [ + 'London', + 'West Midlands (England)', + 'East Midlands (England)', + 'Yorkshire and The Humber', + 'North East (England)', + 'North West (England)', + 'South East (England)', + 'East of England', + 'South West (England)', + 'Wales' + ] + + for region in region_data: + if not check_existing_record(destination_cursor,'regions', 'name', region): + id = str(uuid.uuid4()) + batch_region_data.append((id, region)) + + try: + if batch_region_data: + destination_cursor.executemany( + "INSERT INTO public.regions (id, name) VALUES (%s, %s)", + batch_region_data + ) + + destination_cursor.connection.commit() + + for entry in batch_region_data: + audit_entry_creation( + destination_cursor, + table_name="regions", + record_id=entry[0], + record=entry[1], + ) + except Exception as e: + self.failed_imports.add(('regions', batch_region_data[0], e)) + + log_failed_imports(self.failed_imports) + + diff --git a/bin/migration/tables/roles.py b/bin/migration/tables/roles.py new file mode 100644 index 000000000..04d876768 --- /dev/null +++ b/bin/migration/tables/roles.py @@ -0,0 +1,46 @@ +from .helpers import check_existing_record, audit_entry_creation, log_failed_imports +import uuid + +class RoleManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT groupid, groupname, groupdescription from public.grouplist where grouptype = 'Security'") + return self.source_cursor.fetchall() + + def migrate_data(self,destination_cursor, source_roles_data): + batch_roles_data = [] + + for role in source_roles_data: + id = role[0] + name = role[1] + description = role[2] + + if not check_existing_record(destination_cursor,'roles', 'id', id): + batch_roles_data.append((id, name, description)) + + try: + if batch_roles_data: + destination_cursor.executemany( + 'INSERT INTO public.roles (id, name, description) VALUES (%s, %s, %s)', + batch_roles_data + ) + + destination_cursor.connection.commit() + + for role in batch_roles_data: + audit_entry_creation( + destination_cursor, + table_name='roles', + record_id=role[0], + record=role[1] + ) + + except Exception as e: + self.failed_imports.add(('roles', id, e)) + + log_failed_imports(self.failed_imports) + + \ No newline at end of file diff --git a/bin/migration/tables/rooms.py b/bin/migration/tables/rooms.py new file mode 100644 index 000000000..2d1024d68 --- /dev/null +++ b/bin/migration/tables/rooms.py @@ -0,0 +1,50 @@ +from .helpers import check_existing_record, audit_entry_creation, parse_to_timestamp, log_failed_imports, get_user_id +import uuid + +class RoomManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * from public.rooms") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_data): + batch_rooms_data = [] + + for source_room in source_data: + room = source_room[0] + + if not check_existing_record(destination_cursor, 'rooms', 'room', room): + id = str(uuid.uuid4()) + created_by = source_room[1] + + batch_rooms_data.append((id, room, created_by)) + + try: + if batch_rooms_data: + destination_cursor.executemany( + "INSERT INTO public.rooms (id, room) VALUES (%s, %s)", + [entry[:-1] for entry in batch_rooms_data], + ) + + destination_cursor.connection.commit() + + for room in batch_rooms_data: + created_at = parse_to_timestamp(room[2]) + created_by = get_user_id(destination_cursor, room[2]) + + audit_entry_creation( + destination_cursor, + table_name="rooms", + record_id=room[0], + record=room[1], + created_at=created_at, + created_by= created_by if created_by is not None else None + ) + + except Exception as e: + self.failed_imports.add(('rooms', id, e)) + + log_failed_imports(self.failed_imports) \ No newline at end of file diff --git a/bin/migration/tables/sharebookings.py b/bin/migration/tables/sharebookings.py new file mode 100644 index 000000000..03402794e --- /dev/null +++ b/bin/migration/tables/sharebookings.py @@ -0,0 +1,85 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + +class ShareBookingsManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * FROM public.videopermissions") + return self.source_cursor.fetchall() + + def get_booking(self, bookings_data, recording_id): + booking_id = next((booking[0] for booking in bookings_data if booking[1] == recording_id), None) + return booking_id + + + def migrate_data(self, destination_cursor, source_data): + batch_share_bookings_data = [] + + destination_cursor.execute(""" SELECT b.id AS booking_id, r.id AS recording_id + FROM bookings b + LEFT JOIN capture_sessions cs ON cs.booking_id = b.id + LEFT JOIN recordings r ON r.capture_session_id = cs.id + WHERE r.id is not null""") + bookings_data = destination_cursor.fetchall() + + destination_cursor.execute("SELECT * FROM public.users") + users_data = destination_cursor.fetchall() + + for video_permission in source_data: + id = video_permission[0] + recording_id = video_permission[1] + + booking_id = self.get_booking(bookings_data, recording_id) + + shared_with_user_id = video_permission[4] + + created_by = get_user_id(destination_cursor,video_permission[18]) + shared_by_user_id = created_by + + created_at = parse_to_timestamp(video_permission[19]) + deleted_at = parse_to_timestamp(video_permission[21]) if video_permission[15] != "True" else None + + if not booking_id: + self.failed_imports.add(('share_bookings', id, f"No booking id found for recordinguid: {recording_id}")) + continue + + if not check_existing_record(destination_cursor,'users','id',shared_with_user_id): + self.failed_imports.add(('share_bookings', id, f"Invalid shared_with_user_id value: {shared_with_user_id}")) + continue + + if not shared_by_user_id: + self.failed_imports.add(('share_bookings', id, f"No user found for shared_with_user email : {created_by}")) + continue + + batch_share_bookings_data.append((id, booking_id, shared_with_user_id, shared_by_user_id,created_at, deleted_at)) + + try: + if batch_share_bookings_data: + destination_cursor.executemany( + """ + INSERT INTO public.share_bookings + (id, booking_id, shared_with_user_id, shared_by_user_id,created_at, deleted_at) + VALUES (%s, %s, %s, %s, %s, %s) + """, + batch_share_bookings_data, + ) + destination_cursor.connection.commit() + + for entry in batch_share_bookings_data: + audit_entry_creation( + destination_cursor, + table_name="share_bookings", + record_id=entry[0], + record=entry[1], + created_at=entry[4], + created_by=entry[3] if entry[3] is not None else None, + ) + + except Exception as e: + self.failed_imports.add(('share_bookings', id, e)) + + log_failed_imports(self.failed_imports) + + diff --git a/bin/migration/tables/users.py b/bin/migration/tables/users.py new file mode 100644 index 000000000..65261e8c2 --- /dev/null +++ b/bin/migration/tables/users.py @@ -0,0 +1,57 @@ +from .helpers import check_existing_record, parse_to_timestamp, audit_entry_creation, log_failed_imports, get_user_id + +class UserManager: + def __init__(self, source_cursor): + self.source_cursor = source_cursor + self.failed_imports = set() + + def get_data(self): + self.source_cursor.execute("SELECT * from public.users") + return self.source_cursor.fetchall() + + def migrate_data(self, destination_cursor, source_user_data): + batch_users_data = [] + + for user in source_user_data: + id = user[0] + + if not check_existing_record(destination_cursor,'users', 'id', id): + first_name = user[12] + last_name = user[13] + email = user[6] + organisation = user[8] + phone = user[7] + created_at = parse_to_timestamp(user[15]) + modified_at = parse_to_timestamp(user[17]) + created_by = user[14] + created_by = get_user_id(destination_cursor,user[14]) + + batch_users_data.append(( + id, first_name, last_name, email, organisation, phone, created_at, modified_at, created_by + )) + try: + if batch_users_data: + destination_cursor.executemany( + """ + INSERT INTO public.users + (id, first_name, last_name, email, organisation, phone, created_at, modified_at) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + """, + [entry[:-1] for entry in batch_users_data], + ) + destination_cursor.connection.commit() + + for user in batch_users_data: + audit_entry_creation( + destination_cursor, + table_name="users", + record_id=user[0], + record=f"{user[1]} {user[2]}", + created_at=user[6], + created_by= created_by if created_by is not None else None + ) + except Exception as e: + self.failed_imports.add(('users', id, e)) + + log_failed_imports(self.failed_imports) + diff --git a/pre-api-stg.yaml b/pre-api-stg.yaml index d636fdf8c..c64a60209 100644 --- a/pre-api-stg.yaml +++ b/pre-api-stg.yaml @@ -1367,6 +1367,9 @@ definitions: RoleDTO: description: RoleDTO properties: + description: + description: RoleDescription + type: string id: description: RoleId format: uuid diff --git a/src/integrationTest/java/uk/gov/hmcts/reform/preapi/entities/AuditTest.java b/src/integrationTest/java/uk/gov/hmcts/reform/preapi/entities/AuditTest.java index 94f36f9ae..3760f5c88 100644 --- a/src/integrationTest/java/uk/gov/hmcts/reform/preapi/entities/AuditTest.java +++ b/src/integrationTest/java/uk/gov/hmcts/reform/preapi/entities/AuditTest.java @@ -9,7 +9,6 @@ import uk.gov.hmcts.reform.preapi.enums.AuditLogSource; import uk.gov.hmcts.reform.preapi.enums.AuditLogType; -import java.sql.Timestamp; import java.util.UUID; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,7 +32,6 @@ public void testSaveAndRetrieveAudit() { //NOPMD - suppressed JUnit5TestShouldBe audit.setFunctionalArea("TestFunctionalArea"); audit.setAuditDetails("TestAuditDetails"); audit.setCreatedBy(UUID.randomUUID()); - audit.setUpdatedAt(new Timestamp(System.currentTimeMillis())); entityManager.persist(audit); entityManager.flush(); @@ -50,7 +48,5 @@ public void testSaveAndRetrieveAudit() { //NOPMD - suppressed JUnit5TestShouldBe assertEquals(audit.getAuditDetails(), retrievedAudit.getAuditDetails(), "Audit details should match"); assertEquals(audit.getCreatedBy(), retrievedAudit.getCreatedBy(), "Created by should match"); assertEquals(audit.getCreatedAt(), retrievedAudit.getCreatedAt(), "Created at should match"); - assertEquals(audit.getUpdatedAt(), retrievedAudit.getUpdatedAt(), "Updated at should match"); - assertEquals(audit.getDeletedAt(), retrievedAudit.getDeletedAt(), "Deleted at should match"); } } diff --git a/src/main/java/uk/gov/hmcts/reform/preapi/dto/RoleDTO.java b/src/main/java/uk/gov/hmcts/reform/preapi/dto/RoleDTO.java index 4b3c9150e..7c85a1754 100644 --- a/src/main/java/uk/gov/hmcts/reform/preapi/dto/RoleDTO.java +++ b/src/main/java/uk/gov/hmcts/reform/preapi/dto/RoleDTO.java @@ -24,12 +24,16 @@ public class RoleDTO { @Schema(description = "RoleName") private String name; + @Schema(description = "RoleDescription") + private String description; + @Schema(description = "RolePermissions") private Set permissions; public RoleDTO(Role role) { id = role.getId(); name = role.getName(); + description = role.getDescription(); permissions = Stream.ofNullable(role.getPermissions()) .flatMap(permissions -> permissions.stream().map(PermissionDTO::new)) .collect(Collectors.toSet()); diff --git a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Audit.java b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Audit.java index 2b82ec12e..d18480d07 100644 --- a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Audit.java +++ b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Audit.java @@ -53,14 +53,8 @@ public class Audit extends BaseEntity { @Column(name = "created_by") private UUID createdBy; - @Column(name = "deleted_at") - private Timestamp deletedAt; - @CreationTimestamp @Column(name = "created_at", nullable = false) private Timestamp createdAt; - - @Column(name = "updated_at", nullable = false) - private Timestamp updatedAt; } diff --git a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Recording.java b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Recording.java index 27d08d3fc..3db89b8c6 100644 --- a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Recording.java +++ b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Recording.java @@ -36,7 +36,7 @@ public class Recording extends BaseEntity { @Column(name = "version", nullable = false) private int version; - @Column(name = "url", nullable = false) + @Column(name = "url") private String url; @Column(name = "filename", nullable = false) diff --git a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Role.java b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Role.java index f69c1e0cd..b6d17f476 100644 --- a/src/main/java/uk/gov/hmcts/reform/preapi/entities/Role.java +++ b/src/main/java/uk/gov/hmcts/reform/preapi/entities/Role.java @@ -19,6 +19,9 @@ public class Role extends BaseEntity { @Column(name = "name", nullable = false, length = 45) private String name; + @Column(name = "description") + private String description; + @ManyToMany(mappedBy = "roles") private Set permissions; } diff --git a/src/main/resources/db/migration/V009__AddFieldRoles.sql b/src/main/resources/db/migration/V009__AddFieldRoles.sql new file mode 100644 index 000000000..107be12eb --- /dev/null +++ b/src/main/resources/db/migration/V009__AddFieldRoles.sql @@ -0,0 +1 @@ +ALTER TABLE public.roles ADD COLUMN description TEXT; diff --git a/src/main/resources/db/migration/V010__AmendConstraintsAndDroppingFields.sql b/src/main/resources/db/migration/V010__AmendConstraintsAndDroppingFields.sql new file mode 100644 index 000000000..ccf8f3da4 --- /dev/null +++ b/src/main/resources/db/migration/V010__AmendConstraintsAndDroppingFields.sql @@ -0,0 +1,7 @@ +-- These two defaults were dropped in V007 but we need them back in +ALTER TABLE public.cases ALTER COLUMN modified_at SET DEFAULT NOW(); +ALTER TABLE public.bookings ALTER COLUMN modified_at SET DEFAULT NOW(); + +-- Following discussions these two fields to be dropped +ALTER TABLE public.audits DROP COLUMN updated_at; +ALTER TABLE public.audits DROP COLUMN deleted_at; \ No newline at end of file diff --git a/src/main/resources/db/migration/V011__RemoveConstraintOnField.sql b/src/main/resources/db/migration/V011__RemoveConstraintOnField.sql new file mode 100644 index 000000000..942de37b4 --- /dev/null +++ b/src/main/resources/db/migration/V011__RemoveConstraintOnField.sql @@ -0,0 +1 @@ +ALTER TABLE public.recordings ALTER COLUMN url DROP NOT NULL;