From d68cfd6ef2d27fce5cb260c78078da222e6f18a3 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 15:24:32 -0800 Subject: [PATCH 01/36] Remove Distribute SQL steps - update .gitignore and GH Workflows --- .github/workflows/publish.yml | 6 ++--- .gitignore | 4 ---- build.gradle | 40 ---------------------------------- sequencing-server/build.gradle | 2 -- 4 files changed, 2 insertions(+), 50 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9451b1e68c..a4e68db099 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -101,10 +101,8 @@ jobs: component: ${{ matrix.components.context }} run: | set -x - if [[ "$component" == "." ]]; then - # aerie-hasura and aerie-postgres don't need compiled java - ./gradlew distributeSql --no-daemon - else + # aerie-hasura and aerie-postgres don't need compiled java + if [[ "$component" != "." ]]; then ./gradlew ":$component:assemble" --no-daemon --parallel fi diff --git a/.gitignore b/.gitignore index a103054aed..53c0e25491 100644 --- a/.gitignore +++ b/.gitignore @@ -39,10 +39,6 @@ /nbproject/private/ node_modules -# Ignore any SQL files copied from build tasks -deployment/postgres-init-db/sql/**/*.sql -!deployment/postgres-init-db/sql/ui - # Ignore Gradle project-specific cache directory .gradle diff --git a/build.gradle b/build.gradle index 5e0676d0a3..02f546f92a 100644 --- a/build.gradle +++ b/build.gradle @@ -21,46 +21,6 @@ configure(subprojects) { }()) } -// Add `distributeSql` task to all subprojects with `sql/` child directory -configure(subprojects.findAll { it.projectDir.toPath().resolve('sql').toFile().exists() }) { sp -> - - task distributeSql(type: Copy) { - into "$rootDir/deployment/postgres-init-db/sql" - from fileTree(dir: "${sp.projectDir}/sql", include: '**/*.sql') - } - - // Remove distributed SQL as part of `clean` task - task undoDistributeSql(type: Delete) { - doLast { // Explicitly do last to avoid running during configuration step - file("${sp.projectDir}/sql").list().each { - delete "$rootDir/deployment/postgres-init-db/sql/$it" - } - } - } - - // For all Java subprojects - sp.plugins.withId('java') { - // Distribute SQL as part of resource processing - processResources.dependsOn distributeSql - // Remove distributed SQL as part of `clean` tasks - clean.dependsOn undoDistributeSql - } - - // For all Node subprojects - sp.plugins.withId('com.github.node-gradle.node') { - // For all subprojects without resource processing already defined - if (!tasks.findByName('processResources')) { - // Distribute SQL as part of resource processing - tasks.create('processResources') - tasks.findAll { it.name == 'processResources' }.each { it.dependsOn distributeSql } - // 'clean' is not visible here, make sure to call `undoDistributeSql` as part of each subproject's `clean` task - } - } - - // Distribute SQL prior to creating deployment archive - archiveDeployment.dependsOn distributeSql -} - subprojects { apply plugin: 'com.github.ben-manes.versions' diff --git a/sequencing-server/build.gradle b/sequencing-server/build.gradle index b0b40c2bf2..16b48e203c 100644 --- a/sequencing-server/build.gradle +++ b/sequencing-server/build.gradle @@ -8,7 +8,6 @@ node { } task assemble(type: NpmTask) { - dependsOn processResources dependsOn npmInstall args = ['run', 'build'] } @@ -24,7 +23,6 @@ task build { //} task clean(type: Delete) { - dependsOn undoDistributeSql delete 'build' } From e769cfc7f7d4df248fecb9e93ff9e39b600d66f7 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 14:48:04 -0800 Subject: [PATCH 02/36] Move Merlin DB Files --- .../sql/applied_migrations.sql | 5 + .../sql}/default_user_roles.sql | 0 .../hasura/activity_preset_functions.sql | 0 .../hasura/delete_anchor_functions.sql | 0 .../functions/hasura/hasura_functions.sql | 0 .../hasura/plan_branching_functions.sql | 0 .../functions/hasura/plan_merge_functions.sql | 0 .../functions/hasura/snapshot_functions.sql | 0 .../functions/merlin/merging}/begin_merge.sql | 0 .../merlin/merging}/commit_merge.sql | 0 .../merlin/merging}/duplicate_plan.sql | 0 .../merlin/merging}/get_merge_base.sql | 0 .../merge_request_state_functions.sql | 0 .../merlin/snapshots}/create_snapshot.sql | 0 .../snapshots}/plan_history_functions.sql | 0 .../snapshots}/restore_from_snapshot.sql | 0 .../check_general_permissions.sql | 0 .../permissions}/get_function_permissions.sql | 0 .../sql/functions/permissions}/get_role.sql | 0 .../permissions}/merge_permissions.sql | 0 .../sql/functions/tags}/get_tag_ids.sql | 0 deployment/postgres-init-db/sql/schemas.sql | 8 ++ .../activity_directive.sql | 0 .../activity_directive_changelog.sql | 0 .../activity_directive_metadata_schema.sql | 0 .../activity_directive_validations.sql | 0 .../activity_directive}/activity_presets.sql | 0 .../anchor_validation_status.sql | 0 .../sql/tables/merlin}/activity_type.sql | 0 .../constraints}/constraint_definition.sql | 0 .../constraints}/constraint_metadata.sql | 0 .../constraint_model_specification.sql | 0 .../merlin/constraints}/constraint_run.sql | 0 .../constraints}/constraint_specification.sql | 0 .../sql/tables/merlin/dataset}/dataset.sql | 0 .../sql/tables/merlin/dataset}/event.sql | 0 .../sql/tables/merlin/dataset}/profile.sql | 0 .../merlin/dataset}/profile_segment.sql | 0 .../sql/tables/merlin/dataset}/span.sql | 0 .../sql/tables/merlin/dataset}/topic.sql | 0 .../merging}/conflicting_activities.sql | 0 .../tables/merlin/merging}/merge_comments.sql | 0 .../tables/merlin/merging}/merge_request.sql | 0 .../merlin/merging}/merge_staging_area.sql | 0 .../sql/tables/merlin}/mission_model.sql | 0 .../merlin}/mission_model_parameters.sql | 0 .../sql/tables/merlin}/plan.sql | 0 .../sql/tables/merlin}/plan_collaborators.sql | 0 .../sql/tables/merlin}/plan_dataset.sql | 0 .../sql/tables/merlin}/resource_type.sql | 0 .../tables/merlin/simulation}/simulation.sql | 0 .../merlin/simulation}/simulation_dataset.sql | 0 .../merlin/simulation}/simulation_extent.sql | 0 .../simulation}/simulation_template.sql | 0 .../merlin/snapshot}/plan_latest_snapshot.sql | 0 .../tables/merlin/snapshot}/plan_snapshot.sql | 0 .../snapshot}/plan_snapshot_activities.sql | 0 .../merlin/snapshot}/plan_snapshot_parent.sql | 0 .../sql/tables/merlin}/uploaded_file.sql | 0 .../tables/migrations/schema_migrations.sql | 0 .../permissions}/user_role_permission.sql | 0 .../sql/tables/permissions}/user_roles.sql | 0 .../sql/tables/permissions}/users.sql | 0 .../permissions}/users_allowed_roles.sql | 0 .../tables/tags}/activity_directive_tags.sql | 0 .../tags}/constraint_definition_tags.sql | 0 .../sql/tables/tags}/constraint_tags.sql | 0 .../sql/tables/tags}/plan_snapshot_tags.sql | 0 .../sql/tables/tags}/plan_tags.sql | 0 .../tables/tags}/snapshot_activity_tags.sql | 0 .../sql/tables/tags}/tags.sql | 0 .../merlin-activity-directive-metadata.sql | 7 ++ .../sql/types/merlin}/merlin-arguments.sql | 0 .../sql/types/merlin}/plan-merge-types.sql | 0 .../sql/types/permissions}/permissions.sql | 0 .../merlin}/activity_directive_extended.sql | 0 .../sql/views/merlin}/resource_profile.sql | 0 .../sql/views/merlin}/simulated_activity.sql | 0 .../views/permissions}/users_and_roles.sql | 0 .../sql/merlin/applied_migrations.sql | 44 ------- .../merlin-activity-directive-metadata.sql | 7 -- merlin-server/sql/merlin/init.sql | 118 ------------------ merlin-server/sql/merlin/schemas.sql | 3 - 83 files changed, 20 insertions(+), 172 deletions(-) create mode 100644 deployment/postgres-init-db/sql/applied_migrations.sql rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/default_user_roles.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/activity_preset_functions.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/delete_anchor_functions.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/hasura_functions.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/plan_branching_functions.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/plan_merge_functions.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/functions/hasura/snapshot_functions.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/merging}/begin_merge.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/merging}/commit_merge.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/merging}/duplicate_plan.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/merging}/get_merge_base.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/merging}/merge_request_state_functions.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/snapshots}/create_snapshot.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/snapshots}/plan_history_functions.sql (100%) rename {merlin-server/sql/merlin/functions/public => deployment/postgres-init-db/sql/functions/merlin/snapshots}/restore_from_snapshot.sql (100%) rename {merlin-server/sql/merlin/functions/metadata => deployment/postgres-init-db/sql/functions/permissions}/check_general_permissions.sql (100%) rename {merlin-server/sql/merlin/functions/metadata => deployment/postgres-init-db/sql/functions/permissions}/get_function_permissions.sql (100%) rename {merlin-server/sql/merlin/functions/metadata => deployment/postgres-init-db/sql/functions/permissions}/get_role.sql (100%) rename {merlin-server/sql/merlin/functions/metadata => deployment/postgres-init-db/sql/functions/permissions}/merge_permissions.sql (100%) rename {merlin-server/sql/merlin/functions/metadata => deployment/postgres-init-db/sql/functions/tags}/get_tag_ids.sql (100%) create mode 100644 deployment/postgres-init-db/sql/schemas.sql rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/activity_directive.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/activity_directive_changelog.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/activity_directive_metadata_schema.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/activity_directive_validations.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/activity_presets.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/activity_directive}/anchor_validation_status.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/activity_type.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/constraints}/constraint_definition.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/constraints}/constraint_metadata.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/constraints}/constraint_model_specification.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/constraints}/constraint_run.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/constraints}/constraint_specification.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/dataset.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/event.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/profile.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/profile_segment.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/span.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/dataset}/topic.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/merging}/conflicting_activities.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/merging}/merge_comments.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/merging}/merge_request.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/merging}/merge_staging_area.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/mission_model.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/mission_model_parameters.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/plan.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/plan_collaborators.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/plan_dataset.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/resource_type.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/simulation}/simulation.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/simulation}/simulation_dataset.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/simulation}/simulation_extent.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/simulation}/simulation_template.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/snapshot}/plan_latest_snapshot.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/snapshot}/plan_snapshot.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/snapshot}/plan_snapshot_activities.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin/snapshot}/plan_snapshot_parent.sql (100%) rename {merlin-server/sql/merlin/tables => deployment/postgres-init-db/sql/tables/merlin}/uploaded_file.sql (100%) rename {merlin-server/sql/merlin => deployment/postgres-init-db/sql}/tables/migrations/schema_migrations.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/permissions}/user_role_permission.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/permissions}/user_roles.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/permissions}/users.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/permissions}/users_allowed_roles.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/activity_directive_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/constraint_definition_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/constraint_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/plan_snapshot_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/plan_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/snapshot_activity_tags.sql (100%) rename {merlin-server/sql/merlin/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/tags.sql (100%) create mode 100644 deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql rename {merlin-server/sql/merlin/domain-types => deployment/postgres-init-db/sql/types/merlin}/merlin-arguments.sql (100%) rename {merlin-server/sql/merlin/domain-types => deployment/postgres-init-db/sql/types/merlin}/plan-merge-types.sql (100%) rename {merlin-server/sql/merlin/domain-types => deployment/postgres-init-db/sql/types/permissions}/permissions.sql (100%) rename {merlin-server/sql/merlin/views => deployment/postgres-init-db/sql/views/merlin}/activity_directive_extended.sql (100%) rename {merlin-server/sql/merlin/views => deployment/postgres-init-db/sql/views/merlin}/resource_profile.sql (100%) rename {merlin-server/sql/merlin/views => deployment/postgres-init-db/sql/views/merlin}/simulated_activity.sql (100%) rename {merlin-server/sql/merlin/views => deployment/postgres-init-db/sql/views/permissions}/users_and_roles.sql (100%) delete mode 100644 merlin-server/sql/merlin/applied_migrations.sql delete mode 100644 merlin-server/sql/merlin/domain-types/merlin-activity-directive-metadata.sql delete mode 100644 merlin-server/sql/merlin/init.sql delete mode 100644 merlin-server/sql/merlin/schemas.sql diff --git a/deployment/postgres-init-db/sql/applied_migrations.sql b/deployment/postgres-init-db/sql/applied_migrations.sql new file mode 100644 index 0000000000..81b3adbfda --- /dev/null +++ b/deployment/postgres-init-db/sql/applied_migrations.sql @@ -0,0 +1,5 @@ +/* +This file denotes which migrations occur "before" this version of the schema. +*/ + +call migrations.mark_migration_applied('0'); diff --git a/merlin-server/sql/merlin/default_user_roles.sql b/deployment/postgres-init-db/sql/default_user_roles.sql similarity index 100% rename from merlin-server/sql/merlin/default_user_roles.sql rename to deployment/postgres-init-db/sql/default_user_roles.sql diff --git a/merlin-server/sql/merlin/functions/hasura/activity_preset_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/activity_preset_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql diff --git a/merlin-server/sql/merlin/functions/hasura/delete_anchor_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/delete_anchor_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql diff --git a/merlin-server/sql/merlin/functions/hasura/hasura_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/hasura_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql diff --git a/merlin-server/sql/merlin/functions/hasura/plan_branching_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/plan_branching_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql diff --git a/merlin-server/sql/merlin/functions/hasura/plan_merge_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/plan_merge_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql diff --git a/merlin-server/sql/merlin/functions/hasura/snapshot_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/hasura/snapshot_functions.sql rename to deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql diff --git a/merlin-server/sql/merlin/functions/public/begin_merge.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/begin_merge.sql rename to deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql diff --git a/merlin-server/sql/merlin/functions/public/commit_merge.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/commit_merge.sql rename to deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql diff --git a/merlin-server/sql/merlin/functions/public/duplicate_plan.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/duplicate_plan.sql rename to deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql diff --git a/merlin-server/sql/merlin/functions/public/get_merge_base.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/get_merge_base.sql rename to deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql diff --git a/merlin-server/sql/merlin/functions/public/merge_request_state_functions.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/merge_request_state_functions.sql rename to deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql diff --git a/merlin-server/sql/merlin/functions/public/create_snapshot.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/create_snapshot.sql rename to deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql diff --git a/merlin-server/sql/merlin/functions/public/plan_history_functions.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/plan_history_functions.sql rename to deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql diff --git a/merlin-server/sql/merlin/functions/public/restore_from_snapshot.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql similarity index 100% rename from merlin-server/sql/merlin/functions/public/restore_from_snapshot.sql rename to deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql diff --git a/merlin-server/sql/merlin/functions/metadata/check_general_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/metadata/check_general_permissions.sql rename to deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql diff --git a/merlin-server/sql/merlin/functions/metadata/get_function_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/metadata/get_function_permissions.sql rename to deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql diff --git a/merlin-server/sql/merlin/functions/metadata/get_role.sql b/deployment/postgres-init-db/sql/functions/permissions/get_role.sql similarity index 100% rename from merlin-server/sql/merlin/functions/metadata/get_role.sql rename to deployment/postgres-init-db/sql/functions/permissions/get_role.sql diff --git a/merlin-server/sql/merlin/functions/metadata/merge_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql similarity index 100% rename from merlin-server/sql/merlin/functions/metadata/merge_permissions.sql rename to deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql diff --git a/merlin-server/sql/merlin/functions/metadata/get_tag_ids.sql b/deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql similarity index 100% rename from merlin-server/sql/merlin/functions/metadata/get_tag_ids.sql rename to deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql diff --git a/deployment/postgres-init-db/sql/schemas.sql b/deployment/postgres-init-db/sql/schemas.sql new file mode 100644 index 0000000000..2394098f86 --- /dev/null +++ b/deployment/postgres-init-db/sql/schemas.sql @@ -0,0 +1,8 @@ +-- Services +create schema merlin; + +-- Cross Service +create schema migrations; +create schema hasura; +create schema permissions; +create schema tags; diff --git a/merlin-server/sql/merlin/tables/activity_directive.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_directive.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql diff --git a/merlin-server/sql/merlin/tables/activity_directive_changelog.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_directive_changelog.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql diff --git a/merlin-server/sql/merlin/tables/activity_directive_metadata_schema.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_directive_metadata_schema.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql diff --git a/merlin-server/sql/merlin/tables/activity_directive_validations.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_directive_validations.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql diff --git a/merlin-server/sql/merlin/tables/activity_presets.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_presets.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql diff --git a/merlin-server/sql/merlin/tables/anchor_validation_status.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql similarity index 100% rename from merlin-server/sql/merlin/tables/anchor_validation_status.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql diff --git a/merlin-server/sql/merlin/tables/activity_type.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_type.sql similarity index 100% rename from merlin-server/sql/merlin/tables/activity_type.sql rename to deployment/postgres-init-db/sql/tables/merlin/activity_type.sql diff --git a/merlin-server/sql/merlin/tables/constraint_definition.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql similarity index 100% rename from merlin-server/sql/merlin/tables/constraint_definition.sql rename to deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql diff --git a/merlin-server/sql/merlin/tables/constraint_metadata.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql similarity index 100% rename from merlin-server/sql/merlin/tables/constraint_metadata.sql rename to deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql diff --git a/merlin-server/sql/merlin/tables/constraint_model_specification.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql similarity index 100% rename from merlin-server/sql/merlin/tables/constraint_model_specification.sql rename to deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql diff --git a/merlin-server/sql/merlin/tables/constraint_run.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql similarity index 100% rename from merlin-server/sql/merlin/tables/constraint_run.sql rename to deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql diff --git a/merlin-server/sql/merlin/tables/constraint_specification.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql similarity index 100% rename from merlin-server/sql/merlin/tables/constraint_specification.sql rename to deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql diff --git a/merlin-server/sql/merlin/tables/dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql similarity index 100% rename from merlin-server/sql/merlin/tables/dataset.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql diff --git a/merlin-server/sql/merlin/tables/event.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql similarity index 100% rename from merlin-server/sql/merlin/tables/event.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql diff --git a/merlin-server/sql/merlin/tables/profile.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql similarity index 100% rename from merlin-server/sql/merlin/tables/profile.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql diff --git a/merlin-server/sql/merlin/tables/profile_segment.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql similarity index 100% rename from merlin-server/sql/merlin/tables/profile_segment.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql diff --git a/merlin-server/sql/merlin/tables/span.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql similarity index 100% rename from merlin-server/sql/merlin/tables/span.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql diff --git a/merlin-server/sql/merlin/tables/topic.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql similarity index 100% rename from merlin-server/sql/merlin/tables/topic.sql rename to deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql diff --git a/merlin-server/sql/merlin/tables/conflicting_activities.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql similarity index 100% rename from merlin-server/sql/merlin/tables/conflicting_activities.sql rename to deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql diff --git a/merlin-server/sql/merlin/tables/merge_comments.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql similarity index 100% rename from merlin-server/sql/merlin/tables/merge_comments.sql rename to deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql diff --git a/merlin-server/sql/merlin/tables/merge_request.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql similarity index 100% rename from merlin-server/sql/merlin/tables/merge_request.sql rename to deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql diff --git a/merlin-server/sql/merlin/tables/merge_staging_area.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql similarity index 100% rename from merlin-server/sql/merlin/tables/merge_staging_area.sql rename to deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql diff --git a/merlin-server/sql/merlin/tables/mission_model.sql b/deployment/postgres-init-db/sql/tables/merlin/mission_model.sql similarity index 100% rename from merlin-server/sql/merlin/tables/mission_model.sql rename to deployment/postgres-init-db/sql/tables/merlin/mission_model.sql diff --git a/merlin-server/sql/merlin/tables/mission_model_parameters.sql b/deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql similarity index 100% rename from merlin-server/sql/merlin/tables/mission_model_parameters.sql rename to deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql diff --git a/merlin-server/sql/merlin/tables/plan.sql b/deployment/postgres-init-db/sql/tables/merlin/plan.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan.sql rename to deployment/postgres-init-db/sql/tables/merlin/plan.sql diff --git a/merlin-server/sql/merlin/tables/plan_collaborators.sql b/deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_collaborators.sql rename to deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql diff --git a/merlin-server/sql/merlin/tables/plan_dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_dataset.sql rename to deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql diff --git a/merlin-server/sql/merlin/tables/resource_type.sql b/deployment/postgres-init-db/sql/tables/merlin/resource_type.sql similarity index 100% rename from merlin-server/sql/merlin/tables/resource_type.sql rename to deployment/postgres-init-db/sql/tables/merlin/resource_type.sql diff --git a/merlin-server/sql/merlin/tables/simulation.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql similarity index 100% rename from merlin-server/sql/merlin/tables/simulation.sql rename to deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql diff --git a/merlin-server/sql/merlin/tables/simulation_dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql similarity index 100% rename from merlin-server/sql/merlin/tables/simulation_dataset.sql rename to deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql diff --git a/merlin-server/sql/merlin/tables/simulation_extent.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql similarity index 100% rename from merlin-server/sql/merlin/tables/simulation_extent.sql rename to deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql diff --git a/merlin-server/sql/merlin/tables/simulation_template.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql similarity index 100% rename from merlin-server/sql/merlin/tables/simulation_template.sql rename to deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql diff --git a/merlin-server/sql/merlin/tables/plan_latest_snapshot.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_latest_snapshot.sql rename to deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql diff --git a/merlin-server/sql/merlin/tables/plan_snapshot.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_snapshot.sql rename to deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql diff --git a/merlin-server/sql/merlin/tables/plan_snapshot_activities.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_snapshot_activities.sql rename to deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql diff --git a/merlin-server/sql/merlin/tables/plan_snapshot_parent.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql similarity index 100% rename from merlin-server/sql/merlin/tables/plan_snapshot_parent.sql rename to deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql diff --git a/merlin-server/sql/merlin/tables/uploaded_file.sql b/deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql similarity index 100% rename from merlin-server/sql/merlin/tables/uploaded_file.sql rename to deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql diff --git a/merlin-server/sql/merlin/tables/migrations/schema_migrations.sql b/deployment/postgres-init-db/sql/tables/migrations/schema_migrations.sql similarity index 100% rename from merlin-server/sql/merlin/tables/migrations/schema_migrations.sql rename to deployment/postgres-init-db/sql/tables/migrations/schema_migrations.sql diff --git a/merlin-server/sql/merlin/tables/metadata/user_role_permission.sql b/deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/user_role_permission.sql rename to deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql diff --git a/merlin-server/sql/merlin/tables/metadata/user_roles.sql b/deployment/postgres-init-db/sql/tables/permissions/user_roles.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/user_roles.sql rename to deployment/postgres-init-db/sql/tables/permissions/user_roles.sql diff --git a/merlin-server/sql/merlin/tables/metadata/users.sql b/deployment/postgres-init-db/sql/tables/permissions/users.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/users.sql rename to deployment/postgres-init-db/sql/tables/permissions/users.sql diff --git a/merlin-server/sql/merlin/tables/metadata/users_allowed_roles.sql b/deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/users_allowed_roles.sql rename to deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql diff --git a/merlin-server/sql/merlin/tables/metadata/activity_directive_tags.sql b/deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/activity_directive_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/constraint_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/constraint_definition_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/constraint_definition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/constraint_definition_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/constraint_tags.sql b/deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/constraint_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/plan_snapshot_tags.sql b/deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/plan_snapshot_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/plan_tags.sql b/deployment/postgres-init-db/sql/tables/tags/plan_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/plan_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/plan_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/snapshot_activity_tags.sql b/deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/snapshot_activity_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql diff --git a/merlin-server/sql/merlin/tables/metadata/tags.sql b/deployment/postgres-init-db/sql/tables/tags/tags.sql similarity index 100% rename from merlin-server/sql/merlin/tables/metadata/tags.sql rename to deployment/postgres-init-db/sql/tables/tags/tags.sql diff --git a/deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql b/deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql new file mode 100644 index 0000000000..2cce75c676 --- /dev/null +++ b/deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql @@ -0,0 +1,7 @@ +create domain activity_directive_metadata_set as jsonb + constraint activity_directive_metadata_set_is_object + check(jsonb_typeof(value) = 'object'); + +comment on domain activity_directive_metadata_set is e'' + 'The set of mission defined metadata associated with an activity directive.'; + diff --git a/merlin-server/sql/merlin/domain-types/merlin-arguments.sql b/deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql similarity index 100% rename from merlin-server/sql/merlin/domain-types/merlin-arguments.sql rename to deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql diff --git a/merlin-server/sql/merlin/domain-types/plan-merge-types.sql b/deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql similarity index 100% rename from merlin-server/sql/merlin/domain-types/plan-merge-types.sql rename to deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql diff --git a/merlin-server/sql/merlin/domain-types/permissions.sql b/deployment/postgres-init-db/sql/types/permissions/permissions.sql similarity index 100% rename from merlin-server/sql/merlin/domain-types/permissions.sql rename to deployment/postgres-init-db/sql/types/permissions/permissions.sql diff --git a/merlin-server/sql/merlin/views/activity_directive_extended.sql b/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql similarity index 100% rename from merlin-server/sql/merlin/views/activity_directive_extended.sql rename to deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql diff --git a/merlin-server/sql/merlin/views/resource_profile.sql b/deployment/postgres-init-db/sql/views/merlin/resource_profile.sql similarity index 100% rename from merlin-server/sql/merlin/views/resource_profile.sql rename to deployment/postgres-init-db/sql/views/merlin/resource_profile.sql diff --git a/merlin-server/sql/merlin/views/simulated_activity.sql b/deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql similarity index 100% rename from merlin-server/sql/merlin/views/simulated_activity.sql rename to deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql diff --git a/merlin-server/sql/merlin/views/users_and_roles.sql b/deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql similarity index 100% rename from merlin-server/sql/merlin/views/users_and_roles.sql rename to deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql diff --git a/merlin-server/sql/merlin/applied_migrations.sql b/merlin-server/sql/merlin/applied_migrations.sql deleted file mode 100644 index b6850b36a9..0000000000 --- a/merlin-server/sql/merlin/applied_migrations.sql +++ /dev/null @@ -1,44 +0,0 @@ -/* -This file denotes which migrations occur "before" this version of the schema. -*/ - -call migrations.mark_migration_applied('0'); -call migrations.mark_migration_applied('1'); -call migrations.mark_migration_applied('2'); -call migrations.mark_migration_applied('3'); -call migrations.mark_migration_applied('4'); -call migrations.mark_migration_applied('5'); -call migrations.mark_migration_applied('6'); -call migrations.mark_migration_applied('7'); -call migrations.mark_migration_applied('8'); -call migrations.mark_migration_applied('9'); -call migrations.mark_migration_applied('10'); -call migrations.mark_migration_applied('11'); -call migrations.mark_migration_applied('12'); -call migrations.mark_migration_applied('13'); -call migrations.mark_migration_applied('14'); -call migrations.mark_migration_applied('15'); -call migrations.mark_migration_applied('16'); -call migrations.mark_migration_applied('17'); -call migrations.mark_migration_applied('18'); -call migrations.mark_migration_applied('19'); -call migrations.mark_migration_applied('20'); -call migrations.mark_migration_applied('21'); -call migrations.mark_migration_applied('22'); -call migrations.mark_migration_applied('23'); -call migrations.mark_migration_applied('24'); -call migrations.mark_migration_applied('25'); -call migrations.mark_migration_applied('26'); -call migrations.mark_migration_applied('27'); -call migrations.mark_migration_applied('28'); -call migrations.mark_migration_applied('29'); -call migrations.mark_migration_applied('30'); -call migrations.mark_migration_applied('31'); -call migrations.mark_migration_applied('32'); -call migrations.mark_migration_applied('33'); -call migrations.mark_migration_applied('34'); -call migrations.mark_migration_applied('35'); -call migrations.mark_migration_applied('36'); -call migrations.mark_migration_applied('37'); -call migrations.mark_migration_applied('38'); -call migrations.mark_migration_applied('39'); diff --git a/merlin-server/sql/merlin/domain-types/merlin-activity-directive-metadata.sql b/merlin-server/sql/merlin/domain-types/merlin-activity-directive-metadata.sql deleted file mode 100644 index 587627bb2c..0000000000 --- a/merlin-server/sql/merlin/domain-types/merlin-activity-directive-metadata.sql +++ /dev/null @@ -1,7 +0,0 @@ -create domain merlin_activity_directive_metadata_set as jsonb - constraint merlin_activity_directive_metadata_set_is_object - check(jsonb_typeof(value) = 'object'); - -comment on domain merlin_activity_directive_metadata_set is e'' - 'The set of mission defined metadata associated with an activity directive.'; - diff --git a/merlin-server/sql/merlin/init.sql b/merlin-server/sql/merlin/init.sql deleted file mode 100644 index cc6b18bb77..0000000000 --- a/merlin-server/sql/merlin/init.sql +++ /dev/null @@ -1,118 +0,0 @@ --- The order of inclusion is important! Tables referenced by foreign keys must be loaded before their dependants. - -begin; - -- Non-Public Schemas - \ir schemas.sql - - -- Schema migrations - \ir tables/migrations/schema_migrations.sql - \ir applied_migrations.sql - - -- Domain types. - \ir domain-types/permissions.sql - \ir domain-types/merlin-arguments.sql - \ir domain-types/merlin-activity-directive-metadata.sql - \ir domain-types/plan-merge-types.sql - - -- Deployment-level Metadata - \ir tables/metadata/user_roles.sql - \ir tables/metadata/user_role_permission.sql - \ir tables/metadata/users.sql - \ir tables/metadata/users_allowed_roles.sql - \ir tables/metadata/tags.sql - - -- Activity Directive Metadata schema - \ir tables/activity_directive_metadata_schema.sql - - -- Tables. - -- Uploaded files (JARs or simulation input files). - \ir tables/uploaded_file.sql - - -- Planning intents. - \ir tables/mission_model.sql - \ir tables/activity_type.sql - \ir tables/resource_type.sql - \ir tables/plan.sql - \ir tables/plan_collaborators.sql - \ir tables/activity_directive.sql - \ir tables/activity_directive_changelog.sql - \ir tables/activity_directive_validations.sql - \ir tables/anchor_validation_status.sql - \ir tables/simulation_template.sql - \ir tables/simulation.sql - - -- Uploaded datasets (or datasets generated from simulation). - \ir tables/dataset.sql - \ir tables/span.sql - \ir tables/profile.sql - \ir tables/profile_segment.sql - \ir tables/topic.sql - \ir tables/event.sql - - -- Analysis intents - \ir tables/mission_model_parameters.sql - \ir tables/simulation_dataset.sql - \ir tables/simulation_extent.sql - \ir tables/plan_dataset.sql - - -- Constraints - \ir tables/constraint_metadata.sql - \ir tables/constraint_definition.sql - \ir tables/constraint_specification.sql - \ir tables/constraint_model_specification.sql - \ir tables/constraint_run.sql - - -- Plan Collaboration - \ir tables/plan_snapshot.sql - \ir tables/plan_latest_snapshot.sql - \ir tables/plan_snapshot_parent.sql - \ir tables/plan_snapshot_activities.sql - \ir tables/merge_request.sql - \ir tables/merge_comments.sql - \ir tables/merge_staging_area.sql - \ir tables/conflicting_activities.sql - \ir functions/public/duplicate_plan.sql - \ir functions/public/plan_history_functions.sql - \ir functions/public/get_merge_base.sql - \ir functions/public/merge_request_state_functions.sql - \ir functions/metadata/get_tag_ids.sql - \ir functions/public/begin_merge.sql - \ir functions/public/commit_merge.sql - \ir functions/public/create_snapshot.sql - \ir functions/public/restore_from_snapshot.sql - - -- Presets - \ir tables/activity_presets.sql - - -- Table-specific Metadata - \ir tables/metadata/activity_directive_tags.sql - \ir tables/metadata/constraint_tags.sql - \ir tables/metadata/constraint_definition_tags.sql - \ir tables/metadata/plan_snapshot_tags.sql - \ir tables/metadata/plan_tags.sql - \ir tables/metadata/snapshot_activity_tags.sql - - -- Views - \ir views/users_and_roles.sql - \ir views/simulated_activity.sql - \ir views/resource_profile.sql - \ir views/activity_directive_extended.sql - - -- Permission Functions - \ir functions/metadata/get_role.sql - \ir functions/metadata/get_function_permissions.sql - \ir functions/metadata/check_general_permissions.sql - \ir functions/metadata/merge_permissions.sql - - -- Hasura Functions - \ir functions/hasura/activity_preset_functions.sql - \ir functions/hasura/snapshot_functions.sql - \ir functions/hasura/delete_anchor_functions.sql - \ir functions/hasura/hasura_functions.sql - \ir functions/hasura/plan_branching_functions.sql - \ir functions/hasura/plan_merge_functions.sql - - -- Preload Data - \ir default_user_roles.sql; - -end; diff --git a/merlin-server/sql/merlin/schemas.sql b/merlin-server/sql/merlin/schemas.sql deleted file mode 100644 index 808adb9215..0000000000 --- a/merlin-server/sql/merlin/schemas.sql +++ /dev/null @@ -1,3 +0,0 @@ -create schema hasura_functions; -create schema metadata; -create schema migrations; From 6fe6d15b08bab4fa80e00cf08d092b6778bd796e Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 14:55:22 -0800 Subject: [PATCH 03/36] Move Scheduler DB Files --- deployment/postgres-init-db/sql/schemas.sql | 1 + .../scheduling_condition_definition.sql | 0 .../scheduling_condition_metadata.sql | 0 .../scheduler}/scheduling_goal_definition.sql | 0 .../scheduler}/scheduling_goal_metadata.sql | 0 .../scheduling_goal_analysis.sql | 0 ...uling_goal_analysis_created_activities.sql | 0 ...ng_goal_analysis_satisfying_activities.sql | 0 .../scheduling_run}/scheduling_request.sql | 0 ...eduling_model_specification_conditions.sql | 0 .../scheduling_model_specification_goals.sql | 0 .../scheduling_specification.sql | 0 .../scheduling_specification_conditions.sql | 0 .../scheduling_specification_goals.sql | 0 .../scheduling_condition_definition_tags.sql | 0 .../tags}/scheduling_condition_tags.sql | 0 .../tags}/scheduling_goal_definition_tags.sql | 0 .../sql/tables/tags}/scheduling_goal_tags.sql | 0 .../sql/scheduler/applied_migrations.sql | 19 ---------- scheduler-server/sql/scheduler/init.sql | 36 ------------------- scheduler-server/sql/scheduler/schemas.sql | 2 -- .../scheduler/tables/schema_migrations.sql | 30 ---------------- 22 files changed, 1 insertion(+), 87 deletions(-) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler}/scheduling_condition_definition.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler}/scheduling_condition_metadata.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler}/scheduling_goal_definition.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler}/scheduling_goal_metadata.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_run}/scheduling_goal_analysis.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_run}/scheduling_goal_analysis_created_activities.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_run}/scheduling_goal_analysis_satisfying_activities.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_run}/scheduling_request.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification}/scheduling_model_specification_conditions.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification}/scheduling_model_specification_goals.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification}/scheduling_specification.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification}/scheduling_specification_conditions.sql (100%) rename {scheduler-server/sql/scheduler/tables => deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification}/scheduling_specification_goals.sql (100%) rename {scheduler-server/sql/scheduler/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/scheduling_condition_definition_tags.sql (100%) rename {scheduler-server/sql/scheduler/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/scheduling_condition_tags.sql (100%) rename {scheduler-server/sql/scheduler/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/scheduling_goal_definition_tags.sql (100%) rename {scheduler-server/sql/scheduler/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/scheduling_goal_tags.sql (100%) delete mode 100644 scheduler-server/sql/scheduler/applied_migrations.sql delete mode 100644 scheduler-server/sql/scheduler/init.sql delete mode 100644 scheduler-server/sql/scheduler/schemas.sql delete mode 100644 scheduler-server/sql/scheduler/tables/schema_migrations.sql diff --git a/deployment/postgres-init-db/sql/schemas.sql b/deployment/postgres-init-db/sql/schemas.sql index 2394098f86..b4454326b4 100644 --- a/deployment/postgres-init-db/sql/schemas.sql +++ b/deployment/postgres-init-db/sql/schemas.sql @@ -1,5 +1,6 @@ -- Services create schema merlin; +create schema scheduler; -- Cross Service create schema migrations; diff --git a/scheduler-server/sql/scheduler/tables/scheduling_condition_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_condition_definition.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_condition_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_condition_metadata.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_goal_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_goal_definition.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_goal_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_goal_metadata.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_goal_analysis.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_goal_analysis.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_goal_analysis_created_activities.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_goal_analysis_created_activities.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_goal_analysis_satisfying_activities.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_goal_analysis_satisfying_activities.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_request.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_request.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_model_specification_conditions.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_model_specification_conditions.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_model_specification_goals.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_model_specification_goals.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_specification.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_specification.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_specification_conditions.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_specification_conditions.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql diff --git a/scheduler-server/sql/scheduler/tables/scheduling_specification_goals.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/scheduling_specification_goals.sql rename to deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql diff --git a/scheduler-server/sql/scheduler/tables/metadata/scheduling_condition_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_definition_tags.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/metadata/scheduling_condition_definition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/scheduling_condition_definition_tags.sql diff --git a/scheduler-server/sql/scheduler/tables/metadata/scheduling_condition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/metadata/scheduling_condition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql diff --git a/scheduler-server/sql/scheduler/tables/metadata/scheduling_goal_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/metadata/scheduling_goal_definition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql diff --git a/scheduler-server/sql/scheduler/tables/metadata/scheduling_goal_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql similarity index 100% rename from scheduler-server/sql/scheduler/tables/metadata/scheduling_goal_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql diff --git a/scheduler-server/sql/scheduler/applied_migrations.sql b/scheduler-server/sql/scheduler/applied_migrations.sql deleted file mode 100644 index ce58a07c17..0000000000 --- a/scheduler-server/sql/scheduler/applied_migrations.sql +++ /dev/null @@ -1,19 +0,0 @@ -/* -This file denotes which migrations occur "before" this version of the schema. -*/ - -call migrations.mark_migration_applied('0'); -call migrations.mark_migration_applied('1'); -call migrations.mark_migration_applied('2'); -call migrations.mark_migration_applied('3'); -call migrations.mark_migration_applied('4'); -call migrations.mark_migration_applied('5'); -call migrations.mark_migration_applied('6'); -call migrations.mark_migration_applied('7'); -call migrations.mark_migration_applied('8'); -call migrations.mark_migration_applied('9'); -call migrations.mark_migration_applied('10'); -call migrations.mark_migration_applied('11'); -call migrations.mark_migration_applied('12'); -call migrations.mark_migration_applied('13'); -call migrations.mark_migration_applied('14'); diff --git a/scheduler-server/sql/scheduler/init.sql b/scheduler-server/sql/scheduler/init.sql deleted file mode 100644 index 4eb1ac9463..0000000000 --- a/scheduler-server/sql/scheduler/init.sql +++ /dev/null @@ -1,36 +0,0 @@ --- The order of inclusion is important! Tables referenced by foreign keys must be loaded before their dependants. - -begin; - -- Schemas - \ir schemas.sql - -- Schema migrations - \ir tables/schema_migrations.sql - \ir applied_migrations.sql - - -- Scheduling Goals - \ir tables/scheduling_goal_metadata.sql - \ir tables/scheduling_goal_definition.sql - - -- Scheduling Conditions - \ir tables/scheduling_condition_metadata.sql - \ir tables/scheduling_condition_definition.sql - - -- Scheduling Specification - \ir tables/scheduling_specification.sql - \ir tables/scheduling_specification_goals.sql - \ir tables/scheduling_specification_conditions.sql - \ir tables/scheduling_model_specification_conditions.sql - \ir tables/scheduling_model_specification_goals.sql - - -- Scheduling Output - \ir tables/scheduling_request.sql - \ir tables/scheduling_goal_analysis.sql - \ir tables/scheduling_goal_analysis_created_activities.sql - \ir tables/scheduling_goal_analysis_satisfying_activities.sql - - -- Table-specific Metadata - \ir tables/metadata/scheduling_goal_tags.sql - \ir tables/metadata/scheduling_goal_definition_tags.sql - \ir tables/metadata/scheduling_condition_tags.sql - \ir tables/metadata/scheduling_condition_definition_tags.sql -end; diff --git a/scheduler-server/sql/scheduler/schemas.sql b/scheduler-server/sql/scheduler/schemas.sql deleted file mode 100644 index 16fb571826..0000000000 --- a/scheduler-server/sql/scheduler/schemas.sql +++ /dev/null @@ -1,2 +0,0 @@ -create schema metadata; -create schema migrations; diff --git a/scheduler-server/sql/scheduler/tables/schema_migrations.sql b/scheduler-server/sql/scheduler/tables/schema_migrations.sql deleted file mode 100644 index b1123b6267..0000000000 --- a/scheduler-server/sql/scheduler/tables/schema_migrations.sql +++ /dev/null @@ -1,30 +0,0 @@ -create table migrations.schema_migrations ( - migration_id varchar primary key -); - -create procedure migrations.mark_migration_applied(_migration_id varchar) -language plpgsql as $$ -begin - insert into migrations.schema_migrations (migration_id) - values (_migration_id); -end; -$$; - -create procedure migrations.mark_migration_rolled_back(_migration_id varchar) -language plpgsql as $$ -begin - delete from migrations.schema_migrations - where migration_id = _migration_id; -end; -$$; - -comment on schema migrations is e'' - 'Tables and procedures associated with tracking schema migrations'; -comment on table migrations.schema_migrations is e'' - 'Tracks what migrations have been applied'; -comment on column migrations.schema_migrations.migration_id is e'' - 'An identifier for a migration that has been applied'; -comment on procedure migrations.mark_migration_applied is e'' - 'Given an identifier for a migration, add that migration to the applied set'; -comment on procedure migrations.mark_migration_rolled_back is e'' - 'Given an identifier for a migration, remove that migration from the applied set'; From 9a611414cd2e190f93ab3ae1f8f11def00010505 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 15:02:37 -0800 Subject: [PATCH 04/36] Move Sequencing DB Files - Remove the removed `sql` directory from the Sequencing Server's `.dockerignore` --- deployment/postgres-init-db/sql/schemas.sql | 1 + .../activity_instance_commands.sql | 0 .../tables/sequencing}/command_dictionary.sql | 0 .../tables/sequencing}/expanded_sequences.sql | 0 .../sql/tables/sequencing}/expansion_rule.sql | 0 .../sql/tables/sequencing}/expansion_run.sql | 0 .../sql/tables/sequencing}/expansion_set.sql | 0 .../sequencing}/expansion_set_to_rule.sql | 0 .../tables/sequencing}/schema_migrations.sql | 0 .../sql/tables/sequencing}/sequence.sql | 0 .../sequence_to_simulated_activity.sql | 0 .../sql/tables/sequencing}/user_sequence.sql | 0 .../sql/tables/tags}/expansion_rule_tags.sql | 0 sequencing-server/.dockerignore | 1 - .../sql/sequencing/applied_migrations.sql | 11 -------- sequencing-server/sql/sequencing/init.sql | 25 ------------------- sequencing-server/sql/sequencing/schemas.sql | 2 -- 17 files changed, 1 insertion(+), 39 deletions(-) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/activity_instance_commands.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/command_dictionary.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/expanded_sequences.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/expansion_rule.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/expansion_run.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/expansion_set.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/expansion_set_to_rule.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/schema_migrations.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/sequence.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/sequence_to_simulated_activity.sql (100%) rename {sequencing-server/sql/sequencing/tables => deployment/postgres-init-db/sql/tables/sequencing}/user_sequence.sql (100%) rename {sequencing-server/sql/sequencing/tables/metadata => deployment/postgres-init-db/sql/tables/tags}/expansion_rule_tags.sql (100%) delete mode 100644 sequencing-server/sql/sequencing/applied_migrations.sql delete mode 100644 sequencing-server/sql/sequencing/init.sql delete mode 100644 sequencing-server/sql/sequencing/schemas.sql diff --git a/deployment/postgres-init-db/sql/schemas.sql b/deployment/postgres-init-db/sql/schemas.sql index b4454326b4..1ed25a81c7 100644 --- a/deployment/postgres-init-db/sql/schemas.sql +++ b/deployment/postgres-init-db/sql/schemas.sql @@ -1,6 +1,7 @@ -- Services create schema merlin; create schema scheduler; +create schema sequencing; -- Cross Service create schema migrations; diff --git a/sequencing-server/sql/sequencing/tables/activity_instance_commands.sql b/deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/activity_instance_commands.sql rename to deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql diff --git a/sequencing-server/sql/sequencing/tables/command_dictionary.sql b/deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/command_dictionary.sql rename to deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql diff --git a/sequencing-server/sql/sequencing/tables/expanded_sequences.sql b/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/expanded_sequences.sql rename to deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql diff --git a/sequencing-server/sql/sequencing/tables/expansion_rule.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/expansion_rule.sql rename to deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql diff --git a/sequencing-server/sql/sequencing/tables/expansion_run.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/expansion_run.sql rename to deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql diff --git a/sequencing-server/sql/sequencing/tables/expansion_set.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/expansion_set.sql rename to deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql diff --git a/sequencing-server/sql/sequencing/tables/expansion_set_to_rule.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/expansion_set_to_rule.sql rename to deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql diff --git a/sequencing-server/sql/sequencing/tables/schema_migrations.sql b/deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/schema_migrations.sql rename to deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql diff --git a/sequencing-server/sql/sequencing/tables/sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/sequence.sql rename to deployment/postgres-init-db/sql/tables/sequencing/sequence.sql diff --git a/sequencing-server/sql/sequencing/tables/sequence_to_simulated_activity.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/sequence_to_simulated_activity.sql rename to deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql diff --git a/sequencing-server/sql/sequencing/tables/user_sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/user_sequence.sql rename to deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql diff --git a/sequencing-server/sql/sequencing/tables/metadata/expansion_rule_tags.sql b/deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql similarity index 100% rename from sequencing-server/sql/sequencing/tables/metadata/expansion_rule_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql diff --git a/sequencing-server/.dockerignore b/sequencing-server/.dockerignore index b8396c7073..80de2d5f69 100644 --- a/sequencing-server/.dockerignore +++ b/sequencing-server/.dockerignore @@ -1,3 +1,2 @@ .gradle -sql test diff --git a/sequencing-server/sql/sequencing/applied_migrations.sql b/sequencing-server/sql/sequencing/applied_migrations.sql deleted file mode 100644 index 5d2459096b..0000000000 --- a/sequencing-server/sql/sequencing/applied_migrations.sql +++ /dev/null @@ -1,11 +0,0 @@ -/* -This file denotes which migrations occur "before" this version of the schema. -*/ - -call migrations.mark_migration_applied('0'); -call migrations.mark_migration_applied('1'); -call migrations.mark_migration_applied('2'); -call migrations.mark_migration_applied('3'); -call migrations.mark_migration_applied('4'); -call migrations.mark_migration_applied('5'); -call migrations.mark_migration_applied('6'); diff --git a/sequencing-server/sql/sequencing/init.sql b/sequencing-server/sql/sequencing/init.sql deleted file mode 100644 index 5ca5f067a4..0000000000 --- a/sequencing-server/sql/sequencing/init.sql +++ /dev/null @@ -1,25 +0,0 @@ --- The order of inclusion is important! Tables referenced by foreign keys must be loaded before their dependants. - -begin; - --Schemas - \ir schemas.sql - - -- Schema migrations - \ir tables/schema_migrations.sql - \ir applied_migrations.sql - - -- Command Expansion Tables. - \ir tables/command_dictionary.sql - \ir tables/expansion_set.sql - \ir tables/expansion_rule.sql - \ir tables/expansion_set_to_rule.sql - \ir tables/expansion_run.sql - \ir tables/activity_instance_commands.sql - \ir tables/sequence.sql - \ir tables/sequence_to_simulated_activity.sql - \ir tables/user_sequence.sql - \ir tables/expanded_sequences.sql - - -- Table-specific Metadata - \ir tables/metadata/expansion_rule_tags.sql -end; diff --git a/sequencing-server/sql/sequencing/schemas.sql b/sequencing-server/sql/sequencing/schemas.sql deleted file mode 100644 index 16fb571826..0000000000 --- a/sequencing-server/sql/sequencing/schemas.sql +++ /dev/null @@ -1,2 +0,0 @@ -create schema metadata; -create schema migrations; From 6ae9b8acb234c5080491c00511c26ec993456fbe Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 15:07:50 -0800 Subject: [PATCH 05/36] Move UI DB Files --- deployment/postgres-init-db/sql/schemas.sql | 1 + .../tables => tables/ui}/extension_roles.sql | 0 .../{ui/tables => tables/ui}/extensions.sql | 0 .../sql/{ui/tables => tables/ui}/view.sql | 0 .../sql/ui/applied_migrations.sql | 7 ----- deployment/postgres-init-db/sql/ui/init.sql | 10 ------ .../sql/ui/tables/schema_migrations.sql | 31 ------------------- 7 files changed, 1 insertion(+), 48 deletions(-) rename deployment/postgres-init-db/sql/{ui/tables => tables/ui}/extension_roles.sql (100%) rename deployment/postgres-init-db/sql/{ui/tables => tables/ui}/extensions.sql (100%) rename deployment/postgres-init-db/sql/{ui/tables => tables/ui}/view.sql (100%) delete mode 100644 deployment/postgres-init-db/sql/ui/applied_migrations.sql delete mode 100644 deployment/postgres-init-db/sql/ui/init.sql delete mode 100644 deployment/postgres-init-db/sql/ui/tables/schema_migrations.sql diff --git a/deployment/postgres-init-db/sql/schemas.sql b/deployment/postgres-init-db/sql/schemas.sql index 1ed25a81c7..e84be6f6ca 100644 --- a/deployment/postgres-init-db/sql/schemas.sql +++ b/deployment/postgres-init-db/sql/schemas.sql @@ -2,6 +2,7 @@ create schema merlin; create schema scheduler; create schema sequencing; +create schema ui; -- Cross Service create schema migrations; diff --git a/deployment/postgres-init-db/sql/ui/tables/extension_roles.sql b/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql similarity index 100% rename from deployment/postgres-init-db/sql/ui/tables/extension_roles.sql rename to deployment/postgres-init-db/sql/tables/ui/extension_roles.sql diff --git a/deployment/postgres-init-db/sql/ui/tables/extensions.sql b/deployment/postgres-init-db/sql/tables/ui/extensions.sql similarity index 100% rename from deployment/postgres-init-db/sql/ui/tables/extensions.sql rename to deployment/postgres-init-db/sql/tables/ui/extensions.sql diff --git a/deployment/postgres-init-db/sql/ui/tables/view.sql b/deployment/postgres-init-db/sql/tables/ui/view.sql similarity index 100% rename from deployment/postgres-init-db/sql/ui/tables/view.sql rename to deployment/postgres-init-db/sql/tables/ui/view.sql diff --git a/deployment/postgres-init-db/sql/ui/applied_migrations.sql b/deployment/postgres-init-db/sql/ui/applied_migrations.sql deleted file mode 100644 index 83e3e220cc..0000000000 --- a/deployment/postgres-init-db/sql/ui/applied_migrations.sql +++ /dev/null @@ -1,7 +0,0 @@ -/* -This file denotes which migrations occur "before" this version of the schema. -*/ - -call migrations.mark_migration_applied('0'); -call migrations.mark_migration_applied('1'); -call migrations.mark_migration_applied('2'); diff --git a/deployment/postgres-init-db/sql/ui/init.sql b/deployment/postgres-init-db/sql/ui/init.sql deleted file mode 100644 index 8c462d7add..0000000000 --- a/deployment/postgres-init-db/sql/ui/init.sql +++ /dev/null @@ -1,10 +0,0 @@ -begin; - -- Schema migrations - \ir tables/schema_migrations.sql - \ir applied_migrations.sql - - -- Tables. - \ir tables/extensions.sql - \ir tables/extension_roles.sql - \ir tables/view.sql -end; diff --git a/deployment/postgres-init-db/sql/ui/tables/schema_migrations.sql b/deployment/postgres-init-db/sql/ui/tables/schema_migrations.sql deleted file mode 100644 index da8e239de3..0000000000 --- a/deployment/postgres-init-db/sql/ui/tables/schema_migrations.sql +++ /dev/null @@ -1,31 +0,0 @@ -create schema migrations; -create table migrations.schema_migrations ( - migration_id varchar primary key -); - -create procedure migrations.mark_migration_applied(_migration_id varchar) -language plpgsql as $$ -begin - insert into migrations.schema_migrations (migration_id) - values (_migration_id); -end; -$$; - -create procedure migrations.mark_migration_rolled_back(_migration_id varchar) -language plpgsql as $$ -begin - delete from migrations.schema_migrations - where migration_id = _migration_id; -end; -$$; - -comment on schema migrations is e'' - 'Tables and procedures associated with tracking schema migrations'; -comment on table migrations.schema_migrations is e'' - 'Tracks what migrations have been applied'; -comment on column migrations.schema_migrations.migration_id is e'' - 'An identifier for a migration that has been applied'; -comment on procedure migrations.mark_migration_applied is e'' - 'Given an identifier for a migration, add that migration to the applied set'; -comment on procedure migrations.mark_migration_rolled_back is e'' - 'Given an identifier for a migration, remove that migration from the applied set'; From 2134c69d42736486afced2dcb9071c4ebe6daa22 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 16:54:59 -0800 Subject: [PATCH 06/36] Add Shared Utility Functions These are non-DB specific functions that are reused across the DB. Ie, incrementing a table's revision counter or setting when it was last updated --- .../shared_update_functions.sql | 23 +++++++++++++++++++ deployment/postgres-init-db/sql/schemas.sql | 10 ++++++++ 2 files changed, 33 insertions(+) create mode 100644 deployment/postgres-init-db/sql/functions/util_functions/shared_update_functions.sql diff --git a/deployment/postgres-init-db/sql/functions/util_functions/shared_update_functions.sql b/deployment/postgres-init-db/sql/functions/util_functions/shared_update_functions.sql new file mode 100644 index 0000000000..563a1564b1 --- /dev/null +++ b/deployment/postgres-init-db/sql/functions/util_functions/shared_update_functions.sql @@ -0,0 +1,23 @@ +create function util_functions.set_updated_at() +returns trigger +security invoker +language plpgsql as $$begin + new.updated_at = now(); + return new; +end$$; + +create function util_functions.increment_revision_update() +returns trigger +security invoker +language plpgsql as $$ +begin + new.revision = old.revision +1; + return new; +end$$; + +create function util_functions.raise_duration_is_negative() +returns trigger +security invoker +language plpgsql as $$begin + raise exception 'invalid duration, expected nonnegative duration but found: %', new.duration; +end$$; diff --git a/deployment/postgres-init-db/sql/schemas.sql b/deployment/postgres-init-db/sql/schemas.sql index e84be6f6ca..967b955687 100644 --- a/deployment/postgres-init-db/sql/schemas.sql +++ b/deployment/postgres-init-db/sql/schemas.sql @@ -1,11 +1,21 @@ -- Services create schema merlin; +comment on schema merlin is 'Merlin Service Schema'; create schema scheduler; +comment on schema scheduler is 'Scheduler Service Schema'; create schema sequencing; +comment on schema sequencing is 'Sequencing Service Schema'; create schema ui; +comment on schema ui is 'UI Service Schema'; -- Cross Service create schema migrations; +comment on schema migrations is 'DB Migrations Schema'; create schema hasura; +comment on schema hasura is 'Hasura Helper Function Schema'; create schema permissions; +comment on schema permissions is 'Aerie User and User Roles Schema'; create schema tags; +comment on schema tags is 'Tags Metadata Schema'; +create schema util_functions; +comment on schema util_functions is 'Cross-service Helper Function Schema'; From 5a481f617e23d588e8d9269240978ddb7cebe79c Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 1 Mar 2024 09:06:16 -0800 Subject: [PATCH 07/36] Add Gitkeep for new DB Delete Sample Migrations --- deployment/hasura/migrations/Aerie/.gitkeep | 0 deployment/sample_migrations/config.yaml | 10 ---------- .../1664478381304_rename_mission_column/down.sql | 12 ------------ .../1664478381304_rename_mission_column/up.sql | 12 ------------ .../1664478481307_add_column_expand_to/down.sql | 8 -------- .../1664478481307_add_column_expand_to/up.sql | 12 ------------ .../1667319761264_test_migration/down.sql | 6 ------ .../AerieMerlin/1667319761264_test_migration/up.sql | 5 ----- 8 files changed, 65 deletions(-) create mode 100644 deployment/hasura/migrations/Aerie/.gitkeep delete mode 100644 deployment/sample_migrations/config.yaml delete mode 100644 deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/down.sql delete mode 100644 deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/up.sql delete mode 100644 deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/down.sql delete mode 100644 deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/up.sql delete mode 100644 deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/down.sql delete mode 100644 deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/up.sql diff --git a/deployment/hasura/migrations/Aerie/.gitkeep b/deployment/hasura/migrations/Aerie/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/deployment/sample_migrations/config.yaml b/deployment/sample_migrations/config.yaml deleted file mode 100644 index 9d1f7b2d19..0000000000 --- a/deployment/sample_migrations/config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# This file configures the Hasura CLI, a local development (and deployment) tool. -# It is required to run the Hasura CLI (even if empty). - -version: 3 -endpoint: http://localhost:8080/ -metadata_directory: metadata -migrations_directory: migrations -actions: - kind: synchronous - handler_webhook_baseurl: http://localhost:3000 diff --git a/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/down.sql b/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/down.sql deleted file mode 100644 index 9ec6920158..0000000000 --- a/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/down.sql +++ /dev/null @@ -1,12 +0,0 @@ ------ NOTE: Test file, should not be used in real database migration - ---- Database Change - ---Verify column exists and rename column -select missions from "public"."command_dictionary"; -alter table "public"."command_dictionary" rename column "missions" to "mission"; - ---- Data migration logic - ---- Database Check -select mission from "public"."command_dictionary"; diff --git a/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/up.sql b/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/up.sql deleted file mode 100644 index a5a1485e20..0000000000 --- a/deployment/sample_migrations/migrations/AerieCommanding/1664478381304_rename_mission_column/up.sql +++ /dev/null @@ -1,12 +0,0 @@ ------ NOTE: Test file, should not be used in real database migration - ---- Database Change - ---Verify column exists and rename column -select mission from "public"."command_dictionary"; -alter table "public"."command_dictionary" rename column "mission" to "missions"; - ---- Data migration logic - ---- Database Check -select missions from "public"."command_dictionary"; diff --git a/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/down.sql b/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/down.sql deleted file mode 100644 index 72f7a14f9c..0000000000 --- a/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/down.sql +++ /dev/null @@ -1,8 +0,0 @@ ------ NOTE: Test file, should not be used in real database migration - ---- Database Change -alter table "public"."activity_instance_commands" drop column "expand_to" cascade; - ---- Data migration logic - ---- Database Check diff --git a/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/up.sql b/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/up.sql deleted file mode 100644 index 0da4b4523a..0000000000 --- a/deployment/sample_migrations/migrations/AerieCommanding/1664478481307_add_column_expand_to/up.sql +++ /dev/null @@ -1,12 +0,0 @@ ------ NOTE: Test file, should not be used in real database migration - ---- Database Change -alter table "public"."activity_instance_commands" add column "expand_to" int4; -comment on column "public"."activity_instance_commands"."expand_to" is E'how many commands that were expanded to'; -alter table "public"."activity_instance_commands" alter column "expand_to" set default 0; -alter table "public"."activity_instance_commands" alter column "expand_to" drop not null; - ---- Data migration logic - ---- Database Check -select expand_to from "public"."activity_instance_commands"; diff --git a/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/down.sql b/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/down.sql deleted file mode 100644 index 858a9cae8b..0000000000 --- a/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/down.sql +++ /dev/null @@ -1,6 +0,0 @@ -update plan -set parent_id = null -where id = 1; - -delete from plan -where name = 'New plan'; diff --git a/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/up.sql b/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/up.sql deleted file mode 100644 index 0a7ecd07dc..0000000000 --- a/deployment/sample_migrations/migrations/AerieMerlin/1667319761264_test_migration/up.sql +++ /dev/null @@ -1,5 +0,0 @@ -update plan -set parent_id = 2 -where id = 1; - -select duplicate_plan(1, 'New plan'); From f9c2a7980f792fddd2d318d46608faeabe893f1e Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 16:39:29 -0800 Subject: [PATCH 08/36] Add Schema Qualifiers (Permissions) --- .../sql/default_user_roles.sql | 12 ++-- .../permissions/check_general_permissions.sql | 24 ++++---- .../permissions/get_function_permissions.sql | 46 ++++++++-------- .../sql/functions/permissions/get_role.sql | 4 +- .../permissions/merge_permissions.sql | 55 ++++++++++--------- .../permissions/user_role_permission.sql | 30 +++++----- .../sql/tables/permissions/user_roles.sql | 12 ++-- .../sql/tables/permissions/users.sql | 10 ++-- .../permissions/users_allowed_roles.sql | 8 +-- .../sql/types/permissions/permissions.sql | 6 +- .../sql/views/permissions/users_and_roles.sql | 8 +-- 11 files changed, 112 insertions(+), 103 deletions(-) diff --git a/deployment/postgres-init-db/sql/default_user_roles.sql b/deployment/postgres-init-db/sql/default_user_roles.sql index 213bd2fc90..a7c9f5e079 100644 --- a/deployment/postgres-init-db/sql/default_user_roles.sql +++ b/deployment/postgres-init-db/sql/default_user_roles.sql @@ -1,14 +1,14 @@ -- Default Roles: -insert into metadata.user_roles(role) values ('aerie_admin'), ('user'), ('viewer'); +insert into permissions.user_roles(role) values ('aerie_admin'), ('user'), ('viewer'); -- Permissions For Default Roles: -- 'aerie_admin' permissions aren't specified since 'aerie_admin' is always considered to have "NO_CHECK" permissions -update metadata.user_role_permission +update permissions.user_role_permission set action_permissions = '{}', function_permissions = '{}' -where role = 'admin'; +where role = 'aerie_admin'; -update metadata.user_role_permission +update permissions.user_role_permission set action_permissions = '{ "check_constraints": "PLAN_OWNER_COLLABORATOR", "create_expansion_rule": "NO_CHECK", @@ -46,7 +46,7 @@ set action_permissions = '{ }' where role = 'user'; -update metadata.user_role_permission +update permissions.user_role_permission set action_permissions = '{ "sequence_seq_json_bulk": "NO_CHECK", "resource_samples": "NO_CHECK" @@ -59,6 +59,6 @@ set action_permissions = '{ where role = 'viewer'; -- Default Users: -insert into metadata.users(username, default_role) +insert into permissions.users(username, default_role) values ('Mission Model', 'viewer'), ('Aerie Legacy', 'viewer'); diff --git a/deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql index 5c4f9164c6..f4f7cfde7a 100644 --- a/deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql +++ b/deployment/postgres-init-db/sql/functions/permissions/check_general_permissions.sql @@ -1,18 +1,22 @@ -create procedure metadata.check_general_permissions(_function metadata.function_permission_key, _permission metadata.permission, _plan_id integer, _user text) +create procedure permissions.check_general_permissions( + _function permissions.function_permission_key, + _permission permissions.permission, + _plan_id integer, + _user text) language plpgsql as $$ declare _mission_model_id integer; _plan_name text; begin - select name from public.plan where id = _plan_id into _plan_name; + select name from merlin.plan where id = _plan_id into _plan_name; -- MISSION_MODEL_OWNER: The user must own the relevant Mission Model if _permission = 'MISSION_MODEL_OWNER' then - select id from public.mission_model mm - where mm.id = (select model_id from plan p where p.id = _plan_id) + select id from merlin.mission_model mm + where mm.id = (select model_id from merlin.plan p where p.id = _plan_id) into _mission_model_id; - if not exists(select * from public.mission_model mm where mm.id = _mission_model_id and mm.owner =_user) then + if not exists(select * from merlin.mission_model mm where mm.id = _mission_model_id and mm.owner =_user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not MISSION_MODEL_OWNER on Model ' || _mission_model_id ||'.'; end if; @@ -20,29 +24,29 @@ begin -- OWNER: The user must be the owner of all relevant objects directly used by the KEY -- In most cases, OWNER is equivalent to PLAN_OWNER. Use a custom solution when that is not true. elseif _permission = 'OWNER' then - if not exists(select * from public.plan p where p.id = _plan_id and p.owner = _user) then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not OWNER on Plan ' || _plan_id ||' ('|| _plan_name ||').'; end if; -- PLAN_OWNER: The user must be the Owner of the relevant Plan elseif _permission = 'PLAN_OWNER' then - if not exists(select * from public.plan p where p.id = _plan_id and p.owner = _user) then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Plan '|| _plan_id ||' ('|| _plan_name ||').'; end if; -- PLAN_COLLABORATOR: The user must be a Collaborator of the relevant Plan. The Plan Owner is NOT considered a Collaborator of the Plan elseif _permission = 'PLAN_COLLABORATOR' then - if not exists(select * from public.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then + if not exists(select * from merlin.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Plan '|| _plan_id ||' ('|| _plan_name ||').'; end if; -- PLAN_OWNER_COLLABORATOR: The user must be either the Owner or a Collaborator of the relevant Plan elseif _permission = 'PLAN_OWNER_COLLABORATOR' then - if not exists(select * from public.plan p where p.id = _plan_id and p.owner = _user) then - if not exists(select * from public.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then + if not exists(select * from merlin.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Plan '|| _plan_id ||' ('|| _plan_name ||').'; end if; diff --git a/deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql index 6198334732..c48203a418 100644 --- a/deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql +++ b/deployment/postgres-init-db/sql/functions/permissions/get_function_permissions.sql @@ -1,26 +1,26 @@ -create function metadata.get_function_permissions(_function metadata.function_permission_key, hasura_session json) - returns metadata.permission - stable - language plpgsql as $$ - declare - _role text; - _function_permission metadata.permission; - begin - _role := metadata.get_role(hasura_session); - -- The aerie_admin role is always treated as having NO_CHECK permissions on all functions. - if _role = 'aerie_admin' then return 'NO_CHECK'; end if; +create function permissions.get_function_permissions(_function permissions.function_permission_key, hasura_session json) +returns permissions.permission +stable +language plpgsql as $$ +declare + _role text; + _function_permission permissions.permission; +begin + _role := permissions.get_role(hasura_session); + -- The aerie_admin role is always treated as having NO_CHECK permissions on all functions. + if _role = 'aerie_admin' then return 'NO_CHECK'; end if; - select (function_permissions ->> _function::text)::metadata.permission - from metadata.user_role_permission urp - where urp.role = _role - into _function_permission; + select (function_permissions ->> _function::text)::permissions.permission + from permissions.user_role_permission urp + where urp.role = _role + into _function_permission; - -- The absence of the function key means that the role does not have permission to perform the function. - if _function_permission is null then - raise insufficient_privilege - using message = 'User with role '''|| _role ||''' is not permitted to run '''|| _function ||''''; - end if; + -- The absence of the function key means that the role does not have permission to perform the function. + if _function_permission is null then + raise insufficient_privilege + using message = 'User with role '''|| _role ||''' is not permitted to run '''|| _function ||''''; + end if; - return _function_permission::metadata.permission; - end - $$; + return _function_permission::permissions.permission; +end +$$; diff --git a/deployment/postgres-init-db/sql/functions/permissions/get_role.sql b/deployment/postgres-init-db/sql/functions/permissions/get_role.sql index 1b6f985a16..a3d067486a 100644 --- a/deployment/postgres-init-db/sql/functions/permissions/get_role.sql +++ b/deployment/postgres-init-db/sql/functions/permissions/get_role.sql @@ -1,4 +1,4 @@ -create function metadata.get_role(hasura_session json) +create function permissions.get_role(hasura_session json) returns text stable language plpgsql as $$ @@ -11,7 +11,7 @@ begin return _role; end if; _username := hasura_session ->> 'x-hasura-user-id'; - select default_role from metadata.users u + select default_role from permissions.users u where u.username = _username into _role; if _role is null then raise exception 'Invalid username: %', _username; diff --git a/deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql b/deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql index b3e7688856..762c60f7ff 100644 --- a/deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql +++ b/deployment/postgres-init-db/sql/functions/permissions/merge_permissions.sql @@ -1,4 +1,4 @@ -create function metadata.raise_if_plan_merge_permission(_function metadata.function_permission_key, _permission metadata.permission) +create function permissions.raise_if_plan_merge_permission(_function permissions.function_permission_key, _permission permissions.permission) returns void immutable language plpgsql as $$ @@ -11,52 +11,57 @@ begin end $$; -create procedure metadata.check_merge_permissions(_function metadata.function_permission_key, _merge_request_id integer, hasura_session json) +create procedure permissions.check_merge_permissions(_function permissions.function_permission_key, _merge_request_id integer, hasura_session json) language plpgsql as $$ declare _plan_id_receiving_changes integer; _plan_id_supplying_changes integer; - _function_permission metadata.permission; + _function_permission permissions.permission; _user text; begin select plan_id_receiving_changes - from merge_request mr + from merlin.merge_request mr where mr.id = _merge_request_id into _plan_id_receiving_changes; select plan_id - from public.plan_snapshot ps, merge_request mr + from merlin.plan_snapshot ps, merlin.merge_request mr where mr.id = _merge_request_id and ps.snapshot_id = mr.snapshot_id_supplying_changes into _plan_id_supplying_changes; _user := (hasura_session ->> 'x-hasura-user-id'); - _function_permission := metadata.get_function_permissions('get_non_conflicting_activities', hasura_session); - call metadata.check_merge_permissions(_function, _function_permission, _plan_id_receiving_changes, + _function_permission := permissions.get_function_permissions('get_non_conflicting_activities', hasura_session); + call permissions.check_merge_permissions(_function, _function_permission, _plan_id_receiving_changes, _plan_id_supplying_changes, _user); end $$; -create procedure metadata.check_merge_permissions(_function metadata.function_permission_key, _permission metadata.permission, _plan_id_receiving integer, _plan_id_supplying integer, _user text) +create procedure permissions.check_merge_permissions( + _function permissions.function_permission_key, + _permission permissions.permission, + _plan_id_receiving integer, + _plan_id_supplying integer, + _user text) language plpgsql as $$ declare _supplying_plan_name text; _receiving_plan_name text; begin - select name from public.plan where id = _plan_id_supplying into _supplying_plan_name; - select name from public.plan where id = _plan_id_receiving into _receiving_plan_name; + select name from merlin.plan where id = _plan_id_supplying into _supplying_plan_name; + select name from merlin.plan where id = _plan_id_receiving into _receiving_plan_name; -- MISSION_MODEL_OWNER: The user must own the relevant Mission Model if _permission = 'MISSION_MODEL_OWNER' then - call metadata.check_general_permissions(_function, _permission, _plan_id_receiving, _user); + call permissions.check_general_permissions(_function, _permission, _plan_id_receiving, _user); -- OWNER: The user must be the Owner of both Plans elseif _permission = 'OWNER' then - if not (exists(select * from public.plan p where p.id = _plan_id_receiving and p.owner = _user)) then + if not (exists(select * from merlin.plan p where p.id = _plan_id_receiving and p.owner = _user)) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': ''' || _user ||''' is not OWNER on Plan '|| _plan_id_receiving ||' ('|| _receiving_plan_name ||').'; - elseif not (exists(select * from public.plan p2 where p2.id = _plan_id_supplying and p2.owner = _user)) then + elseif not (exists(select * from merlin.plan p2 where p2.id = _plan_id_supplying and p2.owner = _user)) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': ''' || _user ||''' is not OWNER on Plan '|| _plan_id_supplying @@ -66,7 +71,7 @@ begin -- PLAN_OWNER: The user must be the Owner of either Plan elseif _permission = 'PLAN_OWNER' then if not exists(select * - from public.plan p + from merlin.plan p where (p.id = _plan_id_receiving or p.id = _plan_id_supplying) and p.owner = _user) then raise insufficient_privilege @@ -78,7 +83,7 @@ begin -- PLAN_COLLABORATOR: The user must be a Collaborator of either Plan. The Plan Owner is NOT considered a Collaborator of the Plan elseif _permission = 'PLAN_COLLABORATOR' then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where (pc.plan_id = _plan_id_receiving or pc.plan_id = _plan_id_supplying) and pc.collaborator = _user) then raise insufficient_privilege @@ -90,11 +95,11 @@ begin -- PLAN_OWNER_COLLABORATOR: The user must be either the Owner or a Collaborator of either Plan elseif _permission = 'PLAN_OWNER_COLLABORATOR' then if not exists(select * - from public.plan p + from merlin.plan p where (p.id = _plan_id_receiving or p.id = _plan_id_supplying) and p.owner = _user) then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where (pc.plan_id = _plan_id_receiving or pc.plan_id = _plan_id_supplying) and pc.collaborator = _user) then raise insufficient_privilege @@ -108,7 +113,7 @@ begin -- PLAN_OWNER_SOURCE: The user must be the Owner of the Supplying Plan elseif _permission = 'PLAN_OWNER_SOURCE' then if not exists(select * - from public.plan p + from merlin.plan p where p.id = _plan_id_supplying and p.owner = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Source Plan ' @@ -118,7 +123,7 @@ begin -- PLAN_COLLABORATOR_SOURCE: The user must be a Collaborator of the Supplying Plan. elseif _permission = 'PLAN_COLLABORATOR_SOURCE' then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where pc.plan_id = _plan_id_supplying and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Source Plan ' @@ -128,10 +133,10 @@ begin -- PLAN_OWNER_COLLABORATOR_SOURCE: The user must be either the Owner or a Collaborator of the Supplying Plan. elseif _permission = 'PLAN_OWNER_COLLABORATOR_SOURCE' then if not exists(select * - from public.plan p + from merlin.plan p where p.id = _plan_id_supplying and p.owner = _user) then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where pc.plan_id = _plan_id_supplying and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Source Plan ' @@ -142,7 +147,7 @@ begin -- PLAN_OWNER_TARGET: The user must be the Owner of the Receiving Plan. elseif _permission = 'PLAN_OWNER_TARGET' then if not exists(select * - from public.plan p + from merlin.plan p where p.id = _plan_id_receiving and p.owner = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Target Plan ' @@ -152,7 +157,7 @@ begin -- PLAN_COLLABORATOR_TARGET: The user must be a Collaborator of the Receiving Plan. elseif _permission = 'PLAN_COLLABORATOR_TARGET' then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where pc.plan_id = _plan_id_receiving and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Target Plan ' @@ -162,10 +167,10 @@ begin -- PLAN_OWNER_COLLABORATOR_TARGET: The user must be either the Owner or a Collaborator of the Receiving Plan. elseif _permission = 'PLAN_OWNER_COLLABORATOR_TARGET' then if not exists(select * - from public.plan p + from merlin.plan p where p.id = _plan_id_receiving and p.owner = _user) then if not exists(select * - from public.plan_collaborators pc + from merlin.plan_collaborators pc where pc.plan_id = _plan_id_receiving and pc.collaborator = _user) then raise insufficient_privilege using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Target Plan ' diff --git a/deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql b/deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql index 46f66d78bc..9990fdeb97 100644 --- a/deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql +++ b/deployment/postgres-init-db/sql/tables/permissions/user_role_permission.sql @@ -1,24 +1,24 @@ -create table metadata.user_role_permission( +create table permissions.user_role_permission( role text not null primary key - references metadata.user_roles + references permissions.user_roles on update cascade on delete cascade, action_permissions jsonb not null default '{}', function_permissions jsonb not null default '{}' ); -comment on table metadata.user_role_permission is e'' +comment on table permissions.user_role_permission is e'' 'Permissions for a role that cannot be expressed in Hasura. Permissions take the form {KEY:PERMISSION}.' 'A list of valid KEYs and PERMISSIONs can be found at https://github.com/NASA-AMMOS/aerie/discussions/983#discussioncomment-6257146'; -comment on column metadata.user_role_permission.role is e'' +comment on column permissions.user_role_permission.role is e'' 'The role these permissions apply to.'; -comment on column metadata.user_role_permission.action_permissions is '' +comment on column permissions.user_role_permission.action_permissions is '' 'The permissions the role has on Hasura Actions.'; -comment on column metadata.user_role_permission.function_permissions is '' +comment on column permissions.user_role_permission.function_permissions is '' 'The permissions the role has on Hasura Functions.'; -create function metadata.validate_permissions_json() +create function permissions.validate_permissions_json() returns trigger language plpgsql as $$ declare @@ -47,18 +47,18 @@ begin select jsonb_object_keys(new.function_permissions) as function_key, new.function_permissions ->> jsonb_object_keys(new.function_permissions) as function_permission, - jsonb_object_keys(new.function_permissions) = any(enum_range(null::metadata.function_permission_key)::text[]) as valid_function_key, - new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range(null::metadata.permission)::text[]) as valid_function_permission, + jsonb_object_keys(new.function_permissions) = any(enum_range(null::permissions.function_permission_key)::text[]) as valid_function_key, + new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range(null::permissions.permission)::text[]) as valid_function_permission, jsonb_object_keys(new.function_permissions) = any(plan_merge_fns) as is_plan_merge_key, - new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::metadata.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::metadata.permission)::text[]) as is_plan_merge_permission; + new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::permissions.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::permissions.permission)::text[]) as is_plan_merge_permission; create temp table _validate_actions_table as select jsonb_object_keys(new.action_permissions) as action_key, new.action_permissions ->> jsonb_object_keys(new.action_permissions) as action_permission, - jsonb_object_keys(new.action_permissions) = any(enum_range(null::metadata.action_permission_key)::text[]) as valid_action_key, - new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range(null::metadata.permission)::text[]) as valid_action_permission, - new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::metadata.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::metadata.permission)::text[]) as is_plan_merge_permission; + jsonb_object_keys(new.action_permissions) = any(enum_range(null::permissions.action_permission_key)::text[]) as valid_action_key, + new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range(null::permissions.permission)::text[]) as valid_action_permission, + new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::permissions.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::permissions.permission)::text[]) as is_plan_merge_permission; -- Get any invalid Action Keys @@ -158,6 +158,6 @@ end $$; create trigger validate_permissions_trigger - before insert or update on metadata.user_role_permission + before insert or update on permissions.user_role_permission for each row - execute function metadata.validate_permissions_json(); + execute function permissions.validate_permissions_json(); diff --git a/deployment/postgres-init-db/sql/tables/permissions/user_roles.sql b/deployment/postgres-init-db/sql/tables/permissions/user_roles.sql index dc27702435..a9da334877 100644 --- a/deployment/postgres-init-db/sql/tables/permissions/user_roles.sql +++ b/deployment/postgres-init-db/sql/tables/permissions/user_roles.sql @@ -1,24 +1,24 @@ -- This table is an enum-compatible table (https://hasura.io/docs/latest/schema/postgres/enums/#pg-create-enum-table) -create table metadata.user_roles( +create table permissions.user_roles( role text primary key, description text null ); -comment on table metadata.user_roles is e'' +comment on table permissions.user_roles is e'' 'A list of all the allowed Hasura roles, with an optional description per role'; -create function metadata.insert_permission_for_user_role() +create function permissions.insert_permission_for_user_role() returns trigger security definer language plpgsql as $$ begin - insert into metadata.user_role_permission(role) + insert into permissions.user_role_permission(role) values (new.role); return new; end $$; create trigger insert_permissions_when_user_role_created - after insert on metadata.user_roles + after insert on permissions.user_roles for each row - execute function metadata.insert_permission_for_user_role(); + execute function permissions.insert_permission_for_user_role(); diff --git a/deployment/postgres-init-db/sql/tables/permissions/users.sql b/deployment/postgres-init-db/sql/tables/permissions/users.sql index 1ab737cc4d..3332727fe8 100644 --- a/deployment/postgres-init-db/sql/tables/permissions/users.sql +++ b/deployment/postgres-init-db/sql/tables/permissions/users.sql @@ -1,13 +1,13 @@ -create table metadata.users( +create table permissions.users( username text not null primary key, - default_role text not null references metadata.user_roles + default_role text not null references permissions.user_roles on update cascade on delete restrict ); -comment on table metadata.users is e'' +comment on table permissions.users is e'' 'All users recognized by this deployment.'; -comment on column metadata.users.username is e'' +comment on column permissions.users.username is e'' 'The user''s username. A unique identifier for this user.'; -comment on column metadata.users.default_role is e'' +comment on column permissions.users.default_role is e'' 'The user''s default role for making Hasura requests.'; diff --git a/deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql b/deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql index 3e6ec439af..0c38f917d3 100644 --- a/deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql +++ b/deployment/postgres-init-db/sql/tables/permissions/users_allowed_roles.sql @@ -1,8 +1,8 @@ -create table metadata.users_allowed_roles( - username text references metadata.users +create table permissions.users_allowed_roles( + username text references permissions.users on update cascade on delete cascade, - allowed_role text not null references metadata.user_roles + allowed_role text not null references permissions.user_roles on update cascade on delete cascade, @@ -12,5 +12,5 @@ create table metadata.users_allowed_roles( check (username != 'Mission Model' and username != 'Aerie Legacy' ) ); -comment on table metadata.users_allowed_roles is e'' +comment on table permissions.users_allowed_roles is e'' 'An association between a user and all of the roles they are allowed to use for Hasura requests'; diff --git a/deployment/postgres-init-db/sql/types/permissions/permissions.sql b/deployment/postgres-init-db/sql/types/permissions/permissions.sql index 9a6fd62d40..7e46cd4a4e 100644 --- a/deployment/postgres-init-db/sql/types/permissions/permissions.sql +++ b/deployment/postgres-init-db/sql/types/permissions/permissions.sql @@ -1,6 +1,6 @@ -- User Role Permissions Validation assumes that the Plan Merge Permissions -- are covered by the range [PLAN_OWNER_SOURCE - PLAN_OWNER_COLLABORATOR_TARGET] -create type metadata.permission +create type permissions.permission as enum ( 'NO_CHECK', 'OWNER', @@ -16,7 +16,7 @@ create type metadata.permission 'PLAN_OWNER_COLLABORATOR_TARGET' ); -create type metadata.action_permission_key +create type permissions.action_permission_key as enum ( 'check_constraints', 'create_expansion_rule', @@ -29,7 +29,7 @@ create type metadata.action_permission_key 'simulate' ); -create type metadata.function_permission_key +create type permissions.function_permission_key as enum ( 'apply_preset', 'begin_merge', diff --git a/deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql b/deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql index e430c8be39..801337c7df 100644 --- a/deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql +++ b/deployment/postgres-init-db/sql/views/permissions/users_and_roles.sql @@ -1,14 +1,14 @@ -create view metadata.users_and_roles as +create view permissions.users_and_roles as ( select u.username as username, -- Roles u.default_role as hasura_default_role, array_agg(r.allowed_role) filter (where r.allowed_role is not null) as hasura_allowed_roles - from metadata.users u - left join metadata.users_allowed_roles r using (username) + from permissions.users u + left join permissions.users_allowed_roles r using (username) group by u.username ); -comment on view metadata.users_and_roles is e'' +comment on view permissions.users_and_roles is e'' 'View a user''s information with their role information'; From 4e143651d495fd73d173cce7af1128eede895505 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 5 Feb 2024 17:11:09 -0800 Subject: [PATCH 09/36] Add Schema Qualifiers (Tags) - move get_tags function to tags schema - reorganize tags files --- .../sql/functions/tags/get_tag_ids.sql | 8 ++-- .../sql/functions/tags/get_tags.sql | 21 +++++++++ .../tables/tags/activity_directive_tags.sql | 44 ------------------ .../sql/tables/tags/constraint_tags.sql | 12 ----- .../sql/tables/tags/expansion_rule_tags.sql | 10 +++-- .../tags/merlin/activity_directive_tags.sql | 45 +++++++++++++++++++ .../constraint_definition_tags.sql | 8 ++-- .../tables/tags/merlin/constraint_tags.sql | 12 +++++ .../tables/tags/merlin/plan_snapshot_tags.sql | 13 ++++++ .../sql/tables/tags/merlin/plan_tags.sql | 12 +++++ .../tags/merlin/snapshot_activity_tags.sql | 40 +++++++++++++++++ .../sql/tables/tags/plan_snapshot_tags.sql | 13 ------ .../sql/tables/tags/plan_tags.sql | 12 ----- .../scheduling_condition_definition_tags.sql | 10 +++-- .../scheduling/scheduling_condition_tags.sql | 11 +++++ .../scheduling_goal_definition_tags.sql | 14 ++++++ .../tags/scheduling/scheduling_goal_tags.sql | 11 +++++ .../tables/tags/scheduling_condition_tags.sql | 9 ---- .../tags/scheduling_goal_definition_tags.sql | 12 ----- .../sql/tables/tags/scheduling_goal_tags.sql | 9 ---- .../tables/tags/snapshot_activity_tags.sql | 40 ----------------- .../postgres-init-db/sql/tables/tags/tags.sql | 16 +++---- .../merlin/activity_directive_extended.sql | 22 --------- 23 files changed, 207 insertions(+), 197 deletions(-) create mode 100644 deployment/postgres-init-db/sql/functions/tags/get_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/merlin/activity_directive_tags.sql rename deployment/postgres-init-db/sql/tables/tags/{ => merlin}/constraint_definition_tags.sql (54%) create mode 100644 deployment/postgres-init-db/sql/tables/tags/merlin/constraint_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/merlin/plan_snapshot_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/merlin/plan_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/merlin/snapshot_activity_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/plan_tags.sql rename deployment/postgres-init-db/sql/tables/tags/{ => scheduling}/scheduling_condition_definition_tags.sql (55%) create mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_definition_tags.sql create mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql delete mode 100644 deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql diff --git a/deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql b/deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql index f86e1ec976..4ccdfca92b 100644 --- a/deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql +++ b/deployment/postgres-init-db/sql/functions/tags/get_tag_ids.sql @@ -1,11 +1,11 @@ -create function metadata.tag_ids_activity_snapshot(_directive_id integer, _snapshot_id integer) +create function tags.tag_ids_activity_snapshot(_directive_id integer, _snapshot_id integer) returns int[] language plpgsql as $$ declare tags int[]; begin select array_agg(tag_id) - from metadata.snapshot_activity_tags sat + from tags.snapshot_activity_tags sat where sat.snapshot_id = _snapshot_id and sat.directive_id = _directive_id into tags; @@ -13,14 +13,14 @@ begin end $$; -create function metadata.tag_ids_activity_directive(_directive_id integer, _plan_id integer) +create function tags.tag_ids_activity_directive(_directive_id integer, _plan_id integer) returns int[] language plpgsql as $$ declare tags int[]; begin select array_agg(tag_id) - from metadata.activity_directive_tags adt + from tags.activity_directive_tags adt where adt.plan_id = _plan_id and adt.directive_id = _directive_id into tags; diff --git a/deployment/postgres-init-db/sql/functions/tags/get_tags.sql b/deployment/postgres-init-db/sql/functions/tags/get_tags.sql new file mode 100644 index 0000000000..8357c27d42 --- /dev/null +++ b/deployment/postgres-init-db/sql/functions/tags/get_tags.sql @@ -0,0 +1,21 @@ +create function tags.get_tags(_activity_id int, _plan_id int) + returns jsonb + security invoker + language plpgsql as $$ + declare + tags jsonb; +begin + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) + from tags.tags tags, tags.activity_directive_tags adt + where tags.id = adt.tag_id + and (adt.directive_id, adt.plan_id) = (_activity_id, _plan_id) + into tags; + return tags; +end +$$; diff --git a/deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql b/deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql deleted file mode 100644 index 115eacf3e3..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/activity_directive_tags.sql +++ /dev/null @@ -1,44 +0,0 @@ -create table metadata.activity_directive_tags( - directive_id integer not null, - plan_id integer not null, - - tag_id integer not null references metadata.tags - on update cascade - on delete cascade, - - constraint tags_on_existing_activity_directive - foreign key (directive_id, plan_id) - references activity_directive - on update cascade - on delete cascade, - primary key (directive_id, plan_id, tag_id) -); - -comment on table metadata.activity_directive_tags is e'' - 'The tags associated with an activity directive.'; - -create function adt_check_locked_new() - returns trigger - security definer - language plpgsql as $$ - begin - call plan_locked_exception(new.plan_id); - return new; - end $$; -create function adt_check_locked_old() - returns trigger - security definer - language plpgsql as $$ - begin - call plan_locked_exception(old.plan_id); - return old; - end $$; - -create trigger adt_check_plan_locked_insert_update - before insert or update on metadata.activity_directive_tags - for each row - execute procedure adt_check_locked_new(); -create trigger adt_check_plan_locked_update_delete - before update or delete on metadata.activity_directive_tags - for each row - execute procedure adt_check_locked_old(); diff --git a/deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql b/deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql deleted file mode 100644 index 26065b0227..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/constraint_tags.sql +++ /dev/null @@ -1,12 +0,0 @@ -create table metadata.constraint_tags ( - constraint_id integer not null references public.constraint_metadata - on update cascade - on delete cascade, - tag_id integer not null references metadata.tags - on update cascade - on delete cascade, - primary key (constraint_id, tag_id) -); - -comment on table metadata.constraint_tags is e'' - 'The tags associated with a constraint.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql b/deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql index 1b0e58eb95..a78cebbff1 100644 --- a/deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql +++ b/deployment/postgres-init-db/sql/tables/tags/expansion_rule_tags.sql @@ -1,9 +1,11 @@ -create table metadata.expansion_rule_tags ( - rule_id integer references public.expansion_rule +create table tags.expansion_rule_tags ( + rule_id integer references sequencing.expansion_rule + on update cascade + on delete cascade, + tag_id integer not null references tags.tags on update cascade on delete cascade, - tag_id integer not null, primary key (rule_id, tag_id) ); -comment on table metadata.expansion_rule_tags is e'' +comment on table tags.expansion_rule_tags is e'' 'The tags associated with an expansion rule.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/merlin/activity_directive_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/activity_directive_tags.sql new file mode 100644 index 0000000000..b170b1a7b2 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/activity_directive_tags.sql @@ -0,0 +1,45 @@ +create table tags.activity_directive_tags( + directive_id integer not null, + plan_id integer not null, + + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + + constraint tags_on_existing_activity_directive + foreign key (directive_id, plan_id) + references merlin.activity_directive + on update cascade + on delete cascade, + primary key (directive_id, plan_id, tag_id) +); + +comment on table tags.activity_directive_tags is e'' + 'The tags associated with an activity directive.'; + +create function tags.adt_check_locked_new() + returns trigger + security definer + language plpgsql as $$ +begin + call merlin.plan_locked_exception(new.plan_id); + return new; +end $$; + +create function tags.adt_check_locked_old() + returns trigger + security definer + language plpgsql as $$ +begin + call merlin.plan_locked_exception(old.plan_id); + return old; +end $$; + +create trigger adt_check_plan_locked_insert_update + before insert or update on tags.activity_directive_tags + for each row + execute procedure tags.adt_check_locked_new(); +create trigger adt_check_plan_locked_update_delete + before update or delete on tags.activity_directive_tags + for each row + execute procedure tags.adt_check_locked_old(); diff --git a/deployment/postgres-init-db/sql/tables/tags/constraint_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/constraint_definition_tags.sql similarity index 54% rename from deployment/postgres-init-db/sql/tables/tags/constraint_definition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/merlin/constraint_definition_tags.sql index 9c631d9dcc..f400452688 100644 --- a/deployment/postgres-init-db/sql/tables/tags/constraint_definition_tags.sql +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/constraint_definition_tags.sql @@ -1,14 +1,14 @@ -create table metadata.constraint_definition_tags ( +create table tags.constraint_definition_tags ( constraint_id integer not null, constraint_revision integer not null, - tag_id integer not null references metadata.tags + tag_id integer not null references tags.tags on update cascade on delete cascade, primary key (constraint_id, constraint_revision, tag_id), - foreign key (constraint_id, constraint_revision) references constraint_definition + foreign key (constraint_id, constraint_revision) references merlin.constraint_definition on update cascade on delete cascade ); -comment on table metadata.constraint_definition_tags is e'' +comment on table tags.constraint_definition_tags is e'' 'The tags associated with a specific constraint defintion.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/merlin/constraint_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/constraint_tags.sql new file mode 100644 index 0000000000..51657cd0c4 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/constraint_tags.sql @@ -0,0 +1,12 @@ +create table tags.constraint_tags ( + constraint_id integer not null references merlin.constraint_metadata + on update cascade + on delete cascade, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (constraint_id, tag_id) +); + +comment on table tags.constraint_tags is e'' + 'The tags associated with a constraint.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/merlin/plan_snapshot_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/plan_snapshot_tags.sql new file mode 100644 index 0000000000..9b639af989 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/plan_snapshot_tags.sql @@ -0,0 +1,13 @@ +create table tags.plan_snapshot_tags( + snapshot_id integer not null references merlin.plan_snapshot + on update cascade + on delete cascade, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (snapshot_id, tag_id) +); + +comment on table tags.plan_snapshot_tags is e'' + 'The tags associated with a specific. Note: these tags will not be compared in a merge ' + 'and will not be applied to the plan if the snapshot is restored.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/merlin/plan_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/plan_tags.sql new file mode 100644 index 0000000000..cd6cb77e76 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/plan_tags.sql @@ -0,0 +1,12 @@ +create table tags.plan_tags( + plan_id integer not null references merlin.plan + on update cascade + on delete cascade, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (plan_id, tag_id) +); + +comment on table tags.plan_tags is e'' + 'The tags associated with a plan. Note: these tags will not be compared during a plan merge.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/merlin/snapshot_activity_tags.sql b/deployment/postgres-init-db/sql/tables/tags/merlin/snapshot_activity_tags.sql new file mode 100644 index 0000000000..f5c0ec0c01 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/merlin/snapshot_activity_tags.sql @@ -0,0 +1,40 @@ +create table tags.snapshot_activity_tags( + directive_id integer not null, + snapshot_id integer not null, + + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + + constraint tags_on_existing_snapshot_directive + foreign key (directive_id, snapshot_id) + references merlin.plan_snapshot_activities + on update cascade + on delete cascade, + primary key (directive_id, snapshot_id, tag_id) +); + +comment on table tags.snapshot_activity_tags is e'' + 'The tags associated with an activity directive snapshot.'; + + +create function tags.snapshot_tags_in_review_delete() + returns trigger + security definer +language plpgsql as $$ +begin + if exists(select status from merlin.merge_request mr + where + (mr.snapshot_id_supplying_changes = old.snapshot_id + or mr.merge_base_snapshot_id = old.snapshot_id) + and mr.status = 'in-progress') then + raise exception 'Cannot delete. Snapshot is in use in an active merge review.'; + end if; + return old; +end +$$; + +create trigger snapshot_tags_in_review_delete_trigger + before delete on tags.snapshot_activity_tags + for each row + execute function tags.snapshot_tags_in_review_delete(); diff --git a/deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql b/deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql deleted file mode 100644 index 65980e07ca..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/plan_snapshot_tags.sql +++ /dev/null @@ -1,13 +0,0 @@ -create table metadata.plan_snapshot_tags( - snapshot_id integer not null references public.plan_snapshot - on update cascade - on delete cascade, - tag_id integer not null references metadata.tags - on update cascade - on delete cascade, - primary key (snapshot_id, tag_id) -); - -comment on table metadata.plan_snapshot_tags is e'' - 'The tags associated with a specific. Note: these tags will not be compared in a merge ' - 'and will not be applied to the plan if the snapshot is restored.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/plan_tags.sql b/deployment/postgres-init-db/sql/tables/tags/plan_tags.sql deleted file mode 100644 index dbfb85b253..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/plan_tags.sql +++ /dev/null @@ -1,12 +0,0 @@ -create table metadata.plan_tags( - plan_id integer not null references public.plan - on update cascade - on delete cascade, - tag_id integer not null references metadata.tags - on update cascade - on delete cascade, - primary key (plan_id, tag_id) -); - -comment on table metadata.plan_tags is e'' - 'The tags associated with a plan. Note: these tags will not be compared during a plan merge.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_definition_tags.sql similarity index 55% rename from deployment/postgres-init-db/sql/tables/tags/scheduling_condition_definition_tags.sql rename to deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_definition_tags.sql index f29d45981f..04f76058da 100644 --- a/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_definition_tags.sql +++ b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_definition_tags.sql @@ -1,12 +1,14 @@ -create table metadata.scheduling_condition_definition_tags ( +create table tags.scheduling_condition_definition_tags ( condition_id integer not null, condition_revision integer not null, - tag_id integer not null, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, primary key (condition_id, condition_revision, tag_id), - foreign key (condition_id, condition_revision) references scheduling_condition_definition + foreign key (condition_id, condition_revision) references scheduler.scheduling_condition_definition on update cascade on delete cascade ); -comment on table metadata.scheduling_condition_definition_tags is e'' +comment on table tags.scheduling_condition_definition_tags is e'' 'The tags associated with a specific scheduling condition definition.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_tags.sql new file mode 100644 index 0000000000..17fd0e935b --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_condition_tags.sql @@ -0,0 +1,11 @@ +create table tags.scheduling_condition_tags ( + condition_id integer references scheduler.scheduling_condition_metadata + on update cascade + on delete cascade, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (condition_id, tag_id) +); +comment on table tags.scheduling_condition_tags is e'' + 'The tags associated with a scheduling condition.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_definition_tags.sql new file mode 100644 index 0000000000..0b0cfbff8b --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_definition_tags.sql @@ -0,0 +1,14 @@ +create table tags.scheduling_goal_definition_tags ( + goal_id integer not null, + goal_revision integer not null, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (goal_id, goal_revision, tag_id), + foreign key (goal_id, goal_revision) references scheduler.scheduling_goal_definition + on update cascade + on delete cascade +); + +comment on table tags.scheduling_goal_definition_tags is e'' + 'The tags associated with a specific scheduling condition definition.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_tags.sql new file mode 100644 index 0000000000..edeba3ecfb --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/tags/scheduling/scheduling_goal_tags.sql @@ -0,0 +1,11 @@ +create table tags.scheduling_goal_tags ( + goal_id integer references scheduler.scheduling_goal_metadata + on update cascade + on delete cascade, + tag_id integer not null references tags.tags + on update cascade + on delete cascade, + primary key (goal_id, tag_id) +); +comment on table tags.scheduling_goal_tags is e'' + 'The tags associated with a scheduling goal.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql deleted file mode 100644 index f209b9854f..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/scheduling_condition_tags.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table metadata.scheduling_condition_tags ( - condition_id integer references public.scheduling_condition_metadata - on update cascade - on delete cascade, - tag_id integer not null, - primary key (condition_id, tag_id) -); -comment on table metadata.scheduling_condition_tags is e'' - 'The tags associated with a scheduling condition.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql deleted file mode 100644 index 6c43be051e..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_definition_tags.sql +++ /dev/null @@ -1,12 +0,0 @@ -create table metadata.scheduling_goal_definition_tags ( - goal_id integer not null, - goal_revision integer not null, - tag_id integer not null, - primary key (goal_id, goal_revision, tag_id), - foreign key (goal_id, goal_revision) references scheduling_goal_definition - on update cascade - on delete cascade -); - -comment on table metadata.scheduling_goal_definition_tags is e'' - 'The tags associated with a specific scheduling condition definition.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql b/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql deleted file mode 100644 index 1effedabf2..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/scheduling_goal_tags.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table metadata.scheduling_goal_tags ( - goal_id integer references public.scheduling_goal_metadata - on update cascade - on delete cascade, - tag_id integer not null, - primary key (goal_id, tag_id) -); -comment on table metadata.scheduling_goal_tags is e'' - 'The tags associated with a scheduling goal.'; diff --git a/deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql b/deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql deleted file mode 100644 index dc0f2d94f4..0000000000 --- a/deployment/postgres-init-db/sql/tables/tags/snapshot_activity_tags.sql +++ /dev/null @@ -1,40 +0,0 @@ -create table metadata.snapshot_activity_tags( - directive_id integer not null, - snapshot_id integer not null, - - tag_id integer not null references metadata.tags - on update cascade - on delete cascade, - - constraint tags_on_existing_snapshot_directive - foreign key (directive_id, snapshot_id) - references plan_snapshot_activities - on update cascade - on delete cascade, - primary key (directive_id, snapshot_id, tag_id) -); - -comment on table metadata.snapshot_activity_tags is e'' - 'The tags associated with an activity directive snapshot.'; - - -create function snapshot_tags_in_review_delete() - returns trigger - security definer -language plpgsql as $$ - begin - if exists(select status from merge_request mr - where - (mr.snapshot_id_supplying_changes = old.snapshot_id - or mr.merge_base_snapshot_id = old.snapshot_id) - and mr.status = 'in-progress') then - raise exception 'Cannot delete. Snapshot is in use in an active merge review.'; - end if; - return old; - end -$$; - -create trigger snapshot_tags_in_review_delete_trigger - before delete on metadata.snapshot_activity_tags - for each row - execute function snapshot_tags_in_review_delete(); diff --git a/deployment/postgres-init-db/sql/tables/tags/tags.sql b/deployment/postgres-init-db/sql/tables/tags/tags.sql index 5e882404ec..b458d26601 100644 --- a/deployment/postgres-init-db/sql/tables/tags/tags.sql +++ b/deployment/postgres-init-db/sql/tables/tags/tags.sql @@ -1,4 +1,4 @@ -create table metadata.tags( +create table tags.tags( id integer generated always as identity primary key, name text not null unique, @@ -9,22 +9,22 @@ create table metadata.tags( constraint color_is_hex_format check (color is null or color ~* '^#[a-f0-9]{6}$' ), constraint tags_owner_exists - foreign key (owner) references metadata.users + foreign key (owner) references permissions.users on update cascade on delete set null ); -comment on table metadata.tags is e'' +comment on table tags.tags is e'' 'All tags usable within an Aerie deployment.'; -comment on column metadata.tags.id is e'' +comment on column tags.tags.id is e'' 'The index of the tag.'; -comment on column metadata.tags.name is e'' +comment on column tags.tags.name is e'' 'The name of the tag. Unique within a deployment.'; -comment on column metadata.tags.color is e'' +comment on column tags.tags.color is e'' 'The color the tag should display as when using a GUI.'; -comment on column metadata.tags.owner is e'' +comment on column tags.tags.owner is e'' 'The user responsible for this tag. ' '''Mission Model'' is used to represent tags originating from an uploaded mission model' '''Aerie Legacy'' is used to represent tags originating from a version of Aerie prior to this table''s creation.'; -comment on column metadata.tags.created_at is e'' +comment on column tags.tags.created_at is e'' 'The date this tag was created.'; diff --git a/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql b/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql index c49f96f4ca..3d30e4fef6 100644 --- a/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql +++ b/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql @@ -43,28 +43,6 @@ begin end $$; -create function get_tags(_activity_id int, _plan_id int) - returns jsonb - security definer - language plpgsql as $$ - declare - tags jsonb; -begin - select jsonb_agg(json_build_object( - 'id', id, - 'name', name, - 'color', color, - 'owner', owner, - 'created_at', created_at - )) - from metadata.tags tags, metadata.activity_directive_tags adt - where tags.id = adt.tag_id - and (adt.directive_id, adt.plan_id) = (_activity_id, _plan_id) - into tags; - return tags; -end -$$; - create view activity_directive_extended as ( select From 70b17a28ce1495dbe2e31883774cd9db5f41c9dd Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 1 Mar 2024 09:56:03 -0800 Subject: [PATCH 10/36] Add Schema Qualifiers (Hasura) --- .../hasura/activity_preset_functions.sql | 32 +-- .../hasura/delete_anchor_functions.sql | 126 ++++++------ .../sql/functions/hasura/hasura_functions.sql | 28 +-- .../hasura/plan_branching_functions.sql | 32 +-- .../functions/hasura/plan_merge_functions.sql | 194 +++++++++--------- .../functions/hasura/snapshot_functions.sql | 34 +-- 6 files changed, 223 insertions(+), 223 deletions(-) diff --git a/deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql index 697016360d..a21a16ddaa 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/activity_preset_functions.sql @@ -1,56 +1,56 @@ -- Activity Presets -create function hasura_functions.apply_preset_to_activity(_preset_id int, _activity_id int, _plan_id int, hasura_session json) -returns activity_directive +create function hasura.apply_preset_to_activity(_preset_id int, _activity_id int, _plan_id int, hasura_session json) +returns merlin.activity_directive strict volatile language plpgsql as $$ declare - returning_directive activity_directive; + returning_directive merlin.activity_directive; ad_activity_type text; preset_activity_type text; - _function_permission metadata.permission; + _function_permission permissions.permission; _user text; begin - _function_permission := metadata.get_function_permissions('apply_preset', hasura_session); - perform metadata.raise_if_plan_merge_permission('apply_preset', _function_permission); + _function_permission := permissions.get_function_permissions('apply_preset', hasura_session); + perform permissions.raise_if_plan_merge_permission('apply_preset', _function_permission); -- Check valid permissions _user := hasura_session ->> 'x-hasura-user-id'; if not _function_permission = 'NO_CHECK' then if _function_permission = 'OWNER' then - if not exists(select * from public.activity_presets ap where ap.id = _preset_id and ap.owner = _user) then + if not exists(select * from merlin.activity_presets ap where ap.id = _preset_id and ap.owner = _user) then raise insufficient_privilege using message = 'Cannot run ''apply_preset'': '''|| _user ||''' is not OWNER on Activity Preset ' || _preset_id ||'.'; end if; end if; -- Additionally, the user needs to be OWNER of the plan - call metadata.check_general_permissions('apply_preset', _function_permission, _plan_id, _user); + call permissions.check_general_permissions('apply_preset', _function_permission, _plan_id, _user); end if; - if not exists(select id from public.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then raise exception 'Activity directive % does not exist in plan %', _activity_id, _plan_id; end if; - if not exists(select id from public.activity_presets where id = _preset_id) then + if not exists(select id from merlin.activity_presets where id = _preset_id) then raise exception 'Activity preset % does not exist', _preset_id; end if; - select type from activity_directive where (id, plan_id) = (_activity_id, _plan_id) into ad_activity_type; - select associated_activity_type from activity_presets where id = _preset_id into preset_activity_type; + select type from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) into ad_activity_type; + select associated_activity_type from merlin.activity_presets where id = _preset_id into preset_activity_type; if (ad_activity_type != preset_activity_type) then raise exception 'Cannot apply preset for activity type "%" onto an activity of type "%".', preset_activity_type, ad_activity_type; end if; - update activity_directive - set arguments = (select arguments from activity_presets where id = _preset_id) + update merlin.activity_directive + set arguments = (select arguments from merlin.activity_presets where id = _preset_id) where (id, plan_id) = (_activity_id, _plan_id); - insert into preset_to_directive(preset_id, activity_id, plan_id) + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) select _preset_id, _activity_id, _plan_id on conflict (activity_id, plan_id) do update set preset_id = _preset_id; - select * from activity_directive + select * from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) into returning_directive; diff --git a/deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql index 70a2876ffa..a5e66ba90d 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/delete_anchor_functions.sql @@ -1,92 +1,92 @@ -- Hasura functions for handling anchors during delete -create table hasura_functions.delete_anchor_return_value( - affected_row activity_directive, +create table hasura.delete_anchor_return_value( + affected_row merlin.activity_directive, change_type text ); -create function hasura_functions.delete_activity_by_pk_reanchor_plan_start(_activity_id int, _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_reanchor_plan_start(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_reanchor_plan', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_reanchor_plan', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_plan', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_plan', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_reanchor_plan', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_reanchor_plan', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - if not exists(select id from public.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; end if; return query with updated as ( - select public.anchor_direct_descendents_to_plan(_activity_id := _activity_id, _plan_id := _plan_id) + select merlin.anchor_direct_descendents_to_plan(_activity_id := _activity_id, _plan_id := _plan_id) ) select updated.*, 'updated' from updated; return query with deleted as ( - delete from activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * + delete from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * ) select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, - deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::activity_directive, 'deleted' from deleted; + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; end $$; -create function hasura_functions.delete_activity_by_pk_reanchor_to_anchor(_activity_id int, _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_reanchor_to_anchor(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_reanchor', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_reanchor', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_reanchor', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_reanchor', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_reanchor', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - if not exists(select id from public.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; end if; return query with updated as ( - select public.anchor_direct_descendents_to_ancestor(_activity_id := _activity_id, _plan_id := _plan_id) + select merlin.anchor_direct_descendents_to_ancestor(_activity_id := _activity_id, _plan_id := _plan_id) ) select updated.*, 'updated' from updated; return query with deleted as ( - delete from activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * + delete from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * ) select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, - deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::activity_directive, 'deleted' from deleted; + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; end $$; -create function hasura_functions.delete_activity_by_pk_delete_subtree(_activity_id int, _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_delete_subtree(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_subtree', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_subtree', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_subtree', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_subtree', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_subtree', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_subtree', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - if not exists(select id from public.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; end if; @@ -94,102 +94,102 @@ begin with recursive descendents(activity_id, p_id) as ( select _activity_id, _plan_id - from activity_directive ad + from merlin.activity_directive ad where (ad.id, ad.plan_id) = (_activity_id, _plan_id) union select ad.id, ad.plan_id - from activity_directive ad, descendents d + from merlin.activity_directive ad, descendents d where (ad.anchor_id, ad.plan_id) = (d.activity_id, d.p_id) ), deleted as ( - delete from activity_directive ad + delete from merlin.activity_directive ad using descendents where (ad.plan_id, ad.id) = (_plan_id, descendents.activity_id) returning * ) select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, - deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::activity_directive, 'deleted' from deleted; + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; end $$; -- Bulk versions of Anchor Deletion -create function hasura_functions.delete_activity_by_pk_reanchor_plan_start_bulk(_activity_ids int[], _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_reanchor_plan_start_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare activity_id int; - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_reanchor_plan_bulk', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_reanchor_plan_bulk', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_plan_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_plan_bulk', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_reanchor_plan_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_reanchor_plan_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - set constraints public.validate_anchors_update_trigger immediate; + set constraints merlin.validate_anchors_update_trigger immediate; foreach activity_id in array _activity_ids loop -- An activity ID might've been deleted in a prior step, so validate that it exists first - if exists(select id from public.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then return query - select * from hasura_functions.delete_activity_by_pk_reanchor_plan_start(activity_id, _plan_id, hasura_session); + select * from hasura.delete_activity_by_pk_reanchor_plan_start(activity_id, _plan_id, hasura_session); end if; end loop; - set constraints public.validate_anchors_update_trigger deferred; + set constraints merlin.validate_anchors_update_trigger deferred; end $$; -create function hasura_functions.delete_activity_by_pk_reanchor_to_anchor_bulk(_activity_ids int[], _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_reanchor_to_anchor_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare activity_id int; - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_reanchor_bulk', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_reanchor_bulk', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_bulk', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_reanchor_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_reanchor_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - set constraints public.validate_anchors_update_trigger immediate; + set constraints merlin.validate_anchors_update_trigger immediate; foreach activity_id in array _activity_ids loop -- An activity ID might've been deleted in a prior step, so validate that it exists first - if exists(select id from public.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then return query - select * from hasura_functions.delete_activity_by_pk_reanchor_to_anchor(activity_id, _plan_id, hasura_session); + select * from hasura.delete_activity_by_pk_reanchor_to_anchor(activity_id, _plan_id, hasura_session); end if; end loop; - set constraints public.validate_anchors_update_trigger deferred; + set constraints merlin.validate_anchors_update_trigger deferred; end $$; -create function hasura_functions.delete_activity_by_pk_delete_subtree_bulk(_activity_ids int[], _plan_id int, hasura_session json) - returns setof hasura_functions.delete_anchor_return_value +create function hasura.delete_activity_by_pk_delete_subtree_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value strict volatile language plpgsql as $$ declare activity_id int; - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('delete_activity_subtree_bulk', hasura_session); - perform metadata.raise_if_plan_merge_permission('delete_activity_subtree_bulk', _function_permission); + _function_permission := permissions.get_function_permissions('delete_activity_subtree_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_subtree_bulk', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('delete_activity_subtree_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('delete_activity_subtree_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; - set constraints public.validate_anchors_update_trigger immediate; + set constraints merlin.validate_anchors_update_trigger immediate; foreach activity_id in array _activity_ids loop - if exists(select id from public.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then return query - select * from hasura_functions.delete_activity_by_pk_delete_subtree(activity_id, _plan_id, hasura_session); + select * from hasura.delete_activity_by_pk_delete_subtree(activity_id, _plan_id, hasura_session); end if; end loop; - set constraints public.validate_anchors_update_trigger deferred; + set constraints merlin.validate_anchors_update_trigger deferred; end $$; diff --git a/deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql index d62c16e250..af205c4354 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/hasura_functions.sql @@ -1,5 +1,5 @@ -- Simulation Resources -create table hasura_functions.resource_at_start_offset_return_value( +create table hasura.resource_at_start_offset_return_value( dataset_id integer not null, id integer not null, name text not null, @@ -9,8 +9,8 @@ create table hasura_functions.resource_at_start_offset_return_value( is_gap bool not null ); -create function hasura_functions.get_resources_at_start_offset(_dataset_id int, _start_offset interval) -returns setof hasura_functions.resource_at_start_offset_return_value +create function hasura.get_resources_at_start_offset(_dataset_id int, _start_offset interval) +returns setof hasura.resource_at_start_offset_return_value strict stable security invoker @@ -19,7 +19,7 @@ begin return query select distinct on (p.name) p.dataset_id, p.id, p.name, p.type, ps.start_offset, ps.dynamics, ps.is_gap - from profile p, profile_segment ps + from merlin.profile p, merlin.profile_segment ps where ps.profile_id = p.id and p.dataset_id = _dataset_id and ps.dataset_id = _dataset_id @@ -28,38 +28,38 @@ begin end $$; -create function hasura_functions.restore_activity_changelog( +create function hasura.restore_activity_changelog( _plan_id integer, _activity_directive_id integer, _revision integer, hasura_session json ) - returns setof activity_directive + returns setof merlin.activity_directive volatile language plpgsql as $$ declare - _function_permission metadata.permission; + _function_permission permissions.permission; begin _function_permission := - metadata.get_function_permissions('restore_activity_changelog', hasura_session); + permissions.get_function_permissions('restore_activity_changelog', hasura_session); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions( + call permissions.check_general_permissions( 'restore_activity_changelog', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id') ); end if; - if not exists(select id from public.plan where id = _plan_id) then + if not exists(select id from merlin.plan where id = _plan_id) then raise exception 'Plan % does not exist', _plan_id; end if; - if not exists(select id from public.activity_directive where (id, plan_id) = (_activity_directive_id, _plan_id)) then + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_directive_id, _plan_id)) then raise exception 'Activity Directive % does not exist in Plan %', _activity_directive_id, _plan_id; end if; if not exists(select revision - from public.activity_directive_changelog + from merlin.activity_directive_changelog where (plan_id, activity_directive_id, revision) = (_plan_id, _activity_directive_id, _revision)) then @@ -67,7 +67,7 @@ begin end if; return query - update activity_directive as ad + update merlin.activity_directive as ad set name = c.name, source_scheduling_goal_id = c.source_scheduling_goal_id, start_offset = c.start_offset, @@ -79,7 +79,7 @@ begin anchored_to_start = c.anchored_to_start, last_modified_at = c.changed_at, last_modified_by = c.changed_by - from activity_directive_changelog as c + from merlin.activity_directive_changelog as c where ad.id = _activity_directive_id and c.activity_directive_id = _activity_directive_id and ad.plan_id = _plan_id diff --git a/deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql index 4d89a2cec1..f9001e8c60 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/plan_branching_functions.sql @@ -1,37 +1,37 @@ -create table hasura_functions.duplicate_plan_return_value(new_plan_id integer); -create function hasura_functions.duplicate_plan(plan_id integer, new_plan_name text, hasura_session json) - returns hasura_functions.duplicate_plan_return_value -- plan_id of the new plan +create table hasura.duplicate_plan_return_value(new_plan_id integer); +create function hasura.duplicate_plan(plan_id integer, new_plan_name text, hasura_session json) + returns hasura.duplicate_plan_return_value -- plan_id of the new plan volatile language plpgsql as $$ declare res integer; new_owner text; - _function_permission metadata.permission; + _function_permission permissions.permission; begin new_owner := (hasura_session ->> 'x-hasura-user-id'); - _function_permission := metadata.get_function_permissions('branch_plan', hasura_session); - perform metadata.raise_if_plan_merge_permission('branch_plan', _function_permission); + _function_permission := permissions.get_function_permissions('branch_plan', hasura_session); + perform permissions.raise_if_plan_merge_permission('branch_plan', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('branch_plan', _function_permission, plan_id, new_owner); + call permissions.check_general_permissions('branch_plan', _function_permission, plan_id, new_owner); end if; - select duplicate_plan(plan_id, new_plan_name, new_owner) into res; - return row(res)::hasura_functions.duplicate_plan_return_value; + select merlin.duplicate_plan(plan_id, new_plan_name, new_owner) into res; + return row(res)::hasura.duplicate_plan_return_value; end; $$; -create table hasura_functions.get_plan_history_return_value(plan_id integer); -create function hasura_functions.get_plan_history(_plan_id integer, hasura_session json) - returns setof hasura_functions.get_plan_history_return_value +create table hasura.get_plan_history_return_value(plan_id integer); +create function hasura.get_plan_history(_plan_id integer, hasura_session json) + returns setof hasura.get_plan_history_return_value stable language plpgsql as $$ declare - _function_permission metadata.permission; + _function_permission permissions.permission; begin - _function_permission := metadata.get_function_permissions('get_plan_history', hasura_session); - perform metadata.raise_if_plan_merge_permission('get_plan_history', _function_permission); + _function_permission := permissions.get_function_permissions('get_plan_history', hasura_session); + perform permissions.raise_if_plan_merge_permission('get_plan_history', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('get_plan_history', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + call permissions.check_general_permissions('get_plan_history', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); end if; return query select get_plan_history($1); diff --git a/deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql index 9d7c1a28c1..357956373a 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/plan_merge_functions.sql @@ -1,33 +1,32 @@ -create table hasura_functions.create_merge_request_return_value(merge_request_id integer); -create function hasura_functions.create_merge_request(source_plan_id integer, target_plan_id integer, hasura_session json) - returns hasura_functions.create_merge_request_return_value -- plan_id of the new plan +create table hasura.create_merge_request_return_value(merge_request_id integer); +create function hasura.create_merge_request(source_plan_id integer, target_plan_id integer, hasura_session json) + returns hasura.create_merge_request_return_value -- plan_id of the new plan volatile language plpgsql as $$ declare res integer; requester_username text; - _function_permission metadata.permission; + _function_permission permissions.permission; begin requester_username := (hasura_session ->> 'x-hasura-user-id'); - _function_permission := metadata.get_function_permissions('create_merge_rq', hasura_session); - call metadata.check_merge_permissions('create_merge_rq', _function_permission, target_plan_id, source_plan_id, requester_username); + _function_permission := permissions.get_function_permissions('create_merge_rq', hasura_session); + call permissions.check_merge_permissions('create_merge_rq', _function_permission, target_plan_id, source_plan_id, requester_username); - select create_merge_request(source_plan_id, target_plan_id, requester_username) into res; - return row(res)::hasura_functions.create_merge_request_return_value; + select merlin.create_merge_request(source_plan_id, target_plan_id, requester_username) into res; + return row(res)::hasura.create_merge_request_return_value; end; $$; - -create table hasura_functions.get_non_conflicting_activities_return_value( +create table hasura.get_non_conflicting_activities_return_value( activity_id integer, - change_type activity_change_type, - source plan_snapshot_activities, - target activity_directive, + change_type merlin.activity_change_type, + source merlin.plan_snapshot_activities, + target merlin.activity_directive, source_tags jsonb, target_tags jsonb ); -create function hasura_functions.get_non_conflicting_activities(_merge_request_id integer, hasura_session json) - returns setof hasura_functions.get_non_conflicting_activities_return_value +create function hasura.get_non_conflicting_activities(_merge_request_id integer, hasura_session json) + returns setof hasura.get_non_conflicting_activities_return_value strict volatile language plpgsql as $$ @@ -35,10 +34,10 @@ declare _snapshot_id_supplying_changes integer; _plan_id_receiving_changes integer; begin - call metadata.check_merge_permissions('get_non_conflicting_activities', _merge_request_id, hasura_session); + call permissions.check_merge_permissions('get_non_conflicting_activities', _merge_request_id, hasura_session); select snapshot_id_supplying_changes, plan_id_receiving_changes - from merge_request + from merlin.merge_request where merge_request.id = $1 into _snapshot_id_supplying_changes, _plan_id_receiving_changes; @@ -51,7 +50,7 @@ begin 'owner', owner, 'created_at', created_at )) as tags, adt.directive_id - from metadata.tags tags, metadata.activity_directive_tags adt + from tags.tags tags, tags.activity_directive_tags adt where tags.id = adt.tag_id and adt.plan_id = _plan_id_receiving_changes group by adt.directive_id @@ -64,7 +63,7 @@ begin 'owner', owner, 'created_at', created_at )) as tags, sat.directive_id - from metadata.tags tags, metadata.snapshot_activity_tags sat + from tags.tags tags, tags.snapshot_activity_tags sat where tags.id = sat.tag_id and sat.snapshot_id = _snapshot_id_supplying_changes group by sat.directive_id @@ -78,12 +77,12 @@ begin coalesce(pt.tags, '[]') from (select msa.activity_id, msa.change_type - from merge_staging_area msa + from merlin.merge_staging_area msa where msa.merge_request_id = $1) c - left join plan_snapshot_activities snap_act + left join merlin.plan_snapshot_activities snap_act on _snapshot_id_supplying_changes = snap_act.snapshot_id and c.activity_id = snap_act.id - left join activity_directive act + left join merlin.activity_directive act on _plan_id_receiving_changes = act.plan_id and c.activity_id = act.id left join plan_tags pt @@ -93,21 +92,22 @@ begin end $$; -create type resolution_type as enum ('none','source', 'target'); -create table hasura_functions.get_conflicting_activities_return_value( +create type hasura.resolution_type as enum ('none','source', 'target'); + +create table hasura.get_conflicting_activities_return_value( activity_id integer, - change_type_source activity_change_type, - change_type_target activity_change_type, - resolution resolution_type, - source plan_snapshot_activities, - target activity_directive, - merge_base plan_snapshot_activities, + change_type_source merlin.activity_change_type, + change_type_target merlin.activity_change_type, + resolution hasura.resolution_type, + source merlin.plan_snapshot_activities, + target merlin.activity_directive, + merge_base merlin.plan_snapshot_activities, source_tags jsonb, target_tags jsonb, merge_base_tags jsonb ); -create function hasura_functions.get_conflicting_activities(_merge_request_id integer, hasura_session json) - returns setof hasura_functions.get_conflicting_activities_return_value +create function hasura.get_conflicting_activities(_merge_request_id integer, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value strict volatile language plpgsql as $$ @@ -116,10 +116,10 @@ declare _plan_id_receiving_changes integer; _merge_base_snapshot_id integer; begin - call metadata.check_merge_permissions('get_conflicting_activities', _merge_request_id, hasura_session); + call permissions.check_merge_permissions('get_conflicting_activities', _merge_request_id, hasura_session); select snapshot_id_supplying_changes, plan_id_receiving_changes, merge_base_snapshot_id - from merge_request + from merlin.merge_request where merge_request.id = _merge_request_id into _snapshot_id_supplying_changes, _plan_id_receiving_changes, _merge_base_snapshot_id; @@ -132,7 +132,7 @@ begin 'owner', owner, 'created_at', created_at )) as tags, adt.directive_id - from metadata.tags tags, metadata.activity_directive_tags adt + from tags.tags tags, tags.activity_directive_tags adt where tags.id = adt.tag_id and _plan_id_receiving_changes = adt.plan_id group by adt.directive_id @@ -144,7 +144,7 @@ begin 'owner', owner, 'created_at', created_at )) as tags, sdt.directive_id, sdt.snapshot_id - from metadata.tags tags, metadata.snapshot_activity_tags sdt + from tags.tags tags, tags.snapshot_activity_tags sdt where tags.id = sdt.tag_id and (sdt.snapshot_id = _snapshot_id_supplying_changes or sdt.snapshot_id = _merge_base_snapshot_id) @@ -155,9 +155,9 @@ begin change_type_supplying, change_type_receiving, case - when c.resolution = 'supplying' then 'source'::resolution_type - when c.resolution = 'receiving' then 'target'::resolution_type - when c.resolution = 'none' then 'none'::resolution_type + when c.resolution = 'supplying' then 'source'::hasura.resolution_type + when c.resolution = 'receiving' then 'target'::hasura.resolution_type + when c.resolution = 'none' then 'none'::hasura.resolution_type end, snap_act, act, @@ -166,12 +166,12 @@ begin coalesce(pt.tags, '[]'), coalesce(mbt.tags, '[]') from - (select * from conflicting_activities c where c.merge_request_id = _merge_request_id) c - left join plan_snapshot_activities merge_base_act + (select * from merlin.conflicting_activities c where c.merge_request_id = _merge_request_id) c + left join merlin.plan_snapshot_activities merge_base_act on c.activity_id = merge_base_act.id and _merge_base_snapshot_id = merge_base_act.snapshot_id - left join plan_snapshot_activities snap_act + left join merlin.plan_snapshot_activities snap_act on c.activity_id = snap_act.id and _snapshot_id_supplying_changes = snap_act.snapshot_id - left join activity_directive act + left join merlin.activity_directive act on _plan_id_receiving_changes = act.plan_id and c.activity_id = act.id left join plan_tags pt on c.activity_id = pt.directive_id @@ -182,133 +182,133 @@ begin end; $$; -create table hasura_functions.begin_merge_return_value( +create table hasura.begin_merge_return_value( merge_request_id integer, - non_conflicting_activities hasura_functions.get_non_conflicting_activities_return_value[], - conflicting_activities hasura_functions.get_conflicting_activities_return_value[] + non_conflicting_activities hasura.get_non_conflicting_activities_return_value[], + conflicting_activities hasura.get_conflicting_activities_return_value[] ); -create function hasura_functions.begin_merge(_merge_request_id integer, hasura_session json) - returns hasura_functions.begin_merge_return_value -- plan_id of the new plan +create function hasura.begin_merge(_merge_request_id integer, hasura_session json) + returns hasura.begin_merge_return_value -- plan_id of the new plan strict volatile language plpgsql as $$ declare - non_conflicting_activities hasura_functions.get_non_conflicting_activities_return_value[]; - conflicting_activities hasura_functions.get_conflicting_activities_return_value[]; + non_conflicting_activities hasura.get_non_conflicting_activities_return_value[]; + conflicting_activities hasura.get_conflicting_activities_return_value[]; reviewer_username text; begin - call metadata.check_merge_permissions('begin_merge', _merge_request_id, hasura_session); + call permissions.check_merge_permissions('begin_merge', _merge_request_id, hasura_session); reviewer_username := (hasura_session ->> 'x-hasura-user-id'); - call public.begin_merge($1, reviewer_username); + call merlin.begin_merge($1, reviewer_username); - non_conflicting_activities := array(select hasura_functions.get_non_conflicting_activities($1, hasura_session)); - conflicting_activities := array(select hasura_functions.get_conflicting_activities($1, hasura_session)); + non_conflicting_activities := array(select hasura.get_non_conflicting_activities($1, hasura_session)); + conflicting_activities := array(select hasura.get_conflicting_activities($1, hasura_session)); - return row($1, non_conflicting_activities, conflicting_activities)::hasura_functions.begin_merge_return_value; + return row($1, non_conflicting_activities, conflicting_activities)::hasura.begin_merge_return_value; end; $$; -create table hasura_functions.commit_merge_return_value(merge_request_id integer); -create function hasura_functions.commit_merge(_merge_request_id integer, hasura_session json) - returns hasura_functions.commit_merge_return_value +create table hasura.commit_merge_return_value(merge_request_id integer); +create function hasura.commit_merge(_merge_request_id integer, hasura_session json) + returns hasura.commit_merge_return_value strict volatile language plpgsql as $$ begin - call metadata.check_merge_permissions('commit_merge', _merge_request_id, hasura_session); - call commit_merge(_merge_request_id); - return row(_merge_request_id)::hasura_functions.commit_merge_return_value; + call permissions.check_merge_permissions('commit_merge', _merge_request_id, hasura_session); + call merlin.commit_merge(_merge_request_id); + return row(_merge_request_id)::hasura.commit_merge_return_value; end; $$; -create table hasura_functions.deny_merge_return_value(merge_request_id integer); -create function hasura_functions.deny_merge(merge_request_id integer, hasura_session json) - returns hasura_functions.deny_merge_return_value +create table hasura.deny_merge_return_value(merge_request_id integer); +create function hasura.deny_merge(merge_request_id integer, hasura_session json) + returns hasura.deny_merge_return_value strict volatile language plpgsql as $$ begin - call metadata.check_merge_permissions('deny_merge', $1, hasura_session); - call deny_merge($1); - return row($1)::hasura_functions.deny_merge_return_value; + call permissions.check_merge_permissions('deny_merge', $1, hasura_session); + call merlin.deny_merge($1); + return row($1)::hasura.deny_merge_return_value; end; $$; -create table hasura_functions.withdraw_merge_request_return_value(merge_request_id integer); -create function hasura_functions.withdraw_merge_request(_merge_request_id integer, hasura_session json) - returns hasura_functions.withdraw_merge_request_return_value +create table hasura.withdraw_merge_request_return_value(merge_request_id integer); +create function hasura.withdraw_merge_request(_merge_request_id integer, hasura_session json) + returns hasura.withdraw_merge_request_return_value strict volatile language plpgsql as $$ begin - call metadata.check_merge_permissions('withdraw_merge_rq', _merge_request_id, hasura_session); - call withdraw_merge_request(_merge_request_id); - return row(_merge_request_id)::hasura_functions.withdraw_merge_request_return_value; + call permissions.check_merge_permissions('withdraw_merge_rq', _merge_request_id, hasura_session); + call merlin.withdraw_merge_request(_merge_request_id); + return row(_merge_request_id)::hasura.withdraw_merge_request_return_value; end; $$; -create table hasura_functions.cancel_merge_return_value(merge_request_id integer); -create function hasura_functions.cancel_merge(_merge_request_id integer, hasura_session json) - returns hasura_functions.cancel_merge_return_value +create table hasura.cancel_merge_return_value(merge_request_id integer); +create function hasura.cancel_merge(_merge_request_id integer, hasura_session json) + returns hasura.cancel_merge_return_value strict volatile language plpgsql as $$ begin - call metadata.check_merge_permissions('cancel_merge', _merge_request_id, hasura_session); - call cancel_merge(_merge_request_id); - return row(_merge_request_id)::hasura_functions.cancel_merge_return_value; + call permissions.check_merge_permissions('cancel_merge', _merge_request_id, hasura_session); + call merlin.cancel_merge(_merge_request_id); + return row(_merge_request_id)::hasura.cancel_merge_return_value; end; $$; -create function hasura_functions.set_resolution(_merge_request_id integer, _activity_id integer, _resolution resolution_type, hasura_session json) - returns setof hasura_functions.get_conflicting_activities_return_value +create function hasura.set_resolution(_merge_request_id integer, _activity_id integer, _resolution hasura.resolution_type, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value strict volatile language plpgsql as $$ declare - _conflict_resolution conflict_resolution; + _conflict_resolution merlin.conflict_resolution; begin - call metadata.check_merge_permissions('set_resolution', _merge_request_id, hasura_session); + call permissions.check_merge_permissions('set_resolution', _merge_request_id, hasura_session); select into _conflict_resolution case - when _resolution = 'source' then 'supplying'::conflict_resolution - when _resolution = 'target' then 'receiving'::conflict_resolution - when _resolution = 'none' then 'none'::conflict_resolution + when _resolution = 'source' then 'supplying'::merlin.conflict_resolution + when _resolution = 'target' then 'receiving'::merlin.conflict_resolution + when _resolution = 'none' then 'none'::merlin.conflict_resolution end; - update conflicting_activities ca + update merlin.conflicting_activities ca set resolution = _conflict_resolution where ca.merge_request_id = _merge_request_id and ca.activity_id = _activity_id; return query - select * from hasura_functions.get_conflicting_activities(_merge_request_id, hasura_session) + select * from hasura.get_conflicting_activities(_merge_request_id, hasura_session) where activity_id = _activity_id limit 1; end $$; -create function hasura_functions.set_resolution_bulk(_merge_request_id integer, _resolution resolution_type, hasura_session json) - returns setof hasura_functions.get_conflicting_activities_return_value +create function hasura.set_resolution_bulk(_merge_request_id integer, _resolution hasura.resolution_type, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value strict volatile language plpgsql as $$ declare - _conflict_resolution conflict_resolution; + _conflict_resolution merlin.conflict_resolution; begin - call metadata.check_merge_permissions('set_resolution_bulk', _merge_request_id, hasura_session); + call permissions.check_merge_permissions('set_resolution_bulk', _merge_request_id, hasura_session); select into _conflict_resolution case - when _resolution = 'source' then 'supplying'::conflict_resolution - when _resolution = 'target' then 'receiving'::conflict_resolution - when _resolution = 'none' then 'none'::conflict_resolution + when _resolution = 'source' then 'supplying'::merlin.conflict_resolution + when _resolution = 'target' then 'receiving'::merlin.conflict_resolution + when _resolution = 'none' then 'none'::merlin.conflict_resolution end; - update conflicting_activities ca + update merlin.conflicting_activities ca set resolution = _conflict_resolution where ca.merge_request_id = _merge_request_id; return query - select * from hasura_functions.get_conflicting_activities(_merge_request_id, hasura_session); + select * from hasura.get_conflicting_activities(_merge_request_id, hasura_session); end $$; diff --git a/deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql b/deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql index 9221e87a3c..dd8b1658e5 100644 --- a/deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql +++ b/deployment/postgres-init-db/sql/functions/hasura/snapshot_functions.sql @@ -1,45 +1,45 @@ -create table hasura_functions.create_snapshot_return_value(snapshot_id integer); +create table hasura.create_snapshot_return_value(snapshot_id integer); -- Description must be the last parameter since it has a default value -create function hasura_functions.create_snapshot(_plan_id integer, _snapshot_name text, hasura_session json, _description text default null) - returns hasura_functions.create_snapshot_return_value +create function hasura.create_snapshot(_plan_id integer, _snapshot_name text, hasura_session json, _description text default null) + returns hasura.create_snapshot_return_value volatile language plpgsql as $$ declare _snapshot_id integer; _snapshotter text; - _function_permission metadata.permission; + _function_permission permissions.permission; begin _snapshotter := (hasura_session ->> 'x-hasura-user-id'); - _function_permission := metadata.get_function_permissions('create_snapshot', hasura_session); - perform metadata.raise_if_plan_merge_permission('create_snapshot', _function_permission); + _function_permission := permissions.get_function_permissions('create_snapshot', hasura_session); + perform permissions.raise_if_plan_merge_permission('create_snapshot', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('create_snapshot', _function_permission, _plan_id, _snapshotter); + call permissions.check_general_permissions('create_snapshot', _function_permission, _plan_id, _snapshotter); end if; if _snapshot_name is null then raise exception 'Snapshot name cannot be null.'; end if; - select create_snapshot(_plan_id, _snapshot_name, _description, _snapshotter) into _snapshot_id; - return row(_snapshot_id)::hasura_functions.create_snapshot_return_value; + select merlin.create_snapshot(_plan_id, _snapshot_name, _description, _snapshotter) into _snapshot_id; + return row(_snapshot_id)::hasura.create_snapshot_return_value; end; $$; -create function hasura_functions.restore_from_snapshot(_plan_id integer, _snapshot_id integer, hasura_session json) - returns hasura_functions.create_snapshot_return_value +create function hasura.restore_from_snapshot(_plan_id integer, _snapshot_id integer, hasura_session json) + returns hasura.create_snapshot_return_value volatile language plpgsql as $$ declare _user text; - _function_permission metadata.permission; + _function_permission permissions.permission; begin _user := (hasura_session ->> 'x-hasura-user-id'); - _function_permission := metadata.get_function_permissions('restore_snapshot', hasura_session); - perform metadata.raise_if_plan_merge_permission('restore_snapshot', _function_permission); + _function_permission := permissions.get_function_permissions('restore_snapshot', hasura_session); + perform permissions.raise_if_plan_merge_permission('restore_snapshot', _function_permission); if not _function_permission = 'NO_CHECK' then - call metadata.check_general_permissions('restore_snapshot', _function_permission, _plan_id, _user); + call permissions.check_general_permissions('restore_snapshot', _function_permission, _plan_id, _user); end if; - call restore_from_snapshot(_plan_id, _snapshot_id); - return row(_snapshot_id)::hasura_functions.create_snapshot_return_value; + call merlin.restore_from_snapshot(_plan_id, _snapshot_id); + return row(_snapshot_id)::hasura.create_snapshot_return_value; end $$; From ea142c2913aa72c8cfb6dab0c06a5ab6c57ab94a Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Thu, 29 Feb 2024 17:24:42 -0800 Subject: [PATCH 11/36] Add Schema Qualifiers (Merlin) - Extract Plan Locked check to its own file - move reanchor functions - split three tables into three files --- .../functions/merlin/merging/begin_merge.sql | 114 ++++---- .../functions/merlin/merging/commit_merge.sql | 86 +++--- .../merlin/merging/duplicate_plan.sql | 34 +-- .../merlin/merging/get_merge_base.sql | 6 +- .../merging/merge_request_state_functions.sql | 93 ++++--- .../merlin/merging/plan_locked_exception.sql | 11 + .../merlin/reanchoring_functions.sql | 76 ++++++ .../merlin/snapshots/create_snapshot.sql | 36 +-- .../snapshots/plan_history_functions.sql | 23 +- .../snapshots/restore_from_snapshot.sql | 36 +-- .../activity_directive/activity_directive.sql | 249 ++++++------------ .../activity_directive_changelog.sql | 32 +-- .../activity_directive_metadata_schema.sql | 33 +-- .../activity_directive_validations.sql | 14 +- .../activity_directive/activity_presets.sql | 67 +---- .../anchor_validation_status.sql | 100 +++---- .../preset_to_directive.sql | 21 ++ .../sql/tables/merlin/activity_type.sql | 28 +- .../constraints/constraint_definition.sql | 24 +- .../constraints/constraint_metadata.sql | 38 ++- .../constraint_model_specification.sql | 16 +- .../merlin/constraints/constraint_run.sql | 24 +- .../constraints/constraint_specification.sql | 18 +- .../sql/tables/merlin/dataset/dataset.sql | 78 +++--- .../sql/tables/merlin/dataset/event.sql | 33 ++- .../sql/tables/merlin/dataset/profile.sql | 38 +-- .../tables/merlin/dataset/profile_segment.sql | 30 +-- .../sql/tables/merlin/dataset/span.sql | 32 +-- .../sql/tables/merlin/dataset/topic.sql | 46 ++-- .../merlin/merging/conflicting_activities.sql | 39 ++- .../tables/merlin/merging/merge_comments.sql | 16 +- .../tables/merlin/merging/merge_request.sql | 36 +-- .../merlin/merging/merge_staging_area.sql | 38 +-- .../sql/tables/merlin/mission_model.sql | 58 ++-- .../merlin/mission_model_parameters.sql | 14 +- .../sql/tables/merlin/plan.sql | 114 +++----- .../sql/tables/merlin/plan_collaborators.sql | 12 +- .../sql/tables/merlin/plan_dataset.sql | 67 ++--- .../sql/tables/merlin/resource_type.sql | 12 +- .../tables/merlin/simulation/simulation.sql | 37 +-- .../merlin/simulation/simulation_dataset.sql | 112 ++++---- .../merlin/simulation/simulation_extent.sql | 10 +- .../merlin/simulation/simulation_template.sql | 38 +-- .../merlin/snapshot/plan_latest_snapshot.sql | 8 +- .../tables/merlin/snapshot/plan_snapshot.sql | 20 +- .../snapshot/plan_snapshot_activities.sql | 10 +- .../merlin/snapshot/plan_snapshot_parent.sql | 10 +- .../snapshot/preset_to_snapshot_directive.sql | 22 ++ .../sql/tables/merlin/uploaded_file.sql | 16 +- ...ta.sql => activity-directive-metadata.sql} | 4 +- .../sql/types/merlin/merlin-arguments.sql | 12 +- .../sql/types/merlin/plan-merge-types.sql | 6 +- .../merlin/activity_directive_extended.sql | 22 +- .../sql/views/merlin/resource_profile.sql | 30 +-- .../sql/views/merlin/simulated_activity.sql | 38 +-- 55 files changed, 1060 insertions(+), 1177 deletions(-) create mode 100644 deployment/postgres-init-db/sql/functions/merlin/merging/plan_locked_exception.sql create mode 100644 deployment/postgres-init-db/sql/functions/merlin/reanchoring_functions.sql create mode 100644 deployment/postgres-init-db/sql/tables/merlin/activity_directive/preset_to_directive.sql create mode 100644 deployment/postgres-init-db/sql/tables/merlin/snapshot/preset_to_snapshot_directive.sql rename deployment/postgres-init-db/sql/types/merlin/{merlin-activity-directive-metadata.sql => activity-directive-metadata.sql} (58%) diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql index 1501d2be1a..af6fce63a8 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/begin_merge.sql @@ -20,19 +20,19 @@ Modify | Delete | Into CA Delete | Delete | Dropped */ -create procedure begin_merge(_merge_request_id integer, review_username text) +create procedure merlin.begin_merge(_merge_request_id integer, review_username text) language plpgsql as $$ declare validate_id integer; - validate_status merge_request_status; - validate_non_no_op_status activity_change_type; + validate_status merlin.merge_request_status; + validate_non_no_op_status merlin.activity_change_type; snapshot_id_supplying integer; plan_id_receiving integer; merge_base_id integer; begin -- validate id and status select id, status - from merge_request + from merlin.merge_request where _merge_request_id = id into validate_id, validate_status; @@ -46,26 +46,26 @@ begin -- select from merge-request the snapshot_sc (s_sc) and plan_rc (p_rc) ids select plan_id_receiving_changes, snapshot_id_supplying_changes - from merge_request + from merlin.merge_request where id = _merge_request_id into plan_id_receiving, snapshot_id_supplying; -- ensure the plan receiving changes isn't locked - if (select is_locked from plan where plan.id=plan_id_receiving) then + if (select is_locked from merlin.plan where plan.id=plan_id_receiving) then raise exception 'Cannot begin merge request. Plan to receive changes is locked.'; end if; -- lock plan_rc - update plan + update merlin.plan set is_locked = true where plan.id = plan_id_receiving; -- get merge base (mb) - select get_merge_base(plan_id_receiving, snapshot_id_supplying) + select merlin.get_merge_base(plan_id_receiving, snapshot_id_supplying) into merge_base_id; -- update the status to "in progress" - update merge_request + update merlin.merge_request set status = 'in-progress', merge_base_snapshot_id = merge_base_id, reviewer_username = review_username @@ -79,50 +79,50 @@ begin -- A minus B on everything except everything currently in the table is modify create temp table supplying_diff( activity_id integer, - change_type activity_change_type not null + change_type merlin.activity_change_type not null ); insert into supplying_diff (activity_id, change_type) select activity_id, 'delete' from( select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = merge_base_id except select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = snapshot_id_supplying) a; insert into supplying_diff (activity_id, change_type) select activity_id, 'add' from( select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = snapshot_id_supplying except select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = merge_base_id) a; insert into supplying_diff (activity_id, change_type) select activity_id, 'none' from( - select psa.id as activity_id, name, metadata.tag_ids_activity_snapshot(psa.id, merge_base_id), + select psa.id as activity_id, name, tags.tag_ids_activity_snapshot(psa.id, merge_base_id), source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from plan_snapshot_activities psa + from merlin.plan_snapshot_activities psa where psa.snapshot_id = merge_base_id intersect - select id as activity_id, name, metadata.tag_ids_activity_snapshot(psa.id, snapshot_id_supplying), + select id as activity_id, name, tags.tag_ids_activity_snapshot(psa.id, snapshot_id_supplying), source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from plan_snapshot_activities psa + from merlin.plan_snapshot_activities psa where psa.snapshot_id = snapshot_id_supplying) a; insert into supplying_diff (activity_id, change_type) select activity_id, 'modify' from( - select id as activity_id from plan_snapshot_activities + select id as activity_id from merlin.plan_snapshot_activities where snapshot_id = merge_base_id or snapshot_id = snapshot_id_supplying except select activity_id from supplying_diff) a; @@ -130,55 +130,55 @@ begin -- perform diff between mb and p_rc (r_diff) create temp table receiving_diff( activity_id integer, - change_type activity_change_type not null + change_type merlin.activity_change_type not null ); insert into receiving_diff (activity_id, change_type) select activity_id, 'delete' from( select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = merge_base_id except select id as activity_id - from activity_directive + from merlin.activity_directive where plan_id = plan_id_receiving) a; insert into receiving_diff (activity_id, change_type) select activity_id, 'add' from( select id as activity_id - from activity_directive + from merlin.activity_directive where plan_id = plan_id_receiving except select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = merge_base_id) a; insert into receiving_diff (activity_id, change_type) select activity_id, 'none' from( - select id as activity_id, name, metadata.tag_ids_activity_snapshot(id, merge_base_id), + select id as activity_id, name, tags.tag_ids_activity_snapshot(id, merge_base_id), source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from plan_snapshot_activities psa + from merlin.plan_snapshot_activities psa where psa.snapshot_id = merge_base_id intersect - select id as activity_id, name, metadata.tag_ids_activity_directive(id, plan_id_receiving), + select id as activity_id, name, tags.tag_ids_activity_directive(id, plan_id_receiving), source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from activity_directive ad + from merlin.activity_directive ad where ad.plan_id = plan_id_receiving) a; insert into receiving_diff (activity_id, change_type) select activity_id, 'modify' from ( (select id as activity_id - from plan_snapshot_activities + from merlin.plan_snapshot_activities where snapshot_id = merge_base_id union select id as activity_id - from activity_directive + from merlin.activity_directive where plan_id = plan_id_receiving) except select activity_id @@ -190,8 +190,8 @@ begin -- upload conflict into conflicting_activities create temp table diff_diff( activity_id integer, - change_type_supplying activity_change_type not null, - change_type_receiving activity_change_type not null + change_type_supplying merlin.activity_change_type not null, + change_type_receiving merlin.activity_change_type not null ); -- this is going to require us to do the "none" operation again on the remaining modifies @@ -200,23 +200,23 @@ begin -- 'delete' against a 'delete' does not enter the merge staging area table -- receiving 'delete' against supplying 'none' does not enter the merge staging area table - insert into merge_staging_area ( + insert into merlin.merge_staging_area ( merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type ) -- 'adds' can go directly into the merge staging area table - select _merge_request_id, activity_id, name, metadata.tag_ids_activity_snapshot(s_diff.activity_id, psa.snapshot_id), source_scheduling_goal_id, created_at, + select _merge_request_id, activity_id, name, tags.tag_ids_activity_snapshot(s_diff.activity_id, psa.snapshot_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type from supplying_diff as s_diff - join plan_snapshot_activities psa + join merlin.plan_snapshot_activities psa on s_diff.activity_id = psa.id where snapshot_id = snapshot_id_supplying and change_type = 'add' union -- an 'add' between the receiving plan and merge base is actually a 'none' - select _merge_request_id, activity_id, name, metadata.tag_ids_activity_directive(r_diff.activity_id, ad.plan_id), source_scheduling_goal_id, created_at, - created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, 'none'::activity_change_type + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(r_diff.activity_id, ad.plan_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, 'none'::merlin.activity_change_type from receiving_diff as r_diff - join activity_directive ad + join merlin.activity_directive ad on r_diff.activity_id = ad.id where plan_id = plan_id_receiving and change_type = 'add'; @@ -232,83 +232,85 @@ begin where (change_type_receiving = 'delete' and change_type_supplying = 'delete') or (change_type_receiving = 'delete' and change_type_supplying = 'none'); - insert into merge_staging_area ( + insert into merlin.merge_staging_area ( merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type ) -- receiving 'none' and 'modify' against 'none' in the supplying side go into the merge staging area as 'none' - select _merge_request_id, activity_id, name, metadata.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, 'none' from diff_diff - join activity_directive + join merlin.activity_directive on activity_id=id where plan_id = plan_id_receiving and change_type_supplying = 'none' and (change_type_receiving = 'modify' or change_type_receiving = 'none') union -- supplying 'modify' against receiving 'none' go into the merge staging area as 'modify' - select _merge_request_id, activity_id, name, metadata.tag_ids_activity_snapshot(diff_diff.activity_id, snapshot_id), source_scheduling_goal_id, created_at, + select _merge_request_id, activity_id, name, tags.tag_ids_activity_snapshot(diff_diff.activity_id, snapshot_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type_supplying from diff_diff - join plan_snapshot_activities p + join merlin.plan_snapshot_activities p on diff_diff.activity_id = p.id where snapshot_id = snapshot_id_supplying and (change_type_receiving = 'none' and diff_diff.change_type_supplying = 'modify') union -- supplying 'delete' against receiving 'none' go into the merge staging area as 'delete' - select _merge_request_id, activity_id, name, metadata.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type_supplying from diff_diff - join activity_directive p + join merlin.activity_directive p on diff_diff.activity_id = p.id where plan_id = plan_id_receiving and (change_type_receiving = 'none' and diff_diff.change_type_supplying = 'delete'); -- 'modify' against a 'modify' must be checked for equality first. with false_modify as ( - select activity_id, name, metadata.tag_ids_activity_directive(dd.activity_id, psa.snapshot_id) as tags, + select activity_id, name, tags.tag_ids_activity_directive(dd.activity_id, psa.snapshot_id) as tags, source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from plan_snapshot_activities psa + from merlin.plan_snapshot_activities psa join diff_diff dd on dd.activity_id = psa.id where psa.snapshot_id = snapshot_id_supplying and (dd.change_type_receiving = 'modify' and dd.change_type_supplying = 'modify') intersect - select activity_id, name, metadata.tag_ids_activity_directive(dd.activity_id, ad.plan_id) as tags, + select activity_id, name, tags.tag_ids_activity_directive(dd.activity_id, ad.plan_id) as tags, source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start from diff_diff dd - join activity_directive ad + join merlin.activity_directive ad on dd.activity_id = ad.id where ad.plan_id = plan_id_receiving and (dd.change_type_supplying = 'modify' and dd.change_type_receiving = 'modify')) - insert into merge_staging_area ( + insert into merlin.merge_staging_area ( merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type) - select _merge_request_id, ad.id, ad.name, tags, ad.source_scheduling_goal_id, ad.created_at, ad.created_by, + select _merge_request_id, ad.id, ad.name, tags, ad.source_scheduling_goal_id, ad.created_at, ad.created_by, ad.last_modified_by, ad.start_offset, ad.type, ad.arguments, ad.metadata, ad.anchor_id, ad.anchored_to_start, 'none' - from false_modify fm left join activity_directive ad on (ad.plan_id, ad.id) = (plan_id_receiving, fm.activity_id); + from false_modify fm + left join merlin.activity_directive ad + on (ad.plan_id, ad.id) = (plan_id_receiving, fm.activity_id); -- 'modify' against 'delete' and inequal 'modify' against 'modify' goes into conflict table (aka everything left in diff_diff) - insert into conflicting_activities (merge_request_id, activity_id, change_type_supplying, change_type_receiving) + insert into merlin.conflicting_activities (merge_request_id, activity_id, change_type_supplying, change_type_receiving) select begin_merge._merge_request_id, activity_id, change_type_supplying, change_type_receiving from (select begin_merge._merge_request_id, activity_id from diff_diff except select msa.merge_request_id, activity_id - from merge_staging_area msa) a + from merlin.merge_staging_area msa) a join diff_diff using (activity_id); -- Fail if there are no differences between the snapshot and the plan getting merged validate_non_no_op_status := null; select change_type_receiving - from conflicting_activities + from merlin.conflicting_activities where merge_request_id = _merge_request_id limit 1 into validate_non_no_op_status; if validate_non_no_op_status is null then select change_type - from merge_staging_area msa + from merlin.merge_staging_area msa where merge_request_id = _merge_request_id and msa.change_type != 'none' limit 1 diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql index b2a73042ef..0bb075b44b 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/commit_merge.sql @@ -2,24 +2,24 @@ Commit merge takes all of the contents of the staging area and all of the resolved conflicts and applies the changes to the plan getting merged into. */ -create procedure commit_merge(_request_id integer) +create procedure merlin.commit_merge(_request_id integer) language plpgsql as $$ declare validate_noConflicts integer; plan_id_R integer; snapshot_id_S integer; begin - if(select id from merge_request where id = _request_id) is null then + if(select id from merlin.merge_request where id = _request_id) is null then raise exception 'Invalid merge request id %.', _request_id; end if; -- Stop if this merge is not 'in-progress' - if (select status from merge_request where id = _request_id) != 'in-progress' then + if (select status from merlin.merge_request where id = _request_id) != 'in-progress' then raise exception 'Cannot commit a merge request that is not in-progress.'; end if; -- Stop if any conflicts have not been resolved - select * from conflicting_activities + select * from merlin.conflicting_activities where merge_request_id = _request_id and resolution = 'none' limit 1 into validate_noConflicts; @@ -28,51 +28,51 @@ begin raise exception 'There are unresolved conflicts in merge request %. Cannot commit merge.', _request_id; end if; - select plan_id_receiving_changes from merge_request mr where mr.id = _request_id into plan_id_R; - select snapshot_id_supplying_changes from merge_request mr where mr.id = _request_id into snapshot_id_S; + select plan_id_receiving_changes from merlin.merge_request mr where mr.id = _request_id into plan_id_R; + select snapshot_id_supplying_changes from merlin.merge_request mr where mr.id = _request_id into snapshot_id_S; - insert into merge_staging_area( + insert into merlin.merge_staging_area( merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type) -- gather delete data from the opposite tables - select _request_id, activity_id, name, metadata.tag_ids_activity_directive(ca.activity_id, ad.plan_id), + select _request_id, activity_id, name, tags.tag_ids_activity_directive(ca.activity_id, ad.plan_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, - 'delete'::activity_change_type - from conflicting_activities ca - join activity_directive ad + 'delete'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.activity_directive ad on ca.activity_id = ad.id where ca.resolution = 'supplying' and ca.merge_request_id = _request_id and plan_id = plan_id_R and ca.change_type_supplying = 'delete' union - select _request_id, activity_id, name, metadata.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), + select _request_id, activity_id, name, tags.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, - 'delete'::activity_change_type - from conflicting_activities ca - join plan_snapshot_activities psa + 'delete'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.plan_snapshot_activities psa on ca.activity_id = psa.id where ca.resolution = 'receiving' and ca.merge_request_id = _request_id and snapshot_id = snapshot_id_S and ca.change_type_receiving = 'delete' union - select _request_id, activity_id, name, metadata.tag_ids_activity_directive(ca.activity_id, ad.plan_id), + select _request_id, activity_id, name, tags.tag_ids_activity_directive(ca.activity_id, ad.plan_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, - 'none'::activity_change_type - from conflicting_activities ca - join activity_directive ad + 'none'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.activity_directive ad on ca.activity_id = ad.id where ca.resolution = 'receiving' and ca.merge_request_id = _request_id and plan_id = plan_id_R and ca.change_type_receiving = 'modify' union - select _request_id, activity_id, name, metadata.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), + select _request_id, activity_id, name, tags.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, - 'modify'::activity_change_type - from conflicting_activities ca - join plan_snapshot_activities psa + 'modify'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.plan_snapshot_activities psa on ca.activity_id = psa.id where ca.resolution = 'supplying' and ca.merge_request_id = _request_id @@ -80,28 +80,28 @@ begin and ca.change_type_supplying = 'modify'; -- Unlock so that updates can be written - update plan + update merlin.plan set is_locked = false where id = plan_id_R; -- Update the plan's activities to match merge-staging-area's activities -- Add - insert into activity_directive( + insert into merlin.activity_directive( id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start ) select activity_id, plan_id_R, name, source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start - from merge_staging_area + from merlin.merge_staging_area where merge_staging_area.merge_request_id = _request_id and change_type = 'add'; -- Modify - insert into activity_directive( + insert into merlin.activity_directive( id, plan_id, "name", source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, "type", arguments, metadata, anchor_id, anchored_to_start ) select activity_id, plan_id_R, "name", source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, "type", arguments, metadata, anchor_id, anchored_to_start - from merge_staging_area + from merlin.merge_staging_area where merge_staging_area.merge_request_id = _request_id and change_type = 'modify' on conflict (id, plan_id) @@ -119,27 +119,27 @@ begin anchored_to_start = excluded.anchored_to_start; -- Tags - delete from metadata.activity_directive_tags adt - using merge_staging_area msa + delete from tags.activity_directive_tags adt + using merlin.merge_staging_area msa where adt.directive_id = msa.activity_id and adt.plan_id = plan_id_R and msa.merge_request_id = _request_id and msa.change_type = 'modify'; - insert into metadata.activity_directive_tags(plan_id, directive_id, tag_id) + insert into tags.activity_directive_tags(plan_id, directive_id, tag_id) select plan_id_R, activity_id, t.id - from merge_staging_area msa - inner join metadata.tags t -- Inner join because it's specifically inserting into a tags-association table, so if there are no valid tags we do not want a null value for t.id + from merlin.merge_staging_area msa + inner join tags.tags t -- Inner join because it's specifically inserting into a tags-association table, so if there are no valid tags we do not want a null value for t.id on t.id = any(msa.tags) where msa.merge_request_id = _request_id and (change_type = 'modify' or change_type = 'add') on conflict (directive_id, plan_id, tag_id) do nothing; -- Presets - insert into preset_to_directive(preset_id, activity_id, plan_id) + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) select pts.preset_id, pts.activity_id, plan_id_R - from merge_staging_area msa - inner join preset_to_snapshot_directive pts using (activity_id) + from merlin.merge_staging_area msa + inner join merlin.preset_to_snapshot_directive pts using (activity_id) where pts.snapshot_id = snapshot_id_S and msa.merge_request_id = _request_id and (msa.change_type = 'add' @@ -149,25 +149,25 @@ begin set preset_id = excluded.preset_id; -- Delete - delete from activity_directive ad - using merge_staging_area msa + delete from merlin.activity_directive ad + using merlin.merge_staging_area msa where ad.id = msa.activity_id and ad.plan_id = plan_id_R and msa.merge_request_id = _request_id and msa.change_type = 'delete'; -- Clean up - delete from conflicting_activities where merge_request_id = _request_id; - delete from merge_staging_area where merge_staging_area.merge_request_id = _request_id; + delete from merlin.conflicting_activities where merge_request_id = _request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = _request_id; - update merge_request + update merlin.merge_request set status = 'accepted' where id = _request_id; -- Attach snapshot history - insert into plan_latest_snapshot(plan_id, snapshot_id) + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) select plan_id_receiving_changes, snapshot_id_supplying_changes - from merge_request + from merlin.merge_request where id = _request_id; end $$; diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql index 5df1c781b9..e254f64b95 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/duplicate_plan.sql @@ -1,7 +1,7 @@ -- TODO list: -- - duplicate temporal subset of plan -create function duplicate_plan(_plan_id integer, new_plan_name text, new_owner text) +create function merlin.duplicate_plan(_plan_id integer, new_plan_name text, new_owner text) returns integer -- plan_id of the new plan security definer language plpgsql as $$ @@ -10,32 +10,32 @@ create function duplicate_plan(_plan_id integer, new_plan_name text, new_owner t new_plan_id integer; created_snapshot_id integer; begin - select id from plan where plan.id = _plan_id into validate_plan_id; + select id from merlin.plan where plan.id = _plan_id into validate_plan_id; if(validate_plan_id is null) then raise exception 'Plan % does not exist.', _plan_id; end if; - select create_snapshot(_plan_id) into created_snapshot_id; + select merlin.create_snapshot(_plan_id) into created_snapshot_id; - insert into plan(revision, name, model_id, duration, start_time, parent_id, owner, updated_by) + insert into merlin.plan(revision, name, model_id, duration, start_time, parent_id, owner, updated_by) select 0, new_plan_name, model_id, duration, start_time, _plan_id, new_owner, new_owner - from plan where id = _plan_id + from merlin.plan where id = _plan_id returning id into new_plan_id; - insert into activity_directive( + insert into merlin.activity_directive( id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start) select id, new_plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start - from activity_directive where activity_directive.plan_id = _plan_id; + from merlin.activity_directive where activity_directive.plan_id = _plan_id; with source_plan as ( select simulation_template_id, arguments, simulation_start_time, simulation_end_time - from simulation + from merlin.simulation where simulation.plan_id = _plan_id ) - update simulation s + update merlin.simulation s set simulation_template_id = source_plan.simulation_template_id, arguments = source_plan.arguments, simulation_start_time = source_plan.simulation_start_time, @@ -43,23 +43,23 @@ begin from source_plan where s.plan_id = new_plan_id; - insert into preset_to_directive(preset_id, activity_id, plan_id) + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) select preset_id, activity_id, new_plan_id - from preset_to_directive ptd where ptd.plan_id = _plan_id; + from merlin.preset_to_directive ptd where ptd.plan_id = _plan_id; - insert into metadata.plan_tags(plan_id, tag_id) + insert into tags.plan_tags(plan_id, tag_id) select new_plan_id, tag_id - from metadata.plan_tags pt where pt.plan_id = _plan_id; - insert into metadata.activity_directive_tags(plan_id, directive_id, tag_id) + from tags.plan_tags pt where pt.plan_id = _plan_id; + insert into tags.activity_directive_tags(plan_id, directive_id, tag_id) select new_plan_id, directive_id, tag_id - from metadata.activity_directive_tags adt where adt.plan_id = _plan_id; + from tags.activity_directive_tags adt where adt.plan_id = _plan_id; - insert into plan_latest_snapshot(plan_id, snapshot_id) values(new_plan_id, created_snapshot_id); + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) values(new_plan_id, created_snapshot_id); return new_plan_id; end $$; -comment on function duplicate_plan(plan_id integer, new_plan_name text, new_owner text) is e'' +comment on function merlin.duplicate_plan(plan_id integer, new_plan_name text, new_owner text) is e'' 'Copies all of a given plan''s properties and activities into a new plan with the specified name. When duplicating a plan, a snapshot is created of the original plan. Additionally, that snapshot becomes the latest snapshot of the new plan.'; diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql index 0d1797441e..4fd1a2fd99 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/get_merge_base.sql @@ -1,4 +1,4 @@ -create function get_merge_base(plan_id_receiving_changes integer, snapshot_id_supplying_changes integer) +create function merlin.get_merge_base(plan_id_receiving_changes integer, snapshot_id_supplying_changes integer) returns integer language plpgsql as $$ declare @@ -6,9 +6,9 @@ create function get_merge_base(plan_id_receiving_changes integer, snapshot_id_su begin select * from ( - select get_snapshot_history_from_plan(plan_id_receiving_changes) as ids + select merlin.get_snapshot_history_from_plan(plan_id_receiving_changes) as ids intersect - select get_snapshot_history(snapshot_id_supplying_changes) as ids + select merlin.get_snapshot_history(snapshot_id_supplying_changes) as ids ) as ids order by ids desc diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql index c93a956292..151dbddf84 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/merge_request_state_functions.sql @@ -1,4 +1,4 @@ -create function create_merge_request(plan_id_supplying integer, plan_id_receiving integer, request_username text) +create function merlin.create_merge_request(plan_id_supplying integer, plan_id_receiving integer, request_username text) returns integer language plpgsql as $$ declare @@ -10,44 +10,43 @@ begin if plan_id_receiving = plan_id_supplying then raise exception 'Cannot create a merge request between a plan and itself.'; end if; - select id from plan where plan.id = plan_id_receiving into validate_planIds; + select id from merlin.plan where plan.id = plan_id_receiving into validate_planIds; if validate_planIds is null then raise exception 'Plan receiving changes (Plan %) does not exist.', plan_id_receiving; end if; - select id from plan where plan.id = plan_id_supplying into validate_planIds; + select id from merlin.plan where plan.id = plan_id_supplying into validate_planIds; if validate_planIds is null then raise exception 'Plan supplying changes (Plan %) does not exist.', plan_id_supplying; end if; - select create_snapshot(plan_id_supplying) into supplying_snapshot_id; + select merlin.create_snapshot(plan_id_supplying) into supplying_snapshot_id; - select get_merge_base(plan_id_receiving, supplying_snapshot_id) into merge_base_snapshot_id; + select merlin.get_merge_base(plan_id_receiving, supplying_snapshot_id) into merge_base_snapshot_id; if merge_base_snapshot_id is null then raise exception 'Cannot create merge request between unrelated plans.'; end if; - - insert into merge_request(plan_id_receiving_changes, snapshot_id_supplying_changes, merge_base_snapshot_id, requester_username) + insert into merlin.merge_request(plan_id_receiving_changes, snapshot_id_supplying_changes, merge_base_snapshot_id, requester_username) values(plan_id_receiving, supplying_snapshot_id, merge_base_snapshot_id, request_username) returning id into merge_request_id; return merge_request_id; end $$; -create procedure withdraw_merge_request(request_id integer) +create procedure merlin.withdraw_merge_request(request_id integer) language plpgsql as $$ declare - validate_status merge_request_status; + validate_status merlin.merge_request_status; begin - select status from merge_request where id = request_id into validate_status; + select status from merlin.merge_request where id = request_id into validate_status; if validate_status is null then raise exception 'Merge request % does not exist. Cannot withdraw request.', request_id; elsif validate_status != 'pending' and validate_status != 'withdrawn' then raise exception 'Cannot withdraw request.'; end if; - update merge_request + update merlin.merge_request set status = 'withdrawn' where id = request_id; end @@ -58,58 +57,58 @@ $$; - Then, unlock the to-be-edited plan - Then, change the merge request's status to 'rejected' */ -create procedure deny_merge(request_id integer) +create procedure merlin.deny_merge(request_id integer) language plpgsql as $$ - begin - if(select id from merge_request where id = request_id) is null then - raise exception 'Invalid merge request id %.', request_id; - end if; +begin + if(select id from merlin.merge_request where id = request_id) is null then + raise exception 'Invalid merge request id %.', request_id; + end if; - if (select status from merge_request where id = request_id) != 'in-progress' then - raise exception 'Cannot reject merge not in progress.'; - end if; + if (select status from merlin.merge_request where id = request_id) != 'in-progress' then + raise exception 'Cannot reject merge not in progress.'; + end if; - delete from conflicting_activities where merge_request_id = request_id; - delete from merge_staging_area where merge_staging_area.merge_request_id = deny_merge.request_id; + delete from merlin.conflicting_activities where merge_request_id = request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = deny_merge.request_id; - update merge_request - set status = 'rejected' - where merge_request.id = request_id; + update merlin.merge_request + set status = 'rejected' + where merge_request.id = request_id; - update plan - set is_locked = false - where plan.id = (select plan_id_receiving_changes from merge_request where id = request_id); - end - $$; + update merlin.plan + set is_locked = false + where plan.id = (select plan_id_receiving_changes from merlin.merge_request where id = request_id); +end +$$; /* - Discard everything that was in the staging area - Then, unlock the to-be-edited plan - Then, change the merge request's status to 'pending' */ -create procedure cancel_merge(request_id integer) +create procedure merlin.cancel_merge(request_id integer) language plpgsql as $$ - declare - verify_status merge_request_status; +declare + verify_status merlin.merge_request_status; begin - if(select id from merge_request where id = request_id) is null then - raise exception 'Invalid merge request id %.', request_id; - end if; + if(select id from merlin.merge_request where id = request_id) is null then + raise exception 'Invalid merge request id %.', request_id; + end if; - select status from merge_request where id = request_id into verify_status; - if not (verify_status = 'in-progress' or verify_status = 'pending') then - raise exception 'Cannot cancel merge.'; - end if; + select status from merlin.merge_request where id = request_id into verify_status; + if not (verify_status = 'in-progress' or verify_status = 'pending') then + raise exception 'Cannot cancel merge.'; + end if; - delete from conflicting_activities where merge_request_id = request_id; - delete from merge_staging_area where merge_staging_area.merge_request_id = cancel_merge.request_id; + delete from merlin.conflicting_activities where merge_request_id = request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = cancel_merge.request_id; - update merge_request - set status = 'pending' - where merge_request.id = request_id; + update merlin.merge_request + set status = 'pending' + where merge_request.id = request_id; - update plan - set is_locked = false - where plan.id = (select plan_id_receiving_changes from merge_request where id = request_id); + update merlin.plan + set is_locked = false + where plan.id = (select plan_id_receiving_changes from merlin.merge_request where id = request_id); end $$; diff --git a/deployment/postgres-init-db/sql/functions/merlin/merging/plan_locked_exception.sql b/deployment/postgres-init-db/sql/functions/merlin/merging/plan_locked_exception.sql new file mode 100644 index 0000000000..996b725265 --- /dev/null +++ b/deployment/postgres-init-db/sql/functions/merlin/merging/plan_locked_exception.sql @@ -0,0 +1,11 @@ +create procedure merlin.plan_locked_exception(plan_id integer) +language plpgsql as $$ + begin + if(select is_locked from merlin.plan p where p.id = plan_id limit 1) then + raise exception 'Plan % is locked.', plan_id; + end if; + end +$$; + +comment on procedure merlin.plan_locked_exception(plan_id integer) is e'' + 'Verify that the specified plan is unlocked, throwing an exception if not.'; diff --git a/deployment/postgres-init-db/sql/functions/merlin/reanchoring_functions.sql b/deployment/postgres-init-db/sql/functions/merlin/reanchoring_functions.sql new file mode 100644 index 0000000000..979a96770f --- /dev/null +++ b/deployment/postgres-init-db/sql/functions/merlin/reanchoring_functions.sql @@ -0,0 +1,76 @@ +create function merlin.anchor_direct_descendents_to_plan(_activity_id int, _plan_id int) + returns setof merlin.activity_directive + language plpgsql as $$ +declare + _total_offset interval; +begin + if _plan_id is null then + raise exception 'Plan ID cannot be null.'; + end if; + if _activity_id is null then + raise exception 'Activity ID cannot be null.'; + end if; + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + with recursive history(activity_id, anchor_id, total_offset) as ( + select ad.id, ad.anchor_id, ad.start_offset + from merlin.activity_directive ad + where (ad.id, ad.plan_id) = (_activity_id, _plan_id) + union + select ad.id, ad.anchor_id, h.total_offset + ad.start_offset + from merlin.activity_directive ad, history h + where (ad.id, ad.plan_id) = (h.anchor_id, _plan_id) + and h.anchor_id is not null + ) select total_offset + from history + where history.anchor_id is null + into _total_offset; + + return query update merlin.activity_directive + set start_offset = start_offset + _total_offset, + anchor_id = null, + anchored_to_start = true + where (anchor_id, plan_id) = (_activity_id, _plan_id) + returning *; +end +$$; +comment on function merlin.anchor_direct_descendents_to_plan(_activity_id integer, _plan_id integer) is e'' +'Given the primary key of an activity, reanchor all anchor chains attached to the activity to the plan.\n' +'In the event of an end-time anchor, this function assumes all simulated activities have a duration of 0.'; + +create function merlin.anchor_direct_descendents_to_ancestor(_activity_id int, _plan_id int) + returns setof merlin.activity_directive + language plpgsql as $$ +declare + _current_offset interval; + _current_anchor_id int; +begin + if _plan_id is null then + raise exception 'Plan ID cannot be null.'; + end if; + if _activity_id is null then + raise exception 'Activity ID cannot be null.'; + end if; + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + select start_offset, anchor_id + from merlin.activity_directive + where (id, plan_id) = (_activity_id, _plan_id) + into _current_offset, _current_anchor_id; + + return query + update merlin.activity_directive + set start_offset = start_offset + _current_offset, + anchor_id = _current_anchor_id + where (anchor_id, plan_id) = (_activity_id, _plan_id) + returning *; +end +$$; +comment on function merlin.anchor_direct_descendents_to_ancestor(_activity_id integer, _plan_id integer) is e'' + 'Given the primary key of an activity, reanchor all anchor chains attached to the activity to the anchor of said activity.\n' + 'In the event of an end-time anchor, this function assumes all simulated activities have a duration of 0.'; + diff --git a/deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql index 9d54669e5d..3ef5a8386d 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/snapshots/create_snapshot.sql @@ -1,29 +1,29 @@ -- Captures the state of a plan and all of its activities -create function create_snapshot(_plan_id integer) +create function merlin.create_snapshot(_plan_id integer) returns integer language plpgsql as $$ begin - return create_snapshot(_plan_id, null, null, null); + return merlin.create_snapshot(_plan_id, null, null, null); end $$; -create function create_snapshot(_plan_id integer, _snapshot_name text, _description text, _user text) +create function merlin.create_snapshot(_plan_id integer, _snapshot_name text, _description text, _user text) returns integer -- snapshot id inserted into the table language plpgsql as $$ declare validate_plan_id integer; inserted_snapshot_id integer; begin - select id from plan where plan.id = _plan_id into validate_plan_id; + select id from merlin.plan where plan.id = _plan_id into validate_plan_id; if validate_plan_id is null then raise exception 'Plan % does not exist.', _plan_id; end if; - insert into plan_snapshot(plan_id, revision, snapshot_name, description, taken_by) + insert into merlin.plan_snapshot(plan_id, revision, snapshot_name, description, taken_by) select id, revision, _snapshot_name, _description, _user - from plan where id = _plan_id + from merlin.plan where id = _plan_id returning snapshot_id into inserted_snapshot_id; - insert into plan_snapshot_activities( + insert into merlin.plan_snapshot_activities( snapshot_id, id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start) @@ -32,33 +32,33 @@ begin id, name, source_scheduling_goal_id, created_at, created_by, -- these are the rest of the data for an activity row last_modified_at, last_modified_by, start_offset, type, arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start - from activity_directive where activity_directive.plan_id = _plan_id; - insert into preset_to_snapshot_directive(preset_id, activity_id, snapshot_id) + from merlin.activity_directive where activity_directive.plan_id = _plan_id; + insert into merlin.preset_to_snapshot_directive(preset_id, activity_id, snapshot_id) select ptd.preset_id, ptd.activity_id, inserted_snapshot_id - from preset_to_directive ptd + from merlin.preset_to_directive ptd where ptd.plan_id = _plan_id; - insert into metadata.snapshot_activity_tags(snapshot_id, directive_id, tag_id) + insert into tags.snapshot_activity_tags(snapshot_id, directive_id, tag_id) select inserted_snapshot_id, directive_id, tag_id - from metadata.activity_directive_tags adt + from tags.activity_directive_tags adt where adt.plan_id = _plan_id; --all snapshots in plan_latest_snapshot for plan plan_id become the parent of the current snapshot - insert into plan_snapshot_parent(snapshot_id, parent_snapshot_id) + insert into merlin.plan_snapshot_parent(snapshot_id, parent_snapshot_id) select inserted_snapshot_id, snapshot_id - from plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; + from merlin.plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; --remove all of those entries from plan_latest_snapshot and add this new snapshot. - delete from plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; - insert into plan_latest_snapshot(plan_id, snapshot_id) values (_plan_id, inserted_snapshot_id); + delete from merlin.plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) values (_plan_id, inserted_snapshot_id); return inserted_snapshot_id; end; $$; -comment on function create_snapshot(integer) is e'' +comment on function merlin.create_snapshot(integer) is e'' 'See comment on create_snapshot(integer, text, text, text)'; -comment on function create_snapshot(integer, text, text, text) is e'' +comment on function merlin.create_snapshot(integer, text, text, text) is e'' 'Create a snapshot of the specified plan. A snapshot consists of:' ' - The plan''s id and revision' ' - All the activities in the plan' diff --git a/deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql index 7a7f7afed5..1103a58bf6 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/snapshots/plan_history_functions.sql @@ -1,12 +1,12 @@ -- History of Plans notes (planid to planid): -- - Grab all non-null parents -create function get_plan_history(starting_plan_id integer) +create function merlin.get_plan_history(starting_plan_id integer) returns setof integer language plpgsql as $$ declare validate_id integer; begin - select plan.id from plan where plan.id = starting_plan_id into validate_id; + select plan.id from merlin.plan where plan.id = starting_plan_id into validate_id; if validate_id is null then raise exception 'Plan ID % is not present in plan table.', starting_plan_id; end if; @@ -14,33 +14,32 @@ create function get_plan_history(starting_plan_id integer) return query with recursive history(id) as ( values(starting_plan_id) -- base case union - select parent_id from plan - join history on history.id = plan.id and plan.parent_id is not null-- recursive case + select parent_id from merlin.plan p + join history on history.id = p.id and p.parent_id is not null-- recursive case ) select * from history; end $$; - -- History of Snapshot notes (planid to snapshotid(s)): -- - Get the whole history of both -- - Get the max snapshot id of the intersection -create function get_snapshot_history_from_plan(starting_plan_id integer) +create function merlin.get_snapshot_history_from_plan(starting_plan_id integer) returns setof integer language plpgsql as $$ begin return query - select get_snapshot_history(snapshot_id) --runs the recursion - from plan_latest_snapshot where plan_id = starting_plan_id; --supplies input for get_snapshot_history + select merlin.get_snapshot_history(snapshot_id) --runs the recursion + from merlin.plan_latest_snapshot where plan_id = starting_plan_id; --supplies input for get_snapshot_history end $$; -create function get_snapshot_history(starting_snapshot_id integer) +create function merlin.get_snapshot_history(starting_snapshot_id integer) returns setof integer language plpgsql as $$ declare validate_id integer; begin - select plan_snapshot.snapshot_id from plan_snapshot where plan_snapshot.snapshot_id = starting_snapshot_id into validate_id; + select plan_snapshot.snapshot_id from merlin.plan_snapshot where plan_snapshot.snapshot_id = starting_snapshot_id into validate_id; if validate_id is null then raise exception 'Snapshot ID % is not present in plan_snapshot table.', starting_snapshot_id; end if; @@ -48,8 +47,8 @@ begin return query with recursive history(id) as ( values(starting_snapshot_id) --base case union - select parent_snapshot_id from plan_snapshot_parent - join history on id = plan_snapshot_parent.snapshot_id --recursive case + select parent_snapshot_id from merlin.plan_snapshot_parent psp + join history on id = psp.snapshot_id --recursive case ) select * from history; end $$; diff --git a/deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql b/deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql index 219f2a8f99..b607e8465c 100644 --- a/deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql +++ b/deployment/postgres-init-db/sql/functions/merlin/snapshots/restore_from_snapshot.sql @@ -1,19 +1,19 @@ -create procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) +create procedure merlin.restore_from_snapshot(_plan_id integer, _snapshot_id integer) language plpgsql as $$ declare _snapshot_name text; _plan_name text; begin -- Input Validation - select name from plan where id = _plan_id into _plan_name; + select name from merlin.plan where id = _plan_id into _plan_name; if _plan_name is null then raise exception 'Cannot Restore: Plan with ID % does not exist.', _plan_id; end if; - if not exists(select snapshot_id from plan_snapshot where snapshot_id = _snapshot_id) then + if not exists(select snapshot_id from merlin.plan_snapshot where snapshot_id = _snapshot_id) then raise exception 'Cannot Restore: Snapshot with ID % does not exist.', _snapshot_id; end if; - if not exists(select snapshot_id from plan_snapshot where _snapshot_id = snapshot_id and _plan_id = plan_id ) then - select snapshot_name from plan_snapshot where snapshot_id = _snapshot_id into _snapshot_name; + if not exists(select snapshot_id from merlin.plan_snapshot where _snapshot_id = snapshot_id and _plan_id = plan_id ) then + select snapshot_name from merlin.plan_snapshot where snapshot_id = _snapshot_id into _snapshot_name; if _snapshot_name is not null then raise exception 'Cannot Restore: Snapshot ''%'' (ID %) is not a snapshot of Plan ''%'' (ID %)', _snapshot_name, _snapshot_id, _plan_name, _plan_id; @@ -24,7 +24,7 @@ create procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) end if; -- Catch Plan_Locked - call plan_locked_exception(_plan_id); + call merlin.plan_locked_exception(_plan_id); -- Record the Union of Activities in Plan and Snapshot -- and note which ones have been added since the Snapshot was taken (in_snapshot = false) @@ -34,33 +34,33 @@ create procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) ); insert into diff(activity_id, in_snapshot) select id as activity_id, true - from plan_snapshot_activities where snapshot_id = _snapshot_id; + from merlin.plan_snapshot_activities where snapshot_id = _snapshot_id; insert into diff (activity_id, in_snapshot) select activity_id, false from( select id as activity_id - from activity_directive + from merlin.activity_directive where plan_id = _plan_id except select activity_id from diff) a; -- Remove any added activities - delete from activity_directive ad + delete from merlin.activity_directive ad using diff d where (ad.id, ad.plan_id) = (d.activity_id, _plan_id) and d.in_snapshot is false; -- Upsert the rest - insert into activity_directive ( + insert into merlin.activity_directive ( id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start) select psa.id, _plan_id, psa.name, psa.source_scheduling_goal_id, psa.created_at, psa.created_by, psa.last_modified_at, psa.last_modified_by, psa.start_offset, psa.type, psa.arguments, psa.last_modified_arguments_at, psa.metadata, psa.anchor_id, psa.anchored_to_start - from plan_snapshot_activities psa + from merlin.plan_snapshot_activities psa where psa.snapshot_id = _snapshot_id on conflict (id, plan_id) do update -- 'last_modified_at' and 'last_modified_arguments_at' are skipped during update, as triggers will overwrite them to now() @@ -77,22 +77,22 @@ create procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) anchored_to_start = excluded.anchored_to_start; -- Tags - delete from metadata.activity_directive_tags adt + delete from tags.activity_directive_tags adt using diff d where (adt.directive_id, adt.plan_id) = (d.activity_id, _plan_id); - insert into metadata.activity_directive_tags(directive_id, plan_id, tag_id) + insert into tags.activity_directive_tags(directive_id, plan_id, tag_id) select sat.directive_id, _plan_id, sat.tag_id - from metadata.snapshot_activity_tags sat + from tags.snapshot_activity_tags sat where sat.snapshot_id = _snapshot_id on conflict (directive_id, plan_id, tag_id) do nothing; -- Presets - delete from preset_to_directive + delete from merlin.preset_to_directive where plan_id = _plan_id; - insert into preset_to_directive(preset_id, activity_id, plan_id) + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) select pts.preset_id, pts.activity_id, _plan_id - from preset_to_snapshot_directive pts + from merlin.preset_to_snapshot_directive pts where pts.snapshot_id = _snapshot_id on conflict (activity_id, plan_id) do update set preset_id = excluded.preset_id; @@ -102,5 +102,5 @@ create procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) end $$; -comment on procedure restore_from_snapshot(_plan_id integer, _snapshot_id integer) is e'' +comment on procedure merlin.restore_from_snapshot(_plan_id integer, _snapshot_id integer) is e'' 'Restore a plan to its state described in the given snapshot.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql index fe6cf8ff7d..9509621387 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive.sql @@ -1,4 +1,4 @@ -create table activity_directive ( +create table merlin.activity_directive ( id integer generated by default as identity, plan_id integer not null, @@ -10,9 +10,9 @@ create table activity_directive ( last_modified_by text, start_offset interval not null, type text not null, - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, last_modified_arguments_at timestamptz not null default now(), - metadata merlin_activity_directive_metadata_set default '{}'::jsonb, + metadata merlin.activity_directive_metadata_set default '{}'::jsonb, anchor_id integer default null, anchored_to_start boolean default true not null, @@ -20,100 +20,92 @@ create table activity_directive ( primary key (id, plan_id), constraint activity_directive_owned_by_plan foreign key (plan_id) - references plan + references merlin.plan on update cascade on delete cascade, -- An activity cannot anchor to an activity in another plan constraint anchor_in_plan foreign key (anchor_id, plan_id) - references activity_directive + references merlin.activity_directive on update cascade on delete restrict, constraint activity_directive_last_modified_by_exists foreign key (last_modified_by) - references metadata.users + references permissions.users on update cascade on delete set null, constraint activity_directive_created_by_exists foreign key (created_by) - references metadata.users + references permissions.users on update cascade on delete set null ); -create index activity_directive_plan_id_index on activity_directive (plan_id); +create index activity_directive_plan_id_index on merlin.activity_directive (plan_id); -comment on table activity_directive is e'' +comment on table merlin.activity_directive is e'' 'A single activity_directive within a plan.'; -comment on column activity_directive.id is e'' +comment on column merlin.activity_directive.id is e'' 'The synthetic identifier for this activity_directive.\n' 'Unique within a given plan.'; -comment on column activity_directive.plan_id is e'' +comment on column merlin.activity_directive.plan_id is e'' 'The plan within which this activity_directive is located.'; -comment on column activity_directive.name is e'' +comment on column merlin.activity_directive.name is e'' 'The name of this activity_directive.'; -comment on column activity_directive.source_scheduling_goal_id is e'' +comment on column merlin.activity_directive.source_scheduling_goal_id is e'' 'The scheduling goal that this activity_directive was generated by.'; -comment on column activity_directive.created_at is e'' +comment on column merlin.activity_directive.created_at is e'' 'The time at which this activity_directive was created.'; -comment on column activity_directive.created_by is e'' +comment on column merlin.activity_directive.created_by is e'' 'The user who originally created this activity_directive.'; -comment on column activity_directive.last_modified_at is e'' +comment on column merlin.activity_directive.last_modified_at is e'' 'The time at which this activity_directive was last modified.'; -comment on column activity_directive.last_modified_by is e'' +comment on column merlin.activity_directive.last_modified_by is e'' 'The user who last modified this activity_directive.'; -comment on column activity_directive.last_modified_arguments_at is e'' +comment on column merlin.activity_directive.last_modified_arguments_at is e'' 'The time at which this activity_directive.arguments was last modified.'; -comment on column activity_directive.start_offset is e'' +comment on column merlin.activity_directive.start_offset is e'' 'The non-negative time offset from the start of the plan at which this activity_directive is scheduled.'; -comment on column activity_directive.type is e'' +comment on column merlin.activity_directive.type is e'' 'The type of the activity_directive, as defined in the mission model associated with the plan.'; -comment on column activity_directive.arguments is e'' +comment on column merlin.activity_directive.arguments is e'' 'The set of arguments to this activity_directive, corresponding to the parameters of the associated activity type.'; -comment on column activity_directive.metadata is e'' +comment on column merlin.activity_directive.metadata is e'' 'The metadata associated with this activity_directive.'; -comment on column activity_directive.anchor_id is e'' +comment on column merlin.activity_directive.anchor_id is e'' 'The id of the activity_directive this activity_directive is anchored to. ' 'The value null indicates that this activity_directive is anchored to the plan.'; -comment on column activity_directive.anchored_to_start is e'' +comment on column merlin.activity_directive.anchored_to_start is e'' 'If true, this activity_directive is anchored to the start time of its anchor. ' 'If false, this activity_directive is anchored to the end time of its anchor.'; -create procedure plan_locked_exception(plan_id integer) -language plpgsql as $$ - begin - if(select is_locked from plan where plan.id = plan_id limit 1) then - raise exception 'Plan % is locked.', plan_id; - end if; - end -$$; - -comment on procedure plan_locked_exception(plan_id integer) is e'' - 'Verify that the plan corresponding to the activity being updated is unlocked, throwing an exception if not.'; +-- Insert Triggers -create function increment_revision_on_insert_activity_directive() +create function merlin.increment_plan_revision_on_directive_insert() returns trigger security definer language plpgsql as $$begin - update plan + update merlin.plan set revision = revision + 1 where id = new.plan_id; return new; end$$; -create trigger increment_revision_on_insert_activity_directive_trigger -after insert on activity_directive +create trigger increment_plan_revision_on_directive_insert_trigger +after insert on merlin.activity_directive for each row -execute function increment_revision_on_insert_activity_directive(); +execute function merlin.increment_plan_revision_on_directive_insert(); -create function increment_revision_on_update_activity_directive() +-- Update Triggers + +create function merlin.increment_plan_revision_on_directive_update() returns trigger security definer language plpgsql as $$begin - update plan + update merlin.plan set revision = revision + 1 where id = new.plan_id or id = old.plan_id; @@ -121,78 +113,74 @@ language plpgsql as $$begin return new; end$$; -create trigger increment_revision_on_update_activity_directive_trigger -after update on activity_directive +create trigger increment_plan_revision_on_directive_update_trigger +after update on merlin.activity_directive for each row -execute function increment_revision_on_update_activity_directive(); +execute function merlin.increment_plan_revision_on_directive_update(); -create function increment_revision_on_delete_activity_directive() +create function merlin.increment_plan_revision_on_directive_delete() returns trigger -security definer +security invoker language plpgsql as $$begin - update plan + update merlin.plan set revision = revision + 1 where id = old.plan_id; return old; end$$; -create trigger increment_revision_on_delete_activity_directive_trigger -after delete on activity_directive +create trigger increment_plan_revision_on_directive_delete_trigger +after delete on merlin.activity_directive for each row -execute function increment_revision_on_delete_activity_directive(); +execute function merlin.increment_plan_revision_on_directive_delete(); -create function generate_activity_directive_name() +create function merlin.generate_activity_directive_name() returns trigger -security definer +security invoker language plpgsql as $$begin - call plan_locked_exception(new.plan_id); - if new.name is null - then new.name = new.type || ' ' || new.id; + call merlin.plan_locked_exception(new.plan_id); + if new.name is null then + new.name = new.type || ' ' || new.id; end if; return new; end$$; -comment on function generate_activity_directive_name() is e'' +comment on function merlin.generate_activity_directive_name() is e'' 'Generates a name for an activity_directive as the activity type + activity id.'; create trigger generate_name_trigger -before insert on activity_directive -for each row execute function generate_activity_directive_name(); +before insert on merlin.activity_directive +for each row execute function merlin.generate_activity_directive_name(); -comment on trigger generate_name_trigger on activity_directive is e'' +comment on trigger generate_name_trigger on merlin.activity_directive is e'' 'Generates a name for an activity_directive as the activity type + activity id.'; -create function activity_directive_set_updated_at() - returns trigger - security definer - language plpgsql as $$begin - call plan_locked_exception(new.plan_id); - new.last_modified_at = now(); +create function merlin.set_last_modified_at() +returns trigger +security invoker +language plpgsql as $$begin + new.last_modified_at = now(); return new; end$$; -comment on function activity_directive_set_updated_at() is e'' - 'Sets the last_modified_at field of an activity_directive to the current time.'; - create trigger set_timestamp - before update on activity_directive + before update on merlin.activity_directive for each row -execute function activity_directive_set_updated_at(); +execute function merlin.set_last_modified_at(); -comment on trigger set_timestamp on activity_directive is e'' +comment on trigger set_timestamp on merlin.activity_directive is e'' 'Sets the last_modified_at field of an activity_directive to the current time.'; -create function activity_directive_set_arguments_updated_at() +create function merlin.activity_directive_set_arguments_updated_at() returns trigger security definer language plpgsql as $$ begin - call plan_locked_exception(new.plan_id); + call merlin.plan_locked_exception(new.plan_id); new.last_modified_arguments_at = now(); -- request new validation - update activity_directive_validations + update merlin.activity_directive_validations set last_modified_arguments_at = new.last_modified_arguments_at, status = 'pending' where (directive_id, plan_id) = (new.id, new.plan_id); @@ -200,34 +188,34 @@ $$ begin return new; end $$; -comment on function activity_directive_set_arguments_updated_at() is e'' +comment on function merlin.activity_directive_set_arguments_updated_at() is e'' 'Sets the last_modified_arguments_at field of an activity_directive to the current time.'; create trigger set_arguments_timestamp - before update of arguments on activity_directive + before update of arguments on merlin.activity_directive for each row -execute function activity_directive_set_arguments_updated_at(); +execute function merlin.activity_directive_set_arguments_updated_at(); -comment on trigger set_arguments_timestamp on activity_directive is e'' +comment on trigger set_arguments_timestamp on merlin.activity_directive is e'' 'Sets the last_modified_arguments_at field of an activity_directive to the current time.'; -create function activity_directive_validation_entry() +create function merlin.activity_directive_validation_entry() returns trigger security definer language plpgsql as $$ begin - insert into activity_directive_validations + insert into merlin.activity_directive_validations (directive_id, plan_id, last_modified_arguments_at) values (new.id, new.plan_id, new.last_modified_arguments_at); return new; end $$; create trigger validation_entry_on_insert - after insert on activity_directive + after insert on merlin.activity_directive for each row -execute function activity_directive_validation_entry(); +execute function merlin.activity_directive_validation_entry(); -create function check_activity_directive_metadata() +create function merlin.check_activity_directive_metadata() returns trigger security definer language plpgsql as $$ @@ -238,11 +226,11 @@ language plpgsql as $$ _type text; _subValue jsonb; begin - call plan_locked_exception(new.plan_id); + call merlin.plan_locked_exception(new.plan_id); for _key, _value in select * from jsonb_each(new.metadata::jsonb) loop - select schema into _schema from activity_directive_metadata_schema where key = _key; + select schema into _schema from merlin.activity_directive_metadata_schema where key = _key; _type := _schema->>'type'; if _type = 'string' then if jsonb_typeof(_value) != 'string' then @@ -280,97 +268,20 @@ language plpgsql as $$ end$$; create trigger check_activity_directive_metadata_trigger -before insert or update on activity_directive +before insert or update on merlin.activity_directive for each row -execute function check_activity_directive_metadata(); +execute function merlin.check_activity_directive_metadata(); -create function check_locked_on_delete() +create function merlin.check_locked_on_delete() returns trigger security definer language plpgsql as $$ begin - call plan_locked_exception(old.plan_id); + call merlin.plan_locked_exception(old.plan_id); return old; end $$; create trigger check_locked_on_delete_trigger -before delete on activity_directive +before delete on merlin.activity_directive for each row -execute procedure check_locked_on_delete(); - -create function anchor_direct_descendents_to_plan(_activity_id int, _plan_id int) - returns setof activity_directive - language plpgsql as $$ - declare - _total_offset interval; - begin - if _plan_id is null then - raise exception 'Plan ID cannot be null.'; - end if; - if _activity_id is null then - raise exception 'Activity ID cannot be null.'; - end if; - if not exists(select id from activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then - raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; - end if; - - with recursive history(activity_id, anchor_id, total_offset) as ( - select ad.id, ad.anchor_id, ad.start_offset - from activity_directive ad - where (ad.id, ad.plan_id) = (_activity_id, _plan_id) - union - select ad.id, ad.anchor_id, h.total_offset + ad.start_offset - from activity_directive ad, history h - where (ad.id, ad.plan_id) = (h.anchor_id, _plan_id) - and h.anchor_id is not null - ) select total_offset - from history - where history.anchor_id is null - into _total_offset; - - return query update activity_directive - set start_offset = start_offset + _total_offset, - anchor_id = null, - anchored_to_start = true - where (anchor_id, plan_id) = (_activity_id, _plan_id) - returning *; - end - $$; -comment on function anchor_direct_descendents_to_plan(_activity_id integer, _plan_id integer) is e'' -'Given the primary key of an activity, reanchor all anchor chains attached to the activity to the plan.\n' -'In the event of an end-time anchor, this function assumes all simulated activities have a duration of 0.'; - -create function anchor_direct_descendents_to_ancestor(_activity_id int, _plan_id int) - returns setof activity_directive - language plpgsql as $$ -declare - _current_offset interval; - _current_anchor_id int; -begin - if _plan_id is null then - raise exception 'Plan ID cannot be null.'; - end if; - if _activity_id is null then - raise exception 'Activity ID cannot be null.'; - end if; - if not exists(select id from activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then - raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; - end if; - - select start_offset, anchor_id - from activity_directive - where (id, plan_id) = (_activity_id, _plan_id) - into _current_offset, _current_anchor_id; - - return query - update activity_directive - set start_offset = start_offset + _current_offset, - anchor_id = _current_anchor_id - where (anchor_id, plan_id) = (_activity_id, _plan_id) - returning *; -end -$$; -comment on function anchor_direct_descendents_to_ancestor(_activity_id integer, _plan_id integer) is e'' - 'Given the primary key of an activity, reanchor all anchor chains attached to the activity to the anchor of said activity.\n' - 'In the event of an end-time anchor, this function assumes all simulated activities have a duration of 0.'; - +execute procedure merlin.check_locked_on_delete(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql index 3bfecfc4e0..574ed16e91 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_changelog.sql @@ -1,4 +1,4 @@ -create table activity_directive_changelog ( +create table merlin.activity_directive_changelog ( revision integer not null, plan_id integer not null, activity_directive_id integer not null, @@ -9,9 +9,9 @@ create table activity_directive_changelog ( changed_by text, start_offset interval not null, type text not null, - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, changed_arguments_at timestamptz not null default now(), - metadata merlin_activity_directive_metadata_set default '{}'::jsonb, + metadata merlin.activity_directive_metadata_set default '{}'::jsonb, anchor_id integer default null, anchored_to_start boolean default true not null, @@ -20,25 +20,25 @@ create table activity_directive_changelog ( primary key (plan_id, activity_directive_id, revision), constraint changelog_references_activity_directive foreign key (activity_directive_id, plan_id) - references activity_directive + references merlin.activity_directive on update cascade on delete cascade, constraint changed_by_exists foreign key (changed_by) - references metadata.users + references permissions.users on update cascade on delete set null ); -comment on table activity_directive_changelog is e'' +comment on table merlin.activity_directive_changelog is e'' 'A changelog that captures the 10 most recent revisions for each activity directive\n' 'See activity_directive comments for descriptions of shared fields'; -create function store_activity_directive_change() +create function merlin.store_activity_directive_change() returns trigger language plpgsql as $$ begin - insert into activity_directive_changelog ( + insert into merlin.activity_directive_changelog ( revision, plan_id, activity_directive_id, @@ -53,7 +53,7 @@ begin anchored_to_start) values ( (select coalesce(max(revision), -1) + 1 - from activity_directive_changelog + from merlin.activity_directive_changelog where plan_id = new.plan_id and activity_directive_id = new.id), new.plan_id, @@ -73,26 +73,26 @@ end $$; create trigger store_activity_directive_change_trigger - after update or insert on activity_directive + after update or insert on merlin.activity_directive for each row - execute function store_activity_directive_change(); + execute function merlin.store_activity_directive_change(); -create function delete_min_activity_directive_revision() +create function merlin.delete_min_activity_directive_revision() returns trigger language plpgsql as $$ begin - delete from activity_directive_changelog + delete from merlin.activity_directive_changelog where activity_directive_id = new.activity_directive_id and plan_id = new.plan_id and revision = (select min(revision) - from activity_directive_changelog + from merlin.activity_directive_changelog where activity_directive_id = new.activity_directive_id and plan_id = new.plan_id); return new; end$$; create trigger delete_min_activity_directive_revision_trigger - after insert on activity_directive_changelog + after insert on merlin.activity_directive_changelog for each row when (new.revision > 10) - execute function delete_min_activity_directive_revision(); + execute function merlin.delete_min_activity_directive_revision(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql index 69bad91298..ed3fb93138 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_metadata_schema.sql @@ -1,22 +1,22 @@ -create table activity_directive_metadata_schema ( +create table merlin.activity_directive_metadata_schema ( key text not null primary key, schema jsonb not null, created_at timestamptz not null default now(), updated_at timestamptz not null default now() ); -comment on table activity_directive_metadata_schema is 'e' +comment on table merlin.activity_directive_metadata_schema is 'e' 'Schema for the activity directive metadata.'; -comment on column activity_directive_metadata_schema.key is 'e' +comment on column merlin.activity_directive_metadata_schema.key is 'e' 'Key of the metadata.'; -comment on column activity_directive_metadata_schema.schema is 'e' +comment on column merlin.activity_directive_metadata_schema.schema is 'e' 'Schema of the metadata field.'; -comment on column activity_directive_metadata_schema.created_at is 'e' +comment on column merlin.activity_directive_metadata_schema.created_at is 'e' 'Timestamp when the metadata field was created.'; -comment on column activity_directive_metadata_schema.updated_at is 'e' +comment on column merlin.activity_directive_metadata_schema.updated_at is 'e' 'Timestamp when the metadata field was last updated.'; -create or replace function validate_activity_directive_metadata_schema() +create function merlin.validate_activity_directive_metadata_schema() returns trigger security definer language plpgsql as $$ @@ -50,23 +50,14 @@ create or replace function validate_activity_directive_metadata_schema() $$; create trigger validate_activity_directive_metadata_schema_trigger -before insert or update on activity_directive_metadata_schema +before insert or update on merlin.activity_directive_metadata_schema for each row -execute function validate_activity_directive_metadata_schema(); +execute function merlin.validate_activity_directive_metadata_schema(); -comment on trigger validate_activity_directive_metadata_schema_trigger on activity_directive_metadata_schema is 'e' +comment on trigger validate_activity_directive_metadata_schema_trigger on merlin.activity_directive_metadata_schema is 'e' 'Trigger to validate the metadata schema entries for the activity directive metadata.'; -create or replace function activity_directive_metadata_schema_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger activity_directive_metadata_schema_updated_at_trigger -before update -on activity_directive_metadata_schema +before update on merlin.activity_directive_metadata_schema for each row -execute procedure activity_directive_metadata_schema_updated_at(); +execute procedure util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql index 0feb40b104..1c0459ee5d 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_directive_validations.sql @@ -1,4 +1,4 @@ -create table activity_directive_validations ( +create table merlin.activity_directive_validations ( directive_id integer not null, plan_id integer not null, last_modified_arguments_at timestamptz not null, @@ -9,19 +9,19 @@ create table activity_directive_validations ( primary key (directive_id, plan_id), constraint activity_directive_validations_owned_by_activity_directive foreign key (directive_id, plan_id) - references activity_directive + references merlin.activity_directive on update cascade on delete cascade ); -comment on table activity_directive_validations is e'' +comment on table merlin.activity_directive_validations is e'' 'The activity validations extracted from an activity directive.'; -comment on column activity_directive_validations.directive_id is e'' +comment on column merlin.activity_directive_validations.directive_id is e'' 'The activity directive these validations are extracted from.'; -comment on column activity_directive_validations.plan_id is '' +comment on column merlin.activity_directive_validations.plan_id is '' 'The plan associated with the activity directive these validations are extracted from.'; -comment on column activity_directive_validations.last_modified_arguments_at is e'' +comment on column merlin.activity_directive_validations.last_modified_arguments_at is e'' 'The time at which these argument validations were last modified.'; -comment on column activity_directive_validations.validations is e'' +comment on column merlin.activity_directive_validations.validations is e'' 'The argument validations extracted from an activity directive.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql index 9d1426ad23..9f691266be 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/activity_presets.sql @@ -1,78 +1,33 @@ -create table activity_presets( +create table merlin.activity_presets( id integer generated always as identity primary key, model_id integer not null, name text not null, associated_activity_type text not null, - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, owner text, foreign key (model_id, associated_activity_type) - references activity_type + references merlin.activity_type on delete cascade, unique (model_id, associated_activity_type, name), constraint activity_presets_owner_exists - foreign key (owner) references metadata.users + foreign key (owner) references permissions.users on update cascade on delete set null ); -comment on table activity_presets is e'' +comment on table merlin.activity_presets is e'' 'A set of arguments that can be applied to an activity of a given type.'; -comment on column activity_presets.id is e'' +comment on column merlin.activity_presets.id is e'' 'The unique identifier for this activity preset'; -comment on column activity_presets.model_id is e'' +comment on column merlin.activity_presets.model_id is e'' 'The model defining this activity preset is associated with.'; -comment on column activity_presets.name is e'' +comment on column merlin.activity_presets.name is e'' 'The name of this activity preset, unique for an activity type within a mission model.'; -comment on column activity_presets.associated_activity_type is e'' +comment on column merlin.activity_presets.associated_activity_type is e'' 'The activity type with which this activity preset is associated.'; -comment on column activity_presets.arguments is e'' +comment on column merlin.activity_presets.arguments is e'' 'The set of arguments to be applied when this preset is applied.'; -comment on column activity_presets.owner is e'' +comment on column merlin.activity_presets.owner is e'' 'The owner of this activity preset'; - -create table preset_to_directive( - preset_id integer - references activity_presets - on update cascade - on delete cascade, - - activity_id integer, - plan_id integer, - foreign key (activity_id, plan_id) - references activity_directive - on update cascade - on delete cascade, - - constraint one_preset_per_activity_directive - unique (activity_id, plan_id), - - primary key (preset_id, activity_id, plan_id) -); - -comment on table preset_to_directive is e'' - 'Associates presets with activity directives that have been assigned presets.'; - -create table preset_to_snapshot_directive( - preset_id integer - references activity_presets - on update cascade - on delete cascade, - - activity_id integer, - snapshot_id integer, - - foreign key (activity_id, snapshot_id) - references plan_snapshot_activities - on update cascade - on delete cascade, - - constraint one_preset_per_snapshot_directive - unique (activity_id, snapshot_id), - - primary key (preset_id, activity_id, snapshot_id) -); - -comment on table preset_to_snapshot_directive is e'' - 'Associates presets with snapshot activity directives that have been assigned presets.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql index f5e1a0ba0c..260d5b625e 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/anchor_validation_status.sql @@ -1,15 +1,30 @@ -create table anchor_validation_status( +create table merlin.anchor_validation_status( activity_id integer not null, plan_id integer not null, reason_invalid text default null, primary key (activity_id, plan_id), foreign key (activity_id, plan_id) - references activity_directive + references merlin.activity_directive on update cascade on delete cascade ); -create function get_dependent_activities(_activity_id int, _plan_id int) +create index anchor_validation_plan_id_index on merlin.anchor_validation_status (plan_id); + +comment on index merlin.anchor_validation_plan_id_index is e'' + 'A similar index to that on activity_directive, as we often want to filter by plan_id'; + +comment on table merlin.anchor_validation_status is e'' + 'The validation status of the anchor of a single activity_directive within a plan.'; +comment on column merlin.anchor_validation_status.activity_id is e'' + 'The synthetic identifier for the activity_directive.\n' + 'Unique within a given plan.'; +comment on column merlin.anchor_validation_status.plan_id is e'' + 'The plan within which the activity_directive is located'; +comment on column merlin.anchor_validation_status.reason_invalid is e'' + 'If null, the anchor is valid. If not null, this contains a reason why the anchor is invalid.'; + +create function merlin.get_dependent_activities(_activity_id int, _plan_id int) returns table(activity_id int, total_offset interval) stable language plpgsql as $$ @@ -17,11 +32,11 @@ begin return query with recursive d_activities(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, ad.start_offset - from activity_directive ad + from merlin.activity_directive ad where (ad.anchor_id, ad.plan_id) = (_activity_id, _plan_id) -- select all activities anchored to this one union select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, da.total_offset + ad.start_offset - from activity_directive ad, d_activities da + from merlin.activity_directive ad, d_activities da where (ad.anchor_id, ad.plan_id) = (da.activity_id, _plan_id) -- select all activities anchored to those in the selection and ad.anchored_to_start -- stop at next end-time anchor ) select da.activity_id, da.total_offset @@ -29,30 +44,15 @@ begin end; $$; -comment on function get_dependent_activities(_activity_id int, _plan_id int) is e'' +comment on function merlin.get_dependent_activities(_activity_id int, _plan_id int) is e'' 'Get the collection of activities that depend on the given activity, with offset relative to the specified activity'; -create index anchor_validation_plan_id_index on anchor_validation_status (plan_id); - -comment on index anchor_validation_plan_id_index is e'' - 'A similar index to that on activity_directive, as we often want to filter by plan_id'; - -comment on table anchor_validation_status is e'' - 'The validation status of the anchor of a single activity_directive within a plan.'; -comment on column anchor_validation_status.activity_id is e'' - 'The synthetic identifier for the activity_directive.\n' - 'Unique within a given plan.'; -comment on column anchor_validation_status.plan_id is e'' - 'The plan within which the activity_directive is located'; -comment on column anchor_validation_status.reason_invalid is e'' - 'If null, the anchor is valid. If not null, this contains a reason why the anchor is invalid.'; - /* An activity directive may have a negative offset from its anchor's start time. If its anchor is anchored to the end time of another activity (or so on up the chain), the activity with a negative offset must come out to have a positive offset relative to that end time anchor. */ -create procedure validate_nonnegative_net_end_offset(_activity_id integer, _plan_id integer) +create procedure merlin.validate_nonnegative_net_end_offset(_activity_id integer, _plan_id integer) security definer language plpgsql as $$ declare @@ -63,7 +63,7 @@ declare _anchored_to_start boolean; begin select anchor_id, start_offset, anchored_to_start - from activity_directive + from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) into _anchor_id, _start_offset, _anchored_to_start; @@ -77,7 +77,7 @@ begin select _activity_id, _anchor_id, _anchored_to_start, _start_offset, _start_offset union select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, eta.total_offset + ad.start_offset - from activity_directive ad, end_time_anchor eta + from merlin.activity_directive ad, end_time_anchor eta where (ad.id, ad.plan_id) = (eta.anchor_id, _plan_id) and eta.anchor_id is not null -- stop at plan and eta.anchored_to_start -- or stop at end time anchor @@ -89,7 +89,7 @@ begin if end_anchor_id is not null and offset_from_end_anchor < '0' then raise notice 'Activity Directive % has a net negative offset relative to an end-time anchor on Activity Directive %.', _activity_id, end_anchor_id; - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) values (_activity_id, _plan_id, 'Activity Directive ' || _activity_id || ' has a net negative offset relative to an end-time' || ' anchor on Activity Directive ' || end_anchor_id ||'.') on conflict (activity_id, plan_id) do update @@ -99,12 +99,12 @@ begin end if; end $$; -comment on procedure validate_nonnegative_net_end_offset(_activity_id integer, _plan_id integer) is e'' +comment on procedure merlin.validate_nonnegative_net_end_offset(_activity_id integer, _plan_id integer) is e'' 'Returns true if the specified activity has a net negative offset from a non-plan activity end-time anchor. Otherwise, returns false.\n' 'If true, writes to anchor_validation_status.'; -- An activity may not have a start time before the plan -create procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_id integer) +create procedure merlin.validate_nonegative_net_plan_start(_activity_id integer, _plan_id integer) security definer language plpgsql as $$ declare @@ -114,7 +114,7 @@ create procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_ _anchored_to_start boolean; begin select anchor_id, start_offset, anchored_to_start - from activity_directive + from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) into _anchor_id, _start_offset, _anchored_to_start; @@ -123,7 +123,7 @@ create procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_ select _activity_id, _anchor_id, _anchored_to_start, _start_offset, _start_offset union select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, anchors.total_offset + ad.start_offset - from activity_directive ad, anchors + from merlin.activity_directive ad, anchors where anchors.anchor_id is not null -- stop at plan and (ad.id, ad.plan_id) = (anchors.anchor_id, _plan_id) and anchors.anchored_to_start -- or, stop at end-time offset @@ -138,7 +138,7 @@ create procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_ if(net_offset < '0') then raise notice 'Activity Directive % has a net negative offset relative to Plan Start.', _activity_id; - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) values (_activity_id, _plan_id, 'Activity Directive ' || _activity_id || ' has a net negative offset relative to Plan Start.') on conflict (activity_id, plan_id) do update set reason_invalid = 'Activity Directive ' || excluded.activity_id || ' has a net negative offset relative to Plan Start.'; @@ -146,7 +146,7 @@ create procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_ end if; end $$; -comment on procedure validate_nonegative_net_plan_start(_activity_id integer, _plan_id integer) is e'' +comment on procedure merlin.validate_nonegative_net_plan_start(_activity_id integer, _plan_id integer) is e'' 'Returns true if the specified activity has a net negative offset from plan start. Otherwise, returns false.\n' 'If true, writes to anchor_validation_status.'; @@ -161,7 +161,7 @@ comment on procedure validate_nonegative_net_plan_start(_activity_id integer, _p For all other invalid states, it writes to 'anchor_validation_status's 'reason_invalid' field and then returns. If the activity's anchor is valid, then the 'reason_invalid' field on the activity's entry in 'anchor_validation_status' is set to ''. */ -create function validate_anchors() +create function merlin.validate_anchors() returns trigger security definer language plpgsql as $$ @@ -172,7 +172,7 @@ declare offset_from_plan_start interval; begin -- Clear the reason invalid field (if an exception is thrown, this will be rolled back) - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) values (new.id, new.plan_id, '') on conflict (activity_id, plan_id) do update set reason_invalid = ''; @@ -190,7 +190,7 @@ begin select ad.id, ad.anchor_id, ad.id = any(path), path || ad.id - from activity_directive ad, history h + from merlin.activity_directive ad, history h where (ad.id, ad.plan_id) = (h.anchor_id, new.plan_id) and not is_cycle ) select * from history @@ -205,8 +205,8 @@ begin If its anchor is anchored to the end time of another activity (or so on up the chain), the activity with a negative offset must come out to have a positive offset relative to that end time anchor. */ - call validate_nonnegative_net_end_offset(new.id, new.plan_id); - call validate_nonegative_net_plan_start(new.id, new.plan_id); + call merlin.validate_nonnegative_net_end_offset(new.id, new.plan_id); + call merlin.validate_nonegative_net_plan_start(new.id, new.plan_id); /* Everything below validates that the activities anchored to this one did not become invalid as a result of these changes. @@ -219,7 +219,7 @@ begin select new.id, new.anchor_id, new.anchored_to_start, new.start_offset, new.start_offset union select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, eta.total_offset + ad.start_offset - from activity_directive ad, end_time_anchor eta + from merlin.activity_directive ad, end_time_anchor eta where (ad.id, ad.plan_id) = (eta.anchor_id, new.plan_id) and eta.anchor_id is not null -- stop at plan and eta.anchored_to_start -- or stop at end time anchor @@ -231,7 +231,7 @@ begin -- Not null iff the activity being looked at has some end anchor to another activity in its chain if offset_from_end_anchor is not null then select array_agg(activity_id) - from get_dependent_activities(new.id, new.plan_id) + from merlin.get_dependent_activities(new.id, new.plan_id) where total_offset + offset_from_end_anchor < '0' into invalid_descendant_act_ids; @@ -240,7 +240,7 @@ begin 'There may be additional activities that are invalid relative to this activity.', end_anchor_id, array_to_string(invalid_descendant_act_ids, ','); - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) select id, new.plan_id, 'Activity Directive ' || id || ' has a net negative offset relative to an end-time' || ' anchor on Activity Directive ' || end_anchor_id ||'.' from unnest(invalid_descendant_act_ids) as id @@ -255,7 +255,7 @@ begin select new.id, new.anchor_id, new.anchored_to_start, new.start_offset, new.start_offset union select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, anchors.total_offset + ad.start_offset - from activity_directive ad, anchors + from merlin.activity_directive ad, anchors where anchors.anchor_id is not null -- stop at plan and (ad.id, ad.plan_id) = (anchors.anchor_id, new.plan_id) and anchors.anchored_to_start -- or, stop at end-time offset @@ -272,7 +272,7 @@ begin -- Validate descendents invalid_descendant_act_ids := null; select array_agg(activity_id) - from get_dependent_activities(new.id, new.plan_id) + from merlin.get_dependent_activities(new.id, new.plan_id) where total_offset + offset_from_plan_start < '0' into invalid_descendant_act_ids; -- grab all and split @@ -281,7 +281,7 @@ begin 'There may be additional activities that are invalid relative to this activity.', array_to_string(invalid_descendant_act_ids, ','); - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) select id, new.plan_id, 'Activity Directive ' || id || ' has a net negative offset relative to Plan Start.' from unnest(invalid_descendant_act_ids) as id on conflict (activity_id, plan_id) do update @@ -292,17 +292,17 @@ begin -- These are both null iff the activity is anchored to plan end if(offset_from_plan_start is null and offset_from_end_anchor is null) then -- All dependent activities should have no errors, as Plan End can have an offset of any value. - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) select da.activity_id, new.plan_id, '' - from get_dependent_activities(new.id, new.plan_id) as da + from merlin.get_dependent_activities(new.id, new.plan_id) as da on conflict (activity_id, plan_id) do update set reason_invalid = ''; end if; -- Remove the error from the dependent activities that wouldn't have been flagged by the earlier checks. - insert into anchor_validation_status (activity_id, plan_id, reason_invalid) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) select da.activity_id, new.plan_id, '' - from get_dependent_activities(new.id, new.plan_id) as da + from merlin.get_dependent_activities(new.id, new.plan_id) as da where total_offset + offset_from_plan_start >= '0' or total_offset + offset_from_end_anchor >= '0' -- only one of these checks will run depending on which one has `null` behind the offset on conflict (activity_id, plan_id) do update @@ -313,20 +313,20 @@ end $$; create constraint trigger validate_anchors_update_trigger after update - on activity_directive + on merlin.activity_directive deferrable initially deferred for each row when (old.anchor_id is distinct from new.anchor_id -- != but allows for one side to be null or old.anchored_to_start != new.anchored_to_start or old.start_offset != new.start_offset) -execute procedure validate_anchors(); +execute procedure merlin.validate_anchors(); -- The insert trigger is separate in order to allow the update trigger to have a 'when' clause create constraint trigger validate_anchors_insert_trigger after insert - on activity_directive + on merlin.activity_directive deferrable initially deferred for each row -execute procedure validate_anchors(); +execute procedure merlin.validate_anchors(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_directive/preset_to_directive.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/preset_to_directive.sql new file mode 100644 index 0000000000..aee5e2bcca --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_directive/preset_to_directive.sql @@ -0,0 +1,21 @@ +create table merlin.preset_to_directive( + preset_id integer + references merlin.activity_presets + on update cascade + on delete cascade, + + activity_id integer, + plan_id integer, + foreign key (activity_id, plan_id) + references merlin.activity_directive + on update cascade + on delete cascade, + + constraint one_preset_per_activity_directive + unique (activity_id, plan_id), + + primary key (preset_id, activity_id, plan_id) +); + +comment on table merlin.preset_to_directive is e'' + 'Associates presets with activity directives that have been assigned presets.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/activity_type.sql b/deployment/postgres-init-db/sql/tables/merlin/activity_type.sql index 475927f1ea..25a5fe368e 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/activity_type.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/activity_type.sql @@ -1,33 +1,33 @@ -create table activity_type ( +create table merlin.activity_type ( model_id integer not null, name text not null, - parameters merlin_parameter_set not null, - required_parameters merlin_required_parameter_set not null, + parameters merlin.parameter_set not null, + required_parameters merlin.required_parameter_set not null, computed_attributes_value_schema jsonb, - subsystem integer references metadata.tags + subsystem integer references tags.tags on update cascade on delete restrict, - constraint activity_type_natural_key + constraint activity_type_pkey primary key (model_id, name), - constraint activity_type_owned_by_mission_model + constraint activity_type_mission_model_exists foreign key (model_id) - references mission_model + references merlin.mission_model on delete cascade ); -comment on table activity_type is e'' +comment on table merlin.activity_type is e'' 'A description of a parametric activity type supported by the associated mission model.'; -comment on column activity_type.name is e'' +comment on column merlin.activity_type.name is e'' 'The name of this activity type, unique within a mission model.'; -comment on column activity_type.model_id is e'' +comment on column merlin.activity_type.model_id is e'' 'The model defining this activity type.'; -comment on column activity_type.parameters is e'' +comment on column merlin.activity_type.parameters is e'' 'The set of parameters accepted by this activity type.'; -comment on column activity_type.required_parameters is e'' +comment on column merlin.activity_type.required_parameters is e'' 'A description of which parameters are required to be provided to instantiate this activity type'; -comment on column activity_type.computed_attributes_value_schema is e'' +comment on column merlin.activity_type.computed_attributes_value_schema is e'' 'The type of value returned by the effect model of this activity type'; -comment on column activity_type.subsystem is e'' +comment on column merlin.activity_type.subsystem is e'' 'The subsystem this activity type belongs to.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql index 00c79bff89..92bcfb4d2c 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_definition.sql @@ -1,4 +1,4 @@ -create table constraint_definition( +create table merlin.constraint_definition( constraint_id integer not null, revision integer not null default 0, definition text not null, @@ -9,28 +9,28 @@ create table constraint_definition( primary key (constraint_id, revision), constraint constraint_definition_constraint_exists foreign key (constraint_id) - references constraint_metadata + references merlin.constraint_metadata on update cascade on delete cascade, constraint constraint_definition_author_exists foreign key (author) - references metadata.users + references permissions.users on update cascade on delete set null ); -comment on table constraint_definition is e'' +comment on table merlin.constraint_definition is e'' 'The specific revisions of a constraint''s definition'; -comment on column constraint_definition.revision is e'' +comment on column merlin.constraint_definition.revision is e'' 'An identifier of this definition.'; -comment on column constraint_definition.definition is e'' +comment on column merlin.constraint_definition.definition is e'' 'An executable expression in the Merlin constraint language.'; -comment on column constraint_definition.author is e'' +comment on column merlin.constraint_definition.author is e'' 'The user who authored this revision.'; -comment on column constraint_definition.created_at is e'' +comment on column merlin.constraint_definition.created_at is e'' 'When this revision was created.'; -create function constraint_definition_set_revision() +create function merlin.constraint_definition_set_revision() returns trigger volatile language plpgsql as $$ @@ -39,7 +39,7 @@ declare begin -- Grab the current max value of revision, or -1, if this is the first revision select coalesce((select revision - from constraint_definition + from merlin.constraint_definition where constraint_id = new.constraint_id order by revision desc limit 1), -1) @@ -51,6 +51,6 @@ end $$; create trigger constraint_definition_set_revision - before insert on constraint_definition + before insert on merlin.constraint_definition for each row - execute function constraint_definition_set_revision(); + execute function merlin.constraint_definition_set_revision(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql index d3d6bd5950..ffd58982ff 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_metadata.sql @@ -1,4 +1,4 @@ -create table constraint_metadata( +create table merlin.constraint_metadata( id integer generated always as identity, name text not null, @@ -15,49 +15,41 @@ create table constraint_metadata( primary key (id), constraint constraint_owner_exists foreign key (owner) - references metadata.users + references permissions.users on update cascade on delete set null, constraint constraint_updated_by_exists foreign key (updated_by) - references metadata.users + references permissions.users on update cascade on delete set null ); -- A partial index is used to enforce name uniqueness only on constraints visible to other users -create unique index name_unique_if_published on constraint_metadata (name) where public; +create unique index name_unique_if_published on merlin.constraint_metadata (name) where public; -comment on table constraint_metadata is e'' +comment on table merlin.constraint_metadata is e'' 'The metadata for a constraint'; -comment on column constraint_metadata.id is e'' +comment on column merlin.constraint_metadata.id is e'' 'The unique identifier of the constraint'; -comment on column constraint_metadata.name is e'' +comment on column merlin.constraint_metadata.name is e'' 'A human-meaningful name.'; -comment on column constraint_metadata.description is e'' +comment on column merlin.constraint_metadata.description is e'' 'A detailed description suitable for long-form documentation.'; -comment on column constraint_metadata.public is e'' +comment on column merlin.constraint_metadata.public is e'' 'Whether this constraint is visible to all users.'; -comment on column constraint_metadata.owner is e'' +comment on column merlin.constraint_metadata.owner is e'' 'The user responsible for this constraint.'; -comment on column constraint_metadata.updated_by is e'' +comment on column merlin.constraint_metadata.updated_by is e'' 'The user who last modified this constraint''s metadata.'; -comment on column constraint_metadata.created_at is e'' +comment on column merlin.constraint_metadata.created_at is e'' 'The time at which this constraint was created.'; -comment on column constraint_metadata.updated_at is e'' +comment on column merlin.constraint_metadata.updated_at is e'' 'The time at which this constraint''s metadata was last modified.'; -create function constraint_metadata_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp -before update on constraint_metadata +before update on merlin.constraint_metadata for each row -execute function constraint_metadata_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql index 101c762058..e5de75bcbd 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_model_specification.sql @@ -1,6 +1,6 @@ -create table constraint_model_specification( +create table merlin.constraint_model_specification( model_id integer not null - references mission_model + references merlin.mission_model on update cascade on delete cascade, constraint_id integer not null, @@ -10,21 +10,21 @@ create table constraint_model_specification( primary key (model_id, constraint_id), constraint model_spec_constraint_exists foreign key (constraint_id) - references constraint_metadata(id) + references merlin.constraint_metadata(id) on update cascade on delete restrict, constraint model_spec_constraint_definition_exists foreign key (constraint_id, constraint_revision) - references constraint_definition(constraint_id, revision) + references merlin.constraint_definition(constraint_id, revision) on update cascade on delete restrict ); -comment on table constraint_model_specification is e'' +comment on table merlin.constraint_model_specification is e'' 'The set of constraints that all plans using the model should include in their constraint specification.'; -comment on column constraint_model_specification.model_id is e'' +comment on column merlin.constraint_model_specification.model_id is e'' 'The model which this specification is for. Half of the primary key.'; -comment on column constraint_model_specification.constraint_id is e'' +comment on column merlin.constraint_model_specification.constraint_id is e'' 'The id of a specific constraint in the specification. Half of the primary key.'; -comment on column constraint_model_specification.constraint_revision is e'' +comment on column merlin.constraint_model_specification.constraint_revision is e'' 'The version of the constraint definition to use. Leave NULL to use the latest version.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql index d57c50db71..3cf066a5b4 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_run.sql @@ -1,4 +1,4 @@ -create table constraint_run ( +create table merlin.constraint_run ( constraint_id integer not null, constraint_revision integer not null, simulation_dataset_id integer not null, @@ -13,34 +13,34 @@ create table constraint_run ( primary key (constraint_id, constraint_revision, simulation_dataset_id), constraint constraint_run_to_constraint_definition foreign key (constraint_id, constraint_revision) - references constraint_definition + references merlin.constraint_definition on delete cascade, constraint constraint_run_to_simulation_dataset foreign key (simulation_dataset_id) - references simulation_dataset + references merlin.simulation_dataset on delete cascade, constraint constraint_run_requested_by foreign key (requested_by) - references metadata.users + references permissions.users on update cascade on delete set null ); create index constraint_run_simulation_dataset_id_index - on constraint_run (simulation_dataset_id); + on merlin.constraint_run (simulation_dataset_id); -comment on table constraint_run is e'' +comment on table merlin.constraint_run is e'' 'A single constraint run, used to cache violation results to be reused if the constraint definition is not stale.'; -comment on column constraint_run.constraint_id is e'' +comment on column merlin.constraint_run.constraint_id is e'' 'The constraint that we are evaluating during the run.'; -comment on column constraint_run.constraint_revision is e'' +comment on column merlin.constraint_run.constraint_revision is e'' 'The version of the constraint definition that was checked.'; -comment on column constraint_run.simulation_dataset_id is e'' +comment on column merlin.constraint_run.simulation_dataset_id is e'' 'The simulation dataset id from when the constraint was checked.'; -comment on column constraint_run.results is e'' +comment on column merlin.constraint_run.results is e'' 'Results that were computed during the constraint check.'; -comment on column constraint_run.requested_by is e'' +comment on column merlin.constraint_run.requested_by is e'' 'The user who requested the constraint run.'; -comment on column constraint_run.requested_at is e'' +comment on column merlin.constraint_run.requested_at is e'' 'When the constraint run was created.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql index 44f17d2893..def39d9239 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/constraints/constraint_specification.sql @@ -1,6 +1,6 @@ -create table constraint_specification( +create table merlin.constraint_specification( plan_id integer not null - references plan + references merlin.plan on update cascade on delete cascade, constraint_id integer not null, @@ -11,23 +11,23 @@ create table constraint_specification( primary key (plan_id, constraint_id), constraint plan_spec_constraint_exists foreign key (constraint_id) - references constraint_metadata(id) + references merlin.constraint_metadata(id) on update cascade on delete restrict, constraint plan_spec_constraint_definition_exists foreign key (constraint_id, constraint_revision) - references constraint_definition(constraint_id, revision) + references merlin.constraint_definition(constraint_id, revision) on update cascade on delete restrict ); -comment on table constraint_specification is e'' +comment on table merlin.constraint_specification is e'' 'The set of constraints to be checked for a given plan.'; -comment on column constraint_specification.plan_id is e'' +comment on column merlin.constraint_specification.plan_id is e'' 'The plan which this specification is for. Half of the primary key.'; -comment on column constraint_specification.constraint_id is e'' +comment on column merlin.constraint_specification.constraint_id is e'' 'The id of a specific constraint in the specification. Half of the primary key.'; -comment on column constraint_specification.constraint_revision is e'' +comment on column merlin.constraint_specification.constraint_revision is e'' 'The version of the constraint definition to use. Leave NULL to use the latest version.'; -comment on column constraint_specification.enabled is e'' +comment on column merlin.constraint_specification.enabled is e'' 'Whether to run a given constraint. Defaults to TRUE.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql index 44e745dbce..9f80a7dd4d 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/dataset.sql @@ -1,4 +1,4 @@ -create table dataset ( +create table merlin.dataset ( id integer generated always as identity, revision integer not null default 0, @@ -6,15 +6,15 @@ create table dataset ( primary key (id) ); -comment on table dataset is e'' +comment on table merlin.dataset is e'' 'A time-series dataset consisting of profiles and spans.' '\n' 'The actual data this dataset contains is stored in dedicated' 'partitions of their corresponding tables.'; -comment on column dataset.id is e'' +comment on column merlin.dataset.id is e'' 'The synthetic identifier for this dataset.'; -create or replace function delete_partitions() +create function merlin.delete_partitions() returns trigger security definer language plpgsql as $$begin @@ -24,30 +24,26 @@ language plpgsql as $$begin return old; end$$; -do $$ begin create trigger delete_partitions_trigger - before delete on dataset + before delete on merlin.dataset for each row - execute function delete_partitions(); -exception - when duplicate_object then null; -end $$; + execute function merlin.delete_partitions(); -create function delete_dataset_cascade() +create function merlin.delete_dataset_cascade() returns trigger security definer language plpgsql as $$begin - delete from span where span.dataset_id = old.id; + delete from merlin.span s where s.dataset_id = old.id; return old; end$$; create trigger delete_dataset_trigger - after delete on dataset + after delete on merlin.dataset for each row -execute function delete_dataset_cascade(); +execute function merlin.delete_dataset_cascade(); -comment on trigger delete_dataset_trigger on dataset is e'' +comment on trigger delete_dataset_trigger on merlin.dataset is e'' 'Trigger to simulate an ON DELETE CASCADE foreign key constraint between span and dataset. The reason to' 'implement this as a trigger is that this single trigger can cascade deletes to any partitions of span.' 'If we used a foreign key, every new partition of span would need to add a new trigger to the dataset' @@ -55,55 +51,57 @@ comment on trigger delete_dataset_trigger on dataset is e'' 'partitions concurrently with inserts to referenced tables, we have chosen to forego foreign keys from partitions' 'to other tables in favor of these hand-written triggers'; -create function allocate_dataset_partitions(dataset_id integer) - returns dataset +create function merlin.allocate_dataset_partitions(dataset_id integer) + returns merlin.dataset security definer language plpgsql as $$ declare - dataset_ref dataset; + dataset_ref merlin.dataset; begin - select * from dataset where id = dataset_id into dataset_ref; + select * from merlin.dataset d where d.id = dataset_id into dataset_ref; if dataset_id is null then raise exception 'Cannot allocate partitions for non-existent dataset id %', dataset_id; end if; - execute 'create table profile_segment_' || dataset_id || ' ( - like profile_segment including defaults including constraints + execute 'create table merlin.profile_segment_' || dataset_id || ' ( + like merlin.profile_segment including defaults including constraints );'; - execute 'alter table profile_segment - attach partition profile_segment_' || dataset_id || ' for values in ('|| dataset_id ||');'; + execute 'alter table merlin.profile_segment + attach partition merlin.profile_segment_' || dataset_id || ' for values in ('|| dataset_id ||');'; - execute 'create table event_' || dataset_id || ' ( - like event including defaults including constraints + execute 'create table merlin.event_' || dataset_id || ' ( + like merlin.event including defaults including constraints );'; - execute 'alter table event - attach partition event_' || dataset_id || ' for values in (' || dataset_id || ');'; + execute 'alter table merlin.event + attach partition merlin.event_' || dataset_id || ' for values in (' || dataset_id || ');'; - execute 'create table span_' || dataset_id || ' ( - like span including defaults including constraints + execute 'create table merlin.span_' || dataset_id || ' ( + like merlin.span including defaults including constraints );'; - execute 'alter table span - attach partition span_' || dataset_id || ' for values in (' || dataset_id || ');'; + execute 'alter table merlin.span + attach partition merlin.span_' || dataset_id || ' for values in (' || dataset_id || ');'; -- Create a self-referencing foreign key on the span partition table. We avoid referring to the top level span table -- in order to avoid lock contention with concurrent inserts - call span_add_foreign_key_to_partition('span_' || dataset_id); + call merlin.span_add_foreign_key_to_partition('merlin.span_' || dataset_id); return dataset_ref; end$$; -comment on function allocate_dataset_partitions is e'' +comment on function merlin.allocate_dataset_partitions is e'' 'Creates partition tables for the components of a dataset and attaches them to their partitioned tables.'; -create function call_create_partition() +create function merlin.call_create_partition() returns trigger security invoker - language plpgsql as $$ begin - perform allocate_dataset_partitions(new.id); -return new; -end $$; + language plpgsql as $$ +begin + perform merlin.allocate_dataset_partitions(new.id); + return new; +end +$$; create trigger create_partition_on_simulation - after insert on dataset + after insert on merlin.dataset for each row - execute function call_create_partition(); + execute function merlin.call_create_partition(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql index 6e92ec6e13..25918edf25 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/event.sql @@ -1,5 +1,4 @@ -create table event -( +create table merlin.event ( dataset_id integer not null, real_time interval not null, transaction_index integer not null, @@ -13,30 +12,30 @@ create table event ) partition by list (dataset_id); -comment on table event is e'' +comment on table merlin.event is e'' 'A representation of all events that occurred at a single time point'; -comment on column event.dataset_id is e'' +comment on column merlin.event.dataset_id is e'' 'The dataset this event is part of.'; -comment on column event.real_time is e'' +comment on column merlin.event.real_time is e'' 'The simulation time at which this event takes place'; -comment on column event.transaction_index is e'' +comment on column merlin.event.transaction_index is e'' 'When multiple transactions occur at the same real_time, the transaction index will disambiguate them'; -comment on column event.causal_time is e'' +comment on column merlin.event.causal_time is e'' 'A string that allows any two events at the same real time to be compared for causal relationships.'; -comment on column event.value is e'' +comment on column merlin.event.value is e'' 'The value of this event as a json blob'; -comment on column event.topic_index is e'' +comment on column merlin.event.topic_index is e'' 'The topic of this event'; -create function event_integrity_function() +create function merlin.event_integrity_function() returns trigger security invoker language plpgsql as $$begin if not exists( - select from topic - where topic.dataset_id = new.dataset_id - and topic.topic_index = new.topic_index - for key share of topic) + select from merlin.topic t + where t.dataset_id = new.dataset_id + and t.topic_index = new.topic_index + for key share of t) -- for key share is important: it makes sure that concurrent transactions cannot update -- the columns that compose the topic's key until after this transaction commits. then @@ -45,13 +44,13 @@ create function event_integrity_function() return new; end$$; -comment on function event_integrity_function is e'' +comment on function merlin.event_integrity_function is e'' 'Used to simulate a foreign key constraint between event and topic, to avoid acquiring a lock on the' 'topic table when creating a new partition of event. This function checks that a corresponding topic' 'exists for every inserted or updated event. A trigger that calls this function is added separately to each' 'new partition of event.'; create constraint trigger insert_update_event_trigger - after insert or update on event + after insert or update on merlin.event for each row -execute function event_integrity_function(); +execute function merlin.event_integrity_function(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql index 17ebf284e9..4ba6907d3f 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile.sql @@ -1,4 +1,4 @@ -create table profile ( +create table merlin.profile ( id integer generated always as identity, dataset_id integer not null, @@ -12,38 +12,38 @@ create table profile ( unique (dataset_id, name), constraint profile_owned_by_dataset foreign key (dataset_id) - references dataset + references merlin.dataset on update cascade on delete cascade ); -comment on table profile is e'' +comment on table merlin.profile is e'' 'The behavior of a resource over time, in the context of a dataset.'; -comment on column profile.dataset_id is e'' +comment on column merlin.profile.dataset_id is e'' 'The dataset this profile is part of.'; -comment on column profile.name is e'' +comment on column merlin.profile.name is e'' 'A human-readable name for this profile, unique within its containing dataset.'; -comment on column profile.type is e'' +comment on column merlin.profile.type is e'' 'The type of behavior this profile expresses. The segments of this profile must abide by this type.'; -comment on column profile.duration is e'' +comment on column merlin.profile.duration is e'' 'The duration of the profile after the start time stored in the dataset.'; -create function delete_profile_cascade() +create function merlin.delete_profile_cascade() returns trigger security invoker language plpgsql as $$begin - delete from profile_segment - where profile_segment.dataset_id = old.dataset_id and profile_segment.profile_id = old.id; + delete from merlin.profile_segment ps + where ps.dataset_id = old.dataset_id and ps.profile_id = old.id; return old; end$$; create trigger delete_profile_trigger - after delete on profile + after delete on merlin.profile for each row -execute function delete_profile_cascade(); +execute function merlin.delete_profile_cascade(); -comment on trigger delete_profile_trigger on profile is e'' +comment on trigger delete_profile_trigger on merlin.profile is e'' 'Trigger to simulate an ON DELETE CASCADE foreign key constraint between profile_segment and profile. The reason to' 'implement this as a trigger is that this single trigger can cascade deletes to any partitions of profile_segment.' 'If we used a foreign key, every new partition of profile_segment would need to add a new cascade delete trigger to' @@ -51,26 +51,26 @@ comment on trigger delete_profile_trigger on profile is e'' 'new partitions concurrently with inserts to referenced tables, we have chosen to forego foreign keys from partitions' 'to other tables in favor of these hand-written triggers'; -create function update_profile_cascade() +create function merlin.update_profile_cascade() returns trigger security invoker language plpgsql as $$begin if old.id != new.id or old.dataset_id != new.dataset_id then - update profile_segment + update merlin.profile_segment ps set profile_id = new.id, dataset_id = new.dataset_id - where profile_segment.dataset_id = old.dataset_id and profile_segment.profile_id = old.id; + where ps.dataset_id = old.dataset_id and ps.profile_id = old.id; end if; return new; end$$; create trigger update_profile_trigger - after update on profile + after update on merlin.profile for each row -execute function update_profile_cascade(); +execute function merlin.update_profile_cascade(); -comment on trigger update_profile_trigger on profile is e'' +comment on trigger update_profile_trigger on merlin.profile is e'' 'Trigger to simulate an ON UPDATE CASCADE foreign key constraint between profile_segment and profile. The reason to' 'implement this as a trigger is that this single trigger can propagate updates to any partitions of profile_segment.' 'If we used a foreign key, every new partition of profile_segment would need to add a new trigger to the profile' diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql index 4bd7263d42..71896a4618 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/profile_segment.sql @@ -1,4 +1,4 @@ -create table profile_segment ( +create table merlin.profile_segment ( dataset_id integer not null, profile_id integer not null, @@ -13,7 +13,7 @@ partition by list (dataset_id); -- TODO: Add a database range index for start_offset for efficient time searching. -comment on table profile_segment is e'' +comment on table merlin.profile_segment is e'' 'A piece of a profile associated with a dataset, starting at a particular offset from the dataset basis. ' 'The profile is governed at any time T by the latest profile whose start_offset is no later than T.' '\n' @@ -23,30 +23,30 @@ comment on table profile_segment is e'' 'so defining the segment duration implicitly by whenever the next segment begins avoids redundancy. ' 'In exchange, a trailing NULL segment is necessary if the effective end of a profile must be identified.'; -comment on column profile_segment.dataset_id is e'' +comment on column merlin.profile_segment.dataset_id is e'' 'The dataset this segment''s profile is a part of.' '\n' 'Denormalized for partitioning. Should always match ''profile.dataset_id''.'; -comment on column profile_segment.profile_id is e'' +comment on column merlin.profile_segment.profile_id is e'' 'The profile this segment is a part of.'; -comment on column profile_segment.start_offset is e'' +comment on column merlin.profile_segment.start_offset is e'' 'The offset from the dataset start time at which this profile segment takes over the profile''s behavior.'; -comment on column profile_segment.dynamics is e'' +comment on column merlin.profile_segment.dynamics is e'' 'A formal description of the behavior of the resource between this segment and the next.' '\n' 'May be NULL if no behavior is known, thereby canceling any prior behavior.'; -comment on column profile_segment.is_gap is e'' +comment on column merlin.profile_segment.is_gap is e'' 'Whether this segment has a value. If not, the value is not used, and is treated as unknown.'; -create function profile_segment_integrity_function() +create function merlin.profile_segment_integrity_function() returns trigger security invoker language plpgsql as $$begin if not exists( - select from profile - where profile.dataset_id = new.dataset_id - and profile.id = new.profile_id - for key share of profile) + select from merlin.profile p + where p.dataset_id = new.dataset_id + and p.id = new.profile_id + for key share of p) -- for key share is important: it makes sure that concurrent transactions cannot update -- the columns that compose the profile's key until after this transaction commits. then @@ -55,13 +55,13 @@ create function profile_segment_integrity_function() return new; end$$; -comment on function profile_segment_integrity_function is e'' +comment on function merlin.profile_segment_integrity_function is e'' 'Used to simulate a foreign key constraint between profile_segment and profile, to avoid acquiring a lock on the' 'profile table when creating a new partition of profile_segment. This function checks that a corresponding profile' 'exists for every inserted or updated profile_segment. A trigger that calls this function is added separately to each' 'new partition of profile_segment.'; create constraint trigger insert_update_profile_segment_trigger - after insert or update on profile_segment + after insert or update on merlin.profile_segment for each row -execute function profile_segment_integrity_function(); +execute function merlin.profile_segment_integrity_function(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql index 9f6d26eb42..944c459643 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/span.sql @@ -1,4 +1,4 @@ -create table span ( +create table merlin.span ( id integer generated always as identity, dataset_id integer not null, @@ -14,48 +14,48 @@ create table span ( ) partition by list (dataset_id); -comment on table span is e'' +comment on table merlin.span is e'' 'A temporal window of interest. A span may be refined by its children, providing additional information over ' 'more specific windows.'; -comment on column span.id is e'' +comment on column merlin.span.id is e'' 'The synthetic identifier for this span.'; -comment on column span.dataset_id is e'' +comment on column merlin.span.dataset_id is e'' 'The dataset this span is part of.'; -comment on column span.parent_id is e'' +comment on column merlin.span.parent_id is e'' 'The span this span refines.'; -comment on column span.start_offset is e'' +comment on column merlin.span.start_offset is e'' 'The offset from the dataset start at which this span begins.'; -comment on column span.duration is e'' +comment on column merlin.span.duration is e'' 'The amount of time this span extends for.'; -comment on column span.type is e'' +comment on column merlin.span.type is e'' 'The type of span, implying the shape of its attributes.'; -comment on column span.attributes is e'' +comment on column merlin.span.attributes is e'' 'A set of named values annotating this span as a whole.'; -create function span_integrity_function() +create function merlin.span_integrity_function() returns trigger security invoker language plpgsql as $$begin - if not exists(select from dataset where dataset.id = new.dataset_id for key share of dataset) + if not exists(select from merlin.dataset d where d.id = new.dataset_id for key share of d) then raise exception 'foreign key violation: there is no dataset with id %', new.dataset_id; end if; return new; end$$; -comment on function span_integrity_function is e'' +comment on function merlin.span_integrity_function() is e'' 'Used to simulate a foreign key constraint between span and dataset, to avoid acquiring a lock on the' 'dataset table when creating a new partition of span. This function checks that a corresponding dataset' 'exists for every inserted or updated span. A trigger that calls this function is added separately to each' 'new partition of span.'; create constraint trigger insert_update_span_trigger - after insert or update on span + after insert or update on merlin.span for each row -execute function span_integrity_function(); +execute function merlin.span_integrity_function(); -create procedure span_add_foreign_key_to_partition(table_name varchar) +create procedure merlin.span_add_foreign_key_to_partition(table_name varchar) security invoker language plpgsql as $$begin execute 'alter table ' || table_name || ' add constraint span_has_parent_span @@ -65,6 +65,6 @@ create procedure span_add_foreign_key_to_partition(table_name varchar) on delete cascade;'; end$$; -comment on procedure span_add_foreign_key_to_partition is e'' +comment on procedure merlin.span_add_foreign_key_to_partition is e'' 'Creates a self-referencing foreign key on a particular partition of the span table. This should be called' 'on every partition as soon as it is created'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql b/deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql index 33a3c67f25..934fa972a0 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/dataset/topic.sql @@ -1,12 +1,10 @@ -create table topic -( +create table merlin.topic ( dataset_id integer not null, topic_index integer not null, name text, value_schema jsonb, - -- It would make sense for name to be part of the topic's key. This requires enforcing that topics -- have unique names per simulation. Since, as of writing, this is not the case, topics instead use -- an integer topic_index. @@ -18,37 +16,39 @@ create table topic -- primary key (dataset_id, name), constraint topic_owned_by_dataset foreign key (dataset_id) - references dataset + references merlin.dataset on update cascade on delete cascade ); -comment on table topic is e'' +comment on table merlin.topic is e'' 'A representation of all topics that occurred at a single time point'; -comment on column topic.dataset_id is e'' +comment on column merlin.topic.dataset_id is e'' 'The dataset this topic is part of.'; -comment on column topic.topic_index is e'' +comment on column merlin.topic.topic_index is e'' 'A unique number per simulation run that identifies this topic'; -comment on column topic.value_schema is e'' +comment on column merlin.topic.value_schema is e'' 'The value schema describing the value of this topic'; -comment on column topic.name is e'' +comment on column merlin.topic.name is e'' 'The human readable name of this topic'; -create function delete_topic_cascade() +create function merlin.delete_topic_cascade() returns trigger security invoker - language plpgsql as $$begin - delete from event - where event.topic_index = old.topic_index and event.dataset_id = old.dataset_id; + language plpgsql as $$ +begin + delete from merlin.event e + where e.topic_index = old.topic_index and e.dataset_id = old.dataset_id; return old; -end$$; +end +$$; create trigger delete_topic_trigger - after delete on topic + after delete on merlin.topic for each row -execute function delete_topic_cascade(); +execute function merlin.delete_topic_cascade(); -comment on trigger delete_topic_trigger on topic is e'' +comment on trigger delete_topic_trigger on merlin.topic is e'' 'Trigger to simulate an ON DELETE CASCADE foreign key constraint between event and topic. The reason to' 'implement this as a trigger is that this single trigger can cascade deletes to any partitions of event.' 'If we used a foreign key, every new partition of event would need to add a new trigger to the topic' @@ -56,26 +56,26 @@ comment on trigger delete_topic_trigger on topic is e'' 'partitions concurrently with inserts to referenced tables, we have chosen to forego foreign keys from partitions' 'to other tables in favor of these hand-written triggers'; -create function update_topic_cascade() +create function merlin.update_topic_cascade() returns trigger security invoker language plpgsql as $$begin if old.topic_index != new.topic_index or old.dataset_id != new.dataset_id then - update event + update merlin.event e set topic_index = new.topic_index, dataset_id = new.dataset_id - where event.dataset_id = old.dataset_id and event.topic_index = old.topic_index; + where e.dataset_id = old.dataset_id and e.topic_index = old.topic_index; end if; return new; end$$; create trigger update_topic_trigger - after update on topic + after update on merlin.topic for each row -execute function update_topic_cascade(); +execute function merlin.update_topic_cascade(); -comment on trigger update_topic_trigger on topic is e'' +comment on trigger update_topic_trigger on merlin.topic is e'' 'Trigger to simulate an ON UPDATE CASCADE foreign key constraint between event and topic. The reason to' 'implement this as a trigger is that this single trigger can propagate updates to any partitions of event.' 'If we used a foreign key, every new partition of event would need to add a new trigger to the topic' diff --git a/deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql index 4d6b624a86..c977748154 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/merging/conflicting_activities.sql @@ -1,38 +1,37 @@ -- Stores a list of all activities in conflict in a merge -create table conflicting_activities( - merge_request_id integer - references merge_request - on update cascade - on delete cascade, - activity_id integer, - primary key (activity_id, merge_request_id), +create table merlin.conflicting_activities( + merge_request_id integer + references merlin.merge_request + on update cascade + on delete cascade, + activity_id integer, - change_type_supplying activity_change_type - not null - check ( change_type_supplying = 'delete' or change_type_supplying = 'modify' ), - change_type_receiving activity_change_type - not null - check ( change_type_receiving = 'delete' or change_type_receiving = 'modify' ), - resolution conflict_resolution default 'none' + primary key (activity_id, merge_request_id), + + change_type_supplying merlin.activity_change_type not null + check ( change_type_supplying = 'delete' or change_type_supplying = 'modify' ), + change_type_receiving merlin.activity_change_type not null + check ( change_type_receiving = 'delete' or change_type_receiving = 'modify' ), + resolution merlin.conflict_resolution default 'none' ); -comment on table conflicting_activities is e'' +comment on table merlin.conflicting_activities is e'' 'An activity directive in an in-progress merge ' 'where the supplying, receiving, and merge base versions of this activity directive are all different.'; -comment on column conflicting_activities.merge_request_id is e'' +comment on column merlin.conflicting_activities.merge_request_id is e'' 'The merge request associated with this conflicting activity.\n' 'Half of the natural key associated with this table, alongside activity_id.'; -comment on column conflicting_activities.activity_id is e'' +comment on column merlin.conflicting_activities.activity_id is e'' 'The activity directive that is in conflict.\n' 'Half of the natural key associated with this table, alongside merge_request_id.'; -comment on column conflicting_activities.change_type_supplying is e'' +comment on column merlin.conflicting_activities.change_type_supplying is e'' 'The type of change that has occurred between the merge base and the version of this activity' ' in the supplying plan.\n' 'Must be either "delete" or "modify".'; -comment on column conflicting_activities.change_type_receiving is e'' +comment on column merlin.conflicting_activities.change_type_receiving is e'' 'The type of change that has occurred between the merge base and the version of this activity' ' in the receiving plan.\n' 'Must be either "delete" or "modify".'; -comment on column conflicting_activities.resolution is e'' +comment on column merlin.conflicting_activities.resolution is e'' 'The version of this activity to be used when committing this merge.\n' 'Can be either "none", "receiving" or "supplying".'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql index 5ae00765fc..a31b01616f 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_comments.sql @@ -1,4 +1,4 @@ -create table merge_request_comment( +create table merlin.merge_request_comment( comment_id integer generated always as identity primary key, merge_request_id integer, commenter_username text, @@ -6,23 +6,23 @@ create table merge_request_comment( constraint comment_owned_by_merge_request foreign key (merge_request_id) - references merge_request + references merlin.merge_request on delete cascade, constraint merge_request_commenter_exists foreign key (commenter_username) - references metadata.users + references permissions.users on update cascade on delete set null ); -comment on table merge_request_comment is e'' +comment on table merlin.merge_request_comment is e'' 'A comment left on a given merge request.'; -comment on column merge_request_comment.comment_id is e'' +comment on column merlin.merge_request_comment.comment_id is e'' 'The synthetic identifier for this comment.'; -comment on column merge_request_comment.merge_request_id is e'' +comment on column merlin.merge_request_comment.merge_request_id is e'' 'The id of the merge request associated with this comment.'; -comment on column merge_request_comment.commenter_username is e'' +comment on column merlin.merge_request_comment.commenter_username is e'' 'The user who left this comment.'; -comment on column merge_request_comment.comment_text is e'' +comment on column merlin.merge_request_comment.comment_text is e'' 'The contents of this comment.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql index 6049c2d5dd..98abbbdb83 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_request.sql @@ -1,55 +1,47 @@ -create table merge_request( +create table merlin.merge_request( id integer generated always as identity primary key, plan_id_receiving_changes integer, snapshot_id_supplying_changes integer, merge_base_snapshot_id integer not null, - status merge_request_status default 'pending', + status merlin.merge_request_status default 'pending', requester_username text, reviewer_username text, updated_at timestamptz not null default now(), constraint merge_request_requester_exists foreign key (requester_username) - references metadata.users + references permissions.users on update cascade on delete set null, constraint merge_request_reviewer_exists foreign key (reviewer_username) - references metadata.users + references permissions.users on update cascade on delete set null ); -comment on table merge_request is e'' +comment on table merlin.merge_request is e'' 'A request to merge the state of the activities from one plan onto another.'; -comment on column merge_request.id is e'' +comment on column merlin.merge_request.id is e'' 'The synthetic identifier for this merge request.'; -comment on column merge_request.plan_id_receiving_changes is e'' +comment on column merlin.merge_request.plan_id_receiving_changes is e'' 'The plan id of the plan to receive changes as a result of this merge request being processed and committed.' '\nAlso known as "Target".'; -comment on column merge_request.snapshot_id_supplying_changes is e'' +comment on column merlin.merge_request.snapshot_id_supplying_changes is e'' 'The snapshot id used to supply changes when this merge request is processed.' '\nAlso known as "Source".'; -comment on column merge_request.merge_base_snapshot_id is e'' +comment on column merlin.merge_request.merge_base_snapshot_id is e'' 'The snapshot id that is the nearest common ancestor between the ' 'plan_id_receiving_changes and the snapshot_id_supplying_changes of this merge request.'; -comment on column merge_request.status is e'' +comment on column merlin.merge_request.status is e'' 'The current status of this merge request.'; -comment on column merge_request.requester_username is e'' +comment on column merlin.merge_request.requester_username is e'' 'The user who created this merge request.'; -comment on column merge_request.reviewer_username is e'' +comment on column merlin.merge_request.reviewer_username is e'' 'The user who reviews this merge request. Is empty until the request enters review.'; -create function merge_request_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp - before update or insert on merge_request + before update or insert on merlin.merge_request for each row -execute function merge_request_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql index 8bde0b8971..820f46554c 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/merging/merge_staging_area.sql @@ -11,9 +11,9 @@ Gets added to by: */ -- TODO: Consider removing some of the defaults from this table? Decision depends on if we can have an activity -- that doesn't have a field in this area somehow. -create table merge_staging_area( +create table merlin.merge_staging_area( merge_request_id integer - references merge_request + references merlin.merge_request on update cascade on delete cascade, activity_id integer generated by default as identity, @@ -26,46 +26,46 @@ create table merge_staging_area( last_modified_by text, start_offset interval not null, type text not null, - arguments merlin_argument_set not null, - metadata merlin_activity_directive_metadata_set default '{}'::jsonb, + arguments merlin.argument_set not null, + metadata merlin.activity_directive_metadata_set default '{}'::jsonb, anchor_id integer default null, anchored_to_start boolean default true not null, - change_type activity_change_type not null, + change_type merlin.activity_change_type not null, primary key (activity_id, merge_request_id) ); -comment on table merge_staging_area is e'' +comment on table merlin.merge_staging_area is e'' 'The staged version of an activity directive in an in-progress merge to be committed onto the plan receiving changes.'; -comment on column merge_staging_area.merge_request_id is e'' +comment on column merlin.merge_staging_area.merge_request_id is e'' 'The merge request associated with this staged activity.\n' 'Half of the natural key associated with this table, alongside activity_id.'; -comment on column merge_staging_area.activity_id is e'' +comment on column merlin.merge_staging_area.activity_id is e'' 'The identifier of the staged activity directive.\n' 'Half of the natural key associated with this table, alongside merge_request_id.'; -comment on column merge_staging_area.name is e'' +comment on column merlin.merge_staging_area.name is e'' 'The name of this activity directive to be committed.'; -comment on column merge_staging_area.tags is e'' +comment on column merlin.merge_staging_area.tags is e'' 'The tags of this activity directive to be committed.'; -comment on column merge_staging_area.source_scheduling_goal_id is e'' +comment on column merlin.merge_staging_area.source_scheduling_goal_id is e'' 'The id of the scheduling goal that generated this activity directive to be committed.'; -comment on column merge_staging_area.created_at is e'' +comment on column merlin.merge_staging_area.created_at is e'' 'The creation time of this activity directive to be committed.'; -comment on column merge_staging_area.start_offset is e'' +comment on column merlin.merge_staging_area.start_offset is e'' 'The start offset of this activity directive to be committed.'; -comment on column merge_staging_area.type is e'' +comment on column merlin.merge_staging_area.type is e'' 'The type of this activity directive to be committed.'; -comment on column merge_staging_area.arguments is e'' +comment on column merlin.merge_staging_area.arguments is e'' 'The set of arguments to this activity directive to be committed.'; -comment on column merge_staging_area.metadata is e'' +comment on column merlin.merge_staging_area.metadata is e'' 'The metadata of this activity directive to be committed.'; -comment on column merge_staging_area.anchor_id is e'' +comment on column merlin.merge_staging_area.anchor_id is e'' 'The identifier of the anchor of this activity directive to be committed.'; -comment on column merge_staging_area.anchored_to_start is e'' +comment on column merlin.merge_staging_area.anchored_to_start is e'' 'The status of whether this activity directive is anchored to its anchor''s start time to be committed.'; -comment on column merge_staging_area.change_type is e'' +comment on column merlin.merge_staging_area.change_type is e'' 'The type of change that has occurred between the version of this activity in the supplying plan' ' and the version in the receiving plan, from the perspective of the receiving plan.\n' 'Can be either "none", "add", "delete", or "modify".'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/mission_model.sql b/deployment/postgres-init-db/sql/tables/merlin/mission_model.sql index ae860a33b3..34b936e84f 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/mission_model.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/mission_model.sql @@ -1,4 +1,4 @@ -create table mission_model ( +create table merlin.mission_model ( id integer generated always as identity, revision integer not null default 0, @@ -18,54 +18,48 @@ create table mission_model ( unique (mission, name, version), constraint mission_model_references_jar foreign key (jar_id) - references uploaded_file + references merlin.uploaded_file on update cascade on delete restrict, constraint mission_model_owner_exists - foreign key (owner) references metadata.users + foreign key (owner) references permissions.users on update cascade on delete set null ); -comment on table mission_model is e'' +comment on table merlin.mission_model is e'' 'A Merlin simulation model for a mission.'; -comment on column mission_model.id is e'' +comment on column merlin.mission_model.id is e'' 'The synthetic identifier for this mission model.'; -comment on column mission_model.revision is e'' +comment on column merlin.mission_model.revision is e'' 'A monotonic clock that ticks for every change to this mission model.'; -comment on column mission_model.mission is e'' +comment on column merlin.mission_model.mission is e'' 'A human-meaningful identifier for the mission described by this model.'; -comment on column mission_model.name is e'' +comment on column merlin.mission_model.name is e'' 'A human-meaningful model name.'; -comment on column mission_model.version is e'' +comment on column merlin.mission_model.version is e'' 'A human-meaningful version qualifier.'; -comment on column mission_model.owner is e'' +comment on column merlin.mission_model.owner is e'' 'A human-meaningful identifier for the user responsible for this model.'; -comment on column mission_model.jar_id is e'' +comment on column merlin.mission_model.jar_id is e'' 'An uploaded JAR file defining the mission model.'; -comment on column mission_model.created_at is e'' +comment on column merlin.mission_model.created_at is e'' 'The time this mission model was uploaded into Aerie.'; -comment on column mission_model.description is e'' +comment on column merlin.mission_model.description is e'' 'A human-meaningful description of the mission model.'; +create trigger increment_revision_mission_model_update +before update on merlin.mission_model +for each row +when (pg_trigger_depth() < 1) +execute function util_functions.increment_revision_update(); -create function increment_revision_on_update_mission_model() -returns trigger -security definer -language plpgsql as $$begin - update mission_model - set revision = revision + 1 - where id = new.id; - - return new; -end$$; - -create function increment_revision_on_update_mission_model_jar() +create function merlin.increment_revision_mission_model_jar_update() returns trigger security definer language plpgsql as $$begin - update mission_model + update merlin.mission_model set revision = revision + 1 where jar_id = new.id or jar_id = old.id; @@ -73,13 +67,7 @@ language plpgsql as $$begin return new; end$$; -create trigger increment_revision_on_update_mission_model_trigger -after update on mission_model -for each row -when (pg_trigger_depth() < 1) -execute function increment_revision_on_update_mission_model(); - -create trigger increment_revision_on_update_mission_model_jar_trigger -after update on uploaded_file +create trigger increment_revision_mission_model_jar_update_trigger +after update on merlin.uploaded_file for each row -execute function increment_revision_on_update_mission_model_jar(); +execute function merlin.increment_revision_mission_model_jar_update(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql b/deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql index 1a57849653..c8ef7a9ca3 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/mission_model_parameters.sql @@ -1,24 +1,24 @@ -create table mission_model_parameters ( +create table merlin.mission_model_parameters ( model_id integer not null, revision integer not null default 0, - parameters merlin_parameter_set not null, + parameters merlin.parameter_set not null, constraint mission_model_parameter_natural_key primary key (model_id), constraint mission_model_parameter_owned_by_mission_model foreign key (model_id) - references mission_model + references merlin.mission_model on update cascade on delete cascade ); -comment on table mission_model_parameters is e'' +comment on table merlin.mission_model_parameters is e'' 'The model parameters extracted from a mission model.'; -comment on column mission_model_parameters.model_id is e'' +comment on column merlin.mission_model_parameters.model_id is e'' 'The model these parameters are extracted from.'; -comment on column mission_model_parameters.revision is e'' +comment on column merlin.mission_model_parameters.revision is e'' 'The revision of the model these parameters are extracted from.'; -comment on column mission_model_parameters.parameters is e'' +comment on column merlin.mission_model_parameters.parameters is e'' 'The Merlin parameter definitions extracted from a mission model.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/plan.sql b/deployment/postgres-init-db/sql/tables/merlin/plan.sql index c84082cfec..ea7ef68164 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/plan.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/plan.sql @@ -1,4 +1,4 @@ -create table plan ( +create table merlin.plan ( id integer generated always as identity check ( id > 0 ), revision integer not null default 0, @@ -8,7 +8,7 @@ create table plan ( start_time timestamptz not null, parent_id integer - references plan + references merlin.plan on update cascade, is_locked boolean not null default false, @@ -26,144 +26,116 @@ create table plan ( unique (name), constraint plan_uses_model foreign key (model_id) - references mission_model + references merlin.mission_model on update cascade on delete set null, constraint plan_owner_exists foreign key (owner) - references metadata.users + references permissions.users on update cascade on delete set null, constraint plan_updated_by_exists foreign key (updated_by) - references metadata.users + references permissions.users on update cascade on delete set null ); -create index plan_model_id_index on plan (model_id); +create index plan_model_id_index on merlin.plan (model_id); - -comment on table plan is e'' +comment on table merlin.plan is e'' 'A set of activities scheduled against a mission model.'; -comment on column plan.id is e'' +comment on column merlin.plan.id is e'' 'The synthetic identifier for this plan.'; -comment on column plan.revision is e'' +comment on column merlin.plan.revision is e'' 'A monotonic clock that ticks for every change to this plan.'; -comment on column plan.name is e'' +comment on column merlin.plan.name is e'' 'A human-readable name for this plan. Unique amongst all plans.'; -comment on column plan.model_id is e'' +comment on column merlin.plan.model_id is e'' 'The mission model used to simulate and validate the plan.' '\n' 'May be NULL if the mission model the plan references has been deleted.'; -comment on column plan.duration is e'' +comment on column merlin.plan.duration is e'' 'The duration over which this plan extends.'; -comment on column plan.start_time is e'' +comment on column merlin.plan.start_time is e'' 'The time at which the plan''s effective span begins.'; -comment on column plan.parent_id is e'' +comment on column merlin.plan.parent_id is e'' 'The plan id of the parent of this plan. May be NULL if this plan does not have a parent.'; -comment on column plan.is_locked is e'' +comment on column merlin.plan.is_locked is e'' 'A boolean representing whether this plan can be deleted and if changes can happen to the activities of this plan.'; -comment on column plan.created_at is e'' +comment on column merlin.plan.created_at is e'' 'The time at which this plan was created.'; -comment on column plan.updated_at is e'' +comment on column merlin.plan.updated_at is e'' 'The time at which this plan was last updated.'; -comment on column plan.owner is e'' +comment on column merlin.plan.owner is e'' 'The user who owns the plan.'; -comment on column plan.updated_by is e'' +comment on column merlin.plan.updated_by is e'' 'The user who last updated the plan.'; -comment on column plan.description is e'' +comment on column merlin.plan.description is e'' 'A human-readable description for this plan and its contents.'; -- Insert Triggers -create function create_simulation_row_for_new_plan() +create function merlin.create_simulation_row_for_new_plan() returns trigger security definer language plpgsql as $$begin - insert into simulation (revision, simulation_template_id, plan_id, arguments, simulation_start_time, simulation_end_time) + insert into merlin.simulation (revision, simulation_template_id, plan_id, arguments, simulation_start_time, simulation_end_time) values (0, null, new.id, '{}', new.start_time, new.start_time+new.duration); return new; end $$; create trigger simulation_row_for_new_plan_trigger -after insert on plan +after insert on merlin.plan for each row -execute function create_simulation_row_for_new_plan(); +execute function merlin.create_simulation_row_for_new_plan(); -create function populate_constraint_spec_new_plan() +create function merlin.populate_constraint_spec_new_plan() returns trigger language plpgsql as $$ begin - insert into constraint_specification (plan_id, constraint_id, constraint_revision) + insert into merlin.constraint_specification (plan_id, constraint_id, constraint_revision) select new.id, cms.constraint_id, cms.constraint_revision - from constraint_model_specification cms + from merlin.constraint_model_specification cms where cms.model_id = new.model_id; return new; end; $$; -comment on function populate_constraint_spec_new_plan() is e'' +comment on function merlin.populate_constraint_spec_new_plan() is e'' 'Populates the plan''s constraint specification with the contents of its model''s specification.'; create trigger populate_constraint_spec_new_plan_trigger -after insert on plan +after insert on merlin.plan for each row -execute function populate_constraint_spec_new_plan(); +execute function merlin.populate_constraint_spec_new_plan(); -- Insert or Update Triggers -create function plan_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp - before update or insert on plan - for each row -execute function plan_set_updated_at(); - -create function raise_duration_is_negative() -returns trigger -security definer -language plpgsql as $$begin - raise exception 'invalid plan duration, expected nonnegative duration but found: %', new.duration; -end$$; +before update or insert on merlin.plan +for each row +execute function util_functions.set_updated_at(); create trigger check_plan_duration_is_nonnegative_trigger -before insert or update on plan +before insert or update on merlin.plan for each row when (new.duration < '0') -execute function raise_duration_is_negative(); +execute function util_functions.raise_duration_is_negative(); -- Update Triggers -create function increment_revision_on_update_plan() -returns trigger -security definer -language plpgsql as $$begin - update plan - set revision = revision + 1 - where id = new.id - or id = old.id; - - return new; -end$$; - -create trigger increment_revision_on_update_plan_trigger -after update on plan +create trigger increment_revision_plan_update +before update on merlin.plan for each row when (pg_trigger_depth() < 1) -execute function increment_revision_on_update_plan(); +execute function util_functions.increment_revision_update(); -- Delete Triggers -create function cleanup_on_delete() +create function merlin.cleanup_on_delete() returns trigger language plpgsql as $$ begin @@ -173,13 +145,13 @@ begin end if; -- withdraw pending rqs - update merge_request + update merlin.merge_request set status='withdrawn' where plan_id_receiving_changes = old.id and status = 'pending'; -- have the children be 'adopted' by this plan's parent - update plan + update merlin.plan set parent_id = old.parent_id where parent_id = old.id; @@ -188,6 +160,6 @@ end $$; create trigger cleanup_on_delete_trigger - before delete on plan + before delete on merlin.plan for each row -execute function cleanup_on_delete(); +execute function merlin.cleanup_on_delete(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql b/deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql index 4ef6d78e8f..f36d26c1f9 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/plan_collaborators.sql @@ -1,22 +1,22 @@ -create table plan_collaborators( +create table merlin.plan_collaborators( plan_id int not null, collaborator text not null, constraint plan_collaborators_pkey primary key (plan_id, collaborator), constraint plan_collaborators_plan_id_fkey - foreign key (plan_id) references plan + foreign key (plan_id) references merlin.plan on update cascade on delete cascade, constraint plan_collaborator_collaborator_fkey - foreign key (collaborator) references metadata.users + foreign key (collaborator) references permissions.users on update cascade on delete cascade ); -comment on table plan_collaborators is e'' +comment on table merlin.plan_collaborators is e'' 'A collection of users who collaborate on the plan alongside the plan''s owner.'; -comment on column plan_collaborators.plan_id is e'' +comment on column merlin.plan_collaborators.plan_id is e'' 'The plan the user is a collaborator on.'; -comment on column plan_collaborators.collaborator is e'' +comment on column merlin.plan_collaborators.collaborator is e'' 'The username of the collaborator'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql index 57fa0a9e24..d4281ac494 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/plan_dataset.sql @@ -1,4 +1,4 @@ -create table plan_dataset ( +create table merlin.plan_dataset ( plan_id integer not null, dataset_id integer not null, simulation_dataset_id integer, @@ -9,60 +9,61 @@ create table plan_dataset ( primary key (plan_id, dataset_id), constraint plan_dataset_references_plan foreign key (plan_id) - references plan + references merlin.plan on update cascade on delete cascade, constraint associated_sim_dataset_exists foreign key (simulation_dataset_id) - references simulation_dataset + references merlin.simulation_dataset on update cascade on delete cascade, constraint plan_dataset_references_dataset foreign key (dataset_id) - references dataset + references merlin.dataset on update cascade on delete cascade ); -comment on column plan_dataset.plan_id is e'' +comment on column merlin.plan_dataset.plan_id is e'' 'The ID of the plan to which the dataset is associated.'; -comment on column plan_dataset.dataset_id is e'' +comment on column merlin.plan_dataset.dataset_id is e'' 'The ID of the dataset associated with the plan.'; -comment on column plan_dataset.simulation_dataset_id is e'' +comment on column merlin.plan_dataset.simulation_dataset_id is e'' 'The ID of the simulation dataset optionally associated with the dataset.' 'If null, the dataset is associated with all simulation runs for the plan.'; -comment on column plan_dataset.offset_from_plan_start is e'' +comment on column merlin.plan_dataset.offset_from_plan_start is e'' 'The time to judge dataset items against relative to the plan start.' '\n' 'If the dataset as a whole begins one day before the planning period begins, ' 'then this column should contain the interval ''1 day ago''.'; -create or replace function create_dataset() +create function merlin.plan_dataset_create_dataset() returns trigger security definer -language plpgsql as $$begin - insert into dataset +language plpgsql as $$ +begin + insert into merlin.dataset default values returning id into new.dataset_id; -return new; + return new; end$$; -- To calculate this offset, we are going to grab any existing plan dataset with -- the same associated dataset, add the offset to the plan start time to find the -- start time of the dataset, and then subtract out the NEW plan start time to -- determine the offset in the NEW plan dataset -create or replace function calculate_offset() +create function merlin.calculate_offset() returns trigger security definer language plpgsql as $$ declare - reference plan_dataset; + reference merlin.plan_dataset; reference_plan_start timestamptz; dataset_start timestamptz; new_plan_start timestamptz; begin -- Get an existing association with this dataset for reference - select into reference * from plan_dataset + select into reference * from merlin.plan_dataset where dataset_id = new.dataset_id; -- If no reference exists, raise an exception @@ -73,56 +74,44 @@ begin end if; -- Get the plan start times - select start_time into reference_plan_start from plan where id = reference.plan_id; - select start_time into new_plan_start from plan where id = new.plan_id; + select start_time into reference_plan_start from merlin.plan where id = reference.plan_id; + select start_time into new_plan_start from merlin.plan where id = new.plan_id; -- calculate and assign the new offset from plan start dataset_start := reference_plan_start + reference.offset_from_plan_start; new.offset_from_plan_start = dataset_start - new_plan_start; -return new; + return new; end$$; -create or replace function process_delete() +create function merlin.plan_dataset_process_delete() returns trigger security definer language plpgsql as $$begin - if (select count(*) from plan_dataset where dataset_id = old.dataset_id) = 0 + if (select count(*) from merlin.plan_dataset where dataset_id = old.dataset_id) = 0 then - delete from dataset + delete from merlin.dataset where id = old.dataset_id; end if; return old; end$$; -- If a new row is created with no dataset in mind, create the dataset -do $$ begin create trigger create_dataset_trigger - before insert on plan_dataset + before insert on merlin.plan_dataset for each row when (new.dataset_id is null) - execute function create_dataset(); -exception - when duplicate_object then null; -end $$; + execute function merlin.plan_dataset_create_dataset(); -- If a new row is created for an existing dataset, calculate the offset -do $$ begin create trigger calculate_offset_trigger - before insert on plan_dataset + before insert on merlin.plan_dataset for each row when (new.dataset_id is not null) - execute function calculate_offset(); -exception - when duplicate_object then null; -end $$; + execute function merlin.calculate_offset(); -- When a row is deleted, check if any rows for the dataset remain -- If not, delete the dataset -do $$ begin create trigger delete_dataset_trigger - after delete on plan_dataset + after delete on merlin.plan_dataset for each row - execute function process_delete(); -exception - when duplicate_object then null; -end $$; + execute function merlin.plan_dataset_process_delete(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/resource_type.sql b/deployment/postgres-init-db/sql/tables/merlin/resource_type.sql index cb9940893e..cfd22c2c8d 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/resource_type.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/resource_type.sql @@ -1,4 +1,4 @@ -create table resource_type ( +create table merlin.resource_type ( model_id integer not null, name text not null, schema jsonb not null, @@ -7,16 +7,16 @@ create table resource_type ( primary key (model_id, name), constraint resource_type_from_mission_model foreign key (model_id) - references mission_model + references merlin.mission_model on delete cascade ); -comment on table resource_type is e'' +comment on table merlin.resource_type is e'' 'A description of a parametric activity type supported by the associated mission model.'; -comment on column resource_type.name is e'' +comment on column merlin.resource_type.name is e'' 'The name of this resource type, unique within a mission model.'; -comment on column resource_type.model_id is e'' +comment on column merlin.resource_type.model_id is e'' 'The model defining this resource type.'; -comment on column resource_type.schema is e'' +comment on column merlin.resource_type.schema is e'' 'The structure of this resource type.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql index cd5143f234..cf3222e8b0 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation.sql @@ -1,10 +1,10 @@ -create table simulation ( +create table merlin.simulation ( id integer generated always as identity, revision integer not null default 0, simulation_template_id integer null, plan_id integer not null, - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, simulation_start_time timestamptz not null, simulation_end_time timestamptz not null, @@ -13,12 +13,12 @@ create table simulation ( primary key (id), constraint simulation_has_simulation_template foreign key (simulation_template_id) - references simulation_template + references merlin.simulation_template on update cascade on delete set null, constraint simulation_owned_by_plan foreign key (plan_id) - references plan + references merlin.plan on update cascade on delete cascade, constraint one_simulation_per_plan @@ -28,35 +28,22 @@ create table simulation ( ); -comment on table simulation is e'' +comment on table merlin.simulation is e'' 'A specification for simulating an activity plan.'; -comment on column simulation.id is e'' +comment on column merlin.simulation.id is e'' 'The synthetic identifier for this simulation.'; -comment on column simulation.revision is e'' +comment on column merlin.simulation.revision is e'' 'A monotonic clock that ticks for every change to this simulation.'; -comment on column simulation.simulation_template_id is e'' +comment on column merlin.simulation.simulation_template_id is e'' 'A simulation template specification to inherit.'; -comment on column simulation.plan_id is e'' +comment on column merlin.simulation.plan_id is e'' 'The plan whose contents drive this simulation.'; -comment on column simulation.arguments is e'' +comment on column merlin.simulation.arguments is e'' 'The set of arguments to this simulation, corresponding to the parameters of the associated mission model.'; - -create function increment_revision_for_update_simulation() -returns trigger -security definer -language plpgsql as $$begin - update simulation - set revision = revision + 1 - where id = new.id - or id = old.id; - - return new; -end$$; - create trigger increment_revision_for_update_simulation_trigger -after update on simulation +before update on merlin.simulation for each row when (pg_trigger_depth() < 1) -execute function increment_revision_for_update_simulation(); +execute function util_functions.increment_revision_update(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql index 0e6e4c48cc..6f106e0504 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql @@ -1,6 +1,6 @@ -create type status_t as enum('pending', 'incomplete', 'failed', 'success'); +create type merlin.status_t as enum('pending', 'incomplete', 'failed', 'success'); -create table simulation_dataset ( +create table merlin.simulation_dataset ( id integer generated always as identity, simulation_id integer not null, dataset_id integer null, @@ -23,7 +23,7 @@ create table simulation_dataset ( simulation_end_time timestamptz not null, -- Simulation state - status status_t not null default 'pending', + status merlin.status_t not null default 'pending', reason jsonb null, canceled boolean not null default false, @@ -37,16 +37,16 @@ create table simulation_dataset ( unique (dataset_id), constraint simulation_dataset_references_simulation foreign key (simulation_id) - references simulation + references merlin.simulation on update cascade on delete cascade, constraint simulation_dataset_references_dataset foreign key (dataset_id) - references dataset + references merlin.dataset on update cascade on delete cascade, constraint simulation_dataset_requested_by_exists - foreign key (requested_by) references metadata.users + foreign key (requested_by) references permissions.users on update cascade on delete set null, constraint start_before_end @@ -54,68 +54,68 @@ create table simulation_dataset ( ); create index simulation_dataset_simulation_has_many_datasets - on simulation_dataset (simulation_id); + on merlin.simulation_dataset (simulation_id); -comment on table simulation_dataset is e'' +comment on table merlin.simulation_dataset is e'' 'A description of the upstream simulation inputs that determined a given dataset.' '\n' 'A new row should be created by providing a simulation_id and offset_from_plan_start only ' 'as the remaining data will be filled in during the insertion'; -comment on column simulation_dataset.simulation_id is e'' +comment on column merlin.simulation_dataset.simulation_id is e'' 'The simulation determining the contents of the associated dataset.'; -comment on column simulation_dataset.dataset_id is e'' +comment on column merlin.simulation_dataset.dataset_id is e'' 'The dataset containing simulated results for the simulation. NULL if the dataset has not been constructed yet.'; -comment on column simulation_dataset.plan_revision is e'' +comment on column merlin.simulation_dataset.plan_revision is e'' 'The revision of the plan corresponding to the given revision of the dataset.'; -comment on column simulation_dataset.model_revision is e'' +comment on column merlin.simulation_dataset.model_revision is e'' 'The revision of the mission model corresponding to the given revision of the dataset.'; -comment on column simulation_dataset.simulation_revision is e'' +comment on column merlin.simulation_dataset.simulation_revision is e'' 'The revision of the simulation corresponding to the given revision of the dataset.'; -comment on column simulation_dataset.dataset_revision is e'' +comment on column merlin.simulation_dataset.dataset_revision is e'' 'The revision of the dataset corresponding to the given revisions of the input entities.'; -comment on column simulation_dataset.status is e'' +comment on column merlin.simulation_dataset.status is e'' 'The status of the simulation for which the dataset is associated.'; -comment on column simulation_dataset.reason is e'' +comment on column merlin.simulation_dataset.reason is e'' 'The reason for failure in the event that simulation fails.'; -comment on column simulation_dataset.canceled is e'' +comment on column merlin.simulation_dataset.canceled is e'' 'Whether the simulation has been marked as canceled.'; -comment on column simulation_dataset.offset_from_plan_start is e'' +comment on column merlin.simulation_dataset.offset_from_plan_start is e'' 'The time to judge dataset items against relative to the plan start.' '\n' 'If the dataset as a whole begins one day before the planning period begins, ' 'then this column should contain the interval ''1 day ago''.'; -comment on column simulation_dataset.requested_by is e'' +comment on column merlin.simulation_dataset.requested_by is e'' 'The user who requested the simulation.'; -comment on column simulation_dataset.requested_at is e'' +comment on column merlin.simulation_dataset.requested_at is e'' 'When this simulation dataset was created.'; -- Dataset management triggers -- These triggers create and delete datasets along with the insert/delete of a simulation_dataset -create function set_revisions_and_initialize_dataset_on_insert() +create function merlin.set_revisions_and_initialize_dataset_on_insert() returns trigger security definer language plpgsql as $$ declare - simulation_ref simulation; - plan_ref plan; - model_ref mission_model; - template_ref simulation_template; - dataset_ref dataset; + simulation_ref merlin.simulation; + plan_ref merlin.plan; + model_ref merlin.mission_model; + template_ref merlin.simulation_template; + dataset_ref merlin.dataset; begin -- Set the revisions - select into simulation_ref * from simulation where id = new.simulation_id; - select into plan_ref * from plan where id = simulation_ref.plan_id; - select into template_ref * from simulation_template where id = simulation_ref.simulation_template_id; - select into model_ref * from mission_model where id = plan_ref.model_id; + select into simulation_ref * from merlin.simulation where id = new.simulation_id; + select into plan_ref * from merlin.plan where id = simulation_ref.plan_id; + select into template_ref * from merlin.simulation_template where id = simulation_ref.simulation_template_id; + select into model_ref * from merlin.mission_model where id = plan_ref.model_id; new.model_revision = model_ref.revision; new.plan_revision = plan_ref.revision; new.simulation_template_revision = template_ref.revision; new.simulation_revision = simulation_ref.revision; -- Create the dataset - insert into dataset + insert into merlin.dataset default values returning * into dataset_ref; new.dataset_id = dataset_ref.id; @@ -123,44 +123,36 @@ begin return new; end$$; -create function delete_dataset_on_delete() +create trigger set_revisions_and_initialize_dataset_on_insert_trigger + before insert on merlin.simulation_dataset + for each row + execute function merlin.set_revisions_and_initialize_dataset_on_insert(); + +create function merlin.delete_dataset_on_delete() returns trigger security definer language plpgsql as $$begin - delete from dataset + delete from merlin.dataset where id = old.dataset_id; return old; end$$; -do $$ begin -create trigger set_revisions_and_initialize_dataset_on_insert_trigger - before insert on simulation_dataset - for each row - execute function set_revisions_and_initialize_dataset_on_insert(); -exception - when duplicate_object then null; -end $$; - -do $$ begin create trigger delete_dataset_on_delete_trigger - after delete on simulation_dataset + after delete on merlin.simulation_dataset for each row - execute function delete_dataset_on_delete(); -exception - when duplicate_object then null; -end $$; + execute function merlin.delete_dataset_on_delete(); -- Simulation dataset NOTIFY triggers -- These triggers NOTIFY LISTEN(ing) merlin worker clients of pending simulation requests -create function notify_simulation_workers () +create function merlin.notify_simulation_workers() returns trigger security definer language plpgsql as $$ declare - simulation_ref simulation; + simulation_ref merlin.simulation; begin - select into simulation_ref * from simulation where id = new.simulation_id; + select into simulation_ref * from merlin.simulation where id = new.simulation_id; perform ( with payload(model_revision, @@ -186,11 +178,11 @@ begin end$$; create trigger notify_simulation_workers - after insert on simulation_dataset + after insert on merlin.simulation_dataset for each row - execute function notify_simulation_workers(); + execute function merlin.notify_simulation_workers(); -create function notify_simulation_workers_cancel() +create function merlin.notify_simulation_workers_cancel() returns trigger security definer language plpgsql as $$ @@ -201,12 +193,12 @@ end $$; create trigger notify_simulation_workers_cancel -after update of canceled on simulation_dataset +after update of canceled on merlin.simulation_dataset for each row when ((old.status != 'success' or old.status != 'failed') and new.canceled) -execute function notify_simulation_workers_cancel(); +execute function merlin.notify_simulation_workers_cancel(); -create function update_offset_from_plan_start() +create function merlin.update_offset_from_plan_start() returns trigger security invoker language plpgsql as $$ @@ -214,7 +206,7 @@ declare plan_start timestamptz; begin select p.start_time - from simulation s, plan p + from merlin.simulation s, merlin.plan p where s.plan_id = p.id and new.simulation_id = s.id into plan_start; @@ -225,6 +217,6 @@ end $$; create trigger update_offset_from_plan_start_trigger -before insert or update on simulation_dataset +before insert or update on merlin.simulation_dataset for each row -execute function update_offset_from_plan_start(); +execute function merlin.update_offset_from_plan_start(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql index ed16a21780..93f7dc818c 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_extent.sql @@ -1,20 +1,20 @@ -create table simulation_extent ( +create table merlin.simulation_extent ( simulation_dataset_id integer not null primary key, extent interval not null, constraint simulation_dataset_exists foreign key (simulation_dataset_id) - references simulation_dataset + references merlin.simulation_dataset on update cascade on delete cascade ); -comment on table simulation_extent is e'' +comment on table merlin.simulation_extent is e'' 'Tracks the progress of a simulation as the latest achieved offset from the simulation start time. \n' 'This is expected to be an update-heavy table, so it is to be kept compact to maximize the likelihood of HOT updates and minimize bloat \n' 'The data in this table is not particularly valuable once a simulation has completed, and can be cleared out periodically'; -comment on column simulation_extent.simulation_dataset_id is e'' +comment on column merlin.simulation_extent.simulation_dataset_id is e'' 'The simulation dataset to which this extent pertains'; -comment on column simulation_extent.extent is e'' +comment on column merlin.simulation_extent.extent is e'' 'The latest achieved offset from the simulation start time'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql index 921cf46281..91a60c900f 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_template.sql @@ -1,56 +1,44 @@ -create table simulation_template ( +create table merlin.simulation_template ( id integer generated always as identity, revision integer not null default 0, model_id integer not null, description text not null default '', - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, owner text, constraint simulation_template_synthetic_key primary key (id), constraint simulation_template_owned_by_model foreign key (model_id) - references mission_model + references merlin.mission_model on update cascade on delete cascade, constraint simulation_template_owner_exists foreign key (owner) - references metadata.users + references permissions.users on update cascade on delete set null ); -comment on table simulation_template is e'' +comment on table merlin.simulation_template is e'' 'A template specification for simulating an activity plan with a base set of arguments.'; -comment on column simulation_template.id is e'' +comment on column merlin.simulation_template.id is e'' 'The synthetic identifier for this simulation template.'; -comment on column simulation_template.revision is e'' +comment on column merlin.simulation_template.revision is e'' 'A monotonic clock that ticks for every change to this simulation template.'; -comment on column simulation_template.model_id is e'' +comment on column merlin.simulation_template.model_id is e'' 'The mission model used to obtain simulation configuration parameters.'; -comment on column simulation_template.description is e'' +comment on column merlin.simulation_template.description is e'' 'A brief description to offer the planner information about the name or intent of this simulation template.'; -comment on column simulation_template.arguments is e'' +comment on column merlin.simulation_template.arguments is e'' 'A subset of simulation arguments corresponding to the parameters of the associated mission model.'; -comment on column simulation_template.owner is e'' +comment on column merlin.simulation_template.owner is e'' 'The user responsible for this simulation template'; -create function increment_revision_for_update_simulation_template() -returns trigger -security definer -language plpgsql as $$begin - update simulation_template - set revision = revision + 1 - where id = new.id - or id = old.id; - - return new; -end$$; - create trigger increment_revision_for_update_simulation_template_trigger -after update on simulation_template +before update on merlin.simulation_template for each row when (pg_trigger_depth() < 1) -execute function increment_revision_for_update_simulation_template(); +execute function util_functions.increment_revision_update(); diff --git a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql index b5b28cd15b..c4d769f2a3 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_latest_snapshot.sql @@ -1,17 +1,17 @@ -create table plan_latest_snapshot( +create table merlin.plan_latest_snapshot( plan_id integer, snapshot_id integer, primary key (plan_id, snapshot_id), foreign key (plan_id) - references plan + references merlin.plan on update cascade on delete cascade, foreign key (snapshot_id) - references plan_snapshot + references merlin.plan_snapshot on update cascade on delete cascade ); -comment on table plan_latest_snapshot is e'' +comment on table merlin.plan_latest_snapshot is e'' 'An association table between a plan and the most recent snapshot taken of the plan.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql index c521ae427c..e549186471 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot.sql @@ -1,12 +1,12 @@ -- Snapshot is a collection of the state of all the activities as they were at the time of the snapshot -- as well as other of the plan metadata -create table plan_snapshot( +create table merlin.plan_snapshot( snapshot_id integer generated always as identity primary key, plan_id integer - references plan + references merlin.plan on delete set null, revision integer not null, @@ -18,19 +18,19 @@ create table plan_snapshot( unique (plan_id, snapshot_name) ); -comment on table plan_snapshot is e'' +comment on table merlin.plan_snapshot is e'' 'A record of the state of a plan at a given time.'; -comment on column plan_snapshot.snapshot_id is e'' +comment on column merlin.plan_snapshot.snapshot_id is e'' 'The identifier of the snapshot.'; -comment on column plan_snapshot.plan_id is e'' +comment on column merlin.plan_snapshot.plan_id is e'' 'The plan that this is a snapshot of.'; -comment on column plan_snapshot.revision is e'' +comment on column merlin.plan_snapshot.revision is e'' 'The revision of the plan at the time the snapshot was taken.'; -comment on column plan_snapshot.snapshot_name is e'' +comment on column merlin.plan_snapshot.snapshot_name is e'' 'A human-readable name for the snapshot.'; -comment on column plan_snapshot.description is e'' +comment on column merlin.plan_snapshot.description is e'' 'A human-readable description of the snapshot and its contents.'; -comment on column plan_snapshot.taken_by is e'' +comment on column merlin.plan_snapshot.taken_by is e'' 'The user who took the snapshot.'; -comment on column plan_snapshot.taken_at is e'' +comment on column merlin.plan_snapshot.taken_at is e'' 'The time that the snapshot was taken.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql index 6084b479de..65c91d710e 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_activities.sql @@ -1,6 +1,6 @@ -create table plan_snapshot_activities( +create table merlin.plan_snapshot_activities( snapshot_id integer - references plan_snapshot + references merlin.plan_snapshot on delete cascade, id integer, @@ -12,9 +12,9 @@ create table plan_snapshot_activities( last_modified_by text, start_offset interval not null, type text not null, - arguments merlin_argument_set not null, + arguments merlin.argument_set not null, last_modified_arguments_at timestamptz not null, - metadata merlin_activity_directive_metadata_set, + metadata merlin.activity_directive_metadata_set, anchor_id integer default null, anchored_to_start boolean default true not null, @@ -22,5 +22,5 @@ create table plan_snapshot_activities( primary key (id, snapshot_id) ); -comment on table plan_snapshot_activities is e'' +comment on table merlin.plan_snapshot_activities is e'' 'A record of the state of an activity at the time a snapshot was taken.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql index 550e5ba25d..96b151a859 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/snapshot/plan_snapshot_parent.sql @@ -1,15 +1,15 @@ -create table plan_snapshot_parent( +create table merlin.plan_snapshot_parent( snapshot_id integer - references plan_snapshot, + references merlin.plan_snapshot, parent_snapshot_id integer - references plan_snapshot, + references merlin.plan_snapshot, primary key (snapshot_id, parent_snapshot_id), constraint snapshot_cannot_be_own_parent check ( snapshot_id != parent_snapshot_id ) ); -comment on table plan_snapshot_parent is e'' +comment on table merlin.plan_snapshot_parent is e'' 'An association table that tracks the history of snapshots taken on a plan.'; -comment on column plan_snapshot_parent.parent_snapshot_id is e'' +comment on column merlin.plan_snapshot_parent.parent_snapshot_id is e'' 'The snapshot that was considered the latest snapshot for a plan when the id in snapshot_id was taken.' diff --git a/deployment/postgres-init-db/sql/tables/merlin/snapshot/preset_to_snapshot_directive.sql b/deployment/postgres-init-db/sql/tables/merlin/snapshot/preset_to_snapshot_directive.sql new file mode 100644 index 0000000000..3e1ba66d91 --- /dev/null +++ b/deployment/postgres-init-db/sql/tables/merlin/snapshot/preset_to_snapshot_directive.sql @@ -0,0 +1,22 @@ +create table merlin.preset_to_snapshot_directive( + preset_id integer + references merlin.activity_presets + on update cascade + on delete cascade, + + activity_id integer, + snapshot_id integer, + + foreign key (activity_id, snapshot_id) + references merlin.plan_snapshot_activities + on update cascade + on delete cascade, + + constraint one_preset_per_snapshot_directive + unique (activity_id, snapshot_id), + + primary key (preset_id, activity_id, snapshot_id) +); + +comment on table merlin.preset_to_snapshot_directive is e'' + 'Associates presets with snapshot activity directives that have been assigned presets.'; diff --git a/deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql b/deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql index f72b164462..2f888fbdcc 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/uploaded_file.sql @@ -1,4 +1,4 @@ -create table uploaded_file ( +create table merlin.uploaded_file ( id integer generated always as identity, path bytea not null, @@ -13,22 +13,22 @@ create table uploaded_file ( unique (name) ); -comment on table uploaded_file is e'' +comment on table merlin.uploaded_file is e'' 'A file stored physically in an external filesystem.'; -comment on column uploaded_file.id is e'' +comment on column merlin.uploaded_file.id is e'' 'An opaque internal reference to this file.'; -comment on column uploaded_file.path is e'' +comment on column merlin.uploaded_file.path is e'' 'An opaque external reference to this file in an external filesystem.' '\n' 'This is of type bytea since OS paths do not have a set encoding.'; -comment on column uploaded_file.name is e'' +comment on column merlin.uploaded_file.name is e'' 'A human-readable identifier for this file.'; -comment on column uploaded_file.created_date is e'' +comment on column merlin.uploaded_file.created_date is e'' 'The instant at which this file was added to the datastore.'; -comment on column uploaded_file.modified_date is e'' +comment on column merlin.uploaded_file.modified_date is e'' 'The instant at which this file was last updated.'; -comment on column uploaded_file.deleted_date is e'' +comment on column merlin.uploaded_file.deleted_date is e'' 'The instant at which this file was removed from use.' '\n' 'Deletion does not remove the file from the external filesystem, ' diff --git a/deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql b/deployment/postgres-init-db/sql/types/merlin/activity-directive-metadata.sql similarity index 58% rename from deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql rename to deployment/postgres-init-db/sql/types/merlin/activity-directive-metadata.sql index 2cce75c676..d184a5beb1 100644 --- a/deployment/postgres-init-db/sql/types/merlin/merlin-activity-directive-metadata.sql +++ b/deployment/postgres-init-db/sql/types/merlin/activity-directive-metadata.sql @@ -1,7 +1,7 @@ -create domain activity_directive_metadata_set as jsonb +create domain merlin.activity_directive_metadata_set as jsonb constraint activity_directive_metadata_set_is_object check(jsonb_typeof(value) = 'object'); -comment on domain activity_directive_metadata_set is e'' +comment on domain merlin.activity_directive_metadata_set is e'' 'The set of mission defined metadata associated with an activity directive.'; diff --git a/deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql b/deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql index 5832446315..5f604d980b 100644 --- a/deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql +++ b/deployment/postgres-init-db/sql/types/merlin/merlin-arguments.sql @@ -1,20 +1,20 @@ -create domain merlin_parameter_set as jsonb +create domain merlin.parameter_set as jsonb constraint merlin_parameter_set_is_object check(jsonb_typeof(value) = 'object'); -comment on domain merlin_parameter_set is e'' +comment on domain merlin.parameter_set is e'' 'A set of parameters accepted by a Merlin modeling entity, like an activity type or a mission model.'; -create domain merlin_argument_set as jsonb +create domain merlin.argument_set as jsonb constraint merlin_argument_set_is_object check(jsonb_typeof(value) = 'object'); -comment on domain merlin_argument_set is e'' +comment on domain merlin.argument_set is e'' 'A set of arguments provided to a Merlin modeling entity, like an activity type or a mission model.'; -create domain merlin_required_parameter_set as jsonb +create domain merlin.required_parameter_set as jsonb constraint merlin_required_parameter_set_is_array check(jsonb_typeof(value) = 'array'); -comment on domain merlin_required_parameter_set is e'' +comment on domain merlin.required_parameter_set is e'' 'A set of parameters required by a Merlin modeling entity, like an activity type or a mission model.'; diff --git a/deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql b/deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql index 9dbfd205c5..1730e8370b 100644 --- a/deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql +++ b/deployment/postgres-init-db/sql/types/merlin/plan-merge-types.sql @@ -1,3 +1,3 @@ -create type merge_request_status as enum ('pending', 'in-progress','accepted', 'rejected', 'withdrawn'); -create type activity_change_type as enum ('none', 'add', 'delete','modify'); -create type conflict_resolution as enum ('none','supplying', 'receiving'); +create type merlin.merge_request_status as enum ('pending', 'in-progress','accepted', 'rejected', 'withdrawn'); +create type merlin.activity_change_type as enum ('none', 'add', 'delete','modify'); +create type merlin.conflict_resolution as enum ('none','supplying', 'receiving'); diff --git a/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql b/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql index 3d30e4fef6..7f65604bce 100644 --- a/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql +++ b/deployment/postgres-init-db/sql/views/merlin/activity_directive_extended.sql @@ -1,4 +1,4 @@ -create function get_approximate_start_time(_activity_id int, _plan_id int) +create function merlin.get_approximate_start_time(_activity_id int, _plan_id int) returns timestamptz security definer language plpgsql as $$ @@ -12,11 +12,11 @@ begin -- Sum up all the activities from here until the plan with recursive get_net_offset(activity_id, plan_id, anchor_id, net_offset) as ( select id, plan_id, anchor_id, start_offset - from activity_directive ad + from merlin.activity_directive ad where (ad.id, ad.plan_id) = (_activity_id, _plan_id) union select ad.id, ad.plan_id, ad.anchor_id, ad.start_offset+gno.net_offset - from activity_directive ad, get_net_offset gno + from merlin.activity_directive ad, get_net_offset gno where (ad.id, ad.plan_id) = (gno.anchor_id, gno.plan_id) ) select gno.net_offset, activity_id from get_net_offset gno @@ -25,12 +25,12 @@ begin -- Get the plan start time and duration select start_time, duration - from plan + from merlin.plan where id = _plan_id into _plan_start_time, _plan_duration; select anchored_to_start - from activity_directive + from merlin.activity_directive where (id, plan_id) = (_root_activity_id, _plan_id) into _root_anchored_to_start; @@ -43,7 +43,7 @@ begin end $$; -create view activity_directive_extended as +create view merlin.activity_directive_extended as ( select -- Activity Directive Properties @@ -51,7 +51,7 @@ create view activity_directive_extended as ad.plan_id as plan_id, -- Additional Properties ad.name as name, - get_tags(ad.id, ad.plan_id) as tags, + tags.get_tags(ad.id, ad.plan_id) as tags, ad.source_scheduling_goal_id as source_scheduling_goal_id, ad.created_at as created_at, ad.created_by as created_by, @@ -65,10 +65,10 @@ create view activity_directive_extended as ad.anchor_id as anchor_id, ad.anchored_to_start as anchored_to_start, -- Derived Properties - get_approximate_start_time(ad.id, ad.plan_id) as approximate_start_time, + merlin.get_approximate_start_time(ad.id, ad.plan_id) as approximate_start_time, ptd.preset_id as preset_id, ap.arguments as preset_arguments - from activity_directive ad - left join preset_to_directive ptd on ad.id = ptd.activity_id and ad.plan_id = ptd.plan_id - left join activity_presets ap on ptd.preset_id = ap.id + from merlin.activity_directive ad + left join merlin.preset_to_directive ptd on ad.id = ptd.activity_id and ad.plan_id = ptd.plan_id + left join merlin.activity_presets ap on ptd.preset_id = ap.id ); diff --git a/deployment/postgres-init-db/sql/views/merlin/resource_profile.sql b/deployment/postgres-init-db/sql/views/merlin/resource_profile.sql index 0cfe0c3289..32333b939a 100644 --- a/deployment/postgres-init-db/sql/views/merlin/resource_profile.sql +++ b/deployment/postgres-init-db/sql/views/merlin/resource_profile.sql @@ -1,4 +1,4 @@ -create view resource_profile as +create view merlin.resource_profile as ( select profile_segment.dataset_id as dataset_id, profile_segment.profile_id as profile_id, @@ -8,7 +8,7 @@ select profile_segment.dataset_id as dataset_id, coalesce( plan.start_time + ( select p.start_offset - from profile_segment p + from merlin.profile_segment p where p.start_offset > profile_segment.start_offset and p.profile_id = profile_segment.profile_id and p.dataset_id = profile_segment.dataset_id @@ -18,37 +18,37 @@ select profile_segment.dataset_id as dataset_id, plan.start_time + plan.duration ) as end_time -from profile_segment - join dataset on profile_segment.dataset_id = dataset.id - left join plan_dataset pd on dataset.id = pd.dataset_id - left join simulation_dataset sd on dataset.id = sd.dataset_id - left join simulation s on sd.simulation_id = s.id - join plan on plan.id = s.plan_id or plan.id = pd.plan_id +from merlin.profile_segment + join merlin.dataset on profile_segment.dataset_id = dataset.id + left join merlin.plan_dataset pd on dataset.id = pd.dataset_id + left join merlin.simulation_dataset sd on dataset.id = sd.dataset_id + left join merlin.simulation s on sd.simulation_id = s.id + join merlin.plan on plan.id = s.plan_id or plan.id = pd.plan_id ); -comment on view resource_profile is e'' +comment on view merlin.resource_profile is e'' 'A piece of a profile associated with a dataset, starting at a particular offset from the dataset basis. ' 'The profile is governed at any time T by the latest profile whose start_offset is no later than T.' 'This view adds in absolute start and end times to the profile segment.'; -comment on column resource_profile.dataset_id is e'' +comment on column merlin.resource_profile.dataset_id is e'' 'The dataset this segment''s profile is a part of.' '\n' 'Denormalized for partitioning. Should always match ''profile.dataset_id''.'; -comment on column resource_profile.profile_id is e'' +comment on column merlin.resource_profile.profile_id is e'' 'The profile this segment is a part of.'; -comment on column resource_profile.start_offset is e'' +comment on column merlin.resource_profile.start_offset is e'' 'The offset from the start of the plan at which this profile segment takes over the profile''s behavior.'; -comment on column resource_profile.dynamics is e'' +comment on column merlin.resource_profile.dynamics is e'' 'A formal description of the behavior of the resource between this segment and the next.' '\n' 'May be NULL if no behavior is known, thereby canceling any prior behavior.'; -comment on column resource_profile.start_time is e'' +comment on column merlin.resource_profile.start_time is e'' 'The absolute time this profile segment takes over the profile''s behavior.'; -comment on column resource_profile.end_time is e'' +comment on column merlin.resource_profile.end_time is e'' 'The absolute time this profile segment ends influencing the profile''s behavior.'; diff --git a/deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql b/deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql index f821b18888..e268a2cd11 100644 --- a/deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql +++ b/deployment/postgres-init-db/sql/views/merlin/simulated_activity.sql @@ -1,50 +1,50 @@ -create view simulated_activity as +create view merlin.simulated_activity as ( select span.id as id, - simulation_dataset.id as simulation_dataset_id, + sd.id as simulation_dataset_id, span.parent_id as parent_id, span.start_offset as start_offset, span.duration as duration, span.attributes as attributes, span.type as activity_type_name, (span.attributes#>>'{directiveId}')::integer as directive_id, - simulation_dataset.simulation_start_time + span.start_offset as start_time, - simulation_dataset.simulation_start_time + span.start_offset + span.duration as end_time - from span - join dataset on span.dataset_id = dataset.id - join simulation_dataset on dataset.id = simulation_dataset.dataset_id - join simulation on simulation.id = simulation_dataset.simulation_id + sd.simulation_start_time + span.start_offset as start_time, + sd.simulation_start_time + span.start_offset + span.duration as end_time + from merlin.span span + join merlin.dataset d on span.dataset_id = d.id + join merlin.simulation_dataset sd on d.id = sd.dataset_id + join merlin.simulation s on s.id = sd.simulation_id ); -comment on view simulated_activity is e'' +comment on view merlin.simulated_activity is e'' 'Concrete activity instance created via simulation.'; -comment on column simulated_activity.id is e'' +comment on column merlin.simulated_activity.id is e'' 'Unique identifier for the activity instance span.'; -comment on column simulated_activity.simulation_dataset_id is e'' +comment on column merlin.simulated_activity.simulation_dataset_id is e'' 'The simulation dataset this activity is part of.'; -comment on column simulated_activity.parent_id is e'' +comment on column merlin.simulated_activity.parent_id is e'' 'The parent activity of this activity.'; -comment on column simulated_activity.start_offset is e'' +comment on column merlin.simulated_activity.start_offset is e'' 'The offset from the dataset start at which this activity begins.'; -comment on column simulated_activity.duration is e'' +comment on column merlin.simulated_activity.duration is e'' 'The amount of time this activity extends for.'; -comment on column simulated_activity.attributes is e'' +comment on column merlin.simulated_activity.attributes is e'' 'A set of named values annotating this activity.'; -comment on column simulated_activity.activity_type_name is e'' +comment on column merlin.simulated_activity.activity_type_name is e'' 'The activity type of this activity.'; -comment on column simulated_activity.directive_id is e'' +comment on column merlin.simulated_activity.directive_id is e'' 'The id of the activity directive that created this activity.'; -comment on column simulated_activity.start_time is e'' +comment on column merlin.simulated_activity.start_time is e'' 'The absolute start time of this activity.'; -comment on column simulated_activity.end_time is e'' +comment on column merlin.simulated_activity.end_time is e'' 'The absolute end time of this activity.'; From 81b16f2bb2ca72a598ec08d891eb76044671f16b Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 1 Mar 2024 12:47:32 -0800 Subject: [PATCH 12/36] Add Schema Qualifiers (Scheduler) --- .../scheduling_condition_definition.sql | 22 +++--- .../scheduling_condition_metadata.sql | 34 ++++---- .../scheduler/scheduling_goal_definition.sql | 22 +++--- .../scheduler/scheduling_goal_metadata.sql | 34 ++++---- .../scheduling_goal_analysis.sql | 16 ++-- ...uling_goal_analysis_created_activities.sql | 16 ++-- ...ng_goal_analysis_satisfying_activities.sql | 16 ++-- .../scheduling_run/scheduling_request.sql | 56 +++++++------- ...eduling_model_specification_conditions.sql | 14 ++-- .../scheduling_model_specification_goals.sql | 55 +++++++------ .../scheduling_specification.sql | 30 +++----- .../scheduling_specification_conditions.sql | 34 ++++---- .../scheduling_specification_goals.sql | 77 +++++++++---------- 13 files changed, 198 insertions(+), 228 deletions(-) diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql index 2501ac11a4..a2159372c6 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql @@ -1,4 +1,4 @@ -create table scheduling_condition_definition( +create table scheduler.scheduling_condition_definition( condition_id integer not null, revision integer not null default 0, definition text not null, @@ -9,23 +9,23 @@ create table scheduling_condition_definition( primary key (condition_id, revision), constraint scheduling_condition_definition_condition_exists foreign key (condition_id) - references scheduling_condition_metadata + references scheduler.scheduling_condition_metadata on update cascade on delete cascade ); -comment on table scheduling_condition_definition is e'' +comment on table scheduler.scheduling_condition_definition is e'' 'The specific revisions of a scheduling condition''s definition'; -comment on column scheduling_condition_definition.revision is e'' +comment on column scheduler.scheduling_condition_definition.revision is e'' 'An identifier of this definition.'; -comment on column scheduling_condition_definition.definition is e'' +comment on column scheduler.scheduling_condition_definition.definition is e'' 'An executable expression in the Merlin scheduling language.'; -comment on column scheduling_condition_definition.author is e'' +comment on column scheduler.scheduling_condition_definition.author is e'' 'The user who authored this revision.'; -comment on column scheduling_condition_definition.created_at is e'' +comment on column scheduler.scheduling_condition_definition.created_at is e'' 'When this revision was created.'; -create function scheduling_condition_definition_set_revision() +create function scheduler.scheduling_condition_definition_set_revision() returns trigger volatile language plpgsql as $$ @@ -34,7 +34,7 @@ declare begin -- Grab the current max value of revision, or -1, if this is the first revision select coalesce((select revision - from scheduling_condition_definition + from scheduler.scheduling_condition_definition where condition_id = new.condition_id order by revision desc limit 1), -1) @@ -46,6 +46,6 @@ end $$; create trigger scheduling_goal_definition_set_revision - before insert on scheduling_condition_definition + before insert on scheduler.scheduling_condition_definition for each row - execute function scheduling_condition_definition_set_revision(); + execute function scheduler.scheduling_condition_definition_set_revision(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql index 60a23d880b..2adf7d29e9 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql @@ -1,4 +1,4 @@ -create table scheduling_condition_metadata ( +create table scheduler.scheduling_condition_metadata ( id integer generated always as identity, name text not null, @@ -16,36 +16,28 @@ create table scheduling_condition_metadata ( ); -- A partial index is used to enforce name uniqueness only on conditions visible to other users -create unique index condition_name_unique_if_published on scheduling_condition_metadata (name) where public; +create unique index condition_name_unique_if_published on scheduler.scheduling_condition_metadata (name) where public; -comment on table scheduling_condition_metadata is e'' +comment on table scheduler.scheduling_condition_metadata is e'' 'A condition restricting scheduling of a plan.'; -comment on column scheduling_condition_metadata.id is e'' +comment on column scheduler.scheduling_condition_metadata.id is e'' 'The unique identifier for this scheduling condition.'; -comment on column scheduling_condition_metadata.name is e'' +comment on column scheduler.scheduling_condition_metadata.name is e'' 'A short human readable name for this condition'; -comment on column scheduling_condition_metadata.description is e'' +comment on column scheduler.scheduling_condition_metadata.description is e'' 'A longer text description of this scheduling condition.'; -comment on column scheduling_condition_metadata.public is e'' +comment on column scheduler.scheduling_condition_metadata.public is e'' 'Whether this goal is visible to all users.'; -comment on column scheduling_condition_metadata.owner is e'' +comment on column scheduler.scheduling_condition_metadata.owner is e'' 'The user responsible for this condition.'; -comment on column scheduling_condition_metadata.updated_by is e'' +comment on column scheduler.scheduling_condition_metadata.updated_by is e'' 'The user who last modified this condition''s metadata.'; -comment on column scheduling_condition_metadata.created_at is e'' +comment on column scheduler.scheduling_condition_metadata.created_at is e'' 'The time at which this condition was created.'; -comment on column scheduling_condition_metadata.updated_at is e'' +comment on column scheduler.scheduling_condition_metadata.updated_at is e'' 'The time at which this condition''s metadata was last modified.'; -create function scheduling_condition_metadata_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp -before update on scheduling_condition_metadata +before update on scheduler.scheduling_condition_metadata for each row -execute function scheduling_condition_metadata_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql index 62588d6c29..dc3bfcb507 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql @@ -1,4 +1,4 @@ -create table scheduling_goal_definition( +create table scheduler.scheduling_goal_definition( goal_id integer not null, revision integer not null default 0, @@ -10,23 +10,23 @@ create table scheduling_goal_definition( primary key (goal_id, revision), constraint scheduling_goal_definition_goal_exists foreign key (goal_id) - references scheduling_goal_metadata + references scheduler.scheduling_goal_metadata on update cascade on delete cascade ); -comment on table scheduling_goal_definition is e'' +comment on table scheduler.scheduling_goal_definition is e'' 'The specific revisions of a scheduling goal''s definition'; -comment on column scheduling_goal_definition.revision is e'' +comment on column scheduler.scheduling_goal_definition.revision is e'' 'An identifier of this definition.'; -comment on column scheduling_goal_definition.definition is e'' +comment on column scheduler.scheduling_goal_definition.definition is e'' 'An executable expression in the Merlin scheduling language.'; -comment on column scheduling_goal_definition.author is e'' +comment on column scheduler.scheduling_goal_definition.author is e'' 'The user who authored this revision.'; -comment on column scheduling_goal_definition.created_at is e'' +comment on column scheduler.scheduling_goal_definition.created_at is e'' 'When this revision was created.'; -create function scheduling_goal_definition_set_revision() +create function scheduler.scheduling_goal_definition_set_revision() returns trigger volatile language plpgsql as $$ @@ -35,7 +35,7 @@ declare begin -- Grab the current max value of revision, or -1, if this is the first revision select coalesce((select revision - from scheduling_goal_definition + from scheduler.scheduling_goal_definition where goal_id = new.goal_id order by revision desc limit 1), -1) @@ -47,6 +47,6 @@ end $$; create trigger scheduling_goal_definition_set_revision - before insert on scheduling_goal_definition + before insert on scheduler.scheduling_goal_definition for each row - execute function scheduling_goal_definition_set_revision(); + execute function scheduler.scheduling_goal_definition_set_revision(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql index e968d901c3..57ca2a2e4a 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql @@ -1,4 +1,4 @@ -create table scheduling_goal_metadata ( +create table scheduler.scheduling_goal_metadata ( id integer generated always as identity, name text not null, @@ -16,36 +16,28 @@ create table scheduling_goal_metadata ( ); -- A partial index is used to enforce name uniqueness only on goals visible to other users -create unique index goal_name_unique_if_published on scheduling_goal_metadata (name) where public; +create unique index goal_name_unique_if_published on scheduler.scheduling_goal_metadata (name) where public; -comment on table scheduling_goal_metadata is e'' +comment on table scheduler.scheduling_goal_metadata is e'' 'A goal for scheduling a plan.'; -comment on column scheduling_goal_metadata.id is e'' +comment on column scheduler.scheduling_goal_metadata.id is e'' 'The unique identifier of the goal'; -comment on column scheduling_goal_metadata.name is e'' +comment on column scheduler.scheduling_goal_metadata.name is e'' 'A human-meaningful name.'; -comment on column scheduling_goal_metadata.description is e'' +comment on column scheduler.scheduling_goal_metadata.description is e'' 'A detailed description suitable for long-form documentation.'; -comment on column scheduling_goal_metadata.public is e'' +comment on column scheduler.scheduling_goal_metadata.public is e'' 'Whether this goal is visible to all users.'; -comment on column scheduling_goal_metadata.owner is e'' +comment on column scheduler.scheduling_goal_metadata.owner is e'' 'The user responsible for this goal.'; -comment on column scheduling_goal_metadata.updated_by is e'' +comment on column scheduler.scheduling_goal_metadata.updated_by is e'' 'The user who last modified this goal''s metadata.'; -comment on column scheduling_goal_metadata.created_at is e'' +comment on column scheduler.scheduling_goal_metadata.created_at is e'' 'The time at which this goal was created.'; -comment on column scheduling_goal_metadata.updated_at is e'' +comment on column scheduler.scheduling_goal_metadata.updated_at is e'' 'The time at which this goal''s metadata was last modified.'; -create function scheduling_goal_metadata_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp -before update on scheduling_goal_metadata +before update on scheduler.scheduling_goal_metadata for each row -execute function scheduling_goal_metadata_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql index fd4039e465..c10ec48727 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis.sql @@ -1,4 +1,4 @@ -create table scheduling_goal_analysis ( +create table scheduler.scheduling_goal_analysis ( analysis_id integer not null, goal_id integer not null, goal_revision integer not null, @@ -8,23 +8,23 @@ create table scheduling_goal_analysis ( primary key (analysis_id, goal_id, goal_revision), constraint scheduling_goal_analysis_references_scheduling_request foreign key (analysis_id) - references scheduling_request (analysis_id) + references scheduler.scheduling_request (analysis_id) on update cascade on delete cascade, constraint scheduling_goal_analysis_references_scheduling_goal foreign key (goal_id, goal_revision) - references scheduling_goal_definition + references scheduler.scheduling_goal_definition on update cascade on delete cascade ); -comment on table scheduling_goal_analysis is e'' +comment on table scheduler.scheduling_goal_analysis is e'' 'The analysis of single goal from a scheduling run.'; -comment on column scheduling_goal_analysis.analysis_id is e'' +comment on column scheduler.scheduling_goal_analysis.analysis_id is e'' 'The associated analysis ID.'; -comment on column scheduling_goal_analysis.goal_id is e'' +comment on column scheduler.scheduling_goal_analysis.goal_id is e'' 'The associated goal ID.'; -comment on column scheduling_goal_analysis.goal_revision is e'' +comment on column scheduler.scheduling_goal_analysis.goal_revision is e'' 'The associated version of the goal definition used.'; -comment on column scheduling_goal_analysis.satisfied is e'' +comment on column scheduler.scheduling_goal_analysis.satisfied is e'' 'Whether the associated goal was satisfied by the scheduling run.'; diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql index 4d231c9cc2..d8a04fd1d8 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql @@ -1,4 +1,4 @@ -create table scheduling_goal_analysis_created_activities ( +create table scheduler.scheduling_goal_analysis_created_activities ( analysis_id integer not null, goal_id integer not null, goal_revision integer not null, @@ -8,23 +8,23 @@ create table scheduling_goal_analysis_created_activities ( primary key (analysis_id, goal_id, goal_revision, activity_id), constraint created_activities_references_scheduling_request foreign key (analysis_id) - references scheduling_request (analysis_id) + references scheduler.scheduling_request (analysis_id) on update cascade on delete cascade, constraint created_activities_references_scheduling_goal foreign key (goal_id, goal_revision) - references scheduling_goal_definition + references scheduler.scheduling_goal_definition on update cascade on delete cascade ); -comment on table scheduling_goal_analysis_created_activities is e'' +comment on table scheduler.scheduling_goal_analysis_created_activities is e'' 'The activity instances created by a scheduling run to satisfy a goal.'; -comment on column scheduling_goal_analysis_created_activities.analysis_id is e'' +comment on column scheduler.scheduling_goal_analysis_created_activities.analysis_id is e'' 'The associated analysis ID.'; -comment on column scheduling_goal_analysis_created_activities.goal_id is e'' +comment on column scheduler.scheduling_goal_analysis_created_activities.goal_id is e'' 'The associated goal ID.'; -comment on column scheduling_goal_analysis_created_activities.goal_revision is e'' +comment on column scheduler.scheduling_goal_analysis_created_activities.goal_revision is e'' 'The associated version of the goal definition used.'; -comment on column scheduling_goal_analysis_created_activities.activity_id is e'' +comment on column scheduler.scheduling_goal_analysis_created_activities.activity_id is e'' 'The ID of an activity instance created to satisfy the associated goal.'; diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql index 2b88ae2069..6e6754b972 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql @@ -1,4 +1,4 @@ -create table scheduling_goal_analysis_satisfying_activities ( +create table scheduler.scheduling_goal_analysis_satisfying_activities ( analysis_id integer not null, goal_id integer not null, goal_revision integer not null, @@ -8,23 +8,23 @@ create table scheduling_goal_analysis_satisfying_activities ( primary key (analysis_id, goal_id, goal_revision, activity_id), constraint satisfying_activities_references_scheduling_request foreign key (analysis_id) - references scheduling_request (analysis_id) + references scheduler.scheduling_request (analysis_id) on update cascade on delete cascade, constraint satisfying_activities_references_scheduling_goal foreign key (goal_id, goal_revision) - references scheduling_goal_definition + references scheduler.scheduling_goal_definition on update cascade on delete cascade ); -comment on table scheduling_goal_analysis_satisfying_activities is e'' +comment on table scheduler.scheduling_goal_analysis_satisfying_activities is e'' 'The activity instances satisfying a scheduling goal.'; -comment on column scheduling_goal_analysis_satisfying_activities.analysis_id is e'' +comment on column scheduler.scheduling_goal_analysis_satisfying_activities.analysis_id is e'' 'The associated analysis ID.'; -comment on column scheduling_goal_analysis_satisfying_activities.goal_id is e'' +comment on column scheduler.scheduling_goal_analysis_satisfying_activities.goal_id is e'' 'The associated goal ID.'; -comment on column scheduling_goal_analysis_satisfying_activities.goal_revision is e'' +comment on column scheduler.scheduling_goal_analysis_satisfying_activities.goal_revision is e'' 'The associated version of the goal definition used.'; -comment on column scheduling_goal_analysis_satisfying_activities.activity_id is e'' +comment on column scheduler.scheduling_goal_analysis_satisfying_activities.activity_id is e'' 'The ID of an activity instance satisfying the associated goal.'; diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql index 431b1bb5d8..a1a0237ae3 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql @@ -1,6 +1,6 @@ -create type status_t as enum('pending', 'incomplete', 'failed', 'success'); +create type scheduler.status_t as enum('pending', 'incomplete', 'failed', 'success'); -create table scheduling_request ( +create table scheduler.scheduling_request ( analysis_id integer generated always as identity, specification_id integer not null, dataset_id integer default null, @@ -9,7 +9,7 @@ create table scheduling_request ( plan_revision integer not null, -- Scheduling State - status status_t not null default 'pending', + status scheduler.status_t not null default 'pending', reason jsonb null, canceled boolean not null default false, @@ -28,46 +28,46 @@ create table scheduling_request ( unique (specification_id, specification_revision, plan_revision), constraint scheduling_request_references_scheduling_specification foreign key(specification_id) - references scheduling_specification + references scheduler.scheduling_specification on update cascade on delete cascade, constraint start_before_end check (horizon_start <= horizon_end) ); -comment on table scheduling_request is e'' +comment on table scheduler.scheduling_request is e'' 'The status of a scheduling run that is to be performed (or has been performed).'; -comment on column scheduling_request.analysis_id is e'' +comment on column scheduler.scheduling_request.analysis_id is e'' 'The ID associated with the analysis of this scheduling run.'; -comment on column scheduling_request.specification_id is e'' +comment on column scheduler.scheduling_request.specification_id is e'' 'The ID of scheduling specification for this scheduling run.'; -comment on column scheduling_request.dataset_id is e'' +comment on column scheduler.scheduling_request.dataset_id is e'' 'The dataset containing the final simulation results for the simulation. NULL if no simulations were run during scheduling.'; -comment on column scheduling_request.specification_revision is e'' +comment on column scheduler.scheduling_request.specification_revision is e'' 'The revision of the scheduling_specification associated with this request.'; -comment on column scheduling_request.plan_revision is e'' +comment on column scheduler.scheduling_request.plan_revision is e'' 'The revision of the plan corresponding to the given revision of the dataset.'; -comment on column scheduling_request.status is e'' +comment on column scheduler.scheduling_request.status is e'' 'The state of the the scheduling request.'; -comment on column scheduling_request.reason is e'' +comment on column scheduler.scheduling_request.reason is e'' 'The reason for failure in the event a scheduling request fails.'; -comment on column scheduling_request.canceled is e'' +comment on column scheduler.scheduling_request.canceled is e'' 'Whether the scheduling run has been marked as canceled.'; -comment on column scheduling_request.horizon_start is e'' +comment on column scheduler.scheduling_request.horizon_start is e'' 'The start of the scheduling and simulation horizon for this scheduling run.'; -comment on column scheduling_request.horizon_end is e'' +comment on column scheduler.scheduling_request.horizon_end is e'' 'The end of the scheduling and simulation horizon for this scheduling run.'; -comment on column scheduling_request.simulation_arguments is e'' +comment on column scheduler.scheduling_request.simulation_arguments is e'' 'The arguments simulations run during the scheduling run will use.'; -comment on column scheduling_request.requested_by is e'' +comment on column scheduler.scheduling_request.requested_by is e'' 'The user who made the scheduling request.'; -comment on column scheduling_request.requested_at is e'' +comment on column scheduler.scheduling_request.requested_at is e'' 'When this scheduling request was made.'; -- Scheduling request NOTIFY triggers -- These triggers NOTIFY LISTEN(ing) scheduler worker clients of pending scheduling requests -create function notify_scheduler_workers () +create function scheduler.notify_scheduler_workers () returns trigger security definer language plpgsql as $$ @@ -90,16 +90,16 @@ begin end$$; create trigger notify_scheduler_workers - after insert on scheduling_request + after insert on scheduler.scheduling_request for each row - execute function notify_scheduler_workers(); + execute function scheduler.notify_scheduler_workers(); -create function cancel_pending_scheduling_rqs() +create function scheduler.cancel_pending_scheduling_rqs() returns trigger security definer language plpgsql as $$ begin - update scheduling_request + update scheduler.scheduling_request set canceled = true where status = 'pending' and specification_id = new.specification_id; @@ -108,11 +108,11 @@ end $$; create trigger cancel_pending_scheduling_rqs - before insert on scheduling_request + before insert on scheduler.scheduling_request for each row - execute function cancel_pending_scheduling_rqs(); + execute function scheduler.cancel_pending_scheduling_rqs(); -create function notify_scheduling_workers_cancel() +create function scheduler.notify_scheduling_workers_cancel() returns trigger security definer language plpgsql as $$ @@ -123,7 +123,7 @@ end $$; create trigger notify_scheduling_workers_cancel -after update of canceled on scheduling_request +after update of canceled on scheduler.scheduling_request for each row when ((old.status != 'success' or old.status != 'failed') and new.canceled) -execute function notify_scheduling_workers_cancel(); +execute function scheduler.notify_scheduling_workers_cancel(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql index 1230483f20..30fa994642 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql @@ -1,24 +1,24 @@ -create table scheduling_model_specification_conditions( +create table scheduler.scheduling_model_specification_conditions( model_id integer not null, condition_id integer not null, condition_revision integer, -- latest is NULL primary key (model_id, condition_id), foreign key (condition_id) - references scheduling_condition_metadata + references scheduler.scheduling_condition_metadata on update cascade on delete restrict, foreign key (condition_id, condition_revision) - references scheduling_condition_definition + references scheduler.scheduling_condition_definition on update cascade on delete restrict ); -comment on table scheduling_model_specification_conditions is e'' +comment on table scheduler.scheduling_model_specification_conditions is e'' 'The set of scheduling conditions that all plans using the model should include in their scheduling specification.'; -comment on column scheduling_model_specification_conditions.model_id is e'' +comment on column scheduler.scheduling_model_specification_conditions.model_id is e'' 'The model which this specification is for. Half of the primary key.'; -comment on column scheduling_model_specification_conditions.condition_id is e'' +comment on column scheduler.scheduling_model_specification_conditions.condition_id is e'' 'The id of a specific scheduling condition in the specification. Half of the primary key.'; -comment on column scheduling_model_specification_conditions.condition_revision is e'' +comment on column scheduler.scheduling_model_specification_conditions.condition_revision is e'' 'The version of the scheduling condition definition to use. Leave NULL to use the latest version.'; diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql index 65738a481d..28b09d5412 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql @@ -1,4 +1,4 @@ -create table scheduling_model_specification_goals( +create table scheduler.scheduling_model_specification_goals( model_id integer not null, goal_id integer not null, goal_revision integer, -- latest is NULL @@ -6,11 +6,11 @@ create table scheduling_model_specification_goals( primary key (model_id, goal_id), foreign key (goal_id) - references scheduling_goal_metadata + references scheduler.scheduling_goal_metadata on update cascade on delete restrict, foreign key (goal_id, goal_revision) - references scheduling_goal_definition + references scheduler.scheduling_goal_definition on update cascade on delete restrict, constraint model_spec_unique_goal_priorities @@ -19,18 +19,18 @@ create table scheduling_model_specification_goals( check (priority >= 0) ); -comment on table scheduling_model_specification_goals is e'' +comment on table scheduler.scheduling_model_specification_goals is e'' 'The set of scheduling goals that all plans using the model should include in their scheduling specification.'; -comment on column scheduling_model_specification_goals.model_id is e'' +comment on column scheduler.scheduling_model_specification_goals.model_id is e'' 'The model which this specification is for. Half of the primary key.'; -comment on column scheduling_model_specification_goals.goal_id is e'' +comment on column scheduler.scheduling_model_specification_goals.goal_id is e'' 'The id of a specific scheduling goal in the specification. Half of the primary key.'; -comment on column scheduling_model_specification_goals.goal_revision is e'' +comment on column scheduler.scheduling_model_specification_goals.goal_revision is e'' 'The version of the scheduling goal definition to use. Leave NULL to use the latest version.'; -comment on column scheduling_model_specification_goals.priority is e'' +comment on column scheduler.scheduling_model_specification_goals.priority is e'' 'The relative priority of the scheduling goal in relation to other goals on the same specification.'; -create function insert_scheduling_model_specification_goal_func() +create function scheduler.insert_scheduling_model_specification_goal_func() returns trigger language plpgsql as $$ declare @@ -38,7 +38,7 @@ create function insert_scheduling_model_specification_goal_func() begin select coalesce( (select priority - from scheduling_model_specification_goals smg + from scheduler.scheduling_model_specification_goals smg where smg.model_id = new.model_id order by priority desc limit 1), -1) + 1 @@ -54,7 +54,7 @@ begin new.priority = next_priority; end if; - update scheduling_model_specification_goals + update scheduler.scheduling_model_specification_goals set priority = priority + 1 where model_id = new.model_id and priority >= new.priority; @@ -62,16 +62,15 @@ begin end; $$; -comment on function insert_scheduling_model_specification_goal_func() is e'' +comment on function scheduler.insert_scheduling_model_specification_goal_func() is e'' 'Checks that the inserted priority is consecutive, and reorders (increments) higher or equal priorities to make room.'; create trigger insert_scheduling_model_specification_goal - before insert - on scheduling_model_specification_goals + before insert on scheduler.scheduling_model_specification_goals for each row -execute function insert_scheduling_model_specification_goal_func(); +execute function scheduler.insert_scheduling_model_specification_goal_func(); -create function update_scheduling_model_specification_goal_func() +create function scheduler.update_scheduling_model_specification_goal_func() returns trigger language plpgsql as $$ declare @@ -79,7 +78,7 @@ create function update_scheduling_model_specification_goal_func() begin select coalesce( (select priority - from scheduling_model_specification_goals smg + from scheduler.scheduling_model_specification_goals smg where smg.model_id = new.model_id order by priority desc limit 1), -1) + 1 @@ -92,13 +91,13 @@ begin end if; if new.priority > old.priority then - update scheduling_model_specification_goals + update scheduler.scheduling_model_specification_goals set priority = priority - 1 where model_id = new.model_id and priority between old.priority + 1 and new.priority and goal_id != new.goal_id; else - update scheduling_model_specification_goals + update scheduler.scheduling_model_specification_goals set priority = priority + 1 where model_id = new.model_id and priority between new.priority and old.priority - 1 @@ -108,21 +107,20 @@ begin end; $$; -comment on function update_scheduling_model_specification_goal_func() is e'' +comment on function scheduler.update_scheduling_model_specification_goal_func() is e'' 'Checks that the updated priority is consecutive, and reorders priorities to make room.'; create trigger update_scheduling_model_specification_goal - before update - on scheduling_model_specification_goals + before update on scheduler.scheduling_model_specification_goals for each row when (OLD.priority is distinct from NEW.priority and pg_trigger_depth() < 1) -execute function update_scheduling_model_specification_goal_func(); +execute function scheduler.update_scheduling_model_specification_goal_func(); -create function delete_scheduling_model_specification_goal_func() +create function scheduler.delete_scheduling_model_specification_goal_func() returns trigger language plpgsql as $$ begin - update scheduling_model_specification_goals + update scheduler.scheduling_model_specification_goals set priority = priority - 1 where model_id = old.model_id and priority > old.priority; @@ -130,11 +128,10 @@ begin end; $$; -comment on function delete_scheduling_model_specification_goal_func() is e'' +comment on function scheduler.delete_scheduling_model_specification_goal_func() is e'' 'Reorders (decrements) priorities to fill the gap from deleted priority.'; create trigger delete_scheduling_model_specification_goal - after delete - on scheduling_model_specification_goals + after delete on scheduler.scheduling_model_specification_goals for each row -execute function delete_scheduling_model_specification_goal_func(); +execute function scheduler.delete_scheduling_model_specification_goal_func(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql index 7e82f023c7..4806b25249 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql @@ -1,4 +1,4 @@ -create table scheduling_specification ( +create table scheduler.scheduling_specification ( id integer generated always as identity, revision integer not null default 0, @@ -14,33 +14,25 @@ create table scheduling_specification ( unique (plan_id) ); -comment on table scheduling_specification is e'' +comment on table scheduler.scheduling_specification is e'' 'The specification for a scheduling run.'; -comment on column scheduling_specification.id is e'' +comment on column scheduler.scheduling_specification.id is e'' 'The synthetic identifier for this scheduling specification.'; -comment on column scheduling_specification.revision is e'' +comment on column scheduler.scheduling_specification.revision is e'' 'A monotonic clock that ticks for every change to this scheduling specification.'; -comment on column scheduling_specification.plan_id is e'' +comment on column scheduler.scheduling_specification.plan_id is e'' 'The ID of the plan to be scheduled.'; -comment on column scheduling_specification.horizon_start is e'' +comment on column scheduler.scheduling_specification.horizon_start is e'' 'The start of the scheduling horizon within which the scheduler may place activities.'; -comment on column scheduling_specification.horizon_end is e'' +comment on column scheduler.scheduling_specification.horizon_end is e'' 'The end of the scheduling horizon within which the scheduler may place activities.'; -comment on column scheduling_specification.simulation_arguments is e'' +comment on column scheduler.scheduling_specification.simulation_arguments is e'' 'The arguments to use for simulation during scheduling.'; -comment on column scheduling_specification.analysis_only is e'' +comment on column scheduler.scheduling_specification.analysis_only is e'' 'The boolean stating whether this is an analysis run only'; -create function increment_revision_on_update() - returns trigger - security definer -language plpgsql as $$begin - new.revision = old.revision + 1; -return new; -end$$; - create trigger increment_revision_on_update_trigger - before update on scheduling_specification + before update on scheduler.scheduling_specification for each row when (pg_trigger_depth() < 1) - execute function increment_revision_on_update(); + execute function util_functions.increment_revision_update(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql index 4a9657ef08..a6ae969464 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql @@ -1,4 +1,4 @@ -create table scheduling_specification_conditions ( +create table scheduler.scheduling_specification_conditions ( specification_id integer not null, condition_id integer not null, condition_revision integer, -- latest is NULL @@ -8,38 +8,38 @@ create table scheduling_specification_conditions ( primary key (specification_id, condition_id), constraint scheduling_specification_conditions_specification_exists foreign key (specification_id) - references scheduling_specification + references scheduler.scheduling_specification on update cascade on delete cascade, constraint scheduling_specification_condition_exists foreign key (condition_id) - references scheduling_condition_metadata + references scheduler.scheduling_condition_metadata on update cascade on delete restrict, constraint scheduling_specification_condition_definition_exists foreign key (condition_id, condition_revision) - references scheduling_condition_definition + references scheduler.scheduling_condition_definition on update cascade on delete restrict ); -comment on table scheduling_specification_conditions is e'' +comment on table scheduler.scheduling_specification_conditions is e'' 'The set of scheduling conditions to be used on a given plan.'; -comment on column scheduling_specification_conditions.specification_id is e'' +comment on column scheduler.scheduling_specification_conditions.specification_id is e'' 'The plan scheduling specification which this condition is on. Half of the primary key.'; -comment on column scheduling_specification_conditions.condition_id is e'' +comment on column scheduler.scheduling_specification_conditions.condition_id is e'' 'The ID of a specific condition in the specification. Half of the primary key.'; -comment on column scheduling_specification_conditions.condition_revision is e'' +comment on column scheduler.scheduling_specification_conditions.condition_revision is e'' 'The version of the condition definition to use. Leave NULL to use the latest version.'; -comment on column scheduling_specification_conditions.enabled is e'' +comment on column scheduler.scheduling_specification_conditions.enabled is e'' 'Whether to use a given condition. Defaults to TRUE.'; -create function increment_spec_revision_on_conditions_spec_update() +create function scheduler.increment_spec_revision_on_conditions_spec_update() returns trigger security definer language plpgsql as $$ begin - update scheduling_specification + update scheduler.scheduling_specification set revision = revision + 1 where id = new.specification_id; return new; @@ -47,16 +47,16 @@ end; $$; create trigger increment_revision_on_condition_update - before insert or update on scheduling_specification_conditions + before insert or update on scheduler.scheduling_specification_conditions for each row - execute function increment_spec_revision_on_conditions_spec_update(); + execute function scheduler.increment_spec_revision_on_conditions_spec_update(); -create function increment_spec_revision_on_conditions_spec_delete() +create function scheduler.increment_spec_revision_on_conditions_spec_delete() returns trigger security definer language plpgsql as $$ begin - update scheduling_specification + update scheduler.scheduling_specification set revision = revision + 1 where id = new.specification_id; return new; @@ -64,6 +64,6 @@ end; $$; create trigger increment_revision_on_condition_delete - before delete on scheduling_specification_conditions + before delete on scheduler.scheduling_specification_conditions for each row - execute function increment_spec_revision_on_conditions_spec_delete(); + execute function scheduler.increment_spec_revision_on_conditions_spec_delete(); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql index c39013dbbb..2929319cb1 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification_goals.sql @@ -1,4 +1,4 @@ -create table scheduling_specification_goals ( +create table scheduler.scheduling_specification_goals ( specification_id integer not null, goal_id integer not null, goal_revision integer, -- latest is null @@ -13,39 +13,39 @@ create table scheduling_specification_goals ( unique (specification_id, priority) deferrable initially deferred, constraint scheduling_specification_goals_specification_exists foreign key (specification_id) - references scheduling_specification + references scheduler.scheduling_specification on update cascade on delete cascade, constraint non_negative_specification_goal_priority check (priority >= 0), constraint scheduling_spec_goal_exists foreign key (goal_id) - references scheduling_goal_metadata + references scheduler.scheduling_goal_metadata on update cascade on delete restrict, constraint scheduling_spec_goal_definition_exists foreign key (goal_id, goal_revision) - references scheduling_goal_definition + references scheduler.scheduling_goal_definition on update cascade on delete restrict ); -comment on table scheduling_specification_goals is e'' +comment on table scheduler.scheduling_specification_goals is e'' 'The scheduling goals to be executed against a given plan.'; -comment on column scheduling_specification_goals.specification_id is e'' +comment on column scheduler.scheduling_specification_goals.specification_id is e'' 'The plan scheduling specification this goal is on. Half of the primary key.'; -comment on column scheduling_specification_goals.goal_id is e'' +comment on column scheduler.scheduling_specification_goals.goal_id is e'' 'The id of a specific goal in the specification. Half of the primary key.'; -comment on column scheduling_specification_goals.goal_revision is e'' +comment on column scheduler.scheduling_specification_goals.goal_revision is e'' 'The version of the goal definition to use. Leave NULL to use the latest version.'; -comment on column scheduling_specification_goals.priority is e'' +comment on column scheduler.scheduling_specification_goals.priority is e'' 'The relative priority of a scheduling goal in relation to other ' 'scheduling goals within the same specification.'; -comment on column scheduling_specification_goals.enabled is e'' +comment on column scheduler.scheduling_specification_goals.enabled is e'' 'Whether to run a given goal. Defaults to TRUE.'; -comment on column scheduling_specification_goals.simulate_after is e'' +comment on column scheduler.scheduling_specification_goals.simulate_after is e'' 'Whether to re-simulate after evaluating this goal and before the next goal.'; -create function insert_scheduling_specification_goal_func() +create function scheduler.insert_scheduling_specification_goal_func() returns trigger language plpgsql as $$ declare @@ -53,7 +53,7 @@ create function insert_scheduling_specification_goal_func() begin select coalesce( (select priority - from scheduling_specification_goals ssg + from scheduler.scheduling_specification_goals ssg where ssg.specification_id = new.specification_id order by priority desc limit 1), -1) + 1 @@ -69,7 +69,7 @@ begin new.priority = next_priority; end if; - update scheduling_specification_goals + update scheduler.scheduling_specification_goals set priority = priority + 1 where specification_id = new.specification_id and priority >= new.priority; @@ -77,16 +77,15 @@ begin end; $$; -comment on function insert_scheduling_specification_goal_func is e'' +comment on function scheduler.insert_scheduling_specification_goal_func is e'' 'Checks that the inserted priority is consecutive, and reorders (increments) higher or equal priorities to make room.'; create trigger insert_scheduling_specification_goal - before insert - on scheduling_specification_goals + before insert on scheduler.scheduling_specification_goals for each row -execute function insert_scheduling_specification_goal_func(); +execute function scheduler.insert_scheduling_specification_goal_func(); -create function update_scheduling_specification_goal_func() +create function scheduler.update_scheduling_specification_goal_func() returns trigger language plpgsql as $$ declare @@ -94,7 +93,7 @@ create function update_scheduling_specification_goal_func() begin select coalesce( (select priority - from scheduling_specification_goals ssg + from scheduler.scheduling_specification_goals ssg where ssg.specification_id = new.specification_id order by priority desc limit 1), -1) + 1 @@ -107,13 +106,13 @@ begin end if; if new.priority > old.priority then - update scheduling_specification_goals + update scheduler.scheduling_specification_goals set priority = priority - 1 where specification_id = new.specification_id and priority between old.priority + 1 and new.priority and goal_id != new.goal_id; else - update scheduling_specification_goals + update scheduler.scheduling_specification_goals set priority = priority + 1 where specification_id = new.specification_id and priority between new.priority and old.priority - 1 @@ -123,21 +122,20 @@ begin end; $$; -comment on function update_scheduling_specification_goal_func is e'' +comment on function scheduler.update_scheduling_specification_goal_func is e'' 'Checks that the updated priority is consecutive, and reorders priorities to make room.'; create trigger update_scheduling_specification_goal - before update - on scheduling_specification_goals + before update on scheduler.scheduling_specification_goals for each row when (OLD.priority is distinct from NEW.priority and pg_trigger_depth() < 1) -execute function update_scheduling_specification_goal_func(); +execute function scheduler.update_scheduling_specification_goal_func(); -create function delete_scheduling_specification_goal_func() +create function scheduler.delete_scheduling_specification_goal_func() returns trigger language plpgsql as $$ begin - update scheduling_specification_goals + update scheduler.scheduling_specification_goals set priority = priority - 1 where specification_id = old.specification_id and priority > old.priority; @@ -145,41 +143,40 @@ begin end; $$; -comment on function delete_scheduling_specification_goal_func() is e'' +comment on function scheduler.delete_scheduling_specification_goal_func() is e'' 'Reorders (decrements) priorities to fill the gap from deleted priority.'; create trigger delete_scheduling_specification_goal - after delete - on scheduling_specification_goals + after delete on scheduler.scheduling_specification_goals for each row -execute function delete_scheduling_specification_goal_func(); +execute function scheduler.delete_scheduling_specification_goal_func(); -create function increment_spec_revision_on_goal_spec_update() +create function scheduler.increment_spec_revision_on_goal_spec_update() returns trigger security definer language plpgsql as $$begin - update scheduling_specification + update scheduler.scheduling_specification set revision = revision + 1 where id = new.specification_id; return new; end$$; create trigger increment_revision_on_goal_update - before insert or update on scheduling_specification_goals + before insert or update on scheduler.scheduling_specification_goals for each row - execute function increment_spec_revision_on_goal_spec_update(); + execute function scheduler.increment_spec_revision_on_goal_spec_update(); -create function increment_spec_revision_on_goal_spec_delete() +create function scheduler.increment_spec_revision_on_goal_spec_delete() returns trigger security definer language plpgsql as $$begin - update scheduling_specification + update scheduler.scheduling_specification set revision = revision + 1 where id = old.specification_id; return old; end$$; create trigger increment_revision_on_goal_delete - before delete on scheduling_specification_goals + before delete on scheduler.scheduling_specification_goals for each row - execute function increment_spec_revision_on_goal_spec_delete(); + execute function scheduler.increment_spec_revision_on_goal_spec_delete(); From 9e7266705b560c0325ab0729bc1c3f2fbc126747 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 1 Mar 2024 13:29:21 -0800 Subject: [PATCH 13/36] Add Schema Qualifiers (Sequencing) - Split views into their own files --- .../sequencing/activity_instance_commands.sql | 16 +++--- .../tables/sequencing/command_dictionary.sql | 16 +++--- .../tables/sequencing/expanded_sequences.sql | 9 ++-- .../sql/tables/sequencing/expansion_rule.sql | 51 +++++++------------ .../sql/tables/sequencing/expansion_run.sql | 12 ++--- .../sql/tables/sequencing/expansion_set.sql | 30 +++++------ .../sequencing/expansion_set_to_rule.sql | 46 +++-------------- .../tables/sequencing/schema_migrations.sql | 30 ----------- .../sql/tables/sequencing/sequence.sql | 8 +-- .../sequence_to_simulated_activity.sql | 16 +++--- .../sql/tables/sequencing/user_sequence.sql | 28 ++++------ .../sequencing/expansion_set_rule_view.sql | 16 ++++++ .../sequencing/rule_expansion_set_view.sql | 13 +++++ 13 files changed, 121 insertions(+), 170 deletions(-) delete mode 100644 deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql create mode 100644 deployment/postgres-init-db/sql/views/sequencing/expansion_set_rule_view.sql create mode 100644 deployment/postgres-init-db/sql/views/sequencing/rule_expansion_set_view.sql diff --git a/deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql b/deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql index 2c35a6a1e5..3534f1dfb5 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/activity_instance_commands.sql @@ -1,4 +1,4 @@ -create table activity_instance_commands ( +create table sequencing.activity_instance_commands ( id integer generated always as identity, activity_instance_id integer not null, @@ -12,19 +12,19 @@ create table activity_instance_commands ( unique (activity_instance_id,expansion_run_id), foreign key (expansion_run_id) - references expansion_run (id) + references sequencing.expansion_run (id) on delete cascade ); -comment on table activity_instance_commands is e'' +comment on table sequencing.activity_instance_commands is e'' 'The commands generated from activities instances in the plan.'; -comment on column activity_instance_commands.id is e'' +comment on column sequencing.activity_instance_commands.id is e'' 'The synthetic identifier for this activity instance command result.'; -comment on column activity_instance_commands.activity_instance_id is e'' +comment on column sequencing.activity_instance_commands.activity_instance_id is e'' 'The activity_instance in the plan.'; -comment on column activity_instance_commands.commands is e'' +comment on column sequencing.activity_instance_commands.commands is e'' 'Commands generated for the activity_instance.'; -comment on column activity_instance_commands.errors is e'' +comment on column sequencing.activity_instance_commands.errors is e'' 'Errors encountered while attempting to expand the activity_instance.'; -comment on column activity_instance_commands.expansion_run_id is e'' +comment on column sequencing.activity_instance_commands.expansion_run_id is e'' 'The configuration used during command generation'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql b/deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql index f01f03b44e..e1cf7de72e 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/command_dictionary.sql @@ -1,4 +1,4 @@ -create table command_dictionary ( +create table sequencing.command_dictionary ( id integer generated always as identity, command_types_typescript_path text not null, @@ -14,17 +14,17 @@ create table command_dictionary ( unique (mission,version) ); -comment on table command_dictionary is e'' +comment on table sequencing.command_dictionary is e'' 'A Command Dictionary for a mission.'; -comment on column command_dictionary.id is e'' +comment on column sequencing.command_dictionary.id is e'' 'The synthetic identifier for this command dictionary.'; -comment on column command_dictionary.command_types_typescript_path is e'' +comment on column sequencing.command_dictionary.command_types_typescript_path is e'' 'The location of command dictionary types (.ts) on the filesystem'; -comment on column command_dictionary.mission is e'' +comment on column sequencing.command_dictionary.mission is e'' 'A human-meaningful identifier for the mission described by the command dictionary'; -comment on column command_dictionary.version is e'' +comment on column sequencing.command_dictionary.version is e'' 'A human-meaningful version qualifier.'; -comment on column command_dictionary.parsed_json is e'' +comment on column sequencing.command_dictionary.parsed_json is e'' 'The XML that has been parsed and converted to JSON'; -comment on constraint command_dictionary_natural_key on command_dictionary is e'' +comment on constraint command_dictionary_natural_key on sequencing.command_dictionary is e'' 'There an only be one command dictionary of a given version for a given mission.'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql b/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql index 61c0828ab3..fdc0a22f7d 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql @@ -1,4 +1,4 @@ -create table expanded_sequences ( +create table sequencing.expanded_sequences ( id integer generated always as identity, expansion_run_id integer not null, @@ -14,11 +14,14 @@ create table expanded_sequences ( constraint expanded_sequences_to_expansion_run_id foreign key (expansion_run_id) - references expansion_run + references sequencing.expansion_run on delete cascade, constraint expanded_sequences_to_seq_id foreign key (seq_id, simulation_dataset_id) - references sequence (seq_id, simulation_dataset_id) + references sequencing.sequence (seq_id, simulation_dataset_id) on delete cascade ); + +comment on table sequencing.expanded_sequences is e'' + 'A cache of sequences that have already been expanded.'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql index 1b4706931b..31763799ae 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql @@ -1,4 +1,4 @@ -create table expansion_rule ( +create table sequencing.expansion_rule ( id integer generated always as identity, name text not null, @@ -26,53 +26,40 @@ create table expansion_rule ( unique (id, activity_type), foreign key (authoring_command_dict_id) - references command_dictionary (id) + references sequencing.command_dictionary (id) on delete set null ); -comment on table expansion_rule is e'' +comment on table sequencing.expansion_rule is e'' 'The user defined logic to expand an activity type.'; -comment on column expansion_rule.id is e'' +comment on column sequencing.expansion_rule.id is e'' 'The synthetic identifier for this expansion rule.'; -comment on column expansion_rule.activity_type is e'' +comment on column sequencing.expansion_rule.activity_type is e'' 'The user selected activity type.'; -comment on column expansion_rule.expansion_logic is e'' +comment on column sequencing.expansion_rule.expansion_logic is e'' 'The expansion logic used to generate commands.'; -comment on column expansion_rule.authoring_command_dict_id is e'' +comment on column sequencing.expansion_rule.authoring_command_dict_id is e'' 'The id of the command dictionary to be used for authoring of this expansion.'; -comment on column expansion_rule.authoring_mission_model_id is e'' +comment on column sequencing.expansion_rule.authoring_mission_model_id is e'' 'The id of the mission model to be used for authoring of this expansion.'; -comment on column expansion_rule.owner is e'' +comment on column sequencing.expansion_rule.owner is e'' 'The user responsible for this expansion rule.'; -comment on column expansion_rule.updated_by is e'' +comment on column sequencing.expansion_rule.updated_by is e'' 'The user who last updated this expansion rule.'; -comment on column expansion_rule.description is e'' +comment on column sequencing.expansion_rule.description is e'' 'A description of this expansion rule.'; -comment on column expansion_rule.created_at is e'' +comment on column sequencing.expansion_rule.created_at is e'' 'The time this expansion rule was created'; -comment on column expansion_rule.updated_at is e'' +comment on column sequencing.expansion_rule.updated_at is e'' 'The time this expansion rule was last updated.'; -comment on constraint expansion_rule_activity_type_foreign_key on expansion_rule is e'' +comment on constraint expansion_rule_activity_type_foreign_key on sequencing.expansion_rule is e'' 'This enables us to have a foreign key on expansion_set_to_rule which is necessary for building the unique constraint `max_one_expansion_of_each_activity_type_per_expansion_set`.'; -create function expansion_rule_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - update expansion_set es - set updated_at = new.updated_at - from expansion_set_to_rule esr - where esr.rule_id = new.id - and esr.set_id = es.id; - return new; -end$$; - create trigger set_timestamp -before update on expansion_rule +before update on sequencing.expansion_rule for each row -execute function expansion_rule_set_updated_at(); +execute function util_functions.set_updated_at(); -create function expansion_rule_default_name() +create function sequencing.expansion_rule_default_name() returns trigger security invoker language plpgsql as $$begin @@ -82,8 +69,8 @@ end $$; create trigger set_default_name -before insert on expansion_rule +before insert on sequencing.expansion_rule for each row when ( new.name is null ) -execute function expansion_rule_default_name(); +execute function sequencing.expansion_rule_default_name(); diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql index 60c60124a4..79a1dfb0fa 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql @@ -1,4 +1,4 @@ -create table expansion_run ( +create table sequencing.expansion_run ( id integer generated always as identity, simulation_dataset_id integer not null, @@ -10,14 +10,14 @@ create table expansion_run ( primary key (id), foreign key (expansion_set_id) - references expansion_set (id) + references sequencing.expansion_set (id) on delete cascade ); -comment on table expansion_run is e'' +comment on table sequencing.expansion_run is e'' 'The configuration for an expansion run for a plan.'; -comment on column expansion_run.id is e'' +comment on column sequencing.expansion_run.id is e'' 'The synthetic identifier for this expansion run.'; -comment on column expansion_run.simulation_dataset_id is e'' +comment on column sequencing.expansion_run.simulation_dataset_id is e'' 'The simulation dataset id used to generate this expansion run.'; -comment on column expansion_run.expansion_set_id is e'' +comment on column sequencing.expansion_run.expansion_set_id is e'' 'The command dictionary, mission model, and expansion set id.'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql index c95cd40517..e4a4b0c062 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql @@ -1,4 +1,4 @@ -create table expansion_set ( +create table sequencing.expansion_set ( id integer generated always as identity, name text not null, description text not null default '', @@ -19,32 +19,32 @@ create table expansion_set ( primary key (id), foreign key (command_dict_id) - references command_dictionary (id) + references sequencing.command_dictionary (id) on delete cascade ); -comment on table expansion_set is e'' +comment on table sequencing.expansion_set is e'' 'A binding of a command dictionary to a mission model.'; -comment on column expansion_set.id is e'' +comment on column sequencing.expansion_set.id is e'' 'The synthetic identifier for the set.'; -comment on column expansion_set.command_dict_id is e'' +comment on column sequencing.expansion_set.command_dict_id is e'' 'The ID of a command dictionary.'; -comment on column expansion_set.mission_model_id is e'' +comment on column sequencing.expansion_set.mission_model_id is e'' 'The ID of a mission model.'; -comment on column expansion_set.name is e'' +comment on column sequencing.expansion_set.name is e'' 'The human-readable name of the expansion set.'; -comment on column expansion_set.owner is e'' +comment on column sequencing.expansion_set.owner is e'' 'The user responsible for the expansion set.'; -comment on column expansion_set.updated_by is e'' +comment on column sequencing.expansion_set.updated_by is e'' 'The user who last updated this expansion set.'; -comment on column expansion_set.description is e'' +comment on column sequencing.expansion_set.description is e'' 'A description of this expansion set.'; -comment on column expansion_set.created_at is e'' +comment on column sequencing.expansion_set.created_at is e'' 'The time this expansion set was created'; -comment on column expansion_set.updated_at is e'' +comment on column sequencing.expansion_set.updated_at is e'' 'The time this expansion set or one of its expansion rules was last updated.'; -create function expansion_set_default_name() +create function sequencing.expansion_set_default_name() returns trigger security invoker language plpgsql as $$begin @@ -54,7 +54,7 @@ end $$; create trigger set_default_name -before insert on expansion_set +before insert on sequencing.expansion_set for each row when ( new.name is null ) -execute function expansion_set_default_name(); +execute function sequencing.expansion_set_default_name(); diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql index ca38efae75..d22718c7cc 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set_to_rule.sql @@ -1,4 +1,4 @@ -create table expansion_set_to_rule ( +create table sequencing.expansion_set_to_rule ( set_id integer not null, rule_id integer not null, activity_type text not null, @@ -7,53 +7,23 @@ create table expansion_set_to_rule ( primary key (set_id,rule_id), foreign key (set_id) - references expansion_set (id) + references sequencing.expansion_set (id) on delete cascade, foreign key (rule_id, activity_type) - references expansion_rule (id, activity_type) + references sequencing.expansion_rule (id, activity_type) on delete cascade, CONSTRAINT max_one_expansion_of_each_activity_type_per_expansion_set UNIQUE (set_id, activity_type) ); -comment on table expansion_set_to_rule is e'' +comment on table sequencing.expansion_set_to_rule is e'' 'The join table between expansion_set and expansion_rule.'; -comment on column expansion_set_to_rule.set_id is e'' +comment on column sequencing.expansion_set_to_rule.set_id is e'' 'The id for an expansion_set.'; -comment on column expansion_set_to_rule.rule_id is e'' +comment on column sequencing.expansion_set_to_rule.rule_id is e'' 'The id for an expansion_rule.'; -comment on column expansion_set_to_rule.activity_type is e'' +comment on column sequencing.expansion_set_to_rule.activity_type is e'' 'The activity type of the expansion rule. To be used exclusively for the uniqueness check.'; -comment on constraint max_one_expansion_of_each_activity_type_per_expansion_set on expansion_set_to_rule is e'' +comment on constraint max_one_expansion_of_each_activity_type_per_expansion_set on sequencing.expansion_set_to_rule is e'' 'Ensures that there is maximum one expansion of each activity type per expansion set.'; - -create view expansion_set_rule_view as -select expansion_set_to_rule.set_id, - rule.id, - rule.activity_type, - rule.expansion_logic, - rule.authoring_command_dict_id, - rule.authoring_mission_model_id, - rule.created_at, - rule.updated_at, - rule.name, - rule.owner, - rule.updated_by, - rule.description -from expansion_set_to_rule left join expansion_rule rule - on expansion_set_to_rule.rule_id = rule.id; - -create view rule_expansion_set_view as -select expansion_set_to_rule.rule_id, - set.id, - set.name, - set.owner, - set.description, - set.command_dict_id, - set.mission_model_id, - set.created_at, - set.updated_at, - set.updated_by -from expansion_set_to_rule left join expansion_set set - on expansion_set_to_rule.set_id = set.id; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql b/deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql deleted file mode 100644 index b1123b6267..0000000000 --- a/deployment/postgres-init-db/sql/tables/sequencing/schema_migrations.sql +++ /dev/null @@ -1,30 +0,0 @@ -create table migrations.schema_migrations ( - migration_id varchar primary key -); - -create procedure migrations.mark_migration_applied(_migration_id varchar) -language plpgsql as $$ -begin - insert into migrations.schema_migrations (migration_id) - values (_migration_id); -end; -$$; - -create procedure migrations.mark_migration_rolled_back(_migration_id varchar) -language plpgsql as $$ -begin - delete from migrations.schema_migrations - where migration_id = _migration_id; -end; -$$; - -comment on schema migrations is e'' - 'Tables and procedures associated with tracking schema migrations'; -comment on table migrations.schema_migrations is e'' - 'Tracks what migrations have been applied'; -comment on column migrations.schema_migrations.migration_id is e'' - 'An identifier for a migration that has been applied'; -comment on procedure migrations.mark_migration_applied is e'' - 'Given an identifier for a migration, add that migration to the applied set'; -comment on procedure migrations.mark_migration_rolled_back is e'' - 'Given an identifier for a migration, remove that migration from the applied set'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql index 42749e61dc..bf1430d1f6 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql @@ -1,4 +1,4 @@ -create table sequence ( +create table sequencing.sequence ( seq_id text not null, simulation_dataset_id int not null, metadata jsonb, @@ -8,9 +8,9 @@ create table sequence ( constraint sequence_primary_key primary key (seq_id, simulation_dataset_id) ); -comment on table sequence is e'' +comment on table sequencing.sequence is e'' 'A sequence product'; -comment on column sequence.seq_id is e'' +comment on column sequencing.sequence.seq_id is e'' 'The FSW sequence specifier'; -comment on column sequence.simulation_dataset_id is e'' +comment on column sequencing.sequence.simulation_dataset_id is e'' 'The simulation dataset id whose outputs are associated with this sequence'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql index 3e5ecea00c..e6b9dfb592 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql @@ -1,4 +1,4 @@ -create table sequence_to_simulated_activity ( +create table sequencing.sequence_to_simulated_activity ( simulated_activity_id int not null, simulation_dataset_id int not null, seq_id text not null, @@ -8,19 +8,19 @@ create table sequence_to_simulated_activity ( constraint sequence_to_simulated_activity_activity_instance_id_fkey foreign key (seq_id, simulation_dataset_id) - references sequence (seq_id, simulation_dataset_id) + references sequencing.sequence (seq_id, simulation_dataset_id) on delete cascade ); -comment on table sequence_to_simulated_activity is e'' +comment on table sequencing.sequence_to_simulated_activity is e'' 'Join table for sequences and simulated activities.'; -comment on column sequence_to_simulated_activity.simulated_activity_id is e'' +comment on column sequencing.sequence_to_simulated_activity.simulated_activity_id is e'' 'ID of the joining simulated activity.'; -comment on column sequence_to_simulated_activity.simulation_dataset_id is e'' +comment on column sequencing.sequence_to_simulated_activity.simulation_dataset_id is e'' 'ID of the simulation dataset.'; -comment on column sequence_to_simulated_activity.seq_id is e'' +comment on column sequencing.sequence_to_simulated_activity.seq_id is e'' 'ID of the joining sequence.'; -comment on constraint sequence_to_simulated_activity_primary_key on sequence_to_simulated_activity is e'' +comment on constraint sequence_to_simulated_activity_primary_key on sequencing.sequence_to_simulated_activity is e'' 'Primary key constrains one simulated activity id per simulation dataset.'; -comment on constraint sequence_to_simulated_activity_activity_instance_id_fkey on sequence_to_simulated_activity is e'' +comment on constraint sequence_to_simulated_activity_activity_instance_id_fkey on sequencing.sequence_to_simulated_activity is e'' 'Foreign key constrains that this join table relates to a sequence id that exists for the simulation dataset.'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql index 58541daea6..e099a31b92 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql @@ -1,4 +1,4 @@ -create table user_sequence ( +create table sequencing.user_sequence ( authoring_command_dict_id integer not null, created_at timestamptz not null default now(), definition text not null, @@ -10,30 +10,22 @@ create table user_sequence ( constraint user_sequence_primary_key primary key (id) ); -comment on column user_sequence.authoring_command_dict_id is e'' +comment on column sequencing.user_sequence.authoring_command_dict_id is e'' 'Command dictionary the user sequence was created with.'; -comment on column user_sequence.created_at is e'' +comment on column sequencing.user_sequence.created_at is e'' 'Time the user sequence was created.'; -comment on column user_sequence.definition is e'' +comment on column sequencing.user_sequence.definition is e'' 'The user sequence definition string.'; -comment on column user_sequence.id is e'' +comment on column sequencing.user_sequence.id is e'' 'ID of the user sequence.'; -comment on column user_sequence.name is e'' +comment on column sequencing.user_sequence.name is e'' 'Human-readable name of the user sequence.'; -comment on column user_sequence.owner is e'' +comment on column sequencing.user_sequence.owner is e'' 'The user responsible for this sequence.'; -comment on column user_sequence.updated_at is e'' +comment on column sequencing.user_sequence.updated_at is e'' 'Time the user sequence was last updated.'; -create or replace function user_sequence_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp -before update on user_sequence +before update on sequencing.user_sequence for each row -execute function user_sequence_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/views/sequencing/expansion_set_rule_view.sql b/deployment/postgres-init-db/sql/views/sequencing/expansion_set_rule_view.sql new file mode 100644 index 0000000000..545202d0c8 --- /dev/null +++ b/deployment/postgres-init-db/sql/views/sequencing/expansion_set_rule_view.sql @@ -0,0 +1,16 @@ +create view sequencing.expansion_set_rule_view as +select str.set_id, + rule.id, + rule.activity_type, + rule.expansion_logic, + rule.authoring_command_dict_id, + rule.authoring_mission_model_id, + rule.created_at, + rule.updated_at, + rule.name, + rule.owner, + rule.updated_by, + rule.description +from sequencing.expansion_set_to_rule str left join sequencing.expansion_rule rule + on str.rule_id = rule.id; + diff --git a/deployment/postgres-init-db/sql/views/sequencing/rule_expansion_set_view.sql b/deployment/postgres-init-db/sql/views/sequencing/rule_expansion_set_view.sql new file mode 100644 index 0000000000..d5c6813b9f --- /dev/null +++ b/deployment/postgres-init-db/sql/views/sequencing/rule_expansion_set_view.sql @@ -0,0 +1,13 @@ +create view sequencing.rule_expansion_set_view as +select str.rule_id, + set.id, + set.name, + set.owner, + set.description, + set.command_dict_id, + set.mission_model_id, + set.created_at, + set.updated_at, + set.updated_by +from sequencing.expansion_set_to_rule str left join sequencing.expansion_set set + on str.set_id = set.id; From 51f66e22ca47217a59e3b3ca01e6a57a7cc0cd96 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 1 Mar 2024 13:26:43 -0800 Subject: [PATCH 14/36] Add Schema Qualifiers (UI) --- .../sql/tables/ui/extension_roles.sql | 10 +++---- .../sql/tables/ui/extensions.sql | 25 ++++++----------- .../postgres-init-db/sql/tables/ui/view.sql | 28 +++++++------------ 3 files changed, 24 insertions(+), 39 deletions(-) diff --git a/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql b/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql index 552febacd8..128ba2f25b 100644 --- a/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql +++ b/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql @@ -1,14 +1,14 @@ -create table extension_roles ( - extension_id integer not null references extensions(id) +create table ui.extension_roles ( + extension_id integer not null references ui.extensions(id) on update cascade on delete cascade, role text not null, primary key (extension_id, role) ); -comment on table extension_roles is e'' +comment on table ui.extension_roles is e'' 'A mapping of extensions to what roles can access them.'; -comment on column extension_roles.extension_id is e'' +comment on column ui.extension_roles.extension_id is e'' 'The extension that the role is defined for.'; -comment on column extension_roles.role is e'' +comment on column ui.extension_roles.role is e'' 'The role that is allowed to access the extension.'; diff --git a/deployment/postgres-init-db/sql/tables/ui/extensions.sql b/deployment/postgres-init-db/sql/tables/ui/extensions.sql index 8753df55c6..1ab1beb374 100644 --- a/deployment/postgres-init-db/sql/tables/ui/extensions.sql +++ b/deployment/postgres-init-db/sql/tables/ui/extensions.sql @@ -1,4 +1,4 @@ -create table extensions ( +create table ui.extensions ( id integer generated always as identity, description text, label text not null, @@ -9,27 +9,20 @@ create table extensions ( constraint extensions_primary_key primary key (id) ); -comment on table extensions is e'' +comment on table ui.extensions is e'' 'External extension APIs the user can call from within Aerie UI.'; -comment on column extensions.description is e'' +comment on column ui.extensions.description is e'' 'An optional description of the external extension.'; -comment on column extensions.label is e'' +comment on column ui.extensions.label is e'' 'The name of the extension that is displayed in the UI.'; -comment on column extensions.owner is e'' +comment on column ui.extensions.owner is e'' 'The user who owns the extension.'; -comment on column extensions.url is e'' +comment on column ui.extensions.url is e'' 'The URL of the API to be called.'; -comment on column extensions.updated_at is e'' +comment on column ui.extensions.updated_at is e'' 'The time the extension was last updated.'; -create function extensions_set_updated_at() -returns trigger -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger extensions_set_timestamp - before update on extensions + before update on ui.extensions for each row -execute function extensions_set_updated_at(); +execute function util_functions.set_updated_at(); diff --git a/deployment/postgres-init-db/sql/tables/ui/view.sql b/deployment/postgres-init-db/sql/tables/ui/view.sql index f8746b5d19..71a9dc8701 100644 --- a/deployment/postgres-init-db/sql/tables/ui/view.sql +++ b/deployment/postgres-init-db/sql/tables/ui/view.sql @@ -1,4 +1,4 @@ -create table view ( +create table ui.view ( created_at timestamptz not null default now(), definition jsonb not null, id integer generated always as identity, @@ -9,30 +9,22 @@ create table view ( constraint view_primary_key primary key (id) ); -comment on table view is e'' +comment on table ui.view is e'' 'View configuration for Aerie UI.'; -comment on column view.created_at is e'' +comment on column ui.view.created_at is e'' 'Time the view was created.'; -comment on column view.definition is e'' +comment on column ui.view.definition is e'' 'JSON blob of the view definition that implements the view JSON schema.'; -comment on column view.id is e'' +comment on column ui.view.id is e'' 'Integer primary key of the view.'; -comment on column view.name is e'' +comment on column ui.view.name is e'' 'Human-readable name of the view.'; -comment on column view.owner is e'' +comment on column ui.view.owner is e'' 'The user who owns the view.'; -comment on column view.updated_at is e'' +comment on column ui.view.updated_at is e'' 'Time the view was last updated.'; -create or replace function view_set_updated_at() -returns trigger -security definer -language plpgsql as $$begin - new.updated_at = now(); - return new; -end$$; - create trigger set_timestamp -before update on view +before update on ui.view for each row -execute function view_set_updated_at(); +execute function util_functions.set_updated_at(); From bdbef835f8235cbad1697bbf642a2166e27a9982 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 4 Mar 2024 09:31:53 -0800 Subject: [PATCH 15/36] Create Init Files --- deployment/postgres-init-db/sql/init.sql | 46 +++++++++ .../postgres-init-db/sql/init_hasura.sql | 16 +++ .../postgres-init-db/sql/init_merlin.sql | 97 +++++++++++++++++++ .../postgres-init-db/sql/init_permissions.sql | 26 +++++ .../postgres-init-db/sql/init_scheduler.sql | 30 ++++++ .../postgres-init-db/sql/init_sequencing.sql | 24 +++++ deployment/postgres-init-db/sql/init_tags.sql | 29 ++++++ deployment/postgres-init-db/sql/init_ui.sql | 13 +++ 8 files changed, 281 insertions(+) create mode 100644 deployment/postgres-init-db/sql/init.sql create mode 100644 deployment/postgres-init-db/sql/init_hasura.sql create mode 100644 deployment/postgres-init-db/sql/init_merlin.sql create mode 100644 deployment/postgres-init-db/sql/init_permissions.sql create mode 100644 deployment/postgres-init-db/sql/init_scheduler.sql create mode 100644 deployment/postgres-init-db/sql/init_sequencing.sql create mode 100644 deployment/postgres-init-db/sql/init_tags.sql create mode 100644 deployment/postgres-init-db/sql/init_ui.sql diff --git a/deployment/postgres-init-db/sql/init.sql b/deployment/postgres-init-db/sql/init.sql new file mode 100644 index 0000000000..29a6b4dc11 --- /dev/null +++ b/deployment/postgres-init-db/sql/init.sql @@ -0,0 +1,46 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Create Non-Public Schemas + \ir schemas.sql + + -- Migrations + \ir tables/migrations/schema_migrations.sql + \ir applied_migrations.sql + + -- Util Functions + \ir functions/util_functions/shared_update_functions.sql + + -- Permissions + \ir init_permissions.sql + + -- Tags Part 1 (Objects created here due to dependency in Merlin schema) + \ir tables/tags/tags.sql + \ir functions/tags/get_tags.sql + + -- Merlin + \ir init_merlin.sql + + -- Scheduling + \ir init_scheduler.sql + + -- Sequencing + \ir init_sequencing.sql + + -- UI + \ir init_ui.sql + + -- Tags + \ir init_tags.sql + + -- Hasura + \ir init_hasura.sql + + -- Preload Data + \ir default_user_roles.sql; +end; diff --git a/deployment/postgres-init-db/sql/init_hasura.sql b/deployment/postgres-init-db/sql/init_hasura.sql new file mode 100644 index 0000000000..b9d4150570 --- /dev/null +++ b/deployment/postgres-init-db/sql/init_hasura.sql @@ -0,0 +1,16 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Functions + \ir functions/hasura/activity_preset_functions.sql + \ir functions/hasura/delete_anchor_functions.sql + \ir functions/hasura/hasura_functions.sql + \ir functions/hasura/plan_branching_functions.sql + \ir functions/hasura/plan_merge_functions.sql + \ir functions/hasura/snapshot_functions.sql +end; diff --git a/deployment/postgres-init-db/sql/init_merlin.sql b/deployment/postgres-init-db/sql/init_merlin.sql new file mode 100644 index 0000000000..e0fdb3835f --- /dev/null +++ b/deployment/postgres-init-db/sql/init_merlin.sql @@ -0,0 +1,97 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Domain Types + \ir types/merlin/merlin-arguments.sql + \ir types/merlin/activity-directive-metadata.sql + \ir types/merlin/plan-merge-types.sql + + ------------ + -- Tables + -- Uploaded files (JARs or simulation input files) + \ir tables/merlin/uploaded_file.sql + + -- Mission Model + \ir tables/merlin/mission_model.sql + \ir tables/merlin/mission_model_parameters.sql + \ir tables/merlin/activity_type.sql + \ir tables/merlin/resource_type.sql + + -- Plan + \ir tables/merlin/plan.sql + \ir tables/merlin/plan_collaborators.sql + + -- Activity Directives + \ir tables/merlin/activity_directive/activity_directive_metadata_schema.sql + \ir tables/merlin/activity_directive/activity_directive.sql + \ir tables/merlin/activity_directive/activity_directive_changelog.sql + \ir tables/merlin/activity_directive/activity_directive_validations.sql + \ir tables/merlin/activity_directive/anchor_validation_status.sql + \ir tables/merlin/activity_directive/activity_presets.sql + \ir tables/merlin/activity_directive/preset_to_directive.sql + + -- Datasets + \ir tables/merlin/dataset/dataset.sql + \ir tables/merlin/dataset/event.sql + \ir tables/merlin/dataset/topic.sql + \ir tables/merlin/dataset/span.sql + \ir tables/merlin/dataset/profile.sql + \ir tables/merlin/dataset/profile_segment.sql + + -- Simulation + \ir tables/merlin/simulation/simulation_template.sql + \ir tables/merlin/simulation/simulation.sql + \ir tables/merlin/simulation/simulation_dataset.sql + \ir tables/merlin/simulation/simulation_extent.sql + + -- External Datasets + \ir tables/merlin/plan_dataset.sql + + -- Constraints + \ir tables/merlin/constraints/constraint_metadata.sql + \ir tables/merlin/constraints/constraint_definition.sql + \ir tables/merlin/constraints/constraint_model_specification.sql + \ir tables/merlin/constraints/constraint_specification.sql + \ir tables/merlin/constraints/constraint_run.sql + + -- Snapshots + \ir tables/merlin/snapshot/plan_snapshot.sql + \ir tables/merlin/snapshot/plan_snapshot_parent.sql + \ir tables/merlin/snapshot/plan_latest_snapshot.sql + \ir tables/merlin/snapshot/plan_snapshot_activities.sql + \ir tables/merlin/snapshot/preset_to_snapshot_directive.sql + + -- Merging + \ir tables/merlin/merging/merge_request.sql + \ir tables/merlin/merging/merge_comments.sql + \ir tables/merlin/merging/merge_staging_area.sql + \ir tables/merlin/merging/conflicting_activities.sql + + ------------ + -- Functions + \ir functions/merlin/reanchoring_functions.sql + + -- Snapshots + \ir functions/merlin/snapshots/create_snapshot.sql + \ir functions/merlin/snapshots/plan_history_functions.sql + \ir functions/merlin/snapshots/restore_from_snapshot.sql + + -- Merging + \ir functions/merlin/merging/plan_locked_exception.sql + \ir functions/merlin/merging/duplicate_plan.sql + \ir functions/merlin/merging/get_merge_base.sql + \ir functions/merlin/merging/merge_request_state_functions.sql + \ir functions/merlin/merging/begin_merge.sql + \ir functions/merlin/merging/commit_merge.sql + + ------------ + -- Views + \ir views/merlin/activity_directive_extended.sql + \ir views/merlin/simulated_activity.sql + \ir views/merlin/resource_profile.sql +end; diff --git a/deployment/postgres-init-db/sql/init_permissions.sql b/deployment/postgres-init-db/sql/init_permissions.sql new file mode 100644 index 0000000000..5d412e1562 --- /dev/null +++ b/deployment/postgres-init-db/sql/init_permissions.sql @@ -0,0 +1,26 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Domain types + \ir types/permissions/permissions.sql + + -- Tables + \ir tables/permissions/user_roles.sql + \ir tables/permissions/user_role_permission.sql + \ir tables/permissions/users.sql + \ir tables/permissions/users_allowed_roles.sql + + -- Views + \ir views/permissions/users_and_roles.sql + + -- Functions + \ir functions/permissions/get_role.sql + \ir functions/permissions/get_function_permissions.sql + \ir functions/permissions/check_general_permissions.sql + \ir functions/permissions/merge_permissions.sql +end; diff --git a/deployment/postgres-init-db/sql/init_scheduler.sql b/deployment/postgres-init-db/sql/init_scheduler.sql new file mode 100644 index 0000000000..a28d050b0a --- /dev/null +++ b/deployment/postgres-init-db/sql/init_scheduler.sql @@ -0,0 +1,30 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Tables + -- Scheduling Goals + \ir tables/scheduler/scheduling_goal_metadata.sql + \ir tables/scheduler/scheduling_goal_definition.sql + + -- Scheduling Conditions + \ir tables/scheduler/scheduling_condition_metadata.sql + \ir tables/scheduler/scheduling_condition_definition.sql + + -- Scheduling Specification + \ir tables/scheduler/scheduling_specification/scheduling_specification.sql + \ir tables/scheduler/scheduling_specification/scheduling_specification_goals.sql + \ir tables/scheduler/scheduling_specification/scheduling_specification_conditions.sql + \ir tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql + \ir tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql + + -- Scheduling Output + \ir tables/scheduler/scheduling_run/scheduling_request.sql + \ir tables/scheduler/scheduling_run/scheduling_goal_analysis.sql + \ir tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.sql + \ir tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.sql +end; diff --git a/deployment/postgres-init-db/sql/init_sequencing.sql b/deployment/postgres-init-db/sql/init_sequencing.sql new file mode 100644 index 0000000000..81f18bfbaa --- /dev/null +++ b/deployment/postgres-init-db/sql/init_sequencing.sql @@ -0,0 +1,24 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Tables + \ir tables/sequencing/command_dictionary.sql + \ir tables/sequencing/expansion_set.sql + \ir tables/sequencing/expansion_rule.sql + \ir tables/sequencing/expansion_set_to_rule.sql + \ir tables/sequencing/expansion_run.sql + \ir tables/sequencing/activity_instance_commands.sql + \ir tables/sequencing/sequence.sql + \ir tables/sequencing/sequence_to_simulated_activity.sql + \ir tables/sequencing/user_sequence.sql + \ir tables/sequencing/expanded_sequences.sql + + -- Views + \ir views/sequencing/expansion_set_rule_view.sql + \ir views/sequencing/rule_expansion_set_view.sql +end; diff --git a/deployment/postgres-init-db/sql/init_tags.sql b/deployment/postgres-init-db/sql/init_tags.sql new file mode 100644 index 0000000000..f8a29ff9cc --- /dev/null +++ b/deployment/postgres-init-db/sql/init_tags.sql @@ -0,0 +1,29 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Tables (Tags table must be created before the rest of the tags schema due to a reference in the Merlin schema) + -- Merlin-associated tags + \ir tables/tags/merlin/activity_directive_tags.sql + \ir tables/tags/merlin/constraint_tags.sql + \ir tables/tags/merlin/constraint_definition_tags.sql + \ir tables/tags/merlin/plan_tags.sql + \ir tables/tags/merlin/plan_snapshot_tags.sql + \ir tables/tags/merlin/snapshot_activity_tags.sql + + -- Scheduler-associated tags + \ir tables/tags/scheduling/scheduling_goal_tags.sql + \ir tables/tags/scheduling/scheduling_goal_definition_tags.sql + \ir tables/tags/scheduling/scheduling_condition_tags.sql + \ir tables/tags/scheduling/scheduling_condition_definition_tags.sql + + -- Sequencing-associated tags + \ir tables/tags/expansion_rule_tags.sql + + -- Functions + \ir functions/tags/get_tag_ids.sql +end; diff --git a/deployment/postgres-init-db/sql/init_ui.sql b/deployment/postgres-init-db/sql/init_ui.sql new file mode 100644 index 0000000000..2acbb28845 --- /dev/null +++ b/deployment/postgres-init-db/sql/init_ui.sql @@ -0,0 +1,13 @@ +/* + The order of inclusion is important! + - Types must be loaded before usage in tables or function returns + - Tables must be loaded before being referenced by foreign keys. + - Functions must be loaded before they're used in triggers, but can be loaded after any functions that call them. + - Views must be loaded after all their dependent tables and functions + */ +begin; + -- Tables + \ir tables/ui/extensions.sql + \ir tables/ui/extension_roles.sql + \ir tables/ui/view.sql +end; From d61bf09f58c61af2e75d9b88cf4086246040fad8 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 4 Mar 2024 11:09:41 -0800 Subject: [PATCH 16/36] Update DB Init script --- deployment/postgres-init-db/init-aerie.sh | 51 +++++------------------ 1 file changed, 10 insertions(+), 41 deletions(-) diff --git a/deployment/postgres-init-db/init-aerie.sh b/deployment/postgres-init-db/init-aerie.sh index c5caac6567..c480b7669b 100755 --- a/deployment/postgres-init-db/init-aerie.sh +++ b/deployment/postgres-init-db/init-aerie.sh @@ -7,54 +7,23 @@ psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname postgres <<-EOSQL CREATE USER "$AERIE_USERNAME" WITH PASSWORD '$AERIE_PASSWORD'; \echo 'Done!' + \echo 'Initializing aerie database...' + CREATE DATABASE aerie OWNER "$AERIE_USERNAME"; + \connect aerie + ALTER SCHEMA public OWNER TO "$AERIE_USERNAME"; + \connect postgres + \echo 'Done!' + \echo 'Initializing aerie_hasura database...' CREATE DATABASE aerie_hasura; GRANT ALL PRIVILEGES ON DATABASE aerie_hasura TO "$AERIE_USERNAME"; \echo 'Done!' - - \echo 'Initializing aerie_merlin database...' - CREATE DATABASE aerie_merlin; - GRANT ALL PRIVILEGES ON DATABASE aerie_merlin TO "$AERIE_USERNAME"; - \echo 'Done!' - - \echo 'Initializing aerie_scheduler database...' - CREATE DATABASE aerie_scheduler; - GRANT ALL PRIVILEGES ON DATABASE aerie_scheduler TO "$AERIE_USERNAME"; - \echo 'Done!' - - \echo 'Initializing aerie_sequencing database...' - CREATE DATABASE aerie_sequencing; - GRANT ALL PRIVILEGES ON DATABASE aerie_sequencing TO "$AERIE_USERNAME"; - \echo 'Done!' - - \echo 'Initializing aerie_ui database...' - CREATE DATABASE aerie_ui; - GRANT ALL PRIVILEGES ON DATABASE aerie_ui TO "$AERIE_USERNAME"; - \echo 'Done!' EOSQL export PGPASSWORD="$AERIE_PASSWORD" -psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie_merlin" <<-EOSQL - \echo 'Initializing aerie_merlin database objects...' - \ir /docker-entrypoint-initdb.d/sql/merlin/init.sql - \echo 'Done!' -EOSQL - -psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie_scheduler" <<-EOSQL - \echo 'Initializing aerie_scheduler database objects...' - \ir /docker-entrypoint-initdb.d/sql/scheduler/init.sql - \echo 'Done!' -EOSQL - -psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie_ui" <<-EOSQL - \echo 'Initializing aerie_ui database objects...' - \ir /docker-entrypoint-initdb.d/sql/ui/init.sql - \echo 'Done!' -EOSQL - -psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie_sequencing" <<-EOSQL - \echo 'Initializing aerie_sequencing database objects...' - \ir /docker-entrypoint-initdb.d/sql/sequencing/init.sql +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" <<-EOSQL + \echo 'Initializing aerie database objects...' + \ir /docker-entrypoint-initdb.d/sql/init.sql \echo 'Done!' EOSQL From 39ce38c3b91fdbe479335fbd951fa9eb065c2c91 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 4 Mar 2024 11:57:20 -0800 Subject: [PATCH 17/36] Reorganize Hasura Metadata - Update databases.yaml - Reorder contents of schema-level tables.yamls to better resemble schema-level init.sqls --- .../databases/AerieMerlin/tables/tables.yaml | 66 -------- .../AerieScheduler/tables/tables.yaml | 18 -- .../AerieSequencing/tables/tables.yaml | 14 -- .../databases/AerieUI/tables/tables.yaml | 3 - .../hasura/metadata/databases/databases.yaml | 35 +--- .../functions/functions.yaml | 0 .../hasura}/begin_merge_return_value.yaml | 0 .../hasura}/cancel_merge_return_value.yaml | 0 .../hasura}/commit_merge_return_value.yaml | 0 .../hasura}/create_merge_return_value.yaml | 0 .../hasura}/create_snapshot_return_value.yaml | 0 .../hasura}/delete_anchor_return_value.yaml | 0 .../hasura}/deny_merge_return_value.yaml | 0 .../hasura}/duplicate_plan_return_value.yaml | 0 ...t_conflicting_activities_return_value.yaml | 0 ...n_conflicting_activities_return_value.yaml | 0 .../get_plan_history_return_value.yaml | 0 ...resource_at_start_offset_return_value.yaml | 0 .../withdraw_merge_request_return_value.yaml | 0 .../activity_directive.yaml} | 0 .../activity_directive_changelog.yaml} | 0 .../activity_directive_extended.yaml} | 0 .../activity_directive_metadata_schema.yaml} | 0 .../activity_directive_validations.yaml} | 0 .../activity_directive/activity_presets.yaml} | 0 .../anchor_validation_status.yaml} | 0 .../preset_to_directive.yaml} | 0 .../merlin/activity_type.yaml} | 0 .../constraints/constraint_definition.yaml} | 0 .../constraints/constraint_metadata.yaml} | 0 .../constraint_model_specification.yaml} | 0 .../merlin/constraints/constraint_run.yaml} | 0 .../constraint_specification.yaml} | 0 .../merlin/dataset/dataset.yaml} | 0 .../merlin/dataset/event.yaml} | 0 .../merlin/dataset/profile.yaml} | 0 .../merlin/dataset/profile_segment.yaml} | 0 .../dataset/resource_profile_view.yaml} | 0 .../merlin/dataset/span.yaml} | 0 .../merlin/dataset/topic.yaml} | 0 .../merging/conflicting_activities.yaml} | 0 .../merlin/merging/merge_request.yaml} | 0 .../merging/merge_request_comment.yaml} | 0 .../merlin/merging/merge_staging_area.yaml} | 0 .../merlin/mission_model.yaml} | 0 .../merlin/mission_model_parameters.yaml} | 0 .../merlin/plan.yaml} | 0 .../merlin/plan_collaborators.yaml} | 0 .../merlin/plan_dataset.yaml} | 0 .../merlin/resource_type.yaml} | 0 .../simulation/simulated_activity_view.yaml} | 0 .../merlin/simulation/simulation.yaml} | 0 .../simulation/simulation_dataset.yaml} | 0 .../merlin/simulation/simulation_extent.yaml} | 0 .../simulation/simulation_template.yaml} | 0 .../merlin/snapshot/plan_snapshot.yaml} | 0 .../snapshot/plan_snapshot_activities.yaml} | 0 .../merlin/uploaded_file.yaml} | 0 .../permissions}/user_role_permission.yaml | 0 .../permissions}/user_roles.yaml | 0 .../permissions}/users.yaml | 0 .../permissions}/users_allowed_roles.yaml | 0 .../permissions}/users_and_roles_view.yaml | 0 .../scheduling_condition_definition.yaml} | 0 .../scheduling_condition_metadata.yaml} | 0 .../scheduling_goal_definition.yaml} | 0 .../scheduler/scheduling_goal_metadata.yaml} | 0 .../scheduling_goal_analysis.yaml} | 0 ...ing_goal_analysis_created_activities.yaml} | 0 ..._goal_analysis_satisfying_activities.yaml} | 0 .../scheduling_run/scheduling_request.yaml} | 0 ...uling_model_specification_conditions.yaml} | 0 ...scheduling_model_specification_goals.yaml} | 0 .../scheduling_specification.yaml} | 0 .../scheduling_specification_conditions.yaml} | 0 .../scheduling_specification_goals.yaml} | 0 .../activity_instance_commands.yaml} | 0 .../sequencing/command_dictionary.yaml} | 0 .../sequencing/expanded_sequences.yaml} | 0 .../sequencing/expansion_rule.yaml} | 0 .../sequencing/expansion_run.yaml} | 0 .../sequencing/expansion_set.yaml} | 0 .../sequencing/expansion_set_rule_view.yaml} | 0 .../sequencing/expansion_set_to_rule.yaml} | 0 .../sequencing/rule_expansion_set_view.yaml} | 0 .../sequencing/sequence.yaml} | 0 .../sequence_to_simulated_activity.yaml} | 0 .../sequencing/user_sequence.yaml} | 0 .../metadata/databases/tables/tables.yaml | 160 ++++++++++++++++++ .../tags}/activity_directive_tags.yaml | 0 .../tags}/constraint_definition_tags.yaml | 0 .../tags}/constraint_tags.yaml | 0 .../tags}/expansion_rule_tags.yaml | 0 .../tags}/plan_snapshot_tags.yaml | 0 .../metadata => tables/tags}/plan_tags.yaml | 0 .../scheduling_condition_definition_tags.yaml | 0 .../tags}/scheduling_condition_tags.yaml | 0 .../scheduling_goal_definition_tags.yaml | 0 .../tags}/scheduling_goal_tags.yaml | 0 .../tags}/snapshot_activity_tags.yaml | 0 .../tables/metadata => tables/tags}/tags.yaml | 0 .../ui/extension_roles.yaml} | 0 .../ui/extensions.yaml} | 0 .../public_view.yaml => tables/ui/view.yaml} | 0 104 files changed, 164 insertions(+), 132 deletions(-) delete mode 100644 deployment/hasura/metadata/databases/AerieMerlin/tables/tables.yaml delete mode 100644 deployment/hasura/metadata/databases/AerieScheduler/tables/tables.yaml delete mode 100644 deployment/hasura/metadata/databases/AerieSequencing/tables/tables.yaml delete mode 100644 deployment/hasura/metadata/databases/AerieUI/tables/tables.yaml rename deployment/hasura/metadata/databases/{AerieMerlin => }/functions/functions.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/begin_merge_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/cancel_merge_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/commit_merge_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/create_merge_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/create_snapshot_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/delete_anchor_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/deny_merge_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/duplicate_plan_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/get_conflicting_activities_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/get_non_conflicting_activities_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/get_plan_history_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/resource_at_start_offset_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/hasura_functions => tables/hasura}/withdraw_merge_request_return_value.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_directive.yaml => tables/merlin/activity_directive/activity_directive.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_directive_changelog.yaml => tables/merlin/activity_directive/activity_directive_changelog.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_directive_extended.yaml => tables/merlin/activity_directive/activity_directive_extended.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_directive_metadata_schema.yaml => tables/merlin/activity_directive/activity_directive_metadata_schema.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_directive_validations.yaml => tables/merlin/activity_directive/activity_directive_validations.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_presets.yaml => tables/merlin/activity_directive/activity_presets.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_anchor_validation_status.yaml => tables/merlin/activity_directive/anchor_validation_status.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_preset_to_directive.yaml => tables/merlin/activity_directive/preset_to_directive.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_activity_type.yaml => tables/merlin/activity_type.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_constraint_definition.yaml => tables/merlin/constraints/constraint_definition.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_constraint_metadata.yaml => tables/merlin/constraints/constraint_metadata.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_constraint_model_specification.yaml => tables/merlin/constraints/constraint_model_specification.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_constraint_run.yaml => tables/merlin/constraints/constraint_run.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_constraint_specification.yaml => tables/merlin/constraints/constraint_specification.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_dataset.yaml => tables/merlin/dataset/dataset.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_event.yaml => tables/merlin/dataset/event.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_profile.yaml => tables/merlin/dataset/profile.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_profile_segment.yaml => tables/merlin/dataset/profile_segment.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_resource_profile_view.yaml => tables/merlin/dataset/resource_profile_view.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_span.yaml => tables/merlin/dataset/span.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_topic.yaml => tables/merlin/dataset/topic.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_conflicting_activities.yaml => tables/merlin/merging/conflicting_activities.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_merge_request.yaml => tables/merlin/merging/merge_request.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_merge_request_comment.yaml => tables/merlin/merging/merge_request_comment.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_merge_staging_area.yaml => tables/merlin/merging/merge_staging_area.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_mission_model.yaml => tables/merlin/mission_model.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_mission_model_parameters.yaml => tables/merlin/mission_model_parameters.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_plan.yaml => tables/merlin/plan.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_plan_collaborators.yaml => tables/merlin/plan_collaborators.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_plan_dataset.yaml => tables/merlin/plan_dataset.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_resource_type.yaml => tables/merlin/resource_type.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_simulated_activity_view.yaml => tables/merlin/simulation/simulated_activity_view.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_simulation.yaml => tables/merlin/simulation/simulation.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_simulation_dataset.yaml => tables/merlin/simulation/simulation_dataset.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_simulation_extent.yaml => tables/merlin/simulation/simulation_extent.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_simulation_template.yaml => tables/merlin/simulation/simulation_template.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_plan_snapshot.yaml => tables/merlin/snapshot/plan_snapshot.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_plan_snapshot_activities.yaml => tables/merlin/snapshot/plan_snapshot_activities.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/public_uploaded_file.yaml => tables/merlin/uploaded_file.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/permissions}/user_role_permission.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/permissions}/user_roles.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/permissions}/users.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/permissions}/users_allowed_roles.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/permissions}/users_and_roles_view.yaml (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_condition_definition.yaml => tables/scheduler/scheduling_condition_definition.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_condition_metadata.yaml => tables/scheduler/scheduling_condition_metadata.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_goal_definition.yaml => tables/scheduler/scheduling_goal_definition.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_goal_metadata.yaml => tables/scheduler/scheduling_goal_metadata.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_goal_analysis.yaml => tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_goal_analysis_created_activities.yaml => tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_goal_analysis_satisfying_activities.yaml => tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_request.yaml => tables/scheduler/scheduling_run/scheduling_request.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_model_specification_conditions.yaml => tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_model_specification_goals.yaml => tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_specification.yaml => tables/scheduler/scheduling_specification/scheduling_specification.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_specification_conditions.yaml => tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/public_scheduling_specification_goals.yaml => tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_activity_instance_commands.yaml => tables/sequencing/activity_instance_commands.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_command_dictionary.yaml => tables/sequencing/command_dictionary.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expanded_sequences.yaml => tables/sequencing/expanded_sequences.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expansion_rule.yaml => tables/sequencing/expansion_rule.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expansion_run.yaml => tables/sequencing/expansion_run.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expansion_set.yaml => tables/sequencing/expansion_set.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expansion_set_rule_view.yaml => tables/sequencing/expansion_set_rule_view.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_expansion_set_to_rule.yaml => tables/sequencing/expansion_set_to_rule.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_rule_expansion_set_view.yaml => tables/sequencing/rule_expansion_set_view.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_sequence.yaml => tables/sequencing/sequence.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_sequence_to_simulated_activity.yaml => tables/sequencing/sequence_to_simulated_activity.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/public_user_sequence.yaml => tables/sequencing/user_sequence.yaml} (100%) create mode 100644 deployment/hasura/metadata/databases/tables/tables.yaml rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/activity_directive_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/constraint_definition_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/constraint_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieSequencing/tables/metadata => tables/tags}/expansion_rule_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/plan_snapshot_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/plan_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/metadata => tables/tags}/scheduling_condition_definition_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/metadata => tables/tags}/scheduling_condition_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/metadata => tables/tags}/scheduling_goal_definition_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieScheduler/tables/metadata => tables/tags}/scheduling_goal_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/snapshot_activity_tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieMerlin/tables/metadata => tables/tags}/tags.yaml (100%) rename deployment/hasura/metadata/databases/{AerieUI/tables/public_extension_roles.yaml => tables/ui/extension_roles.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieUI/tables/public_extensions.yaml => tables/ui/extensions.yaml} (100%) rename deployment/hasura/metadata/databases/{AerieUI/tables/public_view.yaml => tables/ui/view.yaml} (100%) diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/tables.yaml b/deployment/hasura/metadata/databases/AerieMerlin/tables/tables.yaml deleted file mode 100644 index 2dd25b6456..0000000000 --- a/deployment/hasura/metadata/databases/AerieMerlin/tables/tables.yaml +++ /dev/null @@ -1,66 +0,0 @@ -- "!include public_activity_directive.yaml" -- "!include public_activity_directive_changelog.yaml" -- "!include public_activity_directive_extended.yaml" -- "!include public_activity_directive_metadata_schema.yaml" -- "!include public_activity_directive_validations.yaml" -- "!include public_activity_presets.yaml" -- "!include public_activity_type.yaml" -- "!include public_anchor_validation_status.yaml" -- "!include public_conflicting_activities.yaml" -- "!include public_constraint_definition.yaml" -- "!include public_constraint_metadata.yaml" -- "!include public_constraint_model_specification.yaml" -- "!include public_constraint_run.yaml" -- "!include public_constraint_specification.yaml" -- "!include public_dataset.yaml" -- "!include public_event.yaml" -- "!include public_merge_request.yaml" -- "!include public_merge_request_comment.yaml" -- "!include public_merge_staging_area.yaml" -- "!include public_mission_model.yaml" -- "!include public_mission_model_parameters.yaml" -- "!include public_plan.yaml" -- "!include public_plan_collaborators.yaml" -- "!include public_plan_dataset.yaml" -- "!include public_plan_snapshot.yaml" -- "!include public_plan_snapshot_activities.yaml" -- "!include public_preset_to_directive.yaml" -- "!include public_profile.yaml" -- "!include public_profile_segment.yaml" -- "!include public_resource_profile_view.yaml" -- "!include public_resource_type.yaml" -- "!include public_simulation.yaml" -- "!include public_simulated_activity_view.yaml" -- "!include public_simulation_dataset.yaml" -- "!include public_simulation_extent.yaml" -- "!include public_simulation_template.yaml" -- "!include public_span.yaml" -- "!include public_topic.yaml" -- "!include public_uploaded_file.yaml" -# Function return values: -- "!include hasura_functions/begin_merge_return_value.yaml" -- "!include hasura_functions/cancel_merge_return_value.yaml" -- "!include hasura_functions/commit_merge_return_value.yaml" -- "!include hasura_functions/create_merge_return_value.yaml" -- "!include hasura_functions/create_snapshot_return_value.yaml" -- "!include hasura_functions/delete_anchor_return_value.yaml" -- "!include hasura_functions/deny_merge_return_value.yaml" -- "!include hasura_functions/duplicate_plan_return_value.yaml" -- "!include hasura_functions/get_conflicting_activities_return_value.yaml" -- "!include hasura_functions/get_non_conflicting_activities_return_value.yaml" -- "!include hasura_functions/get_plan_history_return_value.yaml" -- "!include hasura_functions/resource_at_start_offset_return_value.yaml" -- "!include hasura_functions/withdraw_merge_request_return_value.yaml" -# Metadata -- "!include metadata/tags.yaml" -- "!include metadata/activity_directive_tags.yaml" -- "!include metadata/constraint_definition_tags.yaml" -- "!include metadata/constraint_tags.yaml" -- "!include metadata/plan_snapshot_tags.yaml" -- "!include metadata/plan_tags.yaml" -- "!include metadata/snapshot_activity_tags.yaml" -- "!include metadata/users_allowed_roles.yaml" -- "!include metadata/user_roles.yaml" -- "!include metadata/user_role_permission.yaml" -- "!include metadata/users.yaml" -- "!include metadata/users_and_roles_view.yaml" diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/tables.yaml b/deployment/hasura/metadata/databases/AerieScheduler/tables/tables.yaml deleted file mode 100644 index ec6669c746..0000000000 --- a/deployment/hasura/metadata/databases/AerieScheduler/tables/tables.yaml +++ /dev/null @@ -1,18 +0,0 @@ -- "!include public_scheduling_condition_definition.yaml" -- "!include public_scheduling_condition_metadata.yaml" -- "!include public_scheduling_goal_definition.yaml" -- "!include public_scheduling_goal_metadata.yaml" -- "!include public_scheduling_goal_analysis.yaml" -- "!include public_scheduling_goal_analysis_created_activities.yaml" -- "!include public_scheduling_goal_analysis_satisfying_activities.yaml" -- "!include public_scheduling_model_specification_goals.yaml" -- "!include public_scheduling_model_specification_conditions.yaml" -- "!include public_scheduling_request.yaml" -- "!include public_scheduling_specification.yaml" -- "!include public_scheduling_specification_goals.yaml" -- "!include public_scheduling_specification_conditions.yaml" -# Metadata -- "!include metadata/scheduling_condition_tags.yaml" -- "!include metadata/scheduling_condition_definition_tags.yaml" -- "!include metadata/scheduling_goal_tags.yaml" -- "!include metadata/scheduling_goal_definition_tags.yaml" diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/tables.yaml b/deployment/hasura/metadata/databases/AerieSequencing/tables/tables.yaml deleted file mode 100644 index 89960d3875..0000000000 --- a/deployment/hasura/metadata/databases/AerieSequencing/tables/tables.yaml +++ /dev/null @@ -1,14 +0,0 @@ -- "!include public_activity_instance_commands.yaml" -- "!include public_command_dictionary.yaml" -- "!include public_expansion_rule.yaml" -- "!include public_expansion_run.yaml" -- "!include public_expanded_sequences.yaml" -- "!include public_expansion_set_to_rule.yaml" -- "!include public_expansion_set.yaml" -- "!include public_expansion_set_rule_view.yaml" -- "!include public_rule_expansion_set_view.yaml" -- "!include public_sequence.yaml" -- "!include public_sequence_to_simulated_activity.yaml" -- "!include public_user_sequence.yaml" -# Metadata -- "!include metadata/expansion_rule_tags.yaml" diff --git a/deployment/hasura/metadata/databases/AerieUI/tables/tables.yaml b/deployment/hasura/metadata/databases/AerieUI/tables/tables.yaml deleted file mode 100644 index 5adc44d7f2..0000000000 --- a/deployment/hasura/metadata/databases/AerieUI/tables/tables.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- "!include public_extension_roles.yaml" -- "!include public_extensions.yaml" -- "!include public_view.yaml" diff --git a/deployment/hasura/metadata/databases/databases.yaml b/deployment/hasura/metadata/databases/databases.yaml index 18b3b65d8a..ad82a1961d 100644 --- a/deployment/hasura/metadata/databases/databases.yaml +++ b/deployment/hasura/metadata/databases/databases.yaml @@ -1,37 +1,10 @@ -- name: AerieMerlin +- name: Aerie kind: postgres configuration: connection_info: database_url: - from_env: AERIE_MERLIN_DATABASE_URL + from_env: AERIE_DATABASE_URL isolation_level: read-committed use_prepared_statements: false - tables: "!include AerieMerlin/tables/tables.yaml" - functions: "!include AerieMerlin/functions/functions.yaml" -- name: AerieScheduler - kind: postgres - configuration: - connection_info: - database_url: - from_env: AERIE_SCHEDULER_DATABASE_URL - isolation_level: read-committed - use_prepared_statements: false - tables: "!include AerieScheduler/tables/tables.yaml" -- name: AerieSequencing - kind: postgres - configuration: - connection_info: - database_url: - from_env: AERIE_SEQUENCING_DATABASE_URL - isolation_level: read-committed - use_prepared_statements: false - tables: "!include AerieSequencing/tables/tables.yaml" -- name: AerieUI - kind: postgres - configuration: - connection_info: - database_url: - from_env: AERIE_UI_DATABASE_URL - isolation_level: read-committed - use_prepared_statements: false - tables: "!include AerieUI/tables/tables.yaml" + tables: "!include tables/tables.yaml" + functions: "!include functions/functions.yaml" diff --git a/deployment/hasura/metadata/databases/AerieMerlin/functions/functions.yaml b/deployment/hasura/metadata/databases/functions/functions.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/functions/functions.yaml rename to deployment/hasura/metadata/databases/functions/functions.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/begin_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/begin_merge_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/cancel_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/cancel_merge_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/commit_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/commit_merge_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/create_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/create_merge_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/create_snapshot_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/create_snapshot_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/delete_anchor_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/delete_anchor_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/deny_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/deny_merge_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/duplicate_plan_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/duplicate_plan_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_conflicting_activities_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_conflicting_activities_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_non_conflicting_activities_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_non_conflicting_activities_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_plan_history_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/get_plan_history_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/resource_at_start_offset_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/resource_at_start_offset_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/withdraw_merge_request_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/hasura_functions/withdraw_merge_request_return_value.yaml rename to deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_changelog.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_changelog.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_extended.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_extended.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_metadata_schema.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_metadata_schema.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_validations.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_directive_validations.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_presets.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_presets.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_anchor_validation_status.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_anchor_validation_status.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_preset_to_directive.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_preset_to_directive.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_type.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_activity_type.yaml rename to deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_definition.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_definition.yaml rename to deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_metadata.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_metadata.yaml rename to deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_model_specification.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_model_specification.yaml rename to deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_run.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_run.yaml rename to deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_specification.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_constraint_specification.yaml rename to deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_dataset.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_event.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_event.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_profile.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_profile.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_profile_segment.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_profile_segment.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_resource_profile_view.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_resource_profile_view.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_span.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_span.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_topic.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_topic.yaml rename to deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_conflicting_activities.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_conflicting_activities.yaml rename to deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_request.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_request.yaml rename to deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_request_comment.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_request_comment.yaml rename to deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_staging_area.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_merge_staging_area.yaml rename to deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_mission_model.yaml b/deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_mission_model.yaml rename to deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_mission_model_parameters.yaml b/deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_mission_model_parameters.yaml rename to deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan.yaml rename to deployment/hasura/metadata/databases/tables/merlin/plan.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_collaborators.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_collaborators.yaml rename to deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_dataset.yaml rename to deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_resource_type.yaml b/deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_resource_type.yaml rename to deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulated_activity_view.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulated_activity_view.yaml rename to deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation.yaml rename to deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_dataset.yaml rename to deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_extent.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_extent.yaml rename to deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_template.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_simulation_template.yaml rename to deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_snapshot.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_snapshot.yaml rename to deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_snapshot_activities.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_plan_snapshot_activities.yaml rename to deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/public_uploaded_file.yaml b/deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/public_uploaded_file.yaml rename to deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/user_role_permission.yaml b/deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/user_role_permission.yaml rename to deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/user_roles.yaml b/deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/user_roles.yaml rename to deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users.yaml b/deployment/hasura/metadata/databases/tables/permissions/users.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users.yaml rename to deployment/hasura/metadata/databases/tables/permissions/users.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users_allowed_roles.yaml b/deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users_allowed_roles.yaml rename to deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users_and_roles_view.yaml b/deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/users_and_roles_view.yaml rename to deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_condition_definition.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_condition_definition.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_condition_metadata.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_condition_metadata.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_definition.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_definition.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_metadata.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_metadata.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis_created_activities.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis_created_activities.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis_satisfying_activities.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_goal_analysis_satisfying_activities.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_request.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_request.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_model_specification_conditions.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_model_specification_conditions.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_model_specification_goals.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_model_specification_goals.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification_conditions.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification_conditions.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification_goals.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/public_scheduling_specification_goals.yaml rename to deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_activity_instance_commands.yaml b/deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_activity_instance_commands.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_command_dictionary.yaml b/deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_command_dictionary.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expanded_sequences.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expanded_sequences.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_rule.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_rule.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_run.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_run.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set_rule_view.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set_rule_view.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set_to_rule.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_expansion_set_to_rule.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_rule_expansion_set_view.yaml b/deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_rule_expansion_set_view.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_sequence.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_sequence_to_simulated_activity.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_sequence_to_simulated_activity.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/public_user_sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/public_user_sequence.yaml rename to deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml diff --git a/deployment/hasura/metadata/databases/tables/tables.yaml b/deployment/hasura/metadata/databases/tables/tables.yaml new file mode 100644 index 0000000000..cca48af2fb --- /dev/null +++ b/deployment/hasura/metadata/databases/tables/tables.yaml @@ -0,0 +1,160 @@ +# Would prefer to do this as one file that delegates to others, as was done with init.sql +# but doing so currently throws an error: "parse-failed: parsing Object failed, expected Object, but encountered Array" + +##################### +#### PERMISSIONS #### +##################### +- "!include permissions/users_allowed_roles.yaml" +- "!include permissions/user_roles.yaml" +- "!include permissions/user_role_permission.yaml" +- "!include permissions/users.yaml" +- "!include permissions/users_and_roles_view.yaml" + +############## +#### TAGS #### +############## +- "!include tags/tags.yaml" + +# Merlin-associated tags +- "!include tags/activity_directive_tags.yaml" +- "!include tags/constraint_definition_tags.yaml" +- "!include tags/constraint_tags.yaml" +- "!include tags/plan_snapshot_tags.yaml" +- "!include tags/plan_tags.yaml" +- "!include tags/snapshot_activity_tags.yaml" + +# Scheduler-associated tags +- "!include tags/scheduling_condition_tags.yaml" +- "!include tags/scheduling_condition_definition_tags.yaml" +- "!include tags/scheduling_goal_tags.yaml" +- "!include tags/scheduling_goal_definition_tags.yaml" + +# Sequencing-associated tags +- "!include tags/expansion_rule_tags.yaml" + +################ +#### HASURA #### +################ +- "!include hasura/begin_merge_return_value.yaml" +- "!include hasura/cancel_merge_return_value.yaml" +- "!include hasura/commit_merge_return_value.yaml" +- "!include hasura/create_merge_return_value.yaml" +- "!include hasura/create_snapshot_return_value.yaml" +- "!include hasura/delete_anchor_return_value.yaml" +- "!include hasura/deny_merge_return_value.yaml" +- "!include hasura/duplicate_plan_return_value.yaml" +- "!include hasura/get_conflicting_activities_return_value.yaml" +- "!include hasura/get_non_conflicting_activities_return_value.yaml" +- "!include hasura/get_plan_history_return_value.yaml" +- "!include hasura/resource_at_start_offset_return_value.yaml" +- "!include hasura/withdraw_merge_request_return_value.yaml" + +################ +#### MERLIN #### +################ +# Uploaded files (JARs or simulation input files) +- "!include merlin/uploaded_file.yaml" + +# Mission Model +- "!include merlin/mission_model.yaml" +- "!include merlin/mission_model_parameters.yaml" +- "!include merlin/activity_type.yaml" +- "!include merlin/resource_type.yaml" + +# Plan +- "!include merlin/plan.yaml" +- "!include merlin/plan_collaborators.yaml" + +# Activity Directives +- "!include merlin/activity_directive/activity_directive_metadata_schema.yaml" +- "!include merlin/activity_directive/activity_directive.yaml" +- "!include merlin/activity_directive/activity_directive_changelog.yaml" +- "!include merlin/activity_directive/activity_directive_extended.yaml" +- "!include merlin/activity_directive/activity_directive_validations.yaml" +- "!include merlin/activity_directive/anchor_validation_status.yaml" +- "!include merlin/activity_directive/activity_presets.yaml" +- "!include merlin/activity_directive/preset_to_directive.yaml" + +# Datasets +- "!include merlin/dataset/dataset.yaml" +- "!include merlin/dataset/event.yaml" +- "!include merlin/dataset/topic.yaml" +- "!include merlin/dataset/span.yaml" +- "!include merlin/dataset/profile.yaml" +- "!include merlin/dataset/profile_segment.yaml" +- "!include merlin/dataset/resource_profile_view.yaml" + +# Simulation +- "!include merlin/simulation/simulation_template.yaml" +- "!include merlin/simulation/simulation.yaml" +- "!include merlin/simulation/simulated_activity_view.yaml" +- "!include merlin/simulation/simulation_dataset.yaml" +- "!include merlin/simulation/simulation_extent.yaml" + +# External Datasets +- "!include merlin/plan_dataset.yaml" + +# Constraints +- "!include merlin/constraints/constraint_metadata.yaml" +- "!include merlin/constraints/constraint_definition.yaml" +- "!include merlin/constraints/constraint_model_specification.yaml" +- "!include merlin/constraints/constraint_specification.yaml" +- "!include merlin/constraints/constraint_run.yaml" + +# Snapshots +- "!include merlin/snapshot/plan_snapshot.yaml" +- "!include merlin/snapshot/plan_snapshot_activities.yaml" +- "!include merlin/snapshot/preset_to_snapshot_directive.yaml" + +# Merging +- "!include merlin/merging/merge_request.yaml" +- "!include merlin/merging/merge_request_comment.yaml" +- "!include merlin/merging/merge_staging_area.yaml" +- "!include merlin/merging/conflicting_activities.yaml" + +################### +#### SCHEDULER #### +################### +# Scheduling Goals +- "!include scheduler/scheduling_goal_metadata.yaml" +- "!include scheduler/scheduling_goal_definition.yaml" + +# Scheduling Conditions +- "!include scheduler/scheduling_condition_metadata.yaml" +- "!include scheduler/scheduling_condition_definition.yaml" + +# Scheduling Specification +- "!include scheduler/scheduling_specification/scheduling_specification.yaml" +- "!include scheduler/scheduling_specification/scheduling_specification_goals.yaml" +- "!include scheduler/scheduling_specification/scheduling_specification_conditions.yaml" +- "!include scheduler/scheduling_specification/scheduling_model_specification_goals.yaml" +- "!include scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml" + +# Scheduling Output +- "!include scheduler/scheduling_run/scheduling_request.yaml" +- "!include scheduler/scheduling_run/scheduling_goal_analysis.yaml" +- "!include scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml" +- "!include scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml" + +#################### +#### SEQUENCING #### +#################### +- "!include sequencing/activity_instance_commands.yaml" +- "!include sequencing/command_dictionary.yaml" +- "!include sequencing/expansion_rule.yaml" +- "!include sequencing/expansion_run.yaml" +- "!include sequencing/expanded_sequences.yaml" +- "!include sequencing/expansion_set_to_rule.yaml" +- "!include sequencing/expansion_set.yaml" +- "!include sequencing/expansion_set_rule_view.yaml" +- "!include sequencing/rule_expansion_set_view.yaml" +- "!include sequencing/sequence.yaml" +- "!include sequencing/sequence_to_simulated_activity.yaml" +- "!include sequencing/user_sequence.yaml" + +############ +#### UI #### +############ +- "!include ui/extension_roles.yaml" +- "!include ui/extensions.yaml" +- "!include ui/view.yaml" diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/activity_directive_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/activity_directive_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/constraint_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/constraint_definition_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/constraint_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/constraint_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieSequencing/tables/metadata/expansion_rule_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieSequencing/tables/metadata/expansion_rule_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/plan_snapshot_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/plan_snapshot_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/plan_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/plan_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_condition_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_condition_definition_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_condition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_condition_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_goal_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_goal_definition_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_goal_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieScheduler/tables/metadata/scheduling_goal_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/snapshot_activity_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/snapshot_activity_tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/tags.yaml b/deployment/hasura/metadata/databases/tables/tags/tags.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieMerlin/tables/metadata/tags.yaml rename to deployment/hasura/metadata/databases/tables/tags/tags.yaml diff --git a/deployment/hasura/metadata/databases/AerieUI/tables/public_extension_roles.yaml b/deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieUI/tables/public_extension_roles.yaml rename to deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml diff --git a/deployment/hasura/metadata/databases/AerieUI/tables/public_extensions.yaml b/deployment/hasura/metadata/databases/tables/ui/extensions.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieUI/tables/public_extensions.yaml rename to deployment/hasura/metadata/databases/tables/ui/extensions.yaml diff --git a/deployment/hasura/metadata/databases/AerieUI/tables/public_view.yaml b/deployment/hasura/metadata/databases/tables/ui/view.yaml similarity index 100% rename from deployment/hasura/metadata/databases/AerieUI/tables/public_view.yaml rename to deployment/hasura/metadata/databases/tables/ui/view.yaml From 18262f70c10af098b420f6476db6c30bfb7918f6 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 4 Mar 2024 14:21:20 -0800 Subject: [PATCH 18/36] Update Hasura Metadata - Track `preset_to_snapshot_directive` - Translate manual configurations to use fkeys where possible - Update remote relationships to non-remote relationships - Fixed array relationship `expansion_sets` on `expansion_set_to_rule` that should be an object relationship (BREAKING CHANGE) --- .../databases/functions/functions.yaml | 46 +++++++-------- .../hasura/begin_merge_return_value.yaml | 2 +- .../hasura/cancel_merge_return_value.yaml | 2 +- .../hasura/commit_merge_return_value.yaml | 2 +- .../hasura/create_merge_return_value.yaml | 2 +- .../hasura/create_snapshot_return_value.yaml | 2 +- .../hasura/delete_anchor_return_value.yaml | 2 +- .../hasura/deny_merge_return_value.yaml | 2 +- .../hasura/duplicate_plan_return_value.yaml | 2 +- ...t_conflicting_activities_return_value.yaml | 2 +- ...n_conflicting_activities_return_value.yaml | 2 +- .../hasura/get_plan_history_return_value.yaml | 2 +- ...resource_at_start_offset_return_value.yaml | 2 +- .../withdraw_merge_request_return_value.yaml | 2 +- .../activity_directive.yaml | 57 +++++++++---------- .../activity_directive_changelog.yaml | 6 +- .../activity_directive_extended.yaml | 8 ++- .../activity_directive_metadata_schema.yaml | 4 +- .../activity_directive_validations.yaml | 4 +- .../activity_directive/activity_presets.yaml | 4 +- .../anchor_validation_status.yaml | 4 +- .../preset_to_directive.yaml | 15 ++--- .../tables/merlin/activity_type.yaml | 25 ++++---- .../constraints/constraint_definition.yaml | 10 ++-- .../constraints/constraint_metadata.yaml | 21 ++++--- .../constraint_model_specification.yaml | 8 ++- .../merlin/constraints/constraint_run.yaml | 6 +- .../constraints/constraint_specification.yaml | 4 +- .../tables/merlin/dataset/dataset.yaml | 10 ++-- .../tables/merlin/dataset/event.yaml | 4 +- .../tables/merlin/dataset/profile.yaml | 6 +- .../merlin/dataset/profile_segment.yaml | 4 +- .../merlin/dataset/resource_profile_view.yaml | 8 ++- .../databases/tables/merlin/dataset/span.yaml | 8 ++- .../tables/merlin/dataset/topic.yaml | 6 +- .../merging/conflicting_activities.yaml | 4 +- .../tables/merlin/merging/merge_request.yaml | 22 +++---- .../merlin/merging/merge_request_comment.yaml | 4 +- .../merlin/merging/merge_staging_area.yaml | 4 +- .../tables/merlin/mission_model.yaml | 37 ++++++------ .../merlin/mission_model_parameters.yaml | 4 +- .../databases/tables/merlin/plan.yaml | 39 +++++++------ .../tables/merlin/plan_collaborators.yaml | 4 +- .../databases/tables/merlin/plan_dataset.yaml | 4 +- .../tables/merlin/resource_type.yaml | 4 +- .../simulation/simulated_activity_view.yaml | 10 ++-- .../tables/merlin/simulation/simulation.yaml | 8 ++- .../merlin/simulation/simulation_dataset.yaml | 26 ++++----- .../merlin/simulation/simulation_extent.yaml | 4 +- .../simulation/simulation_template.yaml | 4 +- .../tables/merlin/snapshot/plan_snapshot.yaml | 16 ++---- .../snapshot/plan_snapshot_activities.yaml | 6 +- .../preset_to_snapshot_directive.yaml | 34 +++++++++++ .../tables/merlin/uploaded_file.yaml | 4 +- .../permissions/user_role_permission.yaml | 2 +- .../tables/permissions/user_roles.yaml | 2 +- .../databases/tables/permissions/users.yaml | 2 +- .../permissions/users_allowed_roles.yaml | 2 +- .../permissions/users_and_roles_view.yaml | 2 +- .../scheduling_condition_definition.yaml | 10 ++-- .../scheduling_condition_metadata.yaml | 12 ++-- .../scheduler/scheduling_goal_definition.yaml | 12 ++-- .../scheduler/scheduling_goal_metadata.yaml | 14 +++-- .../scheduling_goal_analysis.yaml | 21 +++---- ...ling_goal_analysis_created_activities.yaml | 4 +- ...g_goal_analysis_satisfying_activities.yaml | 4 +- .../scheduling_run/scheduling_request.yaml | 25 ++++---- ...duling_model_specification_conditions.yaml | 23 ++++---- .../scheduling_model_specification_goals.yaml | 23 ++++---- .../scheduling_specification.yaml | 30 +++++----- .../scheduling_specification_conditions.yaml | 4 +- .../scheduling_specification_goals.yaml | 4 +- .../activity_instance_commands.yaml | 23 ++++---- .../tables/sequencing/command_dictionary.yaml | 6 +- .../tables/sequencing/expanded_sequences.yaml | 15 ++--- .../tables/sequencing/expansion_rule.yaml | 8 ++- .../tables/sequencing/expansion_run.yaml | 27 +++++---- .../tables/sequencing/expansion_set.yaml | 27 +++++---- .../sequencing/expansion_set_rule_view.yaml | 6 +- .../sequencing/expansion_set_to_rule.yaml | 24 ++++---- .../sequencing/rule_expansion_set_view.yaml | 6 +- .../databases/tables/sequencing/sequence.yaml | 36 ++++++------ .../sequence_to_simulated_activity.yaml | 34 ++++++----- .../tables/sequencing/user_sequence.yaml | 6 +- .../tables/tags/activity_directive_tags.yaml | 2 +- .../tags/constraint_definition_tags.yaml | 2 +- .../tables/tags/constraint_tags.yaml | 2 +- .../tables/tags/expansion_rule_tags.yaml | 17 +++--- .../tables/tags/plan_snapshot_tags.yaml | 2 +- .../databases/tables/tags/plan_tags.yaml | 2 +- .../scheduling_condition_definition_tags.yaml | 17 +++--- .../tags/scheduling_condition_tags.yaml | 17 +++--- .../tags/scheduling_goal_definition_tags.yaml | 17 +++--- .../tables/tags/scheduling_goal_tags.yaml | 17 +++--- .../tables/tags/snapshot_activity_tags.yaml | 2 +- .../metadata/databases/tables/tags/tags.yaml | 2 +- .../databases/tables/ui/extension_roles.yaml | 4 +- .../databases/tables/ui/extensions.yaml | 6 +- .../metadata/databases/tables/ui/view.yaml | 4 +- 99 files changed, 568 insertions(+), 491 deletions(-) create mode 100644 deployment/hasura/metadata/databases/tables/merlin/snapshot/preset_to_snapshot_directive.yaml diff --git a/deployment/hasura/metadata/databases/functions/functions.yaml b/deployment/hasura/metadata/databases/functions/functions.yaml index 4e47c7e642..5980d9246a 100644 --- a/deployment/hasura/metadata/databases/functions/functions.yaml +++ b/deployment/hasura/metadata/databases/functions/functions.yaml @@ -1,6 +1,6 @@ - function: name: apply_preset_to_activity - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: apply_preset_to_activity @@ -10,7 +10,7 @@ - role: user - function: name: begin_merge - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: begin_merge @@ -20,7 +20,7 @@ - role: user - function: name: cancel_merge - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: cancel_merge @@ -30,7 +30,7 @@ - role: user - function: name: create_merge_request - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: create_merge_request @@ -40,7 +40,7 @@ - role: user - function: name: create_snapshot - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: create_snapshot @@ -50,7 +50,7 @@ - role: user - function: name: commit_merge - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: commit_merge @@ -60,7 +60,7 @@ - role: user - function: name: delete_activity_by_pk_reanchor_plan_start - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_reanchor_plan_start @@ -70,7 +70,7 @@ - role: user - function: name: delete_activity_by_pk_reanchor_plan_start_bulk - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_reanchor_plan_start_bulk @@ -80,7 +80,7 @@ - role: user - function: name: delete_activity_by_pk_reanchor_to_anchor - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_reanchor_to_anchor @@ -90,7 +90,7 @@ - role: user - function: name: delete_activity_by_pk_reanchor_to_anchor_bulk - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_reanchor_to_anchor_bulk @@ -100,7 +100,7 @@ - role: user - function: name: delete_activity_by_pk_delete_subtree - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_delete_subtree @@ -110,7 +110,7 @@ - role: user - function: name: delete_activity_by_pk_delete_subtree_bulk - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: delete_activity_by_pk_delete_subtree_bulk @@ -120,7 +120,7 @@ - role: user - function: name: deny_merge - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: deny_merge @@ -130,7 +130,7 @@ - role: user - function: name: duplicate_plan - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: duplicate_plan @@ -140,7 +140,7 @@ - role: user - function: name: get_conflicting_activities - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: get_conflicting_activities @@ -152,7 +152,7 @@ - role: viewer - function: name: get_non_conflicting_activities - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: get_non_conflicting_activities @@ -164,7 +164,7 @@ - role: viewer - function: name: get_plan_history - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: get_plan_history @@ -174,7 +174,7 @@ - role: user - function: name: get_resources_at_start_offset - schema: hasura_functions + schema: hasura configuration: custom_name: getResourcesAtStartOffset permissions: @@ -183,7 +183,7 @@ - role: viewer - function: name: restore_activity_changelog - schema: hasura_functions + schema: hasura configuration: custom_name: restoreActivityFromChangelog session_argument: hasura_session @@ -192,7 +192,7 @@ - role: user - function: name: restore_from_snapshot - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: restore_from_snapshot @@ -202,7 +202,7 @@ - role: user - function: name: set_resolution - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: set_resolution @@ -212,7 +212,7 @@ - role: user - function: name: set_resolution_bulk - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: set_resolution_bulk @@ -222,7 +222,7 @@ - role: user - function: name: withdraw_merge_request - schema: hasura_functions + schema: hasura configuration: custom_root_fields: function: withdraw_merge_request diff --git a/deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml index 0c5c08ee6a..6487bb1640 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/begin_merge_return_value.yaml @@ -1,6 +1,6 @@ table: name: begin_merge_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml index 07d22e571a..e7c4e90f2a 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/cancel_merge_return_value.yaml @@ -1,6 +1,6 @@ table: name: cancel_merge_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml index 9f6a5bc1ed..e90327f847 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/commit_merge_return_value.yaml @@ -1,6 +1,6 @@ table: name: commit_merge_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml index 3126574cc0..aa2a9689bd 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/create_merge_return_value.yaml @@ -1,6 +1,6 @@ table: name: create_merge_request_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml index 0d1547f26e..58c33454e1 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/create_snapshot_return_value.yaml @@ -1,6 +1,6 @@ table: name: create_snapshot_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml index c5a33d4a39..0824fcbdd2 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/delete_anchor_return_value.yaml @@ -1,6 +1,6 @@ table: name: delete_anchor_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml index af7d959883..7d929113af 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/deny_merge_return_value.yaml @@ -1,6 +1,6 @@ table: name: deny_merge_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml index 97f26c01c2..52a63f53a9 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/duplicate_plan_return_value.yaml @@ -1,6 +1,6 @@ table: name: duplicate_plan_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml index f00c9f5ac6..d06f90a646 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/get_conflicting_activities_return_value.yaml @@ -1,6 +1,6 @@ table: name: get_conflicting_activities_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml index 3573f18720..6d4185dfb7 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/get_non_conflicting_activities_return_value.yaml @@ -1,6 +1,6 @@ table: name: get_non_conflicting_activities_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml index d5979f4f49..1e6d79fa9e 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/get_plan_history_return_value.yaml @@ -1,6 +1,6 @@ table: name: get_plan_history_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml index 86f0a8148d..a66a43320c 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/resource_at_start_offset_return_value.yaml @@ -1,6 +1,6 @@ table: name: resource_at_start_offset_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml b/deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml index 5518bdeb35..451e744986 100644 --- a/deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml +++ b/deployment/hasura/metadata/databases/tables/hasura/withdraw_merge_request_return_value.yaml @@ -1,6 +1,6 @@ table: name: withdraw_merge_request_return_value - schema: hasura_functions + schema: hasura select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml index c4e2bf5cf1..252e00d129 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive.yaml @@ -1,6 +1,8 @@ table: name: activity_directive - schema: public + schema: merlin +configuration: + custom_name: "activity_directive" object_relationships: - name: plan using: @@ -13,7 +15,7 @@ object_relationships: - plan_id table: name: activity_directive_validations - schema: public + schema: merlin - name: anchor_validations using: foreign_key_constraint_on: @@ -22,7 +24,7 @@ object_relationships: - plan_id table: name: anchor_validation_status - schema: public + schema: merlin - name: applied_preset using: foreign_key_constraint_on: @@ -31,48 +33,43 @@ object_relationships: - plan_id table: name: preset_to_directive - schema: public + schema: merlin +- name: source_scheduling_goal + using: + manual_configuration: + column_mapping: + source_scheduling_goal_id: id + remote_table: + name: scheduling_goal_metadata + schema: scheduler array_relationships: - name: simulated_activities using: manual_configuration: remote_table: - schema: public + schema: merlin name: simulated_activity insertion_order: null column_mapping: id: directive_id - name: tags using: - manual_configuration: - insertion_order: null - remote_table: + foreign_key_constraint_on: + columns: + - directive_id + - plan_id + table: name: activity_directive_tags - schema: metadata - column_mapping: - id: directive_id - plan_id: plan_id + schema: tags - name: activity_directive_changelog using: - manual_configuration: - insertion_order: null - remote_table: - name: activity_directive_changelog - schema: public - column_mapping: - id: activity_directive_id - plan_id: plan_id -remote_relationships: -- name: source_scheduling_goal - definition: - to_source: - relationship_type: object - source: AerieScheduler + foreign_key_constraint_on: + columns: + - activity_directive_id + - plan_id table: - schema: public - name: scheduling_goal_metadata - field_mapping: - source_scheduling_goal_id: id + name: activity_directive_changelog + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml index a4d4a518d6..97020bc737 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml @@ -1,13 +1,15 @@ table: name: activity_directive_changelog - schema: public + schema: merlin +configuration: + custom_name: "activity_directive_changelog" object_relationships: - name: activity_directive using: manual_configuration: remote_table: name: activity_directive - schema: public + schema: merlin column_mapping: plan_id: plan_id activity_directive_id: id diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml index 840f249789..b917e31299 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_extended.yaml @@ -1,20 +1,22 @@ table: name: activity_directive_extended - schema: public + schema: merlin +configuration: + custom_name: "activity_directive_extended" object_relationships: - name: anchoring_activity using: manual_configuration: remote_table: name: activity_directive_extended - schema: public + schema: merlin column_mapping: id: anchor_id - name: activity_directive using: manual_configuration: remote_table: - schema: public + schema: merlin name: activity_directive insertion_order: null column_mapping: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml index 7b2a7b5df6..9b4ab68f95 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_metadata_schema.yaml @@ -1,6 +1,8 @@ table: name: activity_directive_metadata_schema - schema: public + schema: merlin +configuration: + custom_name: "activity_directive_metadata_schema" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml index bd10086cb6..2912899fb9 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_validations.yaml @@ -1,6 +1,8 @@ table: name: activity_directive_validations - schema: public + schema: merlin +configuration: + custom_name: "activity_directive_validations" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml index 3572d302ea..ea9740fd75 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_presets.yaml @@ -1,6 +1,8 @@ table: name: activity_presets - schema: public + schema: merlin +configuration: + custom_name: "activity_presets" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml index 10e6900545..ee30421f82 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/anchor_validation_status.yaml @@ -1,6 +1,8 @@ table: name: anchor_validation_status - schema: public + schema: merlin +configuration: + custom_name: "anchor_validation_status" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml index d66c1463cd..5b797047be 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/preset_to_directive.yaml @@ -1,17 +1,14 @@ table: name: preset_to_directive - schema: public + schema: merlin +configuration: + custom_name: "preset_to_directive" object_relationships: - name: directive_applied_to using: - manual_configuration: - column_mapping: - activity_id: id - plan_id: plan_id - insertion_order: null - remote_table: - name: activity_directive - schema: public + foreign_key_constraint_on: + - activity_id + - plan_id - name: preset_applied using: foreign_key_constraint_on: preset_id diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml index 9414983078..51c079c238 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml @@ -1,6 +1,8 @@ table: name: activity_type - schema: public + schema: merlin +configuration: + custom_name: "activity_type" object_relationships: - name: subsystem_tag using: @@ -10,23 +12,20 @@ array_relationships: using: manual_configuration: remote_table: - schema: public + schema: merlin name: activity_presets insertion_order: null column_mapping: model_id: model_id name: associated_activity_type -remote_relationships: -- name: expansion_rules - definition: - to_source: - relationship_type: array - source: AerieSequencing - table: - schema: public - name: expansion_rule - field_mapping: - name: activity_type + - name: expansion_rules + using: + manual_configuration: + column_mapping: + name: activity_type + remote_table: + name: expansion_rule + schema: sequencing select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml index 8ada554efc..036c67d448 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_definition.yaml @@ -1,6 +1,8 @@ table: name: constraint_definition - schema: public + schema: merlin +configuration: + custom_name: "constraint_definition" object_relationships: - name: metadata using: @@ -14,7 +16,7 @@ array_relationships: - constraint_revision table: name: constraint_model_specification - schema: public + schema: merlin - name: plans_using using: foreign_key_constraint_on: @@ -23,7 +25,7 @@ array_relationships: - constraint_revision table: name: constraint_specification - schema: public + schema: merlin - name: tags using: foreign_key_constraint_on: @@ -32,7 +34,7 @@ array_relationships: - constraint_revision table: name: constraint_definition_tags - schema: metadata + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml index ef244a6b12..f147f89668 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_metadata.yaml @@ -1,6 +1,8 @@ table: name: constraint_metadata - schema: public + schema: merlin +configuration: + custom_name: "constraint_metadata" array_relationships: - name: tags using: @@ -8,25 +10,28 @@ array_relationships: column: constraint_id table: name: constraint_tags - schema: metadata + schema: tags - name: versions using: foreign_key_constraint_on: column: constraint_id - table: constraint_definition - schema: public + table: + name: constraint_definition + schema: merlin - name: models_using using: foreign_key_constraint_on: column: constraint_id - table: constraint_model_specification - schema: public + table: + name: constraint_model_specification + schema: merlin - name: plans_using using: foreign_key_constraint_on: column: constraint_id - table: constraint_specification - schema: public + table: + name: constraint_specification + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml index 36a98060ca..050d01f135 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml @@ -1,6 +1,8 @@ table: name: constraint_model_specification - schema: public + schema: merlin +configuration: + custom_name: "constraint_model_specification" object_relationships: - name: model using: @@ -13,7 +15,7 @@ object_relationships: insertion_order: null remote_table: name: constraint_metadata - schema: public + schema: merlin - name: constraint_definition using: manual_configuration: @@ -23,7 +25,7 @@ object_relationships: insertion_order: null remote_table: name: constraint_definition - schema: public + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml index dba2b08d88..750c40296c 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_run.yaml @@ -1,6 +1,8 @@ table: name: constraint_run - schema: public + schema: merlin +configuration: + custom_name: "constraint_run" object_relationships: - name: constraint_definition using: @@ -15,7 +17,7 @@ object_relationships: insertion_order: null remote_table: name: constraint_metadata - schema: public + schema: merlin - name: simulation_dataset using: foreign_key_constraint_on: simulation_dataset_id diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml index 5635e7cce4..ec6b9a392e 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_specification.yaml @@ -1,6 +1,8 @@ table: name: constraint_specification - schema: public + schema: merlin +configuration: + custom_name: "constraint_specification" object_relationships: - name: plan using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml index 15306edf1b..5e77a14ee6 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/dataset.yaml @@ -1,6 +1,8 @@ table: name: dataset - schema: public + schema: merlin +configuration: + custom_name: "dataset" array_relationships: - name: profiles using: @@ -8,7 +10,7 @@ array_relationships: column: dataset_id table: name: profile - schema: public + schema: merlin - name: spans using: manual_configuration: @@ -17,14 +19,14 @@ array_relationships: insertion_order: null remote_table: name: span - schema: public + schema: merlin - name: topics using: foreign_key_constraint_on: column: dataset_id table: name: topic - schema: public + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml index d04296ef15..9d590fadc2 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/event.yaml @@ -1,6 +1,8 @@ table: name: event - schema: public + schema: merlin +configuration: + custom_name: "event" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml index 60fda96edd..5a3d7e7669 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile.yaml @@ -1,12 +1,14 @@ table: name: profile - schema: public + schema: merlin +configuration: + custom_name: "profile" array_relationships: - name: profile_segments using: manual_configuration: remote_table: - schema: public + schema: merlin name: profile_segment column_mapping: id: profile_id diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml index d667a7159d..922353a11e 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/profile_segment.yaml @@ -1,6 +1,8 @@ table: name: profile_segment - schema: public + schema: merlin +configuration: + custom_name: "profile_segment" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml index aba2f82d29..481cb08615 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/resource_profile_view.yaml @@ -1,13 +1,15 @@ table: name: resource_profile - schema: public + schema: merlin +configuration: + custom_name: "resource_profile" object_relationships: - name: dataset using: manual_configuration: remote_table: name: dataset - schema: public + schema: merlin column_mapping: dataset_id: id - name: profile @@ -15,7 +17,7 @@ object_relationships: manual_configuration: remote_table: name: profile - schema: public + schema: merlin column_mapping: profile_id: id select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml index 6e05c63910..7d286e08c8 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/span.yaml @@ -1,6 +1,8 @@ table: name: span - schema: public + schema: merlin +configuration: + custom_name: "span" object_relationships: - name: span using: @@ -11,7 +13,7 @@ object_relationships: insertion_order: null remote_table: name: span - schema: public + schema: merlin array_relationships: - name: spans using: @@ -22,7 +24,7 @@ array_relationships: insertion_order: null remote_table: name: span - schema: public + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml b/deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml index 5f94781b10..93289e072b 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/dataset/topic.yaml @@ -1,12 +1,14 @@ table: name: topic - schema: public + schema: merlin +configuration: + custom_name: "topic" array_relationships: - name: events using: manual_configuration: remote_table: - schema: public + schema: merlin name: event column_mapping: dataset_id: dataset_id diff --git a/deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml index 00068db3bb..a6bcec71ea 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/merging/conflicting_activities.yaml @@ -1,6 +1,8 @@ table: name: conflicting_activities - schema: public + schema: merlin +configuration: + custom_name: "conflicting_activities" object_relationships: - name: merge_request using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml index ae24d01e2a..fc3717d342 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request.yaml @@ -1,6 +1,8 @@ table: name: merge_request - schema: public + schema: merlin +configuration: + custom_name: "merge_request" object_relationships: - name: plan_receiving_changes using: @@ -10,7 +12,7 @@ object_relationships: insertion_order: null remote_table: name: plan - schema: public + schema: merlin - name: plan_snapshot_supplying_changes using: manual_configuration: @@ -19,31 +21,29 @@ object_relationships: insertion_order: null remote_table: name: plan_snapshot - schema: public + schema: merlin array_relationships: - name: comment using: - manual_configuration: - column_mapping: - id: merge_request_id - insertion_order: null - remote_table: + foreign_key_constraint_on: + column: merge_request_id + table: name: merge_request_comment - schema: public + schema: merlin - name: conflicting_activities using: foreign_key_constraint_on: column: merge_request_id table: name: conflicting_activities - schema: public + schema: merlin - name: staged_activities using: foreign_key_constraint_on: column: merge_request_id table: name: merge_staging_area - schema: public + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml index 7a4d4d0a77..0eeea2fe4f 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_request_comment.yaml @@ -1,6 +1,8 @@ table: name: merge_request_comment - schema: public + schema: merlin +configuration: + custom_name: "merge_request_comment" object_relationships: - name: merge_request using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml index 4f227942c6..cbe6b75734 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/merging/merge_staging_area.yaml @@ -1,6 +1,8 @@ table: name: merge_staging_area - schema: public + schema: merlin +configuration: + custom_name: "merge_staging_area" object_relationships: - name: merge_request using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml b/deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml index 736ca51e21..cdbb5b3115 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/mission_model.yaml @@ -1,6 +1,8 @@ table: name: mission_model - schema: public + schema: merlin +configuration: + custom_name: "mission_model" object_relationships: - name: parameters using: @@ -8,7 +10,7 @@ object_relationships: column: model_id table: name: mission_model_parameters - schema: public + schema: merlin - name: uploaded_file using: foreign_key_constraint_on: jar_id @@ -19,49 +21,42 @@ array_relationships: column: model_id table: name: activity_type - schema: public + schema: merlin - name: constraint_specification using: foreign_key_constraint_on: column: model_id table: name: constraint_model_specification - schema: public + schema: merlin - name: plans using: foreign_key_constraint_on: column: model_id table: name: plan - schema: public + schema: merlin - name: resource_types using: foreign_key_constraint_on: column: model_id table: name: resource_type - schema: public -remote_relationships: + schema: merlin - name: scheduling_specification_conditions - definition: - to_source: - relationship_type: array - source: AerieScheduler + using: + foreign_key_constraint_on: + column: model_id table: - schema: public name: scheduling_model_specification_conditions - field_mapping: - id: model_id + schema: scheduler - name: scheduling_specification_goals - definition: - to_source: - relationship_type: array - source: AerieScheduler + using: + foreign_key_constraint_on: + column: model_id table: - schema: public name: scheduling_model_specification_goals - field_mapping: - id: model_id + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml b/deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml index d57fcae606..0391723e78 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/mission_model_parameters.yaml @@ -1,6 +1,8 @@ table: name: mission_model_parameters - schema: public + schema: merlin +configuration: + custom_name: "mission_model_parameters" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/plan.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan.yaml index 3cb415ff5a..24fe1348eb 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/plan.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/plan.yaml @@ -1,6 +1,8 @@ table: name: plan - schema: public + schema: merlin +configuration: + custom_name: "plan" object_relationships: - name: mission_model using: @@ -13,7 +15,15 @@ object_relationships: insertion_order: null remote_table: name: plan - schema: public + schema: merlin +- name: scheduling_specification + using: + manual_configuration: + column_mapping: + id: plan_id + remote_table: + name: scheduling_specification + schema: scheduler array_relationships: - name: activity_directives using: @@ -21,42 +31,42 @@ array_relationships: column: plan_id table: name: activity_directive - schema: public + schema: merlin - name: constraint_specification using: foreign_key_constraint_on: column: plan_id table: name: constraint_specification - schema: public + schema: merlin - name: collaborators using: foreign_key_constraint_on: column: plan_id table: name: plan_collaborators - schema: public + schema: merlin - name: datasets using: foreign_key_constraint_on: column: plan_id table: name: plan_dataset - schema: public + schema: merlin - name: simulations using: foreign_key_constraint_on: column: plan_id table: name: simulation - schema: public + schema: merlin - name: tags using: foreign_key_constraint_on: column: plan_id table: name: plan_tags - schema: metadata + schema: tags - name: child_plans using: manual_configuration: @@ -65,18 +75,7 @@ array_relationships: insertion_order: null remote_table: name: plan - schema: public -remote_relationships: -- name: scheduling_specification - definition: - to_source: - relationship_type: object - source: AerieScheduler - table: - schema: public - name: scheduling_specification - field_mapping: - id: plan_id + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml index 7da55b0d85..b5b9db95c1 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/plan_collaborators.yaml @@ -1,6 +1,8 @@ table: name: plan_collaborators - schema: public + schema: merlin +configuration: + custom_name: "plan_collaborators" object_relationships: - name: plan using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml index 487917a952..5b178046c4 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/plan_dataset.yaml @@ -1,6 +1,8 @@ table: name: plan_dataset - schema: public + schema: merlin +configuration: + custom_name: "plan_dataset" object_relationships: - name: dataset using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml b/deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml index 0a7ede53f6..1c6326de79 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/resource_type.yaml @@ -1,6 +1,8 @@ table: name: resource_type - schema: public + schema: merlin +configuration: + custom_name: "resource_type" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml index 9e8b874131..bf0bd9179e 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulated_activity_view.yaml @@ -1,13 +1,15 @@ table: name: simulated_activity - schema: public + schema: merlin +configuration: + custom_name: "simulated_activity" object_relationships: - name: simulation_dataset using: manual_configuration: remote_table: name: simulation_dataset - schema: public + schema: merlin column_mapping: simulation_dataset_id: id - name: parent_simulated_activity @@ -15,14 +17,14 @@ object_relationships: manual_configuration: remote_table: name: simulated_activity - schema: public + schema: merlin column_mapping: parent_id: id - name: activity_directive using: manual_configuration: remote_table: - schema: public + schema: merlin name: activity_directive insertion_order: null column_mapping: diff --git a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml index 5f53de140a..028f9215d9 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation.yaml @@ -1,6 +1,8 @@ table: name: simulation - schema: public + schema: merlin +configuration: + custom_name: "simulation" object_relationships: - name: simulation_dataset using: @@ -8,7 +10,7 @@ object_relationships: column: simulation_id table: name: simulation_dataset - schema: public + schema: merlin - name: simulation_template using: foreign_key_constraint_on: simulation_template_id @@ -22,7 +24,7 @@ array_relationships: column: simulation_id table: name: simulation_dataset - schema: public + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml index bb2d4dda62..5ec86043fb 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_dataset.yaml @@ -1,6 +1,8 @@ table: name: simulation_dataset - schema: public + schema: merlin +configuration: + custom_name: "simulation_dataset" object_relationships: - name: dataset using: @@ -10,32 +12,28 @@ object_relationships: foreign_key_constraint_on: simulation_id - name: extent using: - manual_configuration: - remote_table: - schema: public + foreign_key_constraint_on: + column: simulation_dataset_id + table: name: simulation_extent - insertion_order: null - column_mapping: - id: simulation_dataset_id + schema: merlin array_relationships: - name: simulated_activities using: manual_configuration: remote_table: - schema: public + schema: merlin name: simulated_activity insertion_order: null column_mapping: id: simulation_dataset_id - name: constraint_runs using: - manual_configuration: - remote_table: - schema: public + foreign_key_constraint_on: + column: simulation_dataset_id + table: name: constraint_run - insertion_order: null - column_mapping: - id: simulation_dataset_id + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml index f87aa8ed8d..8d1ac095d9 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_extent.yaml @@ -1,6 +1,8 @@ table: name: simulation_extent - schema: public + schema: merlin +configuration: + custom_name: "simulation_extent" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml index 0387ad7756..635290e7f7 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/simulation/simulation_template.yaml @@ -1,6 +1,8 @@ table: name: simulation_template - schema: public + schema: merlin +configuration: + custom_name: "simulation_template" object_relationships: - name: mission_model using: diff --git a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml index 9ac1b2213c..0f5f90eb1e 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot.yaml @@ -1,16 +1,12 @@ table: name: plan_snapshot - schema: public + schema: merlin +configuration: + custom_name: "plan_snapshot" object_relationships: - name: plan using: - manual_configuration: - column_mapping: - plan_id: id - insertion_order: null - remote_table: - name: plan - schema: public + foreign_key_constraint_on: plan_id array_relationships: - name: activities using: @@ -18,14 +14,14 @@ array_relationships: column: snapshot_id table: name: plan_snapshot_activities - schema: public + schema: merlin - name: tags using: foreign_key_constraint_on: column: snapshot_id table: name: plan_snapshot_tags - schema: metadata + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml index 4aeaf79ce7..ed90b16b58 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml @@ -1,6 +1,8 @@ table: name: plan_snapshot_activities - schema: public + schema: merlin +configuration: + custom_name: "plan_snapshot_activities" object_relationships: - name: snapshot using: @@ -12,7 +14,7 @@ array_relationships: insertion_order: null remote_table: name: snapshot_activity_tags - schema: metadata + schema: tags column_mapping: id: directive_id snapshot_id: snapshot_id diff --git a/deployment/hasura/metadata/databases/tables/merlin/snapshot/preset_to_snapshot_directive.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/preset_to_snapshot_directive.yaml new file mode 100644 index 0000000000..fdad54a195 --- /dev/null +++ b/deployment/hasura/metadata/databases/tables/merlin/snapshot/preset_to_snapshot_directive.yaml @@ -0,0 +1,34 @@ +table: + name: preset_to_snapshot_directive + schema: merlin +configuration: + custom_name: "preset_to_snapshot_directive" +object_relationships: + - name: directive_applied_to + using: + foreign_key_constraint_on: + - activity_id + - snapshot_id + - name: preset_applied + using: + foreign_key_constraint_on: preset_id +select_permissions: + - role: aerie_admin + permission: + columns: '*' + filter: {} + allow_aggregations: true + - role: user + permission: + columns: '*' + filter: {} + allow_aggregations: true + - role: viewer + permission: + columns: '*' + filter: {} + allow_aggregations: true +delete_permissions: + - role: aerie_admin + permission: + filter: {} diff --git a/deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml b/deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml index e638b2eb9d..12a675b2b4 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/uploaded_file.yaml @@ -1,6 +1,8 @@ table: name: uploaded_file - schema: public + schema: merlin +configuration: + custom_name: "uploaded_file" select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml b/deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml index 5fe02d7009..2d44b641bd 100644 --- a/deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml +++ b/deployment/hasura/metadata/databases/tables/permissions/user_role_permission.yaml @@ -1,6 +1,6 @@ table: name: user_role_permission - schema: metadata + schema: permissions configuration: custom_name: "user_role_permission" select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml b/deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml index 11c1b436a1..d7bcbdbfc9 100644 --- a/deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml +++ b/deployment/hasura/metadata/databases/tables/permissions/user_roles.yaml @@ -1,6 +1,6 @@ table: name: user_roles - schema: metadata + schema: permissions configuration: custom_name: "user_roles" is_enum: true diff --git a/deployment/hasura/metadata/databases/tables/permissions/users.yaml b/deployment/hasura/metadata/databases/tables/permissions/users.yaml index ac5bde541f..c6e6469cf9 100644 --- a/deployment/hasura/metadata/databases/tables/permissions/users.yaml +++ b/deployment/hasura/metadata/databases/tables/permissions/users.yaml @@ -1,6 +1,6 @@ table: name: users - schema: metadata + schema: permissions configuration: custom_name: "users" select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml b/deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml index 3facef90d4..8d983b8b34 100644 --- a/deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml +++ b/deployment/hasura/metadata/databases/tables/permissions/users_allowed_roles.yaml @@ -1,6 +1,6 @@ table: name: users_allowed_roles - schema: metadata + schema: permissions configuration: custom_name: "users_allowed_roles" select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml b/deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml index 553a95c347..f586c33fb9 100644 --- a/deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml +++ b/deployment/hasura/metadata/databases/tables/permissions/users_and_roles_view.yaml @@ -1,6 +1,6 @@ table: name: users_and_roles - schema: metadata + schema: permissions configuration: custom_name: "users_and_roles" select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml index 47fc8119c6..de59117702 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_definition.yaml @@ -1,6 +1,8 @@ table: name: scheduling_condition_definition - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_condition_definition" object_relationships: - name: metadata using: @@ -14,7 +16,7 @@ array_relationships: - condition_revision table: name: scheduling_condition_definition_tags - schema: metadata + schema: tags - name: models_using using: foreign_key_constraint_on: @@ -23,7 +25,7 @@ array_relationships: - condition_revision table: name: scheduling_model_specification_conditions - schema: public + schema: scheduler - name: plans_using using: foreign_key_constraint_on: @@ -32,7 +34,7 @@ array_relationships: - condition_revision table: name: scheduling_specification_conditions - schema: public + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml index 163317aa07..ed57446b04 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_condition_metadata.yaml @@ -1,6 +1,8 @@ table: name: scheduling_condition_metadata - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_condition_metadata" array_relationships: - name: tags using: @@ -8,28 +10,28 @@ array_relationships: column: condition_id table: name: scheduling_condition_tags - schema: metadata + schema: tags - name: versions using: foreign_key_constraint_on: column: condition_id table: name: scheduling_condition_definition - schema: public + schema: scheduler - name: models_using using: foreign_key_constraint_on: column: condition_id table: name: scheduling_model_specification_conditions - schema: public + schema: scheduler - name: plans_using using: foreign_key_constraint_on: column: condition_id table: name: scheduling_specification_conditions - schema: public + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml index d6cd74353e..5d1185a607 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_definition.yaml @@ -1,6 +1,8 @@ table: name: scheduling_goal_definition - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_goal_definition" object_relationships: - name: metadata using: @@ -14,7 +16,7 @@ array_relationships: - goal_revision table: name: scheduling_goal_analysis - schema: public + schema: scheduler - name: tags using: foreign_key_constraint_on: @@ -23,7 +25,7 @@ array_relationships: - goal_revision table: name: scheduling_goal_definition_tags - schema: metadata + schema: tags - name: models_using using: foreign_key_constraint_on: @@ -32,7 +34,7 @@ array_relationships: - goal_revision table: name: scheduling_model_specification_goals - schema: public + schema: scheduler - name: plans_using using: foreign_key_constraint_on: @@ -41,7 +43,7 @@ array_relationships: - goal_revision table: name: scheduling_specification_goals - schema: public + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml index cbd9c47f25..72c843b18e 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_goal_metadata.yaml @@ -1,6 +1,8 @@ table: name: scheduling_goal_metadata - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_goal_metadata" array_relationships: - name: analyses using: @@ -9,35 +11,35 @@ array_relationships: id: goal_id remote_table: name: scheduling_goal_analysis - schema: public + schema: scheduler - name: tags using: foreign_key_constraint_on: column: goal_id table: name: scheduling_goal_tags - schema: metadata + schema: tags - name: versions using: foreign_key_constraint_on: column: goal_id table: name: scheduling_goal_definition - schema: public + schema: scheduler - name: models_using using: foreign_key_constraint_on: column: goal_id table: name: scheduling_model_specification_goals - schema: public + schema: scheduler - name: plans_using using: foreign_key_constraint_on: column: goal_id table: name: scheduling_specification_goals - schema: public + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml index c22c697185..a878d8638c 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis.yaml @@ -1,16 +1,12 @@ table: name: scheduling_goal_analysis - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_goal_analysis" object_relationships: - name: request using: - manual_configuration: - remote_table: - schema: public - name: scheduling_request - insertion_order: null - column_mapping: - analysis_id: analysis_id + foreign_key_constraint_on: analysis_id - name: goal_metadata using: manual_configuration: @@ -18,7 +14,7 @@ object_relationships: goal_id: id remote_table: name: scheduling_goal_metadata - schema: public + schema: scheduler - name: goal_definition using: foreign_key_constraint_on: @@ -28,13 +24,12 @@ array_relationships: - name: satisfying_activities using: manual_configuration: - remote_table: - schema: public - name: scheduling_goal_analysis_satisfying_activities - insertion_order: null column_mapping: goal_id: goal_id analysis_id: analysis_id + remote_table: + name: scheduling_goal_analysis_satisfying_activities + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml index b947f998b3..6369561464 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_created_activities.yaml @@ -1,6 +1,8 @@ table: name: scheduling_goal_analysis_created_activities - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_goal_analysis_created_activities" object_relationships: - name: analysis using: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml index 2880149ef3..a96afb84bd 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_goal_analysis_satisfying_activities.yaml @@ -1,6 +1,8 @@ table: name: scheduling_goal_analysis_satisfying_activities - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_goal_analysis_satisfying_activities" object_relationships: - name: analysis using: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml index d613f1c75d..e119d75b0e 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_run/scheduling_request.yaml @@ -1,6 +1,8 @@ table: name: scheduling_request - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_request" object_relationships: - name: scheduling_specification using: @@ -12,32 +14,29 @@ array_relationships: column: analysis_id table: name: scheduling_goal_analysis - schema: public + schema: scheduler - name: satisfying_activities using: foreign_key_constraint_on: column: analysis_id table: name: scheduling_goal_analysis_satisfying_activities - schema: public + schema: scheduler - name: created_activities using: foreign_key_constraint_on: column: analysis_id table: name: scheduling_goal_analysis_created_activities - schema: public -remote_relationships: + schema: scheduler - name: simulation_dataset - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: simulation_dataset - field_mapping: + using: + manual_configuration: + column_mapping: dataset_id: dataset_id + remote_table: + name: simulation_dataset + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml index 034c84ef91..406c15d96b 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml @@ -1,6 +1,8 @@ table: name: scheduling_model_specification_conditions - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_model_specification_conditions" object_relationships: - name: condition_metadata using: @@ -10,17 +12,14 @@ object_relationships: foreign_key_constraint_on: - condition_id - condition_revision -remote_relationships: -- name: model - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: mission_model - field_mapping: - model_id: id + - name: model + using: + manual_configuration: + column_mapping: + model_id: id + remote_table: + name: mission_model + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml index 608eef2b06..5501895864 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml @@ -1,6 +1,8 @@ table: name: scheduling_model_specification_goals - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_model_specification_goals" object_relationships: - name: goal_metadata using: @@ -10,17 +12,14 @@ object_relationships: foreign_key_constraint_on: - goal_id - goal_revision -remote_relationships: -- name: model - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: mission_model - field_mapping: - model_id: id + - name: model + using: + manual_configuration: + column_mapping: + model_id: id + remote_table: + name: mission_model + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml index 5d52fc8fce..661c1fdb5f 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml @@ -1,6 +1,17 @@ table: name: scheduling_specification - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_specification" +object_relationships: +- name: plan + using: + manual_configuration: + column_mapping: + plan_id: id + remote_table: + name: plan + schema: merlin array_relationships: - name: goals using: @@ -8,32 +19,21 @@ array_relationships: column: specification_id table: name: scheduling_specification_goals - schema: public + schema: scheduler - name: conditions using: foreign_key_constraint_on: column: specification_id table: name: scheduling_specification_conditions - schema: public + schema: scheduler - name: requests using: foreign_key_constraint_on: column: specification_id table: name: scheduling_request - schema: public -remote_relationships: -- name: plan - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: plan - field_mapping: - plan_id: id + schema: scheduler select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml index 2f60212f94..fa3a24863b 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_conditions.yaml @@ -1,6 +1,8 @@ table: name: scheduling_specification_conditions - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_specification_conditions" object_relationships: - name: condition_metadata using: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml index 8af1f31c22..20a1b09c00 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification_goals.yaml @@ -1,6 +1,8 @@ table: name: scheduling_specification_goals - schema: public + schema: scheduler +configuration: + custom_name: "scheduling_specification_goals" object_relationships: - name: goal_metadata using: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml b/deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml index 866349fcb2..01f9d33795 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/activity_instance_commands.yaml @@ -1,21 +1,20 @@ table: name: activity_instance_commands - schema: public + schema: sequencing +configuration: + custom_name: "activity_instance_commands" object_relationships: - name: expansion_run using: foreign_key_constraint_on: expansion_run_id -remote_relationships: - - name: activity_instance - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: span - field_mapping: - activity_instance_id: id +- name: activity_instance + using: + manual_configuration: + column_mapping: + activity_instance_id: id + remote_table: + name: span + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml b/deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml index 64dd79b562..eabc8512d7 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/command_dictionary.yaml @@ -1,6 +1,8 @@ table: name: command_dictionary - schema: public + schema: sequencing +configuration: + custom_name: "command_dictionary" array_relationships: - name: expansion_sets using: @@ -8,7 +10,7 @@ array_relationships: column: command_dict_id table: name: expansion_set - schema: public + schema: sequencing select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml index d6627a35d5..1baed0eab7 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expanded_sequences.yaml @@ -1,20 +1,17 @@ table: name: expanded_sequences - schema: public + schema: sequencing +configuration: + custom_name: "expanded_sequences" object_relationships: - name: expansion_run using: foreign_key_constraint_on: expansion_run_id - name: sequence using: - manual_configuration: - remote_table: - name: sequence - schema: public - insertion_order: null - column_mapping: - seq_id: seq_id - simulation_dataset_id: simulation_dataset_id + foreign_key_constraint_on: + - seq_id + - simulation_dataset_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml index 1740b72b89..9a8ebdd235 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml @@ -1,13 +1,15 @@ table: name: expansion_rule - schema: public + schema: sequencing +configuration: + custom_name: "expansion_rule" array_relationships: - name: expansion_sets using: manual_configuration: remote_table: name: rule_expansion_set_view - schema: public + schema: sequencing insertion_order: null column_mapping: id: rule_id @@ -16,7 +18,7 @@ array_relationships: manual_configuration: remote_table: name: expansion_rule_tags - schema: metadata + schema: tags insertion_order: null column_mapping: id: rule_id diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml index 2e0a5b4dcc..56d6411ed8 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml @@ -1,10 +1,20 @@ table: name: expansion_run - schema: public + schema: sequencing +configuration: + custom_name: "expansion_run" object_relationships: - name: expansion_set using: foreign_key_constraint_on: expansion_set_id + - name: simulation_dataset + using: + manual_configuration: + column_mapping: + simulation_dataset_id: id + remote_table: + name: simulation_dataset + schema: merlin array_relationships: - name: activity_instance_commands using: @@ -12,25 +22,14 @@ array_relationships: column: expansion_run_id table: name: activity_instance_commands - schema: public + schema: sequencing - name: expanded_sequences using: foreign_key_constraint_on: column: expansion_run_id table: name: expanded_sequences - schema: public -remote_relationships: - - name: simulation_dataset - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: simulation_dataset - field_mapping: - simulation_dataset_id: id + schema: sequencing select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml index 311c6c0344..fa616a530c 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml @@ -1,10 +1,20 @@ table: name: expansion_set - schema: public + schema: sequencing +configuration: + custom_name: "expansion_set" object_relationships: - name: command_dictionary using: foreign_key_constraint_on: command_dict_id +- name: mission_model + using: + manual_configuration: + column_mapping: + mission_model_id: id + remote_table: + name: mission_model + schema: merlin array_relationships: - name: expansion_runs using: @@ -12,27 +22,16 @@ array_relationships: column: expansion_set_id table: name: expansion_run - schema: public + schema: sequencing - name: expansion_rules using: manual_configuration: remote_table: name: expansion_set_rule_view - schema: public + schema: sequencing insertion_order: null column_mapping: id: set_id -remote_relationships: -- name: mission_model - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: mission_model - field_mapping: - mission_model_id: id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml index 4705dd7b5d..9c7ebcb46d 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_rule_view.yaml @@ -1,13 +1,15 @@ table: name: expansion_set_rule_view - schema: public + schema: sequencing +configuration: + custom_name: "expansion_set_rule_view" array_relationships: - name: expansion_sets using: manual_configuration: remote_table: name: expansion_set - schema: public + schema: sequencing insertion_order: null column_mapping: set_id: id diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml index 60f4216e1e..56e257e8b3 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set_to_rule.yaml @@ -1,25 +1,21 @@ table: name: expansion_set_to_rule - schema: public -array_relationships: -- name: expansion_sets + schema: sequencing +configuration: + custom_name: "expansion_set_to_rule" +object_relationships: +- name: expansion_set using: - manual_configuration: - remote_table: - name: expansion_set - schema: public - insertion_order: null - column_mapping: - set_id: id + foreign_key_constraint_on: set_id +array_relationships: - name: expansion_rules using: manual_configuration: - remote_table: - name: expansion_rule - schema: public - insertion_order: null column_mapping: rule_id: id + remote_table: + name: expansion_rule + schema: sequencing select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml b/deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml index 70ebec2876..13bd1efbf3 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/rule_expansion_set_view.yaml @@ -1,13 +1,15 @@ table: name: rule_expansion_set_view - schema: public + schema: sequencing +configuration: + custom_name: "rule_expansion_set_view" array_relationships: - name: expansion_rules using: manual_configuration: remote_table: name: expansion_rule - schema: public + schema: sequencing insertion_order: null column_mapping: rule_id: id diff --git a/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml index 2e33a62486..dc86292006 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml @@ -1,27 +1,27 @@ table: name: sequence - schema: public -array_relationships: -- name: activity_instance_joins + schema: sequencing +configuration: + custom_name: "sequence" +object_relationships: +- name: simulation_dataset using: manual_configuration: - remote_table: - name: sequence_to_simulated_activity - schema: public column_mapping: - seq_id: seq_id - simulation_dataset_id: simulation_dataset_id -remote_relationships: -- name: simulation_dataset - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: simulation_dataset - field_mapping: simulation_dataset_id: id + remote_table: + name: simulation_dataset + schema: merlin +array_relationships: +- name: activity_instance_joins + using: + foreign_key_constraint_on: + columns: + - seq_id + - simulation_dataset_id + table: + name: sequence_to_simulated_activity + schema: sequencing select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml index ef95a7f4dd..3f68b0cda8 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml @@ -1,27 +1,25 @@ table: name: sequence_to_simulated_activity - schema: public -remote_relationships: + schema: sequencing +configuration: + custom_name: "sequence_to_simulated_activity" +object_relationships: - name: simulation_dataset - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: simulation_dataset - field_mapping: + using: + manual_configuration: + column_mapping: simulation_dataset_id: id + remote_table: + name: simulation_dataset + schema: merlin - name: simulated_activity - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: public - name: simulated_activity - field_mapping: + using: + manual_configuration: + column_mapping: simulated_activity_id: id + remote_table: + name: simulated_activity + schema: merlin select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml index 8890d5da72..af2c6306df 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml @@ -1,12 +1,14 @@ table: name: user_sequence - schema: public + schema: sequencing +configuration: + custom_name: "user_sequence" object_relationships: - name: command_dictionary using: manual_configuration: remote_table: - schema: public + schema: sequencing name: command_dictionary insertion_order: null column_mapping: diff --git a/deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml index b39cc2789c..c506e2bd50 100644 --- a/deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/activity_directive_tags.yaml @@ -1,6 +1,6 @@ table: name: activity_directive_tags - schema: metadata + schema: tags configuration: custom_name: "activity_directive_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml index cefc7b2a8b..291fe991a7 100644 --- a/deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/constraint_definition_tags.yaml @@ -1,6 +1,6 @@ table: name: constraint_definition_tags - schema: metadata + schema: tags configuration: custom_name: "constraint_definition_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml index bab682fd06..933d735e11 100644 --- a/deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/constraint_tags.yaml @@ -1,6 +1,6 @@ table: name: constraint_tags - schema: metadata + schema: tags configuration: custom_name: "constraint_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml index eafd3f5392..7867c51d3e 100644 --- a/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml @@ -1,23 +1,20 @@ table: name: expansion_rule_tags - schema: metadata + schema: tags configuration: custom_name: "expansion_rule_tags" object_relationships: - name: expansion_rule using: foreign_key_constraint_on: rule_id -remote_relationships: - name: tag - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: metadata - name: tags - field_mapping: + using: + manual_configuration: + column_mapping: tag_id: id + remote_table: + name: tags + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml index 71588a9e73..2db49da21c 100644 --- a/deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/plan_snapshot_tags.yaml @@ -1,6 +1,6 @@ table: name: plan_snapshot_tags - schema: metadata + schema: tags configuration: custom_name: "plan_snapshot_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml index dca7ae1afd..c9ab4b62ce 100644 --- a/deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/plan_tags.yaml @@ -1,6 +1,6 @@ table: name: plan_tags - schema: metadata + schema: tags configuration: custom_name: "plan_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml index 142301412f..5dc15bc0b4 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml @@ -1,6 +1,6 @@ table: name: scheduling_condition_definition_tags - schema: metadata + schema: tags configuration: custom_name: "scheduling_condition_definition_tags" object_relationships: @@ -9,17 +9,14 @@ object_relationships: foreign_key_constraint_on: - condition_id - condition_revision -remote_relationships: - name: tag - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: metadata - name: tags - field_mapping: + using: + manual_configuration: + column_mapping: tag_id: id + remote_table: + name: tags + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml index eab069b8eb..7875b248ec 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml @@ -1,23 +1,20 @@ table: name: scheduling_condition_tags - schema: metadata + schema: tags configuration: custom_name: "scheduling_condition_tags" object_relationships: - name: condition_metadata using: foreign_key_constraint_on: condition_id -remote_relationships: - name: tag - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: metadata - name: tags - field_mapping: + using: + manual_configuration: + column_mapping: tag_id: id + remote_table: + name: tags + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml index a75ff73f3e..98f7d1a3d2 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml @@ -1,6 +1,6 @@ table: name: scheduling_goal_definition_tags - schema: metadata + schema: tags configuration: custom_name: "scheduling_goal_definition_tags" object_relationships: @@ -9,17 +9,14 @@ object_relationships: foreign_key_constraint_on: - goal_id - goal_revision -remote_relationships: - name: tag - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: metadata - name: tags - field_mapping: + using: + manual_configuration: + column_mapping: tag_id: id + remote_table: + name: tags + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml index 1a1c49c8c6..ef1da104e4 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml @@ -1,23 +1,20 @@ table: name: scheduling_goal_tags - schema: metadata + schema: tags configuration: custom_name: "scheduling_goal_tags" object_relationships: - name: goal_metadata using: foreign_key_constraint_on: goal_id -remote_relationships: - name: tag - definition: - to_source: - relationship_type: object - source: AerieMerlin - table: - schema: metadata - name: tags - field_mapping: + using: + manual_configuration: + column_mapping: tag_id: id + remote_table: + name: tags + schema: tags select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml index b54dc901df..29a331e901 100644 --- a/deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/snapshot_activity_tags.yaml @@ -1,6 +1,6 @@ table: name: snapshot_activity_tags - schema: metadata + schema: tags configuration: custom_name: "snapshot_activity_tags" object_relationships: diff --git a/deployment/hasura/metadata/databases/tables/tags/tags.yaml b/deployment/hasura/metadata/databases/tables/tags/tags.yaml index eb89459b47..a37bf51012 100644 --- a/deployment/hasura/metadata/databases/tables/tags/tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/tags.yaml @@ -1,6 +1,6 @@ table: name: tags - schema: metadata + schema: tags configuration: custom_name: "tags" select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml b/deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml index 04ea56de64..cd85072e13 100644 --- a/deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml +++ b/deployment/hasura/metadata/databases/tables/ui/extension_roles.yaml @@ -1,6 +1,8 @@ table: name: extension_roles - schema: public + schema: ui +configuration: + custom_name: "extension_roles" object_relationships: - name: extension using: diff --git a/deployment/hasura/metadata/databases/tables/ui/extensions.yaml b/deployment/hasura/metadata/databases/tables/ui/extensions.yaml index 6932ed610b..d376ac71d0 100644 --- a/deployment/hasura/metadata/databases/tables/ui/extensions.yaml +++ b/deployment/hasura/metadata/databases/tables/ui/extensions.yaml @@ -1,6 +1,8 @@ table: name: extensions - schema: public + schema: ui +configuration: + custom_name: "extensions" array_relationships: - name: extension_roles using: @@ -8,7 +10,7 @@ array_relationships: column: extension_id table: name: extension_roles - schema: public + schema: ui select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/ui/view.yaml b/deployment/hasura/metadata/databases/tables/ui/view.yaml index 590be69780..162c6f86ee 100644 --- a/deployment/hasura/metadata/databases/tables/ui/view.yaml +++ b/deployment/hasura/metadata/databases/tables/ui/view.yaml @@ -1,6 +1,8 @@ table: name: view - schema: public + schema: ui +configuration: + custom_name: "view" select_permissions: - role: aerie_admin permission: From 7fd73a4b106704f4511e058e22f67ff867f4ee36 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 4 Mar 2024 15:07:08 -0800 Subject: [PATCH 19/36] Update Docker-Compose --- docker-compose.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 9e1f6a80cc..72aed01b71 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -224,13 +224,10 @@ services: container_name: aerie_hasura depends_on: ["postgres"] environment: - AERIE_MERLIN_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_merlin" + AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie" AERIE_MERLIN_URL: "http://aerie_merlin:27183" - AERIE_SCHEDULER_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_scheduler" AERIE_SCHEDULER_URL: "http://aerie_scheduler:27185" - AERIE_SEQUENCING_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_sequencing" AERIE_SEQUENCING_URL: "http://aerie_sequencing:27184" - AERIE_UI_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_ui" HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_DEV_MODE: "true" HASURA_GRAPHQL_ENABLE_CONSOLE: "true" From e1f4cf8a0e337ab101f23248cee99d62a4afb604 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Thu, 7 Mar 2024 13:09:59 -0800 Subject: [PATCH 20/36] Add foreign keys (Scheduler) Adds fkeys that were previously unexpressable due to the tables being in separate DBs Adds a trigger to create a Scheduling Spec when a plan is created (this is breaking for the UI) --- .../scheduling_condition_definition.sql | 7 ++- .../scheduling_condition_metadata.sql | 12 ++++- .../scheduler/scheduling_goal_definition.sql | 7 ++- .../scheduler/scheduling_goal_metadata.sql | 12 ++++- .../scheduling_run/scheduling_request.sql | 10 +++++ ...eduling_model_specification_conditions.sql | 6 ++- .../scheduling_model_specification_goals.sql | 4 ++ .../scheduling_specification.sql | 44 ++++++++++++++++++- 8 files changed, 96 insertions(+), 6 deletions(-) diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql index a2159372c6..66449f6ed2 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_definition.sql @@ -11,7 +11,12 @@ create table scheduler.scheduling_condition_definition( foreign key (condition_id) references scheduler.scheduling_condition_metadata on update cascade - on delete cascade + on delete cascade, + constraint condition_definition_author_exists + foreign key (author) + references permissions.users + on update cascade + on delete set null ); comment on table scheduler.scheduling_condition_definition is e'' diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql index 2adf7d29e9..bca6b68bf9 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_condition_metadata.sql @@ -12,7 +12,17 @@ create table scheduler.scheduling_condition_metadata ( updated_at timestamptz not null default now(), constraint scheduling_condition_metadata_pkey - primary key (id) + primary key (id), + constraint condition_owner_exists + foreign key (owner) + references permissions.users + on update cascade + on delete set null, + constraint condition_updated_by_exists + foreign key (updated_by) + references permissions.users + on update cascade + on delete set null ); -- A partial index is used to enforce name uniqueness only on conditions visible to other users diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql index dc3bfcb507..61dc4efdf2 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_definition.sql @@ -12,7 +12,12 @@ create table scheduler.scheduling_goal_definition( foreign key (goal_id) references scheduler.scheduling_goal_metadata on update cascade - on delete cascade + on delete cascade, + constraint goal_definition_author_exists + foreign key (author) + references permissions.users + on update cascade + on delete set null ); comment on table scheduler.scheduling_goal_definition is e'' diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql index 57ca2a2e4a..b9a8f2162c 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_goal_metadata.sql @@ -12,7 +12,17 @@ create table scheduler.scheduling_goal_metadata ( updated_at timestamptz not null default now(), constraint scheduling_goal_metadata_pkey - primary key (id) + primary key (id), + constraint goal_owner_exists + foreign key (owner) + references permissions.users + on update cascade + on delete set null, + constraint goal_updated_by_exists + foreign key (updated_by) + references permissions.users + on update cascade + on delete set null ); -- A partial index is used to enforce name uniqueness only on goals visible to other users diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql index a1a0237ae3..c96eada7a5 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql @@ -31,6 +31,16 @@ create table scheduler.scheduling_request ( references scheduler.scheduling_specification on update cascade on delete cascade, + constraint scheduling_request_requester_exists + foreign key (requested_by) + references permissions.users + on update cascade + on delete set null, + constraint scheduling_request_references_dataset + foreign key (dataset_id) + references merlin.dataset + on update cascade + on delete set null, constraint start_before_end check (horizon_start <= horizon_end) ); diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql index 30fa994642..505e69302b 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.sql @@ -11,7 +11,11 @@ create table scheduler.scheduling_model_specification_conditions( foreign key (condition_id, condition_revision) references scheduler.scheduling_condition_definition on update cascade - on delete restrict + on delete restrict, + foreign key (model_id) + references merlin.mission_model + on update cascade + on delete cascade ); comment on table scheduler.scheduling_model_specification_conditions is e'' diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql index 28b09d5412..35c3127712 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.sql @@ -5,6 +5,10 @@ create table scheduler.scheduling_model_specification_goals( priority integer not null, primary key (model_id, goal_id), + foreign key (model_id) + references merlin.mission_model + on update cascade + on delete cascade, foreign key (goal_id) references scheduler.scheduling_goal_metadata on update cascade diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql index 4806b25249..f84078cfff 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_specification/scheduling_specification.sql @@ -11,7 +11,12 @@ create table scheduler.scheduling_specification ( constraint scheduling_specification_synthetic_key primary key(id), constraint scheduling_specification_unique_plan_id - unique (plan_id) + unique (plan_id), + constraint scheduling_spec_plan_id_fkey + foreign key (plan_id) + references merlin.plan + on update cascade + on delete cascade ); comment on table scheduler.scheduling_specification is e'' @@ -31,6 +36,43 @@ comment on column scheduler.scheduling_specification.simulation_arguments is e'' comment on column scheduler.scheduling_specification.analysis_only is e'' 'The boolean stating whether this is an analysis run only'; +create function scheduler.create_scheduling_spec_for_new_plan() +returns trigger +security definer +language plpgsql as $$ +declare + spec_id integer; +begin + -- Create a new scheduling specification + insert into scheduler.scheduling_specification (revision, plan_id, plan_revision, horizon_start, horizon_end, + simulation_arguments, analysis_only) + values (0, new.id, new.revision, new.start_time, new.start_time+new.duration, '{}', false) + returning id into spec_id; + + -- Populate the scheduling specification + insert into scheduler.scheduling_specification_goals (specification_id, goal_id, goal_revision, priority) + select spec_id, msg.goal_id, msg.goal_revision, msg.priority + from scheduler.scheduling_model_specification_goals msg + where msg.model_id = new.model_id; + + insert into scheduler.scheduling_specification_conditions (specification_id, condition_id, condition_revision) + select spec_id, msc.condition_id, msc.condition_revision + from scheduler.scheduling_model_specification_conditions msc + where msc.model_id = new.model_id; + + return new; +end +$$; + +comment on function scheduler.create_scheduling_spec_for_new_plan() is e'' +'Creates a scheduling specification for a new plan + and populates it with the contents of the plan''s model''s specification.'; + +create trigger scheduling_spec_for_new_plan_trigger +after insert on merlin.plan +for each row +execute function scheduler.create_scheduling_spec_for_new_plan(); + create trigger increment_revision_on_update_trigger before update on scheduler.scheduling_specification for each row From 1a79f400b486f46d467fb053cb1abd730ff70cad Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Thu, 7 Mar 2024 13:46:35 -0800 Subject: [PATCH 21/36] Add foreign keys (UI) Adds fkeys that were previously unexpressable due to the tables being in separate DBs --- deployment/postgres-init-db/sql/tables/ui/extension_roles.sql | 4 +++- deployment/postgres-init-db/sql/tables/ui/extensions.sql | 4 +++- deployment/postgres-init-db/sql/tables/ui/view.sql | 4 +++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql b/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql index 128ba2f25b..a6fd7b970d 100644 --- a/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql +++ b/deployment/postgres-init-db/sql/tables/ui/extension_roles.sql @@ -2,7 +2,9 @@ create table ui.extension_roles ( extension_id integer not null references ui.extensions(id) on update cascade on delete cascade, - role text not null, + role text not null references permissions.user_roles (role) + on update cascade + on delete cascade, primary key (extension_id, role) ); diff --git a/deployment/postgres-init-db/sql/tables/ui/extensions.sql b/deployment/postgres-init-db/sql/tables/ui/extensions.sql index 1ab1beb374..9d1623d275 100644 --- a/deployment/postgres-init-db/sql/tables/ui/extensions.sql +++ b/deployment/postgres-init-db/sql/tables/ui/extensions.sql @@ -2,7 +2,9 @@ create table ui.extensions ( id integer generated always as identity, description text, label text not null, - owner text, + owner text references permissions.users (username) + on update cascade + on delete set null, url text not null, updated_at timestamptz not null default now(), diff --git a/deployment/postgres-init-db/sql/tables/ui/view.sql b/deployment/postgres-init-db/sql/tables/ui/view.sql index 71a9dc8701..b39748955f 100644 --- a/deployment/postgres-init-db/sql/tables/ui/view.sql +++ b/deployment/postgres-init-db/sql/tables/ui/view.sql @@ -3,7 +3,9 @@ create table ui.view ( definition jsonb not null, id integer generated always as identity, name text not null, - owner text, + owner text references permissions.users (username) + on update cascade + on delete set null, updated_at timestamptz not null default now(), constraint view_primary_key primary key (id) From 665efd121d78ef8523a090748418a7e49a35823c Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Thu, 7 Mar 2024 14:18:12 -0800 Subject: [PATCH 22/36] Add foreign keys (Sequencing) Adds fkeys that were previously unexpressable due to the tables being in separate DBs Additionally refines the comment on `expansion_rule.activity_type` --- .../sql/tables/sequencing/expanded_sequences.sql | 6 ++++-- .../sql/tables/sequencing/expansion_rule.sql | 14 +++++++++++++- .../sql/tables/sequencing/expansion_run.sql | 3 +++ .../sql/tables/sequencing/expansion_set.sql | 13 ++++++++++++- .../sql/tables/sequencing/sequence.sql | 5 ++++- .../sequencing/sequence_to_simulated_activity.sql | 12 ++++++++---- .../sql/tables/sequencing/user_sequence.sql | 9 ++++++++- 7 files changed, 52 insertions(+), 10 deletions(-) diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql b/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql index fdc0a22f7d..12c797d754 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expanded_sequences.sql @@ -11,15 +11,17 @@ create table sequencing.expanded_sequences ( constraint expanded_sequences_primary_key primary key (id), - constraint expanded_sequences_to_expansion_run_id foreign key (expansion_run_id) references sequencing.expansion_run on delete cascade, - constraint expanded_sequences_to_seq_id foreign key (seq_id, simulation_dataset_id) references sequencing.sequence (seq_id, simulation_dataset_id) + on delete cascade, + constraint expanded_sequences_to_sim_run + foreign key (simulation_dataset_id) + references merlin.simulation_dataset on delete cascade ); diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql index 31763799ae..3e1892c8c2 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_rule.sql @@ -27,6 +27,18 @@ create table sequencing.expansion_rule ( foreign key (authoring_command_dict_id) references sequencing.command_dictionary (id) + on delete set null, + foreign key (authoring_mission_model_id) + references merlin.mission_model + on update cascade + on delete set null, + foreign key (owner) + references permissions.users + on update cascade + on delete set null, + foreign key (updated_by) + references permissions.users + on update cascade on delete set null ); comment on table sequencing.expansion_rule is e'' @@ -34,7 +46,7 @@ comment on table sequencing.expansion_rule is e'' comment on column sequencing.expansion_rule.id is e'' 'The synthetic identifier for this expansion rule.'; comment on column sequencing.expansion_rule.activity_type is e'' - 'The user selected activity type.'; + 'The activity type this expansion rule applies to. This type is not model-specific.'; comment on column sequencing.expansion_rule.expansion_logic is e'' 'The expansion logic used to generate commands.'; comment on column sequencing.expansion_rule.authoring_command_dict_id is e'' diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql index 79a1dfb0fa..f4b2fc0902 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_run.sql @@ -11,6 +11,9 @@ create table sequencing.expansion_run ( foreign key (expansion_set_id) references sequencing.expansion_set (id) + on delete cascade, + foreign key (simulation_dataset_id) + references merlin.simulation_dataset on delete cascade ); comment on table sequencing.expansion_run is e'' diff --git a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql index e4a4b0c062..502e3dc290 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/expansion_set.sql @@ -20,7 +20,18 @@ create table sequencing.expansion_set ( foreign key (command_dict_id) references sequencing.command_dictionary (id) - on delete cascade + on delete cascade, + foreign key (mission_model_id) + references merlin.mission_model + on delete cascade, + foreign key (owner) + references permissions.users + on update cascade + on delete set null, + foreign key (updated_by) + references permissions.users + on update cascade + on delete set null ); comment on table sequencing.expansion_set is e'' diff --git a/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql index bf1430d1f6..1546462fbf 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/sequence.sql @@ -6,7 +6,10 @@ create table sequencing.sequence ( created_at timestamptz not null default now(), constraint sequence_primary_key - primary key (seq_id, simulation_dataset_id) + primary key (seq_id, simulation_dataset_id), + foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade ); comment on table sequencing.sequence is e'' 'A sequence product'; diff --git a/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql b/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql index e6b9dfb592..c6092ee7ad 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/sequence_to_simulated_activity.sql @@ -4,12 +4,16 @@ create table sequencing.sequence_to_simulated_activity ( seq_id text not null, constraint sequence_to_simulated_activity_primary_key - primary key (simulated_activity_id, simulation_dataset_id), + primary key (simulated_activity_id, simulation_dataset_id), constraint sequence_to_simulated_activity_activity_instance_id_fkey - foreign key (seq_id, simulation_dataset_id) - references sequencing.sequence (seq_id, simulation_dataset_id) - on delete cascade + foreign key (seq_id, simulation_dataset_id) + references sequencing.sequence (seq_id, simulation_dataset_id) + on delete cascade, + constraint sequence_to_sim_run + foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade ); comment on table sequencing.sequence_to_simulated_activity is e'' diff --git a/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql b/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql index e099a31b92..2f6e594a8c 100644 --- a/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql +++ b/deployment/postgres-init-db/sql/tables/sequencing/user_sequence.sql @@ -7,7 +7,14 @@ create table sequencing.user_sequence ( owner text, updated_at timestamptz not null default now(), - constraint user_sequence_primary_key primary key (id) + constraint user_sequence_primary_key primary key (id), + foreign key (authoring_command_dict_id) + references sequencing.command_dictionary + on delete cascade, + foreign key (owner) + references permissions.users + on update cascade + on delete cascade ); comment on column sequencing.user_sequence.authoring_command_dict_id is e'' From 52ef1bf6c29b26b45fcc6c20850f78b76d2271d0 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 11 Mar 2024 14:17:26 -0700 Subject: [PATCH 23/36] Replace Manual Configurations in Hasura Metadata --- .../activity_directive_changelog.yaml | 10 +++----- .../tables/merlin/activity_type.yaml | 13 +++++------ .../constraint_model_specification.yaml | 19 ++++----------- .../databases/tables/merlin/plan.yaml | 23 ++++++------------- .../snapshot/plan_snapshot_activities.yaml | 11 ++++----- ...duling_model_specification_conditions.yaml | 7 +----- .../scheduling_model_specification_goals.yaml | 7 +----- .../scheduling_specification.yaml | 7 +----- .../tables/sequencing/expansion_rule.yaml | 8 +++---- .../tables/sequencing/expansion_run.yaml | 7 +----- .../tables/sequencing/expansion_set.yaml | 7 +----- .../databases/tables/sequencing/sequence.yaml | 7 +----- .../sequence_to_simulated_activity.yaml | 7 +----- .../tables/sequencing/user_sequence.yaml | 8 +------ .../tables/tags/expansion_rule_tags.yaml | 7 +----- .../scheduling_condition_definition_tags.yaml | 7 +----- .../tags/scheduling_condition_tags.yaml | 7 +----- .../tags/scheduling_goal_definition_tags.yaml | 7 +----- .../tables/tags/scheduling_goal_tags.yaml | 7 +----- 19 files changed, 41 insertions(+), 135 deletions(-) diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml index 97020bc737..67bc16c29e 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_directive/activity_directive_changelog.yaml @@ -6,13 +6,9 @@ configuration: object_relationships: - name: activity_directive using: - manual_configuration: - remote_table: - name: activity_directive - schema: merlin - column_mapping: - plan_id: plan_id - activity_directive_id: id + foreign_key_constraint_on: + - plan_id + - activity_directive_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml b/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml index 51c079c238..01681ca599 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/activity_type.yaml @@ -10,14 +10,13 @@ object_relationships: array_relationships: - name: presets using: - manual_configuration: - remote_table: - schema: merlin + foreign_key_constraint_on: + columns: + - model_id + - associated_activity_type + table: name: activity_presets - insertion_order: null - column_mapping: - model_id: model_id - name: associated_activity_type + schema: merlin - name: expansion_rules using: manual_configuration: diff --git a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml index 050d01f135..65dcb013a1 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/constraints/constraint_model_specification.yaml @@ -9,23 +9,12 @@ object_relationships: foreign_key_constraint_on: model_id - name: constraint_metadata using: - manual_configuration: - column_mapping: - constraint_id: id - insertion_order: null - remote_table: - name: constraint_metadata - schema: merlin + foreign_key_constraint_on: constraint_id - name: constraint_definition using: - manual_configuration: - column_mapping: - constraint_id: constraint_id - constraint_revision: revision - insertion_order: null - remote_table: - name: constraint_definition - schema: merlin + foreign_key_constraint_on: + - constraint_id + - constraint_revision select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/merlin/plan.yaml b/deployment/hasura/metadata/databases/tables/merlin/plan.yaml index 24fe1348eb..a29f047350 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/plan.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/plan.yaml @@ -9,19 +9,12 @@ object_relationships: foreign_key_constraint_on: model_id - name: parent_plan using: - manual_configuration: - column_mapping: - parent_id: id - insertion_order: null - remote_table: - name: plan - schema: merlin + foreign_key_constraint_on: parent_id - name: scheduling_specification using: - manual_configuration: - column_mapping: - id: plan_id - remote_table: + foreign_key_constraint_on: + column: plan_id + table: name: scheduling_specification schema: scheduler array_relationships: @@ -69,11 +62,9 @@ array_relationships: schema: tags - name: child_plans using: - manual_configuration: - column_mapping: - id: parent_id - insertion_order: null - remote_table: + foreign_key_constraint_on: + column: parent_id + table: name: plan schema: merlin select_permissions: diff --git a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml index ed90b16b58..cd33ee83d6 100644 --- a/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml +++ b/deployment/hasura/metadata/databases/tables/merlin/snapshot/plan_snapshot_activities.yaml @@ -10,14 +10,13 @@ object_relationships: array_relationships: - name: tags using: - manual_configuration: - insertion_order: null - remote_table: + foreign_key_constraint_on: + columns: + - directive_id + - snapshot_id + table: name: snapshot_activity_tags schema: tags - column_mapping: - id: directive_id - snapshot_id: snapshot_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml index 406c15d96b..d509dfd8f0 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_conditions.yaml @@ -14,12 +14,7 @@ object_relationships: - condition_revision - name: model using: - manual_configuration: - column_mapping: - model_id: id - remote_table: - name: mission_model - schema: merlin + foreign_key_constraint_on: model_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml index 5501895864..c2bf56e564 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_model_specification_goals.yaml @@ -14,12 +14,7 @@ object_relationships: - goal_revision - name: model using: - manual_configuration: - column_mapping: - model_id: id - remote_table: - name: mission_model - schema: merlin + foreign_key_constraint_on: model_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml index 661c1fdb5f..d9936c75aa 100644 --- a/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml +++ b/deployment/hasura/metadata/databases/tables/scheduler/scheduling_specification/scheduling_specification.yaml @@ -6,12 +6,7 @@ configuration: object_relationships: - name: plan using: - manual_configuration: - column_mapping: - plan_id: id - remote_table: - name: plan - schema: merlin + foreign_key_constraint_on: plan_id array_relationships: - name: goals using: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml index 9a8ebdd235..cbcd8a087e 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_rule.yaml @@ -15,13 +15,11 @@ array_relationships: id: rule_id - name: tags using: - manual_configuration: - remote_table: + foreign_key_constraint_on: + column: rule_id + table: name: expansion_rule_tags schema: tags - insertion_order: null - column_mapping: - id: rule_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml index 56d6411ed8..3070f9495c 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_run.yaml @@ -9,12 +9,7 @@ object_relationships: foreign_key_constraint_on: expansion_set_id - name: simulation_dataset using: - manual_configuration: - column_mapping: - simulation_dataset_id: id - remote_table: - name: simulation_dataset - schema: merlin + foreign_key_constraint_on: simulation_dataset_id array_relationships: - name: activity_instance_commands using: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml index fa616a530c..28c0010c6c 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/expansion_set.yaml @@ -9,12 +9,7 @@ object_relationships: foreign_key_constraint_on: command_dict_id - name: mission_model using: - manual_configuration: - column_mapping: - mission_model_id: id - remote_table: - name: mission_model - schema: merlin + foreign_key_constraint_on: mission_model_id array_relationships: - name: expansion_runs using: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml index dc86292006..ecbda13a14 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/sequence.yaml @@ -6,12 +6,7 @@ configuration: object_relationships: - name: simulation_dataset using: - manual_configuration: - column_mapping: - simulation_dataset_id: id - remote_table: - name: simulation_dataset - schema: merlin + foreign_key_constraint_on: simulation_dataset_id array_relationships: - name: activity_instance_joins using: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml b/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml index 3f68b0cda8..d66f49426e 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/sequence_to_simulated_activity.yaml @@ -6,12 +6,7 @@ configuration: object_relationships: - name: simulation_dataset using: - manual_configuration: - column_mapping: - simulation_dataset_id: id - remote_table: - name: simulation_dataset - schema: merlin + foreign_key_constraint_on: simulation_dataset_id - name: simulated_activity using: manual_configuration: diff --git a/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml b/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml index af2c6306df..259fb47c7c 100644 --- a/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml +++ b/deployment/hasura/metadata/databases/tables/sequencing/user_sequence.yaml @@ -6,13 +6,7 @@ configuration: object_relationships: - name: command_dictionary using: - manual_configuration: - remote_table: - schema: sequencing - name: command_dictionary - insertion_order: null - column_mapping: - authoring_command_dict_id: id + foreign_key_constraint_on: authoring_command_dict_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml index 7867c51d3e..5de9ea4251 100644 --- a/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/expansion_rule_tags.yaml @@ -9,12 +9,7 @@ object_relationships: foreign_key_constraint_on: rule_id - name: tag using: - manual_configuration: - column_mapping: - tag_id: id - remote_table: - name: tags - schema: tags + foreign_key_constraint_on: tag_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml index 5dc15bc0b4..6e49a2efbe 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_definition_tags.yaml @@ -11,12 +11,7 @@ object_relationships: - condition_revision - name: tag using: - manual_configuration: - column_mapping: - tag_id: id - remote_table: - name: tags - schema: tags + foreign_key_constraint_on: tag_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml index 7875b248ec..de03b68035 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_condition_tags.yaml @@ -9,12 +9,7 @@ object_relationships: foreign_key_constraint_on: condition_id - name: tag using: - manual_configuration: - column_mapping: - tag_id: id - remote_table: - name: tags - schema: tags + foreign_key_constraint_on: tag_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml index 98f7d1a3d2..1e0cd51906 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_definition_tags.yaml @@ -11,12 +11,7 @@ object_relationships: - goal_revision - name: tag using: - manual_configuration: - column_mapping: - tag_id: id - remote_table: - name: tags - schema: tags + foreign_key_constraint_on: tag_id select_permissions: - role: aerie_admin permission: diff --git a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml index ef1da104e4..000d8d8202 100644 --- a/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml +++ b/deployment/hasura/metadata/databases/tables/tags/scheduling_goal_tags.yaml @@ -9,12 +9,7 @@ object_relationships: foreign_key_constraint_on: goal_id - name: tag using: - manual_configuration: - column_mapping: - tag_id: id - remote_table: - name: tags - schema: tags + foreign_key_constraint_on: tag_id select_permissions: - role: aerie_admin permission: From 67d96ef6b42ca2dcc9e91303a6782f8a5bbf9082 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Mon, 11 Mar 2024 16:37:02 -0700 Subject: [PATCH 24/36] Update DB Actions in Backend (Merlin) --- .../remotes/postgres/AppendProfileSegmentsAction.java | 4 ++-- .../remotes/postgres/CancelSimulationAction.java | 2 +- .../remotes/postgres/CheckPlanDatasetExistsAction.java | 2 +- .../server/remotes/postgres/ClaimSimulationAction.java | 2 +- .../remotes/postgres/CreateModelParametersAction.java | 4 ++-- .../remotes/postgres/CreatePlanDatasetAction.java | 4 ++-- .../postgres/CreateSimulationDatasetAction.java | 4 ++-- .../remotes/postgres/DeleteSimulationExtentAction.java | 8 ++++---- .../remotes/postgres/GetActivityDirectivesAction.java | 2 +- .../remotes/postgres/GetActivityTypesAction.java | 2 +- .../server/remotes/postgres/GetAllModelsAction.java | 4 ++-- .../server/remotes/postgres/GetAllPlansAction.java | 2 +- .../merlin/server/remotes/postgres/GetModelAction.java | 4 ++-- .../merlin/server/remotes/postgres/GetPlanAction.java | 2 +- .../remotes/postgres/GetPlanConstraintsAction.java | 10 +++++----- .../server/remotes/postgres/GetPlanDatasetsAction.java | 2 +- .../remotes/postgres/GetPlanRevisionDataAction.java | 8 ++++---- .../remotes/postgres/GetProfileSegmentsAction.java | 2 +- .../server/remotes/postgres/GetProfilesAction.java | 2 +- .../remotes/postgres/GetProfilesByNameAction.java | 2 +- .../server/remotes/postgres/GetSimulationAction.java | 2 +- .../remotes/postgres/GetSimulationDatasetAction.java | 2 +- .../postgres/GetSimulationDatasetByIdAction.java | 2 +- .../remotes/postgres/GetSimulationEventsAction.java | 3 +-- .../remotes/postgres/GetSimulationTemplateAction.java | 2 +- .../remotes/postgres/GetSimulationTopicsAction.java | 2 +- .../merlin/server/remotes/postgres/GetSpanRecords.java | 2 +- .../postgres/GetUnvalidatedDirectivesAction.java | 6 +++--- .../remotes/postgres/GetValidConstraintRunsAction.java | 2 +- .../remotes/postgres/InsertActivityTypesAction.java | 6 +++--- .../remotes/postgres/InsertConstraintRunsAction.java | 2 +- .../remotes/postgres/InsertResourceTypesAction.java | 6 +++--- .../remotes/postgres/InsertSimulationEventsAction.java | 2 +- .../remotes/postgres/InsertSimulationTopicsAction.java | 2 +- .../postgres/LookupSimulationDatasetAction.java | 10 +++++----- .../remotes/postgres/PostProfileSegmentsAction.java | 4 ++-- .../server/remotes/postgres/PostProfilesAction.java | 2 +- .../server/remotes/postgres/PostSpansAction.java | 2 +- .../remotes/postgres/SetSimulationStateAction.java | 2 +- .../UpdateActivityDirectiveValidationsAction.java | 2 +- .../remotes/postgres/UpdateProfileDurationAction.java | 2 +- .../postgres/UpdateSimulatedActivityParentsAction.java | 4 ++-- .../remotes/postgres/UpdateSimulationExtentAction.java | 4 ++-- 43 files changed, 72 insertions(+), 73 deletions(-) diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/AppendProfileSegmentsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/AppendProfileSegmentsAction.java index 72614c3430..b36ed5420b 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/AppendProfileSegmentsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/AppendProfileSegmentsAction.java @@ -14,7 +14,7 @@ public final class AppendProfileSegmentsAction implements AutoCloseable { private final @Language("SQL") String sql = """ - insert into profile_segment (dataset_id, profile_id, start_offset, dynamics, is_gap) + insert into merlin.profile_segment (dataset_id, profile_id, start_offset, dynamics, is_gap) values (?, ?, ?::interval, ?::json, ?) """; private final PreparedStatement statement; @@ -52,7 +52,7 @@ public Duration apply( final var results = this.statement.executeBatch(); for (final var result : results) { - if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("profile_segment"); + if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("merlin.profile_segment"); } return accumulatedOffset; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CancelSimulationAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CancelSimulationAction.java index 609e62945b..e6b7eb9ace 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CancelSimulationAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CancelSimulationAction.java @@ -8,7 +8,7 @@ /*package local*/ final class CancelSimulationAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update simulation_dataset + update merlin.simulation_dataset set canceled = true where dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CheckPlanDatasetExistsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CheckPlanDatasetExistsAction.java index 87bb4353a6..261220da9e 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CheckPlanDatasetExistsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CheckPlanDatasetExistsAction.java @@ -10,7 +10,7 @@ /*package-local*/ final class CheckPlanDatasetExistsAction implements AutoCloseable { private final @Language("SQL") String sql = """ select 1 - from plan_dataset as p + from merlin.plan_dataset as p where p.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/ClaimSimulationAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/ClaimSimulationAction.java index 1ac2965aec..58d6619a79 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/ClaimSimulationAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/ClaimSimulationAction.java @@ -8,7 +8,7 @@ /*package local*/ public class ClaimSimulationAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update simulation_dataset + update merlin.simulation_dataset set status = 'incomplete' where (dataset_id = ? and status = 'pending' and not canceled); diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateModelParametersAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateModelParametersAction.java index 38e01c3b3c..036a0488f5 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateModelParametersAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateModelParametersAction.java @@ -10,7 +10,7 @@ /*package-local*/ final class CreateModelParametersAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into mission_model_parameters (model_id, parameters) + insert into merlin.mission_model_parameters (model_id, parameters) values (?, ?::json) on conflict (model_id) do update set parameters = ?::json returning model_id @@ -28,7 +28,7 @@ public long apply(final long modelId, final List parameters) throws S PreparedStatements.setParameters(this.statement, 3, parameters); try (final var results = statement.executeQuery()) { - if (!results.next()) throw new FailedInsertException("mission_model_parameters"); + if (!results.next()) throw new FailedInsertException("merlin.mission_model_parameters"); return results.getLong(1); } diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreatePlanDatasetAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreatePlanDatasetAction.java index 481ef6ad2f..2a8fbd3e73 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreatePlanDatasetAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreatePlanDatasetAction.java @@ -15,7 +15,7 @@ /*package-local*/ final class CreatePlanDatasetAction implements AutoCloseable { private final @Language("SQL") String sql = """ - insert into plan_dataset (plan_id, offset_from_plan_start, simulation_dataset_id) + insert into merlin.plan_dataset (plan_id, offset_from_plan_start, simulation_dataset_id) values (?, ?::timestamptz - ?::timestamptz, ?) returning dataset_id """; @@ -44,7 +44,7 @@ public PlanDatasetRecord apply( ); final var results = this.statement.executeQuery(); - if (!results.next()) throw new FailedInsertException("plan_dataset"); + if (!results.next()) throw new FailedInsertException("merlin.plan_dataset"); final var datasetId = results.getLong(1); return new PlanDatasetRecord(planId, datasetId, simulationDatasetId, offsetFromPlanStart); diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateSimulationDatasetAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateSimulationDatasetAction.java index 84fb27aa4c..88003d00b3 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateSimulationDatasetAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/CreateSimulationDatasetAction.java @@ -14,7 +14,7 @@ /*package local*/ final class CreateSimulationDatasetAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into simulation_dataset + insert into merlin.simulation_dataset ( simulation_id, simulation_start_time, @@ -51,7 +51,7 @@ public SimulationDatasetRecord apply( this.statement.setString(5, requestedBy); try (final var results = this.statement.executeQuery()) { - if (!results.next()) throw new FailedInsertException("simulation_dataset"); + if (!results.next()) throw new FailedInsertException("merlin.simulation_dataset"); final Status status; try { status = Status.fromString(results.getString(2)); diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/DeleteSimulationExtentAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/DeleteSimulationExtentAction.java index 0bc43869c6..d65220a65e 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/DeleteSimulationExtentAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/DeleteSimulationExtentAction.java @@ -8,10 +8,10 @@ /*package-local*/ final class DeleteSimulationExtentAction implements AutoCloseable { private final @Language("SQL") String sql = """ - delete from simulation_extent - using simulation_dataset - where simulation_dataset.id = simulation_extent.simulation_dataset_id - and simulation_dataset.dataset_id = ?; + delete from merlin.simulation_extent se + using merlin.simulation_dataset sd + where sd.id = se.simulation_dataset_id + and sd.dataset_id = ?; """; private final PreparedStatement statement; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityDirectivesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityDirectivesAction.java index 7a7094ceaa..588ee724c4 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityDirectivesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityDirectivesAction.java @@ -20,7 +20,7 @@ a.arguments, a.anchor_id, a.anchored_to_start - from activity_directive as a + from merlin.activity_directive as a where a.plan_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityTypesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityTypesAction.java index cdea5104d3..caafe47b1e 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityTypesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetActivityTypesAction.java @@ -25,7 +25,7 @@ a.parameters, a.required_parameters, a.computed_attributes_value_schema - from activity_type as a + from merlin.activity_type as a where a.model_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllModelsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllModelsAction.java index ac9b97be4d..a9a769835b 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllModelsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllModelsAction.java @@ -12,8 +12,8 @@ /*package-local*/ final class GetAllModelsAction implements AutoCloseable { private static final @Language("SQL") String sql = """ select m.id, m.mission, m.name, m.version, m.owner, f.path - from mission_model as m - inner join uploaded_file as f on m.jar_id = f.id + from merlin.mission_model as m + inner join merlin.uploaded_file as f on m.jar_id = f.id """; private final PreparedStatement statement; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllPlansAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllPlansAction.java index f0deb12704..b3d7ebd123 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllPlansAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetAllPlansAction.java @@ -18,7 +18,7 @@ p.model_id, to_char(p.start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as start_time, to_char(p.start_time + p.duration, 'YYYY-DDD"T"HH24:MI:SS.FF6') as end_time - from plan as p + from merlin.plan as p """; private final PreparedStatement statement; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetModelAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetModelAction.java index a5eab27146..6d46833b61 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetModelAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetModelAction.java @@ -11,8 +11,8 @@ /*package-local*/ final class GetModelAction implements AutoCloseable { private static final @Language("SQL") String sql = """ select m.mission, m.name, m.version, m.owner, encode(f.path, 'escape') - from mission_model AS m - inner join uploaded_file AS f + from merlin.mission_model AS m + inner join merlin.uploaded_file AS f on m.jar_id = f.id where m.id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanAction.java index 7d1ce7671b..900908249e 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanAction.java @@ -16,7 +16,7 @@ p.model_id, to_char(p.start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as start_time, to_char(p.start_time + p.duration, 'YYYY-DDD"T"HH24:MI:SS.FF6') as end_time - from plan as p + from merlin.plan as p where p.id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanConstraintsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanConstraintsAction.java index b106d97567..4acd8ce212 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanConstraintsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanConstraintsAction.java @@ -15,17 +15,17 @@ // A plan without any enabled constraints will produce a placeholder row with nulls. private static final @Language("SQL") String sql = """ select c.constraint_id, c.revision, c.name, c.description, c.definition - from plan p + from merlin.plan p left join (select cs.plan_id, cs.constraint_id, cd.revision, cm.name, cm.description, cd.definition - from constraint_specification cs - left join constraint_definition cd using (constraint_id) - left join public.constraint_metadata cm on cs.constraint_id = cm.id + from merlin.constraint_specification cs + left join merlin.constraint_definition cd using (constraint_id) + left join merlin.constraint_metadata cm on cs.constraint_id = cm.id where cs.enabled and ((cs.constraint_revision is not null and cs.constraint_revision = cd.revision) or (cs.constraint_revision is null and cd.revision = (select def.revision - from constraint_definition def + from merlin.constraint_definition def where def.constraint_id = cs.constraint_id order by def.revision desc limit 1))) ) c diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanDatasetsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanDatasetsAction.java index dacdd048b7..69da8e8221 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanDatasetsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanDatasetsAction.java @@ -20,7 +20,7 @@ p.dataset_id, p.offset_from_plan_start, p.simulation_dataset_id - from plan_dataset as p + from merlin.plan_dataset as p where p.plan_id = ? and (p.simulation_dataset_id is null or p.simulation_dataset_id = ?) """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanRevisionDataAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanRevisionDataAction.java index 831b258c4a..f6709c80e9 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanRevisionDataAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetPlanRevisionDataAction.java @@ -14,12 +14,12 @@ p.revision as plan_revision, s.revision as sim_revision, t.revision as template_revision - from plan as p - left join mission_model as m + from merlin.plan as p + left join merlin.mission_model as m on p.model_id = m.id - left join simulation as s + left join merlin.simulation as s on p.id = s.plan_id - left join simulation_template as t + left join merlin.simulation_template as t on s.simulation_template_id = t.id where p.id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfileSegmentsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfileSegmentsAction.java index d421f45004..d6c4ec3d23 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfileSegmentsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfileSegmentsAction.java @@ -20,7 +20,7 @@ seg.start_offset, seg.dynamics, seg.is_gap - from profile_segment as seg + from merlin.profile_segment as seg where seg.dataset_id = ? and seg.profile_id = ? diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesAction.java index f8ac158afd..2aed1bde58 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesAction.java @@ -18,7 +18,7 @@ p.name, p.type, p.duration - from profile as p + from merlin.profile as p where p.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesByNameAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesByNameAction.java index 3fccb1e1ca..2f9a52c907 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesByNameAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetProfilesByNameAction.java @@ -18,7 +18,7 @@ p.name, p.type, p.duration - from profile as p + from merlin.profile as p where p.dataset_id = ? and p.name = any(?) diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationAction.java index 3fe00511c8..1bc4a828ea 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationAction.java @@ -21,7 +21,7 @@ s.arguments, to_char(s.simulation_start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_start_time, to_char(s.simulation_end_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_end_time - from simulation as s + from merlin.simulation as s where s.plan_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetAction.java index 27725f4e80..f5071d8ba4 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetAction.java @@ -19,7 +19,7 @@ public final class GetSimulationDatasetAction implements AutoCloseable { to_char(d.simulation_start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_start_time, to_char(d.simulation_end_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_end_time, d.id as id - from simulation_dataset as d + from merlin.simulation_dataset as d where d.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetByIdAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetByIdAction.java index 364f810f3a..cd32f9f5e1 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetByIdAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationDatasetByIdAction.java @@ -18,7 +18,7 @@ public class GetSimulationDatasetByIdAction implements AutoCloseable { to_char(d.simulation_start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_start_time, to_char(d.simulation_end_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_end_time, d.dataset_id as dataset_id - from simulation_dataset as d + from merlin.simulation_dataset as d where d.id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationEventsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationEventsAction.java index 09eb15a618..8ddb50b30c 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationEventsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationEventsAction.java @@ -3,7 +3,6 @@ import gov.nasa.jpl.aerie.merlin.driver.timeline.EventGraph; import gov.nasa.jpl.aerie.merlin.protocol.types.Duration; import gov.nasa.jpl.aerie.merlin.protocol.types.SerializedValue; -import gov.nasa.jpl.aerie.merlin.server.models.Timestamp; import org.apache.commons.lang3.tuple.Pair; import org.intellij.lang.annotations.Language; @@ -31,7 +30,7 @@ e.causal_time, e.topic_index, e.value - from event as e + from merlin.event as e where e.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTemplateAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTemplateAction.java index ff4b938a09..4953bd929d 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTemplateAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTemplateAction.java @@ -18,7 +18,7 @@ t.revision, t.description, t.arguments - from simulation_template as t + from merlin.simulation_template as t where t.id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTopicsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTopicsAction.java index e203acbb9f..420b388997 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTopicsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSimulationTopicsAction.java @@ -19,7 +19,7 @@ e.topic_index, e.name, e.value_schema - from topic as e + from merlin.topic as e where e.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSpanRecords.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSpanRecords.java index c858b0b54a..303a4f1e98 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSpanRecords.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetSpanRecords.java @@ -27,7 +27,7 @@ a.start_offset, a.duration, a.attributes - from span as a + from merlin.span as a where a.dataset_id = ? """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetUnvalidatedDirectivesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetUnvalidatedDirectivesAction.java index 1439ac1e74..2f69e85c46 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetUnvalidatedDirectivesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetUnvalidatedDirectivesAction.java @@ -20,10 +20,10 @@ public class GetUnvalidatedDirectivesAction implements AutoCloseable { private static final String sql = """ select ad.id, ad.plan_id, ad.type, ad.arguments, p.model_id, adv.last_modified_arguments_at - from activity_directive ad - join activity_directive_validations adv + from merlin.activity_directive ad + join merlin.activity_directive_validations adv on ad.id = adv.directive_id and ad.plan_id = adv.plan_id - join plan p + join merlin.plan p on ad.plan_id = p.id where adv.status = 'pending'; """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetValidConstraintRunsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetValidConstraintRunsAction.java index b38c9cb156..5d2b910ba1 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetValidConstraintRunsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/GetValidConstraintRunsAction.java @@ -21,7 +21,7 @@ final class GetValidConstraintRunsAction implements AutoCloseable { cr.constraint_revision, cr.simulation_dataset_id, cr.results - from constraint_run as cr + from merlin.constraint_run as cr where cr.constraint_id = any(?) and cr.simulation_dataset_id = ?; """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertActivityTypesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertActivityTypesAction.java index 4c4f8b4076..4bc01e3649 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertActivityTypesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertActivityTypesAction.java @@ -14,7 +14,7 @@ /*package-local*/ final class InsertActivityTypesAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into activity_type (model_id, name, parameters, required_parameters, computed_attributes_value_schema) + insert into merlin.activity_type (model_id, name, parameters, required_parameters, computed_attributes_value_schema) values (?, ?, ?::json, ?::json, ?::json) on conflict (model_id, name) do update set parameters = excluded.parameters, @@ -53,12 +53,12 @@ public void apply(final int modelId, Collection activityTypes) for (int i : results) { if (i == Statement.EXECUTE_FAILED) { connection.rollback(); - throw new FailedInsertException("activity_type"); + throw new FailedInsertException("merlin.activity_type"); } connection.commit(); } } catch (BatchUpdateException bue){ - throw new FailedInsertException("activity_type"); + throw new FailedInsertException("merlin.activity_type"); } finally { this.statement.getConnection().setAutoCommit(true); } diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertConstraintRunsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertConstraintRunsAction.java index fd2d536a86..3546fd8c25 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertConstraintRunsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertConstraintRunsAction.java @@ -13,7 +13,7 @@ /* package local */ class InsertConstraintRunsAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into constraint_run (constraint_id, constraint_revision, simulation_dataset_id, results) + insert into merlin.constraint_run (constraint_id, constraint_revision, simulation_dataset_id, results) values (?, ?, ?, ?::json) """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertResourceTypesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertResourceTypesAction.java index d6bc9d5e21..56794a8540 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertResourceTypesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertResourceTypesAction.java @@ -14,7 +14,7 @@ /*package-private*/ final class InsertResourceTypesAction implements AutoCloseable{ private static final @Language("SQL") String sql = """ - insert into resource_type (model_id, name, schema) + insert into merlin.resource_type (model_id, name, schema) values (?, ?, ?::json) on conflict (model_id, name) do update set schema = excluded.schema @@ -46,12 +46,12 @@ public void apply(final int modelId, Map resourceTypes) for (int i : results) { if (i == Statement.EXECUTE_FAILED) { connection.rollback(); - throw new FailedInsertException("resource_type"); + throw new FailedInsertException("merlin.resource_type"); } connection.commit(); } } catch (BatchUpdateException bue){ - throw new FailedInsertException("resource_type"); + throw new FailedInsertException("merlin.resource_type"); } finally { this.statement.getConnection().setAutoCommit(true); } diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationEventsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationEventsAction.java index 087c2963fc..fe61567ef1 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationEventsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationEventsAction.java @@ -19,7 +19,7 @@ /*package-local*/ final class InsertSimulationEventsAction implements AutoCloseable { @Language("SQL") private static final String sql = """ - insert into event (dataset_id, real_time, transaction_index, causal_time, topic_index, value) + insert into merlin.event (dataset_id, real_time, transaction_index, causal_time, topic_index, value) values (?, ?::timestamptz - ?::timestamptz, ?, ?, ?, ?::jsonb) """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationTopicsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationTopicsAction.java index 820c5350de..275165fa2f 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationTopicsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/InsertSimulationTopicsAction.java @@ -13,7 +13,7 @@ /*package-local*/ final class InsertSimulationTopicsAction implements AutoCloseable { @Language("SQL") private static final String sql = """ - insert into topic (dataset_id, topic_index, name, value_schema) + insert into merlin.topic (dataset_id, topic_index, name, value_schema) values (?, ?, ?, ?::jsonb) """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/LookupSimulationDatasetAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/LookupSimulationDatasetAction.java index 0cb8e795b0..6faf4162d0 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/LookupSimulationDatasetAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/LookupSimulationDatasetAction.java @@ -19,12 +19,12 @@ t.revision as template_revision, p.revision as plan_revision, m.revision as model_revision - from simulation as s - left join simulation_template as t + from merlin.simulation as s + left join merlin.simulation_template as t on s.simulation_template_id = t.id - left join plan as p + left join merlin.plan as p on p.id = s.plan_id - left join mission_model as m + left join merlin.mission_model as m on m.id = p.model_id) select d.dataset_id as dataset_id, @@ -34,7 +34,7 @@ to_char(d.simulation_start_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_start_time, to_char(d.simulation_end_time, 'YYYY-DDD"T"HH24:MI:SS.FF6') as simulation_end_time, d.id as id - from simulation_dataset as d + from merlin.simulation_dataset as d left join revisions as r on d.simulation_id = r.sim_id where diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfileSegmentsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfileSegmentsAction.java index d5fe90e161..5d52283c78 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfileSegmentsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfileSegmentsAction.java @@ -14,7 +14,7 @@ public final class PostProfileSegmentsAction implements AutoCloseable { private final @Language("SQL") String sql = """ - insert into profile_segment (dataset_id, profile_id, start_offset, dynamics, is_gap) + insert into merlin.profile_segment (dataset_id, profile_id, start_offset, dynamics, is_gap) values (?, ?, ?::interval, ?::jsonb, ?) """; private final PreparedStatement statement; @@ -56,7 +56,7 @@ public void apply( final var results = this.statement.executeBatch(); for (final var result : results) { - if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("profile_segment"); + if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("merlin.profile_segment"); } } diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfilesAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfilesAction.java index 3a87284ffa..1438e9b14c 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfilesAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostProfilesAction.java @@ -22,7 +22,7 @@ import static gov.nasa.jpl.aerie.merlin.server.remotes.postgres.PostgresParsers.realProfileTypeP; /*package-local*/ final class PostProfilesAction implements AutoCloseable { private final @Language("SQL") String sql = """ - insert into profile (dataset_id, name, type, duration) + insert into merlin.profile (dataset_id, name, type, duration) values (?, ?, ?::jsonb, ?::interval) """; private final PreparedStatement statement; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostSpansAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostSpansAction.java index ea48007761..269d24b425 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostSpansAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/PostSpansAction.java @@ -20,7 +20,7 @@ /*package-local*/ final class PostSpansAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into span (dataset_id, start_offset, duration, type, attributes) + insert into merlin.span (dataset_id, start_offset, duration, type, attributes) values (?, ?::timestamptz - ?::timestamptz, ?::timestamptz - ?::timestamptz, ?, ?::jsonb) """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java index 8c575ee263..9774b52749 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java @@ -8,7 +8,7 @@ /*package-local*/ final class SetSimulationStateAction implements AutoCloseable { private final @Language("SQL") String sql = """ - update simulation_dataset + update merlin.simulation_dataset set status = ?::status_t, reason = ?::json diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateActivityDirectiveValidationsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateActivityDirectiveValidationsAction.java index 4033597a19..408ca8bf47 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateActivityDirectiveValidationsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateActivityDirectiveValidationsAction.java @@ -13,7 +13,7 @@ /*package-local*/ final class UpdateActivityDirectiveValidationsAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update activity_directive_validations + update merlin.activity_directive_validations set validations = ?::jsonb, status = 'complete' where (directive_id, plan_id, last_modified_arguments_at) = (?, ?, ?) diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateProfileDurationAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateProfileDurationAction.java index 0351ac1809..a23ea6a8ef 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateProfileDurationAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateProfileDurationAction.java @@ -10,7 +10,7 @@ /*package-local*/ final class UpdateProfileDurationAction implements AutoCloseable { private final @Language("SQL") String sql = """ - update profile + update merlin.profile set duration = ?::interval where dataset_id=? and id=?; """; diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulatedActivityParentsAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulatedActivityParentsAction.java index 831ca043a5..ff806194b1 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulatedActivityParentsAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulatedActivityParentsAction.java @@ -9,7 +9,7 @@ /*package-local*/ final class UpdateSimulatedActivityParentsAction implements AutoCloseable { private final @Language("SQL") String sql = """ - update span + update merlin.span set parent_id = ? where dataset_id = ? and id = ? @@ -40,7 +40,7 @@ public void apply( try { final var results = this.statement.executeBatch(); for (final var result : results) { - if (result != 1) throw new FailedUpdateException("span"); + if (result != 1) throw new FailedUpdateException("merlin.span"); } } finally { this.statement.clearBatch(); diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulationExtentAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulationExtentAction.java index a81979b6df..149be46bfc 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulationExtentAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/UpdateSimulationExtentAction.java @@ -9,9 +9,9 @@ /*package-local*/ final class UpdateSimulationExtentAction implements AutoCloseable { private final @Language("SQL") String sql = """ - insert into simulation_extent (simulation_dataset_id, extent) + insert into merlin.simulation_extent (simulation_dataset_id, extent) select id, ?::interval - from simulation_dataset + from merlin.simulation_dataset where dataset_id = ? on conflict (simulation_dataset_id) do update From 434236846cc29dbfd7da1e82312e0ca73fc45124 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Tue, 12 Mar 2024 10:49:29 -0700 Subject: [PATCH 25/36] Update DB Actions in Backend (Scheduler) - Update `deallocate` to remove unneeded reference to the Spec ID and to properly pass analysisID instead of spec revision --- .../remotes/postgres/CancelSchedulingRequestAction.java | 2 +- .../server/remotes/postgres/ClaimRequestAction.java | 2 +- .../server/remotes/postgres/CreateRequestAction.java | 4 ++-- .../server/remotes/postgres/DeleteRequestAction.java | 8 +++----- .../remotes/postgres/GetCreatedActivitiesAction.java | 2 +- .../remotes/postgres/GetGoalSatisfactionAction.java | 2 +- .../server/remotes/postgres/GetRequestAction.java | 2 +- .../remotes/postgres/GetSatisfyingActivitiesAction.java | 2 +- .../server/remotes/postgres/GetSpecificationAction.java | 2 +- .../postgres/GetSpecificationConditionsAction.java | 8 ++++---- .../remotes/postgres/GetSpecificationGoalsAction.java | 8 ++++---- .../remotes/postgres/InsertCreatedActivitiesAction.java | 2 +- .../remotes/postgres/InsertGoalSatisfactionAction.java | 2 +- .../postgres/InsertSatisfyingActivitiesAction.java | 4 ++-- .../remotes/postgres/PostgresResultsCellRepository.java | 7 +++---- .../server/remotes/postgres/SetRequestStateAction.java | 4 ++-- 16 files changed, 29 insertions(+), 32 deletions(-) diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CancelSchedulingRequestAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CancelSchedulingRequestAction.java index 24a77f3483..da61525c66 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CancelSchedulingRequestAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CancelSchedulingRequestAction.java @@ -8,7 +8,7 @@ /*package-local*/ final class CancelSchedulingRequestAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update scheduling_request + update scheduler.scheduling_request set canceled = true where diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/ClaimRequestAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/ClaimRequestAction.java index 9b1b75bf42..ac0971e763 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/ClaimRequestAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/ClaimRequestAction.java @@ -7,7 +7,7 @@ /*package local*/ public class ClaimRequestAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update scheduling_request + update scheduler.scheduling_request set status = 'incomplete' where (analysis_id = ? and status = 'pending' and not canceled) returning diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CreateRequestAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CreateRequestAction.java index c95320ec4d..7a9c8a18df 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CreateRequestAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/CreateRequestAction.java @@ -20,7 +20,7 @@ .appendOffset("+HH:mm:ss", "+00") .toFormatter(); private final @Language("SQL") String sql = """ - insert into scheduling_request ( + insert into scheduler.scheduling_request ( specification_id, specification_revision, plan_revision, @@ -54,7 +54,7 @@ public RequestRecord apply(final SpecificationRecord specification, final String this.statement.setString(7, requestedBy); final var result = this.statement.executeQuery(); - if (!result.next()) throw new FailedInsertException("scheduling_request"); + if (!result.next()) throw new FailedInsertException("scheduler.scheduling_request"); final RequestRecord.Status status; try { diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/DeleteRequestAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/DeleteRequestAction.java index cc2d50da7e..42c58fac6c 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/DeleteRequestAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/DeleteRequestAction.java @@ -8,9 +8,8 @@ /*package-local*/ final class DeleteRequestAction implements AutoCloseable { private final @Language("SQL") String sql = """ - delete from scheduling_request + delete from scheduler.scheduling_request where - spec_id = ? and analysis_id = ? """; @@ -20,9 +19,8 @@ public DeleteRequestAction(final Connection connection) throws SQLException { this.statement = connection.prepareStatement(sql); } - public void apply(final long specId, final long analysisId) throws SQLException { - this.statement.setLong(1, specId); - this.statement.setLong(2, analysisId); + public void apply(final long analysisId) throws SQLException { + this.statement.setLong(1, analysisId); this.statement.execute(); } diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetCreatedActivitiesAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetCreatedActivitiesAction.java index 7622976f3b..7bbd88b621 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetCreatedActivitiesAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetCreatedActivitiesAction.java @@ -18,7 +18,7 @@ c.goal_id, c.goal_revision, c.activity_id - from scheduling_goal_analysis_created_activities as c + from scheduler.scheduling_goal_analysis_created_activities as c where c.analysis_id = ? """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetGoalSatisfactionAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetGoalSatisfactionAction.java index 2294bb0ece..f06adcabfd 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetGoalSatisfactionAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetGoalSatisfactionAction.java @@ -14,7 +14,7 @@ goal.goal_id, goal.goal_revision, goal.satisfied - from scheduling_goal_analysis as goal + from scheduler.scheduling_goal_analysis as goal where goal.analysis_id = ? """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetRequestAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetRequestAction.java index 8ee5e8a575..2079a85164 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetRequestAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetRequestAction.java @@ -15,7 +15,7 @@ r.reason, r.canceled, r.dataset_id - from scheduling_request as r + from scheduler.scheduling_request as r where r.specification_id = ? and r.specification_revision = ? and r.plan_revision = ? diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSatisfyingActivitiesAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSatisfyingActivitiesAction.java index e049fc4593..19240ee2af 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSatisfyingActivitiesAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSatisfyingActivitiesAction.java @@ -18,7 +18,7 @@ s.goal_id, s.goal_revision, s.activity_id - from scheduling_goal_analysis_satisfying_activities as s + from scheduler.scheduling_goal_analysis_satisfying_activities as s where s.analysis_id = ? """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationAction.java index 1d016faeb9..5ed47ae8d1 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationAction.java @@ -22,7 +22,7 @@ to_char(spec.horizon_end, 'YYYY-DDD"T"HH24:MI:SS.FF6') as horizon_end, spec.simulation_arguments, spec.analysis_only - from scheduling_specification as spec + from scheduler.scheduling_specification as spec where spec.id = ? """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationConditionsAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationConditionsAction.java index 61434da968..ab457f3afe 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationConditionsAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationConditionsAction.java @@ -14,14 +14,14 @@ /*package-local*/ final class GetSpecificationConditionsAction implements AutoCloseable { private final @Language("SQL") String sql = """ select s.condition_id, cd.revision, cm.name, cd.definition - from scheduling_specification_conditions s - left join scheduling_condition_definition cd using (condition_id) - left join scheduling_condition_metadata cm on s.condition_id = cm.id + from scheduler.scheduling_specification_conditions s + left join scheduler.scheduling_condition_definition cd using (condition_id) + left join scheduler.scheduling_condition_metadata cm on s.condition_id = cm.id where s.specification_id = ? and s.enabled and ((s.condition_revision is not null and s.condition_revision = cd.revision) or (s.condition_revision is null and cd.revision = (select def.revision - from scheduling_condition_definition def + from scheduler.scheduling_condition_definition def where def.condition_id = s.condition_id order by def.revision desc limit 1))); """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationGoalsAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationGoalsAction.java index 73ef450634..4753859b2a 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationGoalsAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/GetSpecificationGoalsAction.java @@ -14,14 +14,14 @@ /*package-local*/ final class GetSpecificationGoalsAction implements AutoCloseable { private final @Language("SQL") String sql = """ select s.goal_id, gd.revision, gm.name, gd.definition, s.simulate_after - from scheduling_specification_goals s - left join scheduling_goal_definition gd using (goal_id) - left join scheduling_goal_metadata gm on s.goal_id = gm.id + from scheduler.scheduling_specification_goals s + left join scheduler.scheduling_goal_definition gd using (goal_id) + left join scheduler.scheduling_goal_metadata gm on s.goal_id = gm.id where s.specification_id = ? and s.enabled and ((s.goal_revision is not null and s.goal_revision = gd.revision) or (s.goal_revision is null and gd.revision = (select def.revision - from scheduling_goal_definition def + from scheduler.scheduling_goal_definition def where def.goal_id = s.goal_id order by def.revision desc limit 1))) order by s.priority; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertCreatedActivitiesAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertCreatedActivitiesAction.java index 84647e1ca0..8a17bc5279 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertCreatedActivitiesAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertCreatedActivitiesAction.java @@ -13,7 +13,7 @@ /*package-local*/ final class InsertCreatedActivitiesAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into scheduling_goal_analysis_created_activities (analysis_id, goal_id, goal_revision, activity_id) + insert into scheduler.scheduling_goal_analysis_created_activities (analysis_id, goal_id, goal_revision, activity_id) values (?, ?, ?, ?) """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertGoalSatisfactionAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertGoalSatisfactionAction.java index 873992e088..76615d7fef 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertGoalSatisfactionAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertGoalSatisfactionAction.java @@ -11,7 +11,7 @@ /*package-local*/ final class InsertGoalSatisfactionAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into scheduling_goal_analysis (analysis_id, goal_id, goal_revision, satisfied) + insert into scheduler.scheduling_goal_analysis (analysis_id, goal_id, goal_revision, satisfied) values (?, ?, ?, ?) """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertSatisfyingActivitiesAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertSatisfyingActivitiesAction.java index d08f7870a4..1282c5891d 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertSatisfyingActivitiesAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/InsertSatisfyingActivitiesAction.java @@ -13,7 +13,7 @@ /*package-local*/ final class InsertSatisfyingActivitiesAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - insert into scheduling_goal_analysis_satisfying_activities (analysis_id, goal_id, goal_revision, activity_id) + insert into scheduler.scheduling_goal_analysis_satisfying_activities (analysis_id, goal_id, goal_revision, activity_id) values (?, ?, ?, ?) """; @@ -40,7 +40,7 @@ public void apply( final var resultSet = this.statement.executeBatch(); for (final var result : resultSet) { - if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("scheduling_goal_analysis_satisfying_activities"); + if (result == Statement.EXECUTE_FAILED) throw new FailedInsertException("scheduler.scheduling_goal_analysis_satisfying_activities"); } } diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/PostgresResultsCellRepository.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/PostgresResultsCellRepository.java index 4e80ce37cd..11455b06c2 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/PostgresResultsCellRepository.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/PostgresResultsCellRepository.java @@ -101,7 +101,7 @@ public void deallocate(final ResultsProtocol.OwnerRole resultsCell) throw new Error("Unable to deallocate results cell of unknown type"); } try (final var connection = this.dataSource.getConnection()) { - deleteRequest(connection, cell.specId, cell.specRevision); + deleteRequest(connection, cell.analysisId); } catch (final SQLException ex) { throw new DatabaseException("Failed to delete scheduling request", ex); } @@ -151,11 +151,10 @@ private static RequestRecord createRequest( private static void deleteRequest( final Connection connection, - final SpecificationId specId, - final long specRevision + final long analysisId ) throws SQLException { try (final var deleteRequestAction = new DeleteRequestAction(connection)) { - deleteRequestAction.apply(specId.id(), specRevision); + deleteRequestAction.apply(analysisId); } } diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java index 5db879e3a3..0cc70b5920 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java @@ -12,7 +12,7 @@ /*package-local*/ final class SetRequestStateAction implements AutoCloseable { private static final @Language("SQL") String sql = """ - update scheduling_request + update scheduler.scheduling_request set status = ?::status_t, reason = ?::json, @@ -46,7 +46,7 @@ public void apply( this.statement.setLong(5, specificationRevision); final var count = this.statement.executeUpdate(); - if (count < 1) throw new FailedUpdateException("scheduling_request"); + if (count < 1) throw new FailedUpdateException("scheduler.scheduling_request"); if (count > 1) throw new Error("More than one row affected by scheduling_request update by primary key. Is the database corrupted?"); } From 2fd7462ccd6489cde0be2b9f2e9fc387813925e1 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Tue, 12 Mar 2024 11:34:51 -0700 Subject: [PATCH 26/36] Update DB Actions in Backend (Sequencing) --- sequencing-server/src/app.ts | 2 +- .../src/routes/command-expansion.ts | 24 +++++++++---------- sequencing-server/src/routes/seqjson.ts | 16 ++++++------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/sequencing-server/src/app.ts b/sequencing-server/src/app.ts index 975259333f..7c21945d11 100644 --- a/sequencing-server/src/app.ts +++ b/sequencing-server/src/app.ts @@ -146,7 +146,7 @@ app.post('/put-dictionary', async (req, res, next) => { logger.info(`command-lib generated - path: ${commandDictionaryPath}`); const sqlExpression = ` - insert into command_dictionary (command_types_typescript_path, mission, version, parsed_json) + insert into sequencing.command_dictionary (command_types_typescript_path, mission, version, parsed_json) values ($1, $2, $3, $4) on conflict (mission, version) do update set command_types_typescript_path = $1, parsed_json = $4 diff --git a/sequencing-server/src/routes/command-expansion.ts b/sequencing-server/src/routes/command-expansion.ts index e53e2eb39b..473d2d97e7 100644 --- a/sequencing-server/src/routes/command-expansion.ts +++ b/sequencing-server/src/routes/command-expansion.ts @@ -29,7 +29,7 @@ commandExpansionRouter.post('/put-expansion', async (req, res, next) => { const { rows } = await db.query( ` - insert into expansion_rule (activity_type, expansion_logic, authoring_command_dict_id, + insert into sequencing.expansion_rule (activity_type, expansion_logic, authoring_command_dict_id, authoring_mission_model_id) values ($1, $2, $3, $4) returning id; @@ -170,12 +170,12 @@ commandExpansionRouter.post('/put-expansion-set', async (req, res, next) => { const { rows } = await db.query( ` with expansion_set_id as ( - insert into expansion_set (command_dict_id, mission_model_id, description, owner, name) + insert into sequencing.expansion_set (command_dict_id, mission_model_id, description, owner, name) values ($1, $2, $3, $4, $5) returning id), - rules as (select id, activity_type from expansion_rule where id = any ($6::int[]) order by id) + rules as (select id, activity_type from sequencing.expansion_rule where id = any ($6::int[]) order by id) insert - into expansion_set_to_rule (set_id, rule_id, activity_type) + into sequencing.expansion_set_to_rule (set_id, rule_id, activity_type) select a.id, b.id, b.activity_type from (select id from expansion_set_id) a, (select id, activity_type from rules) b @@ -321,11 +321,11 @@ commandExpansionRouter.post('/expand-all-activity-instances', async (req, res, n const { rows } = await db.query( ` with expansion_run_id as ( - insert into expansion_run (simulation_dataset_id, expansion_set_id) + insert into sequencing.expansion_run (simulation_dataset_id, expansion_set_id) values ($1, $2) returning id) insert - into activity_instance_commands (expansion_run_id, + into sequencing.activity_instance_commands (expansion_run_id, activity_instance_id, commands, errors) @@ -361,8 +361,8 @@ commandExpansionRouter.post('/expand-all-activity-instances', async (req, res, n const seqToSimulatedActivity = await db.query( ` select seq_id, simulated_activity_id - from sequence_to_simulated_activity - where sequence_to_simulated_activity.simulated_activity_id in (${pgFormat( + from sequencing.sequence_to_simulated_activity + where sequencing.sequence_to_simulated_activity.simulated_activity_id in (${pgFormat( '%L', expandedActivityInstances.map(eai => eai.id), )}) @@ -375,12 +375,12 @@ commandExpansionRouter.post('/expand-all-activity-instances', async (req, res, n const seqRows = await db.query( ` select metadata, seq_id, simulation_dataset_id - from sequence - where sequence.seq_id in (${pgFormat( + from sequencing.sequence s + where s.seq_id in (${pgFormat( '%L', seqToSimulatedActivity.rows.map(row => row.seq_id), )}) - and sequence.simulation_dataset_id = $1; + and s.simulation_dataset_id = $1; `, [simulationDatasetId], ); @@ -462,7 +462,7 @@ commandExpansionRouter.post('/expand-all-activity-instances', async (req, res, n const { rows } = await db.query( ` - insert into expanded_sequences (expansion_run_id, seq_id, simulation_dataset_id, expanded_sequence, edsl_string) + insert into sequencing.expanded_sequences (expansion_run_id, seq_id, simulation_dataset_id, expanded_sequence, edsl_string) values ($1, $2, $3, $4, $5) returning id `, diff --git a/sequencing-server/src/routes/seqjson.ts b/sequencing-server/src/routes/seqjson.ts index 3aeedd93f2..66cd4ef4fc 100644 --- a/sequencing-server/src/routes/seqjson.ts +++ b/sequencing-server/src/routes/seqjson.ts @@ -142,8 +142,8 @@ seqjsonRouter.post('/get-seqjson-for-seqid-and-simulation-dataset', async (req, aic.activity_instance_id, aic.errors, aic.expansion_run_id - from sequence_to_simulated_activity ssa - join activity_instance_commands aic + from sequencing.sequence_to_simulated_activity ssa + join sequencing.activity_instance_commands aic on ssa.simulated_activity_id = aic.activity_instance_id where (ssa.simulation_dataset_id, ssa.seq_id) = ($1, $2)), max_values as ( @@ -168,9 +168,9 @@ seqjsonRouter.post('/get-seqjson-for-seqid-and-simulation-dataset', async (req, }>( ` select metadata - from sequence - where sequence.seq_id = $2 - and sequence.simulation_dataset_id = $1; + from sequencing.sequence s + where s.seq_id = $2 + and s.simulation_dataset_id = $1; `, [simulationDatasetId, seqId], ), @@ -286,8 +286,8 @@ seqjsonRouter.post('/bulk-get-seqjson-for-seqid-and-simulation-dataset', async ( aic.expansion_run_id, ssa.seq_id, ssa.simulation_dataset_id - from sequence_to_simulated_activity ssa - join activity_instance_commands aic + from sequencing.sequence_to_simulated_activity ssa + join sequencing.activity_instance_commands aic on ssa.simulated_activity_id = aic.activity_instance_id where (ssa.seq_id, ssa.simulation_dataset_id) in (${pgFormat('%L', inputTuples)}) ), @@ -318,7 +318,7 @@ seqjsonRouter.post('/bulk-get-seqjson-for-seqid-and-simulation-dataset', async ( }>( ` select metadata, seq_id, simulation_dataset_id - from sequence s + from sequencing.sequence s where (s.seq_id, s.simulation_dataset_id) in (${pgFormat('%L', inputTuples)}); `, ), From c5fec881bb23585b96b974dbaa5cbfce6724ecf9 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 20 Mar 2024 17:37:42 -0700 Subject: [PATCH 27/36] Update DB Actions in Backend (Timeline Libray) --- .../aerie/timeline/plan/AeriePostgresPlan.kt | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/procedural/timeline/src/main/kotlin/gov/nasa/jpl/aerie/timeline/plan/AeriePostgresPlan.kt b/procedural/timeline/src/main/kotlin/gov/nasa/jpl/aerie/timeline/plan/AeriePostgresPlan.kt index f6fca17785..3bd88f2118 100644 --- a/procedural/timeline/src/main/kotlin/gov/nasa/jpl/aerie/timeline/plan/AeriePostgresPlan.kt +++ b/procedural/timeline/src/main/kotlin/gov/nasa/jpl/aerie/timeline/plan/AeriePostgresPlan.kt @@ -33,19 +33,19 @@ data class AeriePostgresPlan( ): Plan { private val datasetId by lazy { - val statement = c.prepareStatement("select dataset_id from simulation_dataset where id = ?;") + val statement = c.prepareStatement("select dataset_id from merlin.simulation_dataset where id = ?;") statement.setInt(1, simDatasetId) getSingleIntQueryResult(statement) } private val simulationId by lazy { - val statement = c.prepareStatement("select simulation_id from simulation_dataset where id = ?;") + val statement = c.prepareStatement("select simulation_id from merlin.simulation_dataset where id = ?;") statement.setInt(1, simDatasetId) getSingleIntQueryResult(statement) } private val simulationInfo by lazy { - val statement = c.prepareStatement("select plan_id, simulation_start_time, simulation_end_time from simulation where id = ?;") + val statement = c.prepareStatement("select plan_id, simulation_start_time, simulation_end_time from merlin.simulation where id = ?;") statement.setInt(1, simulationId) val response = statement.executeQuery() if (!response.next()) throw DatabaseError("Expected exactly one result for query, found none: $statement") @@ -67,7 +67,7 @@ data class AeriePostgresPlan( } private val planInfo by lazy { - val statement = c.prepareStatement("select start_time, duration from plan where id = ?;") + val statement = c.prepareStatement("select start_time, duration from merlin.plan where id = ?;") statement.setInt(1, simulationInfo.planId) intervalStyleStatement.execute() val response = statement.executeQuery() @@ -92,12 +92,12 @@ data class AeriePostgresPlan( private val intervalStyleStatement = c.prepareStatement("set intervalstyle = 'iso_8601';") private val profileInfoStatement = c.prepareStatement( - "select id, duration from profile where dataset_id = ? and name = ?;" + "select id, duration from merlin.profile where dataset_id = ? and name = ?;" ) private data class ProfileInfo(val id: Int, val duration: Duration) private val segmentsStatement = c.prepareStatement( - "select start_offset, dynamics, is_gap from profile_segment where profile_id = ? and dataset_id = ? order by start_offset asc;" + "select start_offset, dynamics, is_gap from merlin.profile_segment where profile_id = ? and dataset_id = ? order by start_offset asc;" ) /***/ class DatabaseError(message: String): Error(message) @@ -167,11 +167,11 @@ data class AeriePostgresPlan( } private val allInstancesStatement = c.prepareStatement( - "select start_offset, duration, attributes, activity_type_name, id from simulated_activity" + + "select start_offset, duration, attributes, activity_type_name, id from merlin.simulated_activity" + " where simulation_dataset_id = ?;" ) private val filteredInstancesStatement = c.prepareStatement( - "select start_offset, duration, attributes, activity_type_name, id from simulated_activity" + + "select start_offset, duration, attributes, activity_type_name, id from merlin.simulated_activity" + " where simulation_dataset_id = ? and activity_type_name = ?;" ) override fun instances(type: String?, deserializer: (SerializedValue) -> A): Instances { @@ -200,11 +200,11 @@ data class AeriePostgresPlan( } private val allDirectivesStatement = c.prepareStatement( - "select name, start_offset, type, arguments, id from activity_directive where plan_id = ?" + + "select name, start_offset, type, arguments, id from merlin.activity_directive where plan_id = ?" + " and start_offset > ?::interval and start_offset < ?::interval;" ) private val filteredDirectivesStatement = c.prepareStatement( - "select name, start_offset, type, arguments, id from activity_directive where plan_id = ?" + + "select name, start_offset, type, arguments, id from merlin.activity_directive where plan_id = ?" + " and start_offset > ?::interval and start_offset < ?::interval and type = ?;" ) override fun directives(type: String?, deserializer: (SerializedValue) -> A) = BaseTimeline(::Directives) { opts -> From 57dfc327f364f168dc1006128b4d2c307eeeb528 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 13 Mar 2024 15:36:37 -0700 Subject: [PATCH 28/36] Merge merlin.status_t and scheduer.status_t into util_functions.request_status [Hasura is currently unable to properly use custom types not in the public schema] (https://github.com/hasura/graphql-engine/issues/4630). While this can be partially circumvented by specifying a search_path in the connection string (see change to Docker Compose), because these two types share a name, Hasura would always match to the type specified in the first listed schema in the search path. Because these types are identical sans the originating DB, they were combined into a singular type, `request_status`, rather than renaming each one. --- deployment/postgres-init-db/sql/init.sql | 1 + .../sql/tables/merlin/simulation/simulation_dataset.sql | 4 +--- .../tables/scheduler/scheduling_run/scheduling_request.sql | 4 +--- .../sql/types/util_functions/request_status.sql | 1 + docker-compose.yml | 2 +- .../server/remotes/postgres/SetSimulationStateAction.java | 2 +- .../server/remotes/postgres/SetRequestStateAction.java | 2 +- 7 files changed, 7 insertions(+), 9 deletions(-) create mode 100644 deployment/postgres-init-db/sql/types/util_functions/request_status.sql diff --git a/deployment/postgres-init-db/sql/init.sql b/deployment/postgres-init-db/sql/init.sql index 29a6b4dc11..dc22512bd1 100644 --- a/deployment/postgres-init-db/sql/init.sql +++ b/deployment/postgres-init-db/sql/init.sql @@ -15,6 +15,7 @@ begin; -- Util Functions \ir functions/util_functions/shared_update_functions.sql + \ir types/util_functions/request_status.sql -- Permissions \ir init_permissions.sql diff --git a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql index 6f106e0504..9ee3faf212 100644 --- a/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql +++ b/deployment/postgres-init-db/sql/tables/merlin/simulation/simulation_dataset.sql @@ -1,5 +1,3 @@ -create type merlin.status_t as enum('pending', 'incomplete', 'failed', 'success'); - create table merlin.simulation_dataset ( id integer generated always as identity, simulation_id integer not null, @@ -23,7 +21,7 @@ create table merlin.simulation_dataset ( simulation_end_time timestamptz not null, -- Simulation state - status merlin.status_t not null default 'pending', + status util_functions.request_status not null default 'pending', reason jsonb null, canceled boolean not null default false, diff --git a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql index c96eada7a5..c6439dd914 100644 --- a/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql +++ b/deployment/postgres-init-db/sql/tables/scheduler/scheduling_run/scheduling_request.sql @@ -1,5 +1,3 @@ -create type scheduler.status_t as enum('pending', 'incomplete', 'failed', 'success'); - create table scheduler.scheduling_request ( analysis_id integer generated always as identity, specification_id integer not null, @@ -9,7 +7,7 @@ create table scheduler.scheduling_request ( plan_revision integer not null, -- Scheduling State - status scheduler.status_t not null default 'pending', + status util_functions.request_status not null default 'pending', reason jsonb null, canceled boolean not null default false, diff --git a/deployment/postgres-init-db/sql/types/util_functions/request_status.sql b/deployment/postgres-init-db/sql/types/util_functions/request_status.sql new file mode 100644 index 0000000000..764de8e525 --- /dev/null +++ b/deployment/postgres-init-db/sql/types/util_functions/request_status.sql @@ -0,0 +1 @@ +create type util_functions.request_status as enum('pending', 'incomplete', 'failed', 'success'); diff --git a/docker-compose.yml b/docker-compose.yml index 72aed01b71..e84bff083b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -224,7 +224,7 @@ services: container_name: aerie_hasura depends_on: ["postgres"] environment: - AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie" + AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie?options=-c%20search_path%3Dutil_functions%2Chasura%2Cpermissions%2Ctags%2Cmerlin%2Cscheduler%2Csequencing%2Cpublic" AERIE_MERLIN_URL: "http://aerie_merlin:27183" AERIE_SCHEDULER_URL: "http://aerie_scheduler:27185" AERIE_SEQUENCING_URL: "http://aerie_sequencing:27184" diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java index 9774b52749..172707b88d 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/remotes/postgres/SetSimulationStateAction.java @@ -10,7 +10,7 @@ private final @Language("SQL") String sql = """ update merlin.simulation_dataset set - status = ?::status_t, + status = ?::util_functions.request_status, reason = ?::json where dataset_id = ? """; diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java index 0cc70b5920..a8c4d540a2 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/remotes/postgres/SetRequestStateAction.java @@ -14,7 +14,7 @@ private static final @Language("SQL") String sql = """ update scheduler.scheduling_request set - status = ?::status_t, + status = ?::util_functions.request_status, reason = ?::json, dataset_id = ? where From bdbc42350306b5541b82c7b49cde352cdb9c8894 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 13 Mar 2024 10:08:54 -0700 Subject: [PATCH 29/36] Update DB Tests - Add schema info and language tags to SQL statements - Suppress `SqlSourceToSinkFlow` warnings, which are not significant since this is test code - Extract duplicate SQL code into methods - Close the connection and datasource when stopping DB in `DatabaseTestHelper`. Method renamed to `close` to reflect this. - Create helper method `DBTestHelper::clearSchema` to simplify test cleanup code - Move `PlanCollaborationTests::UpdateActivityName` and `DeleteActivityDirective` to `MerlinDBTestHelper` - Wrap created statements in "try-with-resources" - Fix Plan Collaboration Test `mergeBaseBetweenSelf`, which wasn't calling `get_merge_base` before --- db-tests/build.gradle | 1 + .../ActivityDirectiveChangelogTests.java | 75 +- .../nasa/jpl/aerie/database/AnchorTests.java | 149 ++- .../CommandExpansionDatabaseTests.java | 43 +- .../aerie/database/DatabaseTestHelper.java | 38 +- .../database/MerlinDatabaseTestHelper.java | 141 ++- .../aerie/database/MerlinDatabaseTests.java | 778 +++++++--------- .../jpl/aerie/database/PermissionsTest.java | 120 +-- .../database/PlanCollaborationTests.java | 875 +++++++----------- .../nasa/jpl/aerie/database/PresetTests.java | 97 +- .../database/SchedulerDatabaseTests.java | 252 ++--- .../nasa/jpl/aerie/database/TagsTests.java | 153 ++- 12 files changed, 1204 insertions(+), 1518 deletions(-) diff --git a/db-tests/build.gradle b/db-tests/build.gradle index c4420085f9..b6c927cfc3 100644 --- a/db-tests/build.gradle +++ b/db-tests/build.gradle @@ -24,6 +24,7 @@ task e2eTest(type: Test) { } dependencies { + testImplementation 'org.jetbrains:annotations:16.0.2' testImplementation 'org.postgresql:postgresql:42.6.1' testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.10.0' testImplementation 'org.junit.jupiter:junit-jupiter-params:5.10.0' diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/ActivityDirectiveChangelogTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/ActivityDirectiveChangelogTests.java index 769e2fefea..e50ee02716 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/ActivityDirectiveChangelogTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/ActivityDirectiveChangelogTests.java @@ -8,7 +8,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -16,9 +15,9 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +@SuppressWarnings("SqlSourceToSinkFlow") @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class ActivityDirectiveChangelogTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; private Connection connection; @@ -33,28 +32,19 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive_changelog"); - helper.clearTable("activity_directive"); + helper.clearSchema("merlin"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_activity_changelog_test", "Activity Directive Changelog Tests"); connection = helper.connection(); merlinHelper = new MerlinDatabaseTestHelper(connection); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); + helper.close(); connection = null; helper = null; } @@ -66,7 +56,7 @@ private Activity getActivity(final int planId, final int activityId) throws SQLE // language=sql """ SELECT * - FROM activity_directive + FROM merlin.activity_directive WHERE id = %d AND plan_id = %d; """.formatted(activityId, planId)); @@ -92,13 +82,26 @@ private Activity getActivity(final int planId, final int activityId) throws SQLE ); } } - + private void updateActivityStartOffset(int planId, int activityDirectiveId, String newOffset) throws SQLException { + try (final var statement = connection.createStatement()) { + final var updatedRows = statement.executeQuery( + // language=sql + """ + UPDATE merlin.activity_directive + SET start_offset = '%s' + WHERE plan_id = %d and id = %d + RETURNING id; + """.formatted(newOffset, planId, activityDirectiveId)); + updatedRows.next(); + assertEquals(activityDirectiveId, updatedRows.getInt(1)); + } + } private void revertActivityDirectiveToChangelog(int planId, int activityDirectiveId, int revision) throws SQLException { try (final var statement = connection.createStatement()) { statement.executeQuery( // language=sql """ - SELECT hasura_functions.restore_activity_changelog( + SELECT hasura.restore_activity_changelog( _plan_id => %d, _activity_directive_id => %d, _revision => %d, @@ -113,7 +116,7 @@ private int getChangelogRevisionCount(int planId, int activityDirectiveId) throw // language=sql """ SELECT count(revision) - FROM activity_directive_changelog + FROM merlin.activity_directive_changelog WHERE plan_id = %d and activity_directive_id = %d; """.formatted(planId, activityDirectiveId) ); @@ -130,7 +133,7 @@ void shouldHaveNoChangelogsForEmptyPlan() throws SQLException { // language=sql """ SELECT count(revision) - FROM activity_directive_changelog + FROM merlin.activity_directive_changelog WHERE plan_id = %d; """.formatted(planId) ); @@ -149,22 +152,8 @@ void shouldCreateChangelogForInsertedActDir() throws SQLException { @Test void shouldCreateChangelogForUpdatedActDir() throws SQLException { final var activityId = merlinHelper.insertActivity(planId); - assertEquals(1, getChangelogRevisionCount(planId, activityId)); - - try (final var statement = connection.createStatement()) { - final var updatedRows = statement.executeQuery( - // language=sql - """ - UPDATE activity_directive - SET start_offset = '%s' - WHERE plan_id = %d and id = %d - RETURNING id; - """.formatted("01:01:01", planId, activityId)); - updatedRows.next(); - assertEquals(activityId, updatedRows.getInt(1)); - } - + updateActivityStartOffset(planId, activityId, "01:01:01"); assertEquals(2, getChangelogRevisionCount(planId, activityId)); } @Test @@ -176,7 +165,7 @@ void changelogRevisionHasCorrectValues() throws SQLException { // language=sql """ SELECT * - FROM activity_directive_changelog + FROM merlin.activity_directive_changelog WHERE plan_id = %d and activity_directive_id = %d; """.formatted(planId, activityId)); @@ -203,22 +192,10 @@ void changelogRevisionHasCorrectValues() throws SQLException { void shouldDeleteChangelogsOverRevisionLimit() throws SQLException { final var maxRevisionsLimit = 11; final var activityId = merlinHelper.insertActivity(planId); - // randomly update activity directive > maxRevisionsLimit times for (int i = 0; i < maxRevisionsLimit * 2; i++) { - connection.createStatement() - .executeQuery( - // language=sql - """ - UPDATE activity_directive - SET start_offset = '%02d' - WHERE plan_id = %d and id = %d - RETURNING id; - """.formatted(i, planId, activityId) - ) - .close(); + updateActivityStartOffset(planId, activityId, "%02d"); } - assertEquals(maxRevisionsLimit, getChangelogRevisionCount(planId, activityId)); } @@ -266,7 +243,7 @@ void shouldRevertActDirToChangelogEntry() throws SQLException { statement.executeQuery( // language=sql """ - UPDATE activity_directive + UPDATE merlin.activity_directive SET name = 'changed', start_offset = '01:01:01', diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/AnchorTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/AnchorTests.java index e0c54352d8..e52dff6b0a 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/AnchorTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/AnchorTests.java @@ -9,7 +9,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -22,9 +21,9 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +@SuppressWarnings("SqlSourceToSinkFlow") @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class AnchorTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; @@ -40,42 +39,19 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation_template"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); - helper.clearTable("plan_dataset"); - helper.clearTable("simulation_dataset"); - helper.clearTable("plan_snapshot"); - helper.clearTable("plan_latest_snapshot"); - helper.clearTable("plan_snapshot_activities"); - helper.clearTable("plan_snapshot_parent"); - helper.clearTable("merge_request"); - helper.clearTable("merge_staging_area"); - helper.clearTable("conflicting_activities"); - helper.clearTable("anchor_validation_status"); + helper.clearSchema("merlin"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_anchor_test", "Anchor Tests"); connection = helper.connection(); merlinHelper = new MerlinDatabaseTestHelper(connection); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } //region Helper Methods @@ -83,8 +59,9 @@ void afterAll() throws SQLException, IOException, InterruptedException { private void updateOffsetFromAnchor(PGInterval newOffset, int activityId, int planId) throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - update activity_directive + update merlin.activity_directive set start_offset = '%s' where id = %d and plan_id = %d; """.formatted(newOffset.toString(), activityId, planId)); @@ -93,12 +70,14 @@ private void updateOffsetFromAnchor(PGInterval newOffset, int activityId, int pl private Activity getActivity(final int planId, final int activityId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" + final var res = statement.executeQuery( + //language=sql + """ SELECT id, plan_id, start_offset, anchor_id, anchored_to_start, approximate_start_time - FROM activity_directive_extended + FROM merlin.activity_directive_extended WHERE id = %d AND plan_id = %d; - """.formatted(activityId, planId)); + """.formatted(activityId, planId)); res.next(); return new Activity( res.getInt("id"), @@ -113,12 +92,14 @@ private Activity getActivity(final int planId, final int activityId) throws SQLE private ArrayList getActivities(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" + final var res = statement.executeQuery( + //language=sql + """ SELECT * - FROM activity_directive_extended + FROM merlin.activity_directive_extended WHERE plan_id = %d ORDER BY id; - """.formatted(planId)); + """.formatted(planId)); final var activities = new ArrayList(); while (res.next()){ @@ -135,22 +116,16 @@ private ArrayList getActivities(final int planId) throws SQLException } } - private void deleteActivityDirective(final int planId, final int activityId) throws SQLException { - try (final var statement = connection.createStatement()) { - statement.executeUpdate(""" - delete from activity_directive where id = %s and plan_id = %s - """.formatted(activityId, planId)); - } - } - private AnchorValidationStatus getValidationStatus(final int planId, final int activityId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM anchor_validation_status - WHERE activity_id = %d - AND plan_id = %d; - """.formatted(activityId, planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.anchor_validation_status + WHERE activity_id = %d + AND plan_id = %d; + """.formatted(activityId, planId)); res.next(); return new AnchorValidationStatus( res.getInt("activity_id"), @@ -166,15 +141,13 @@ private AnchorValidationStatus refresh(AnchorValidationStatus original) throws S int insertActivityWithAnchor(final int planId, final PGInterval startOffset, final int anchorId, final boolean anchoredToStart) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement - .executeQuery( + final var res = statement.executeQuery( + //language=sql """ - INSERT INTO activity_directive (type, plan_id, start_offset, arguments, anchor_id, anchored_to_start) - VALUES ('test-activity', '%s', '%s', '{}', %d, %b) - RETURNING id;""" - .formatted(planId, startOffset.toString(), anchorId, anchoredToStart) - ); - + INSERT INTO merlin.activity_directive (type, plan_id, start_offset, arguments, anchor_id, anchored_to_start) + VALUES ('test-activity', '%s', '%s', '{}', %d, %b) + RETURNING id; + """.formatted(planId, startOffset.toString(), anchorId, anchoredToStart)); res.next(); return res.getInt("id"); } @@ -857,7 +830,7 @@ void cantDeleteActivityWithAnchors() throws SQLException { insertActivityWithAnchor(planId, new PGInterval("0 seconds"), anchorId, true); try { - deleteActivityDirective(planId, anchorId); + merlinHelper.deleteActivityDirective(planId, anchorId); fail(); } catch (SQLException ex){ if(!ex.getMessage().contains( @@ -876,51 +849,57 @@ void rebasesDoNotRunOnNullParameters() throws SQLException { try (final var statement = connection.createStatement()) { // Reanchor to Plan Start var results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_plan_start(%d, null, '%s'::json) - """.formatted(activityId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_plan_start(%d, null, '%s'::json) + """.formatted(activityId, merlinHelper.admin.session())); if (results.next()) { fail(); } results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_plan_start(null, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_plan_start(null, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); if (results.next()) { fail(); } // Reanchor to ascendant anchor results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_to_anchor(%d, null, '%s'::json) - """.formatted(activityId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_to_anchor(%d, null, '%s'::json) + """.formatted(activityId, merlinHelper.admin.session())); if (results.next()) { fail(); } results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_to_anchor(null, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_to_anchor(null, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); if (results.next()) { fail(); } // Delete Remaining Chain results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_delete_subtree(%d, null, '%s'::json) - """.formatted(activityId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_delete_subtree(%d, null, '%s'::json) + """.formatted(activityId, merlinHelper.admin.session())); if (results.next()) { fail(); } results = statement.executeQuery( + //language=sql """ - select hasura_functions.delete_activity_by_pk_delete_subtree(null, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_delete_subtree(null, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); if (results.next()) { fail(); } @@ -933,9 +912,10 @@ void cannotRebaseActivityThatDoesNotExist() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_plan_start(-1, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_plan_start(-1, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); fail(); } catch (SQLException ex){ if(!ex.getMessage().contains("Activity Directive -1 does not exist in Plan "+planId)){ @@ -945,9 +925,10 @@ void cannotRebaseActivityThatDoesNotExist() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_to_anchor(-1, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_to_anchor(-1, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); fail(); } catch (SQLException ex){ if(!ex.getMessage().contains("Activity Directive -1 does not exist in Plan "+planId)){ @@ -957,9 +938,10 @@ void cannotRebaseActivityThatDoesNotExist() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_delete_subtree(-1, %d, '%s'::json) - """.formatted(planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_delete_subtree(-1, %d, '%s'::json) + """.formatted(planId, merlinHelper.admin.session())); fail(); } catch (SQLException ex){ if(!ex.getMessage().contains("Activity Directive -1 does not exist in Plan "+planId)){ @@ -1001,9 +983,10 @@ void rebaseToAscendantAnchor() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_to_anchor(%d, %d, '%s'::json) - """.formatted(baseId, planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_to_anchor(%d, %d, '%s'::json) + """.formatted(baseId, planId, merlinHelper.admin.session())); } final var remainingActivities = getActivities(planId); @@ -1059,9 +1042,10 @@ void rebaseChainsToPlanStart() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_plan_start(%d, %d, '%s'::json) - """.formatted(baseId, planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_reanchor_plan_start(%d, %d, '%s'::json) + """.formatted(baseId, planId, merlinHelper.admin.session())); } final var remainingActivities = getActivities(planId); @@ -1107,9 +1091,10 @@ void deleteChain() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_delete_subtree(%d, %d, '%s'::json) - """.formatted(baseId, planId, merlinHelper.admin.session())); + select hasura.delete_activity_by_pk_delete_subtree(%d, %d, '%s'::json) + """.formatted(baseId, planId, merlinHelper.admin.session())); } final var remainingActivities = getActivities(planId); diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/CommandExpansionDatabaseTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/CommandExpansionDatabaseTests.java index 9d37045338..179e83c3b8 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/CommandExpansionDatabaseTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/CommandExpansionDatabaseTests.java @@ -7,7 +7,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -17,34 +16,23 @@ @TestInstance(TestInstance.Lifecycle.PER_CLASS) class CommandExpansionDatabaseTests { - private static final File initSqlScriptFile = new File("../sequencing-server/sql/sequencing/init.sql"); private DatabaseTestHelper helper; - private Connection connection; @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "command_expansion_test", - "Command Expansion Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("command_expansion_test", "Command Expansion Database Tests"); connection = helper.connection(); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } @AfterEach void afterEach() throws SQLException { - helper.clearTable("expansion_rule"); - helper.clearTable("sequence"); - helper.clearTable("sequence_to_simulated_activity"); + helper.clearSchema("sequencing"); } @Nested @@ -52,11 +40,13 @@ class ExpansionRuleTriggers { @Test void shouldModifyUpdatedAtTimeOnUpdate() throws SQLException { try (final var statement = connection.createStatement()) { - final var insertRes = statement.executeQuery(""" - insert into expansion_rule (activity_type, expansion_logic) - values ('test-activity-type', 'test-activity-logic') - returning id, created_at, updated_at - """); + final var insertRes = statement.executeQuery( + //language=sql + """ + insert into sequencing.expansion_rule (activity_type, expansion_logic) + values ('test-activity-type', 'test-activity-logic') + returning id, created_at, updated_at + """); insertRes.next(); final var id = insertRes.getInt("id"); final var created_at = insertRes.getTimestamp("created_at"); @@ -64,11 +54,14 @@ insert into expansion_rule (activity_type, expansion_logic) assertEquals(created_at, updated_at); - final var updateRes = statement.executeQuery(""" - update expansion_rule set expansion_logic = 'updated-logic' - where id = %d - returning created_at, updated_at - """.formatted(id)); + final var updateRes = statement.executeQuery( + //language=sql + """ + update sequencing.expansion_rule + set expansion_logic = 'updated-logic' + where id = %d + returning created_at, updated_at + """.formatted(id)); updateRes.next(); final var created_at2 = updateRes.getTimestamp("created_at"); final var updated_at2 = updateRes.getTimestamp("updated_at"); diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/DatabaseTestHelper.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/DatabaseTestHelper.java index 2f99b12be7..64e0549291 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/DatabaseTestHelper.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/DatabaseTestHelper.java @@ -2,6 +2,7 @@ import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; +import org.intellij.lang.annotations.Language; import org.junit.jupiter.api.Assumptions; import java.io.File; @@ -13,23 +14,26 @@ /** * Manages the test database. */ +@SuppressWarnings("SqlSourceToSinkFlow") public class DatabaseTestHelper { - private Connection connection; + private final Connection connection; + private final HikariDataSource hikariDataSource; private final String dbName; private final String appName; - private final File initSqlScriptFile; + private final File initSqlScriptFile = new File("../deployment/postgres-init-db/sql/init.sql"); - public DatabaseTestHelper(String dbName, String appName, File initSqlScriptFile) { + public DatabaseTestHelper(String dbName, String appName) throws SQLException, IOException, InterruptedException { this.dbName = dbName; this.appName = appName; - this.initSqlScriptFile = initSqlScriptFile; + this.hikariDataSource = startDatabase(); + this.connection = hikariDataSource.getConnection(); } /** * Sets up the test database */ - public void startDatabase() throws SQLException, IOException, InterruptedException { + private HikariDataSource startDatabase() throws IOException, InterruptedException { // Create test database and grant privileges { final var pb = new ProcessBuilder("psql", @@ -78,16 +82,13 @@ public void startDatabase() throws SQLException, IOException, InterruptedExcepti hikariConfig.setConnectionInitSql("set time zone 'UTC'"); - final var hikariDataSource = new HikariDataSource(hikariConfig); - - connection = hikariDataSource.getConnection(); + return new HikariDataSource(hikariConfig); } /** * Tears down the test database */ - public void stopDatabase() throws SQLException, IOException, InterruptedException { - + public void close() throws SQLException, IOException, InterruptedException { Assumptions.assumeTrue(connection != null); connection.close(); @@ -105,15 +106,30 @@ public void stopDatabase() throws SQLException, IOException, InterruptedExceptio final var proc = pb.start(); proc.waitFor(); proc.destroy(); + connection.close(); + hikariDataSource.close(); } public Connection connection() { return connection; } - public void clearTable(String table) throws SQLException { + public void clearTable(@Language(value="SQL", prefix="SELECT * FROM ") String table) throws SQLException { try (final var statement = connection.createStatement()) { statement.executeUpdate("TRUNCATE " + table + " CASCADE;"); } } + + public void clearSchema(@Language(value="SQL", prefix="DROP SCHEMA ") String schema) throws SQLException { + try (final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + select tablename from pg_tables where schemaname = '%s'; + """.formatted(schema)); + while(res.next()) { + clearTable(schema+"."+res.getString("tablename")); + } + } + } } diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTestHelper.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTestHelper.java index 0623c96d2d..2fea35cf53 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTestHelper.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTestHelper.java @@ -4,6 +4,7 @@ import java.sql.SQLException; import java.util.UUID; +@SuppressWarnings("SqlSourceToSinkFlow") final class MerlinDatabaseTestHelper { private final Connection connection; final User admin; @@ -26,10 +27,11 @@ User insertUser(final String username) throws SQLException { User insertUser(final String username, final String defaultRole) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( - """ - INSERT INTO metadata.users (username, default_role) - VALUES ('%s', '%s'); - """.formatted(username, defaultRole) + //language=sql + """ + INSERT INTO permissions.users (username, default_role) + VALUES ('%s', '%s'); + """.formatted(username, defaultRole) ); } return new User( @@ -43,11 +45,12 @@ int insertFileUpload() throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO uploaded_file (path, name) - VALUES ('test-path', 'test-name-%s') - RETURNING id;""" - .formatted(UUID.randomUUID().toString()) + INSERT INTO merlin.uploaded_file (path, name) + VALUES ('test-path', 'test-name-%s') + RETURNING id; + """.formatted(UUID.randomUUID().toString()) ); res.next(); return res.getInt("id"); @@ -62,11 +65,12 @@ int insertMissionModel(final int fileId, final String username) throws SQLExcept try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO mission_model (name, mission, owner, version, jar_id) - VALUES ('test-mission-model-%s', 'test-mission', '%s', '0', %s) - RETURNING id;""" - .formatted(UUID.randomUUID().toString(), username, fileId) + INSERT INTO merlin.mission_model (name, mission, owner, version, jar_id) + VALUES ('test-mission-model-%s', 'test-mission', '%s', '0', %s) + RETURNING id; + """.formatted(UUID.randomUUID().toString(), username, fileId) ); res.next(); return res.getInt("id"); @@ -87,26 +91,37 @@ int insertPlan(final int missionModelId, final String username, final String pl int insertPlan(final int missionModelId, final String username, final String planName, final String start_time) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement - .executeQuery( - """ - INSERT INTO plan (name, model_id, duration, start_time, owner) - VALUES ('%s', '%s', '0', '%s', '%s') - RETURNING id;""" - .formatted(planName, missionModelId, start_time, username) - ); + final var res = statement.executeQuery( + //language=sql + """ + INSERT INTO merlin.plan (name, model_id, duration, start_time, owner) + VALUES ('%s', '%s', '0', '%s', '%s') + RETURNING id; + """.formatted(planName, missionModelId, start_time, username)); res.next(); return res.getInt("id"); } } + void deletePlan(final int planId) throws SQLException { + try (final var statement = connection.createStatement()) { + statement.execute( + //language=sql + """ + DELETE FROM merlin.plan + WHERE id = %d; + """.formatted(planId)); + } + } + void insertPlanCollaborator(final int planId, final String username) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( - """ - INSERT INTO plan_collaborators (plan_id, collaborator) - VALUES (%d, '%s'); - """.formatted(planId, username) + //language=sql + """ + INSERT INTO merlin.plan_collaborators (plan_id, collaborator) + VALUES (%d, '%s'); + """.formatted(planId, username) ); } } @@ -127,11 +142,12 @@ int insertActivity(final int planId, final String startOffset, final String argu try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO activity_directive (type, plan_id, start_offset, arguments, last_modified_by, created_by) - VALUES ('test-activity', '%s', '%s', '%s', '%s', '%s') - RETURNING id;""" - .formatted(planId, startOffset, arguments, user.name, user.name) + INSERT INTO merlin.activity_directive (type, plan_id, start_offset, arguments, last_modified_by, created_by) + VALUES ('test-activity', '%s', '%s', '%s', '%s', '%s') + RETURNING id; + """.formatted(planId, startOffset, arguments, user.name, user.name) ); res.next(); @@ -139,6 +155,28 @@ INSERT INTO activity_directive (type, plan_id, start_offset, arguments, last_mod } } + void updateActivityName(String newName, int activityId, int planId) throws SQLException { + try(final var statement = connection.createStatement()) { + statement.execute( + //language=sql + """ + update merlin.activity_directive + set name = '%s' + where id = %d and plan_id = %d; + """.formatted(newName, activityId, planId)); + } + } + + void deleteActivityDirective(final int planId, final int activityId) throws SQLException { + try (final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + delete from merlin.activity_directive where id = %s and plan_id = %s + """.formatted(activityId, planId)); + } + } + /** * To anchor an activity to the plan, set "anchorId" equal to -1. */ @@ -146,20 +184,22 @@ void setAnchor(int anchorId, boolean anchoredToStart, int activityId, int planId try (final var statement = connection.createStatement()) { if (anchorId == -1) { statement.execute( + //language=sql """ - update activity_directive - set anchor_id = null, - anchored_to_start = %b - where id = %d and plan_id = %d; - """.formatted(anchoredToStart, activityId, planId)); + update merlin.activity_directive + set anchor_id = null, + anchored_to_start = %b + where id = %d and plan_id = %d; + """.formatted(anchoredToStart, activityId, planId)); } else { statement.execute( + //language=sql """ - update activity_directive - set anchor_id = %d, - anchored_to_start = %b - where id = %d and plan_id = %d; - """.formatted(anchorId, anchoredToStart, activityId, planId)); + update merlin.activity_directive + set anchor_id = %d, + anchored_to_start = %b + where id = %d and plan_id = %d; + """.formatted(anchorId, anchoredToStart, activityId, planId)); } } } @@ -167,8 +207,9 @@ void setAnchor(int anchorId, boolean anchoredToStart, int activityId, int planId void insertActivityType(final int modelId, final String name) throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO activity_type (model_id, name, parameters, required_parameters, computed_attributes_value_schema) + INSERT INTO merlin.activity_type (model_id, name, parameters, required_parameters, computed_attributes_value_schema) VALUES (%d, '%s', '{}', '[]', '{}'); """.formatted(modelId, name) ); @@ -191,11 +232,12 @@ int insertPreset(int modelId, String name, String associatedActivityType, String try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO activity_presets (model_id, name, associated_activity_type, arguments, owner) - VALUES (%d, '%s', '%s', '%s', '%s') - RETURNING id;""" - .formatted(modelId, name, associatedActivityType, arguments, username) + INSERT INTO merlin.activity_presets (model_id, name, associated_activity_type, arguments, owner) + VALUES (%d, '%s', '%s', '%s', '%s') + RETURNING id; + """.formatted(modelId, name, associatedActivityType, arguments, username) ); res.next(); return res.getInt("id"); @@ -204,8 +246,10 @@ INSERT INTO activity_presets (model_id, name, associated_activity_type, argument void assignPreset(int presetId, int activityId, int planId, String userSession) throws SQLException { try(final var statement = connection.createStatement()){ - statement.execute(""" - select hasura_functions.apply_preset_to_activity(%d, %d, %d, '%s'::json); + statement.execute( + //language=sql + """ + select hasura.apply_preset_to_activity(%d, %d, %d, '%s'::json); """.formatted(presetId, activityId, planId, userSession)); } } @@ -215,7 +259,7 @@ void unassignPreset(int presetId, int activityId, int planId) throws SQLExceptio statement.execute( //language=sql """ - delete from preset_to_directive + delete from merlin.preset_to_directive where (preset_id, activity_id, plan_id) = (%d, %d, %d); """.formatted(presetId, activityId, planId)); } @@ -225,13 +269,14 @@ void unassignPreset(int presetId, int activityId, int planId) throws SQLExceptio int insertConstraint(String name, String definition, User user) throws SQLException { try(final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ WITH metadata(id, owner) AS ( - INSERT INTO public.constraint_metadata(name, description, owner, updated_by) + INSERT INTO merlin.constraint_metadata(name, description, owner, updated_by) VALUES ('%s', 'Merlin DB Test Constraint', '%s', '%s') RETURNING id, owner ) - INSERT INTO public.constraint_definition(constraint_id, definition, author) + INSERT INTO merlin.constraint_definition(constraint_id, definition, author) SELECT m.id, '%s', m.owner FROM metadata m RETURNING constraint_id; diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTests.java index bb084e46c1..3ee14c35e9 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/MerlinDatabaseTests.java @@ -9,7 +9,6 @@ import org.junit.jupiter.api.TestInstance; import org.postgresql.util.PGInterval; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -18,7 +17,6 @@ import java.util.UUID; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -30,7 +28,6 @@ record ProfileSegmentAtATimeRecord(int datasetId, int profileId, String name, St @TestInstance(TestInstance.Lifecycle.PER_CLASS) class MerlinDatabaseTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; @@ -38,32 +35,80 @@ class MerlinDatabaseTests { @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_merlin_test", "Merlin Database Tests"); connection = helper.connection(); merlinHelper = new MerlinDatabaseTestHelper(connection); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); + } + + //region Helper Methods + int getMissionModelRevision(final int modelId) throws SQLException { + try(final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + SELECT revision + FROM merlin.mission_model + WHERE id = %d; + """.formatted(modelId)); + res.next(); + return res.getInt("revision"); + } + } + int getPlanRevision(final int planId) throws SQLException { + try(final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + SELECT revision + FROM merlin.plan + WHERE id = %d; + """.formatted(planId)); + res.next(); + return res.getInt("revision"); + } + } + int getSimulationTemplateRevision(final int simulationTemplateId) throws SQLException { + try(final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + SELECT revision + FROM merlin.simulation_template + WHERE id = %d; + """.formatted(simulationTemplateId)); + res.next(); + return res.getInt("revision"); + } + } + int getSimulationRevision(final int simulationId) throws SQLException { + try(final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + SELECT revision + FROM merlin.simulation + WHERE id = %d; + """.formatted(simulationId)); + res.next(); + return res.getInt("revision"); + } } int insertSimulationTemplate(final int modelId) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO simulation_template (model_id, description, arguments) - VALUES ('%s', 'test-description', '{}') - RETURNING id;""" - .formatted(modelId) + INSERT INTO merlin.simulation_template (model_id, description, arguments) + VALUES ('%s', 'test-description', '{}') + RETURNING id; + """.formatted(modelId) ); res.next(); return res.getInt("id"); @@ -73,11 +118,12 @@ INSERT INTO simulation_template (model_id, description, arguments) int getSimulationId(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - SELECT id - FROM simulation - WHERE simulation.plan_id = '%s'; - """.formatted(planId) + SELECT id + FROM merlin.simulation s + WHERE s.plan_id = '%s'; + """.formatted(planId) ); res.next(); return res.getInt("id"); @@ -87,23 +133,25 @@ int getSimulationId(final int planId) throws SQLException { void addTemplateIdToSimulation(final int simulationTemplateId, final int simulationId) throws SQLException { try (final var statement = connection.createStatement()) { statement.executeUpdate( + //language=sql """ - UPDATE simulation - SET simulation_template_id = '%s', - arguments = '{}' - WHERE id = '%s'; - """.formatted(simulationTemplateId, simulationId)); + UPDATE merlin.simulation + SET simulation_template_id = %d, + arguments = '{}' + WHERE id = %d; + """.formatted(simulationTemplateId, simulationId)); } } int getDatasetId(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement - .executeQuery( - """ - SELECT dataset_id from plan_dataset - WHERE plan_id = '%s'""".formatted(planId) - ); + final var res = statement.executeQuery( + //language=sql + """ + SELECT dataset_id + FROM merlin.plan_dataset + WHERE plan_id = %d + """.formatted(planId)); res.next(); return res.getInt("dataset_id"); } @@ -111,34 +159,51 @@ int getDatasetId(final int planId) throws SQLException { PlanDatasetRecord insertPlanDataset(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement - .executeQuery( - """ - INSERT INTO plan_dataset (plan_id, offset_from_plan_start) - VALUES ('%s', '0') - RETURNING plan_id, dataset_id;""" - .formatted(planId) - ); + final var res = statement.executeQuery( + //language=sql + """ + INSERT INTO merlin.plan_dataset (plan_id, offset_from_plan_start) + VALUES ('%s', '0') + RETURNING plan_id, dataset_id; + """.formatted(planId)); res.next(); return new PlanDatasetRecord(res.getInt("plan_id"), res.getInt("dataset_id")); } } + int getDatasetCount(final int datasetId) throws SQLException { + try (final var statement = connection.createStatement(); + final var res = statement.executeQuery( + //language=sql + """ + SELECT COUNT(*) + FROM merlin.dataset + WHERE id = %s; + """.formatted(datasetId) + )) { + res.next(); + return res.getInt("count"); + } + } + SimulationDatasetRecord insertSimulationDataset(final int simulationId, final int datasetId) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement .executeQuery( + //language=sql """ - INSERT INTO simulation_dataset (simulation_id, dataset_id, arguments, simulation_start_time, simulation_end_time) - VALUES ('%s', '%s', '{}', '2020-1-1 00:00:00', '2020-1-2 00:00:00') - RETURNING simulation_id, dataset_id;""" - .formatted(simulationId, datasetId) + INSERT INTO merlin.simulation_dataset (simulation_id, dataset_id, arguments, simulation_start_time, simulation_end_time) + VALUES ('%s', '%s', '{}', '2020-1-1 00:00:00', '2020-1-2 00:00:00') + RETURNING simulation_id, dataset_id; + """.formatted(simulationId, datasetId) ); res.next(); return new SimulationDatasetRecord(res.getInt("simulation_id"), res.getInt("dataset_id")); } } + //endregion + int fileId; int missionModelId; int planId; @@ -165,92 +230,43 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation_template"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); - helper.clearTable("plan_dataset"); - helper.clearTable("simulation_dataset"); + helper.clearSchema("merlin"); } @Nested class MissionModelTriggers { @Test void shouldIncrementMissionModelRevisionOnMissionModelUpdate() throws SQLException { - final var res = connection.createStatement() - .executeQuery( - """ - SELECT revision - FROM mission_model - WHERE id = %s;""" - .formatted(missionModelId) - ); - res.next(); - final var revision = res.getInt("revision"); - res.close(); - - connection.createStatement() - .executeUpdate( - """ - UPDATE mission_model - SET name = 'updated-name-%s' - WHERE id = %s;""" - .formatted(UUID.randomUUID().toString(), missionModelId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision - FROM mission_model - WHERE id = %s;""" - .formatted(missionModelId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); + final var revision = getMissionModelRevision(missionModelId); + try(final var statement = connection.createStatement()){ + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.mission_model + SET name = 'updated-name-%s' + WHERE id = %d; + """.formatted(UUID.randomUUID().toString(), missionModelId)); + } + final var updatedRevision = getMissionModelRevision(missionModelId); assertEquals(revision + 1, updatedRevision); } @Test void shouldIncrementMissionModelRevisionOnMissionModelJarIdUpdate() throws SQLException { - final var res = connection.createStatement() - .executeQuery( - """ - SELECT revision - FROM mission_model - WHERE id = %s;""" - .formatted(missionModelId) - ); - res.next(); - final var revision = res.getInt("revision"); - res.close(); - - connection.createStatement() - .executeUpdate( - """ - UPDATE uploaded_file - SET path = 'test-path-updated' - WHERE id = %s;""" - .formatted(fileId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision - FROM mission_model - WHERE id = %s;""" - .formatted(missionModelId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); + final var revision = getMissionModelRevision(missionModelId); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.uploaded_file + SET path = 'test-path-updated' + WHERE id = %d; + """.formatted(fileId)); + } + + final var updatedRevision = getMissionModelRevision(missionModelId); assertEquals(revision + 1, updatedRevision); } } @@ -259,140 +275,45 @@ void shouldIncrementMissionModelRevisionOnMissionModelJarIdUpdate() throws SQLEx class PlanTriggers { @Test void shouldIncrementPlanRevisionOnPlanUpdate() throws SQLException { - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - initialRes.close(); - - connection.createStatement() - .executeUpdate( - """ - UPDATE plan SET name = 'test-plan-updated-%s' - WHERE id = %s;""" - .formatted(UUID.randomUUID().toString(), planId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); + final var initialRevision = getPlanRevision(planId); + + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.plan + SET name = 'test-plan-updated-%s' + WHERE id = %d; + """.formatted(UUID.randomUUID().toString(), planId)); + } + final var updatedRevision = getPlanRevision(planId); assertEquals(initialRevision + 1, updatedRevision); } @Test void shouldIncrementPlanRevisionOnActivityInsert() throws SQLException { - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - initialRes.close(); - + final var initialRevision = getPlanRevision(planId); merlinHelper.insertActivity(planId); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); - + final var updatedRevision = getPlanRevision(planId); assertEquals(initialRevision + 1, updatedRevision); } @Test void shouldIncrementPlanRevisionOnActivityUpdate() throws SQLException { - final var activityId = merlinHelper.insertActivity(planId); - - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - - connection.createStatement() - .executeUpdate( - """ - UPDATE activity_directive SET type = 'test-activity-updated' - WHERE id = %s;""" - .formatted(activityId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); - + final var initialRevision = getPlanRevision(planId); + merlinHelper.updateActivityName("test-activity-updated", activityId, planId); + final var updatedRevision = getPlanRevision(planId); assertEquals(initialRevision + 1, updatedRevision); } @Test void shouldIncrementPlanRevisionOnActivityDelete() throws SQLException { - final var activityId = merlinHelper.insertActivity(planId); - - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - initialRes.close(); - - connection.createStatement() - .executeUpdate( - """ - DELETE FROM activity_directive - WHERE id = %s;""" - .formatted(activityId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM plan - WHERE id = %s;""" - .formatted(planId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); - + final var initialRevision = getPlanRevision(planId); + merlinHelper.deleteActivityDirective(planId, activityId); + final var updatedRevision = getPlanRevision(planId); assertEquals(initialRevision + 1, updatedRevision); } } @@ -401,38 +322,19 @@ void shouldIncrementPlanRevisionOnActivityDelete() throws SQLException { class SimulationTemplateTriggers { @Test void shouldIncrementSimulationTemplateRevisionOnSimulationTemplateUpdate() throws SQLException { + final var initialRevision = getSimulationTemplateRevision(simulationTemplateId); - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM simulation_template - WHERE id = %s;""" - .formatted(simulationTemplateId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - initialRes.close(); - - connection.createStatement() - .executeUpdate( - """ - UPDATE simulation_template - SET description = 'test-description-updated' - WHERE id = %s;""" - .formatted(simulationTemplateId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM simulation_template - WHERE id = %s;""" - .formatted(simulationTemplateId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.simulation_template + SET description = 'test-description-updated' + WHERE id = %s; + """.formatted(simulationTemplateId)); + } + final var updatedRevision = getSimulationTemplateRevision(simulationTemplateId); assertEquals(initialRevision + 1, updatedRevision); } } @@ -441,37 +343,19 @@ void shouldIncrementSimulationTemplateRevisionOnSimulationTemplateUpdate() throw class SimulationTriggers { @Test void shouldIncrementSimulationRevisionOnSimulationUpdate() throws SQLException { + final var initialRevision = getSimulationRevision(simulationId); - final var initialRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM simulation - WHERE id = %s;""" - .formatted(simulationId) - ); - initialRes.next(); - final var initialRevision = initialRes.getInt("revision"); - initialRes.close(); - - connection.createStatement() - .executeUpdate( - """ - UPDATE simulation SET arguments = '{}' - WHERE id = %s;""" - .formatted(simulationId) - ); - - final var updatedRes = connection.createStatement() - .executeQuery( - """ - SELECT revision FROM simulation - WHERE id = %s;""" - .formatted(simulationId) - ); - updatedRes.next(); - final var updatedRevision = updatedRes.getInt("revision"); - updatedRes.close(); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.simulation + SET arguments = '{}' + WHERE id = %s; + """.formatted(simulationId)); + } + final var updatedRevision = getSimulationRevision(simulationId); assertEquals(initialRevision + 1, updatedRevision); } } @@ -480,32 +364,20 @@ void shouldIncrementSimulationRevisionOnSimulationUpdate() throws SQLException { class PlanDatasetTriggers { @Test void shouldCreateDefaultDatasetOnPlanDatasetInsertWithNullDatasetId() throws SQLException { - final var res = connection.createStatement() - .executeQuery( - """ - INSERT INTO plan_dataset (plan_id, offset_from_plan_start) - VALUES (%s, '0') - RETURNING dataset_id;""" - .formatted(planId) - ); - res.next(); - final var newDatasetId = res.getInt("dataset_id"); - assertFalse(res.wasNull()); - res.close(); - - - final var datasetRes = connection.createStatement() - .executeQuery( - """ - SELECT * FROM dataset - WHERE id = %s;""" - .formatted(newDatasetId) - ); - - datasetRes.next(); - assertEquals(newDatasetId, datasetRes.getInt("id")); - assertEquals(0, datasetRes.getInt("revision")); - datasetRes.close(); + final var newDatasetId = insertPlanDataset(planId).dataset_id(); + try(final var statement = connection.createStatement()) { + final var datasetRes = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.dataset + WHERE id = %d; + """.formatted(newDatasetId)); + + datasetRes.next(); + assertEquals(newDatasetId, datasetRes.getInt("id")); + assertEquals(0, datasetRes.getInt("revision")); + } } @Test @@ -514,51 +386,34 @@ void shouldCalculatePlanDatasetOffsetOnPlanDatasetInsertWithNonNullDatasetId() t // this new plan starts exactly 1 hour later. final var newPlanId = merlinHelper.insertPlan(missionModelId, merlinHelper.admin.name(), "test-plan-"+UUID.randomUUID(), "2020-1-1 01:00:00+00"); - final var planDatasetInsertRes = connection.createStatement() - .executeQuery( - """ - INSERT INTO plan_dataset (plan_id, dataset_id) - VALUES (%s, %s) - RETURNING *;""" - .formatted(newPlanId, planDatasetRecord.dataset_id()) - ); - planDatasetInsertRes.next(); - final var newOffsetFromPlanStart = new PGInterval(planDatasetInsertRes.getString("offset_from_plan_start")); - planDatasetInsertRes.close(); - - assertEquals(new PGInterval("-1 hours"), newOffsetFromPlanStart); + try(final var statement = connection.createStatement()) { + final var planDatasetInsertRes = statement.executeQuery( + //language=sql + """ + INSERT INTO merlin.plan_dataset (plan_id, dataset_id) + VALUES (%s, %s) + RETURNING *; + """.formatted(newPlanId, planDatasetRecord.dataset_id())); + planDatasetInsertRes.next(); + final var newOffsetFromPlanStart = new PGInterval(planDatasetInsertRes.getString("offset_from_plan_start")); + assertEquals(new PGInterval("-1 hours"), newOffsetFromPlanStart); + } } @Test void shouldDeleteDatasetWithNoAssociatedPlansOnPlanDatasetDelete() throws SQLException { - try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery( - """ - SELECT COUNT(*) FROM dataset - WHERE id = %s;""" - .formatted(planDatasetRecord.dataset_id()) - ); - res.next(); - assertEquals(1, res.getInt(1)); - } + assertEquals(1, getDatasetCount(planDatasetRecord.dataset_id())); + try (final var statement = connection.createStatement()) { statement.executeUpdate( + //language=sql """ - DELETE FROM plan_dataset - WHERE plan_id = %s and dataset_id = %s;""" - .formatted(planDatasetRecord.plan_id(), planDatasetRecord.dataset_id()) - ); - } - try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery( - """ - SELECT COUNT(*) FROM dataset - WHERE id = %s;""" - .formatted(planDatasetRecord.dataset_id()) - ); - res.next(); - assertEquals(0, res.getInt(1)); + DELETE FROM merlin.plan_dataset + WHERE plan_id = %d and dataset_id = %d; + """.formatted(planDatasetRecord.plan_id(), planDatasetRecord.dataset_id())); } + + assertEquals(0, getDatasetCount(planDatasetRecord.dataset_id())); } } @@ -566,21 +421,20 @@ SELECT COUNT(*) FROM dataset class SimulationDatasetTriggers { @Test void shouldInitializeDatasetOnInsert() throws SQLException { - try (final var statement = connection.createStatement()) { - try (final var res = statement.executeQuery( + try (final var statement = connection.createStatement(); + final var res = statement.executeQuery( + //language=sql """ - SELECT plan_revision, model_revision, simulation_revision, simulation_template_revision - FROM simulation_dataset - WHERE simulation_id = %s AND dataset_id = %s;""" - .formatted(simulationDatasetRecord.simulation_id(), simulationDatasetRecord.dataset_id()) - ) - ) { - res.next(); - assertEquals(1, res.getInt("plan_revision")); - assertEquals(0, res.getInt("model_revision")); - assertEquals(1, res.getInt("simulation_revision")); //1, as we add a template in the BeforeEach - assertEquals(0, res.getInt("simulation_template_revision")); - } + SELECT plan_revision, model_revision, simulation_revision, simulation_template_revision + FROM merlin.simulation_dataset + WHERE simulation_id = %d AND dataset_id = %d; + """.formatted(simulationDatasetRecord.simulation_id(), simulationDatasetRecord.dataset_id())) + ) { + res.next(); + assertEquals(1, res.getInt("plan_revision")); + assertEquals(0, res.getInt("model_revision")); + assertEquals(1, res.getInt("simulation_revision")); //1, as we add a template in the BeforeEach + assertEquals(0, res.getInt("simulation_template_revision")); } } @@ -588,23 +442,13 @@ void shouldInitializeDatasetOnInsert() throws SQLException { void shouldDeleteDatasetOnSimulationDatasetDelete() throws SQLException { try (final var statement = connection.createStatement()) { statement.executeUpdate( + //language=sql """ - DELETE FROM simulation_dataset - WHERE simulation_id = %s AND dataset_id = %s;""" - .formatted(simulationDatasetRecord.simulation_id(), simulationDatasetRecord.dataset_id()) - ); - try (final var res = statement.executeQuery( - """ - SELECT COUNT(*) - FROM dataset - WHERE id = %s;""" - .formatted(simulationDatasetRecord.dataset_id()) - ) - ) { - res.next(); - assertEquals(0, res.getInt("count")); - } + DELETE FROM merlin.simulation_dataset + WHERE simulation_id = %d AND dataset_id = %d; + """.formatted(simulationDatasetRecord.simulation_id(), simulationDatasetRecord.dataset_id())); } + assertEquals(0, getDatasetCount(simulationDatasetRecord.dataset_id())); } } @@ -626,13 +470,12 @@ void shouldRejectInsertProfileSegmentWithNonExistentProfile() throws SQLExceptio void shouldRejectInsertEventWithNonExistentTopic() throws SQLException { final var datasetId = allocateDataset(); try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - INSERT INTO event (dataset_id, real_time, transaction_index, causal_time, value, topic_index) - VALUES (%d, '0 seconds', 0, '.1', '{}', 1234); - """.formatted(datasetId) - ); + statement.executeUpdate( + //language=sql + """ + INSERT INTO merlin.event (dataset_id, real_time, transaction_index, causal_time, value, topic_index) + VALUES (%d, '0 seconds', 0, '.1', '{}', 1234); + """.formatted(datasetId)); fail(); } catch (SQLException e) { if (!e @@ -651,14 +494,13 @@ void shouldRejectUpdateProfileSegmentToNonExistentProfile() throws SQLException insertProfileSegment(datasetId, profileId); try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - UPDATE profile_segment - set profile_id=%d - where dataset_id=%d and profile_id=%d; - """.formatted(profileId + 1, datasetId, profileId) - ); + statement.executeUpdate( + //language=sql + """ + update merlin.profile_segment + set profile_id = %d + where dataset_id=%d and profile_id=%d; + """.formatted(profileId + 1, datasetId, profileId)); fail(); } catch (SQLException e) { if (!e.getMessage().contains("foreign key violation: there is no profile with id %d in dataset %d".formatted( @@ -680,12 +522,12 @@ void shouldRejectUpdateEventToNonExistentTopic() throws SQLException { try (final var statement = connection.createStatement()) { statement.executeUpdate( + //language=sql """ - UPDATE event - set topic_index=%d - where dataset_id=%d and topic_index=%d; - """.formatted(topicIndex + 1, datasetId, topicIndex) - ); + update merlin.event + set topic_index = %d + where dataset_id=%d and topic_index=%d; + """.formatted(topicIndex + 1, datasetId, topicIndex)); fail(); } catch (SQLException e) { if (!e @@ -706,13 +548,12 @@ void shouldCascadeWhenDeletingProfile() throws SQLException { insertProfileSegment(datasetId, profileId); try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - DELETE FROM profile - WHERE dataset_id=%d and id=%d - """.formatted(datasetId, profileId) - ); + statement.executeUpdate( + //language=sql + """ + DELETE FROM merlin.profile + WHERE dataset_id=%d and id=%d + """.formatted(datasetId, profileId)); } assertEquals(0, getProfileSegmentCount(datasetId, profileId)); @@ -727,19 +568,18 @@ void shouldCascadeWhenDeletingTopic() throws SQLException { insertEvent(datasetId, topicIndex); try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - DELETE FROM topic - WHERE dataset_id=%d and topic_index=%d - """.formatted(datasetId, topicIndex) - ); + statement.executeUpdate( + //language=sql + """ + DELETE FROM merlin.topic + WHERE dataset_id=%d and topic_index=%d + """.formatted(datasetId, topicIndex)); try (final var res = statement.executeQuery( + //language=sql """ - SELECT count(1) FROM event WHERE dataset_id=%d and topic_index=%d - """.formatted(datasetId, topicIndex) - ) + SELECT count(1) FROM merlin.event WHERE dataset_id=%d and topic_index=%d + """.formatted(datasetId, topicIndex)) ) { res.next(); assertEquals(0, res.getInt("count")); @@ -756,13 +596,13 @@ void shouldCascadeWhenUpdatingProfile() throws SQLException { final int newProfileId; try (final var statement = connection.createStatement(); final var res = statement.executeQuery( + //language=sql """ - UPDATE profile - SET id=default - WHERE dataset_id=%d and id=%d - RETURNING id; - """.formatted(datasetId, profileId) - ) + UPDATE merlin.profile + SET id=default + WHERE dataset_id=%d and id=%d + RETURNING id; + """.formatted(datasetId, profileId)) ) { res.next(); newProfileId = res.getInt("id"); @@ -781,20 +621,19 @@ void shouldCascadeWhenUpdatingTopicIndex() throws SQLException { insertEvent(datasetId, topicIndex); try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - UPDATE topic - SET topic_index=%d - WHERE dataset_id=%d and topic_index=%d - """.formatted(topicIndex + 1, datasetId, topicIndex) - ); + statement.executeUpdate( + //language=sql + """ + UPDATE merlin.topic + SET topic_index = %d + WHERE dataset_id = %d and topic_index = %d + """.formatted(topicIndex + 1, datasetId, topicIndex)); try (final var res = statement.executeQuery( + //language=sql """ - SELECT count(1) FROM event WHERE dataset_id=%d and topic_index=%d - """.formatted(datasetId, topicIndex + 1) - ) + SELECT count(1) FROM merlin.event WHERE dataset_id = %d and topic_index = %d + """.formatted(datasetId, topicIndex + 1)) ) { res.next(); assertEquals(1, res.getInt("count")); @@ -894,11 +733,12 @@ private int insertProfile(final int datasetId, final String name, final String t { try (final var statement = connection.createStatement()) { final var results = statement.executeQuery( + //language=sql """ - INSERT INTO profile(dataset_id, name, type, duration) - VALUES (%d, '%s', '%s', '%s') - RETURNING id; - """.formatted(datasetId, name, type, duration)); + INSERT INTO merlin.profile(dataset_id, name, type, duration) + VALUES (%d, '%s', '%s', '%s') + RETURNING id; + """.formatted(datasetId, name, type, duration)); assertTrue(results.next()); return results.getInt("id"); } @@ -913,23 +753,25 @@ private void insertProfileSegment( final int profileId, final String startOffset, final String dynamics, - final boolean isGap) throws SQLException - { + final boolean isGap + ) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO profile_segment(dataset_id, profile_id, start_offset, dynamics, is_gap) - VALUES (%d, %d, '%s'::interval, '%s'::jsonb, %b); - """.formatted(datasetId, profileId, startOffset, dynamics, isGap)); + INSERT INTO merlin.profile_segment(dataset_id, profile_id, start_offset, dynamics, is_gap) + VALUES (%d, %d, '%s'::interval, '%s'::jsonb, %b); + """.formatted(datasetId, profileId, startOffset, dynamics, isGap)); } } private int getProfileSegmentCount(final int datasetId, final int profileId) throws SQLException { try (final Statement statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - SELECT count(1) FROM profile_segment WHERE dataset_id=%d and profile_id=%d - """.formatted(datasetId, profileId)); + SELECT count(1) FROM merlin.profile_segment WHERE dataset_id=%d and profile_id=%d + """.formatted(datasetId, profileId)); assertTrue(res.next()); return res.getInt("count"); } @@ -937,24 +779,23 @@ SELECT count(1) FROM profile_segment WHERE dataset_id=%d and profile_id=%d private void insertTopic(final int datasetId, final int topicIndex) throws SQLException { try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - INSERT INTO topic (dataset_id, topic_index, name, value_schema) - VALUES (%d, %d, 'fred', '{}'); - """.formatted(datasetId, topicIndex)); + statement.executeUpdate( + //language=sql + """ + INSERT INTO merlin.topic (dataset_id, topic_index, name, value_schema) + VALUES (%d, %d, 'fred', '{}'); + """.formatted(datasetId, topicIndex)); } } private void insertEvent(final int datasetId, final int topicIndex) throws SQLException { try (final var statement = connection.createStatement()) { - statement - .executeUpdate( - """ - INSERT INTO event (dataset_id, real_time, transaction_index, causal_time, value, topic_index) - VALUES (%d, '0 seconds', 0, '.1', '{}', %d); - """.formatted(datasetId, topicIndex) - ); + statement.executeUpdate( + //language=sql + """ + INSERT INTO merlin.event (dataset_id, real_time, transaction_index, causal_time, value, topic_index) + VALUES (%d, '0 seconds', 0, '.1', '{}', %d); + """.formatted(datasetId, topicIndex)); } } @@ -963,9 +804,11 @@ private ArrayList getResourcesAtStartOffset( final String startOffset) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select * from hasura_functions.get_resources_at_start_offset(%d, '%s'); - """.formatted(datasetId, startOffset)); + final var res = statement.executeQuery( + //language=sql + """ + select * from hasura.get_resources_at_start_offset(%d, '%s'); + """.formatted(datasetId, startOffset)); final var segments = new ArrayList(); while (res.next()) { @@ -987,11 +830,12 @@ private int allocateDataset() throws SQLException { final int datasetId; try (final Statement statement = connection.createStatement()) { try (final var res = statement.executeQuery( + //language=sql """ - INSERT INTO dataset - DEFAULT VALUES - RETURNING id; - """)) { + INSERT INTO merlin.dataset + DEFAULT VALUES + RETURNING id; + """)) { res.next(); datasetId = res.getInt("id"); } diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PermissionsTest.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PermissionsTest.java index bca8c0c554..ba2c93987e 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PermissionsTest.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PermissionsTest.java @@ -10,7 +10,6 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -21,7 +20,6 @@ @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class PermissionsTest { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private static final String testRole = "testRole"; private enum FunctionPermissionKey { apply_preset, @@ -70,24 +68,12 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("activity_type"); - helper.clearTable("plan"); - helper.clearTable("plan_collaborators"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); + helper.clearSchema("merlin"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_permissions_test", "Database Permissions Tests"); connection = helper.connection(); merlinHelper = new MerlinDatabaseTestHelper(connection); insertUserRole(testRole); @@ -95,17 +81,17 @@ void beforeAll() throws SQLException, IOException, InterruptedException { @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } //region Helper Methods private String getRole(String session) throws SQLException { try(final var statement = connection.createStatement()){ - final var resp = statement.executeQuery(""" - select metadata.get_role('%s'::json) - """.formatted(session)); + final var resp = statement.executeQuery( + //language=sql + """ + select permissions.get_role('%s'::json) + """.formatted(session)); resp.next(); return resp.getString(1); } @@ -113,9 +99,11 @@ private String getRole(String session) throws SQLException { private String getFunctionPermission(String function, String session) throws SQLException { try(final var statement = connection.createStatement()){ - final var resp = statement.executeQuery(""" - select metadata.get_function_permissions('%s', '%s'::json) - """.formatted(function, session)); + final var resp = statement.executeQuery( + //language=sql + """ + select permissions.get_function_permissions('%s', '%s'::json) + """.formatted(function, session)); resp.next(); return resp.getString(1); } @@ -127,17 +115,21 @@ private void checkGeneralPermissions(String permission, int planId, String usern private void checkGeneralPermissions(String function, String permission, int planId, String username) throws SQLException { try(final var statement = connection.createStatement()){ - statement.execute(""" - call metadata.check_general_permissions('%s', '%s', %d, '%s') - """.formatted(function, permission, planId, username)); + statement.execute( + //language=sql + """ + call permissions.check_general_permissions('%s', '%s', %d, '%s') + """.formatted(function, permission, planId, username)); } } private void raiseIfPlanMergePermission(String permission) throws SQLException { try(final var statement = connection.createStatement()){ - statement.execute(""" - select metadata.raise_if_plan_merge_permission('get_plan_history', '%s'); - """.formatted(permission)); + statement.execute( + //language=sql + """ + select permissions.raise_if_plan_merge_permission('get_plan_history', '%s'); + """.formatted(permission)); } } @@ -148,9 +140,11 @@ private void checkMergePermissions( String username) throws SQLException { try(final var statement = connection.createStatement()){ - statement.execute(""" - call metadata.check_merge_permissions('create_merge_rq', '%s', %d, %d, '%s') - """.formatted(permission, planIdReceiving, planIdSupplying, username)); + statement.execute( + //language=sql + """ + call permissions.check_merge_permissions('create_merge_rq', '%s', %d, %d, '%s') + """.formatted(permission, planIdReceiving, planIdSupplying, username)); } } @@ -159,7 +153,7 @@ private void insertUserRole(String role) throws SQLException { statement.execute( //language=sql """ - insert into metadata.user_roles(role, description) + insert into permissions.user_roles(role, description) values ('%s', 'A role created during DBTests'); """.formatted(role)); } @@ -175,7 +169,7 @@ private void updateUserRolePermissions( statement.execute( //language=sql """ - update metadata.user_role_permission + update permissions.user_role_permission set function_permissions = '%s'::jsonb where role = '%s'; """.formatted(functionPermissionsJson, role)); @@ -183,7 +177,7 @@ private void updateUserRolePermissions( statement.execute( //language=sql """ - update metadata.user_role_permission + update permissions.user_role_permission set action_permissions = '%s'::jsonb where role = '%s'; """.formatted(actionPermissionsJson, role)); @@ -192,7 +186,7 @@ private void updateUserRolePermissions( statement.execute( //language=sql """ - update metadata.user_role_permission + update permissions.user_role_permission set action_permissions = '%s'::jsonb, function_permissions = '%s'::jsonb where role = '%s'; @@ -277,7 +271,7 @@ void getFunctionReturnsAssignedValuesViewer(FunctionPermissionKey function) thro @Test void invalidFunctionThrowsException() throws SQLException { final SQLException ex = assertThrows(SQLException.class, () -> getFunctionPermission("any", merlinHelper.viewer.session())); - if(!ex.getMessage().contains("invalid input value for enum metadata.function_permission_key: \"any\"")) + if(!ex.getMessage().contains("invalid input value for enum permissions.function_permission_key: \"any\"")) throw ex; } } @@ -288,7 +282,7 @@ class CheckGeneralPermissions { void invalidPermissionFails() throws SQLException { final int planId = merlinHelper.insertPlan(missionModelId, merlinHelper.admin.name()); final SQLException ex = assertThrows(SQLException.class, () -> checkGeneralPermissions("any", planId, merlinHelper.admin.name())); - if(!ex.getMessage().contains("invalid input value for enum metadata.permission: \"any\"")) + if(!ex.getMessage().contains("invalid input value for enum permissions.permission: \"any\"")) throw ex; } @@ -347,14 +341,18 @@ void sufficientPrivilege(GeneralPermission permission) throws SQLException { void specialApplyPresetOwnerPermission() throws SQLException { // Set up a custom role and two users with it try (final var statement = connection.createStatement()) { - statement.execute(""" - insert into metadata.user_roles(role) values ('applyPresetOwner'); - """); - statement.execute(""" - update metadata.user_role_permission - set function_permissions = '{"apply_preset": "OWNER"}' - where role = 'applyPresetOwner'; - """); + statement.execute( + //language=sql + """ + insert into permissions.user_roles(role) values ('applyPresetOwner'); + """); + statement.execute( + //language=sql + """ + update permissions.user_role_permission + set function_permissions = '{"apply_preset": "OWNER"}' + where role = 'applyPresetOwner'; + """); } final var userMerlin = merlinHelper.insertUser("Merlin", "applyPresetOwner"); final var userFalcon = merlinHelper.insertUser("Falcon", "applyPresetOwner"); @@ -388,15 +386,19 @@ void specialApplyPresetOwnerPermission() throws SQLException { // Cleanup added role and users try (final var statement = connection.createStatement()) { - statement.execute(""" - delete from metadata.users - where username = 'Merlin' - or username = 'Falcon'; - """); - statement.execute(""" - delete from metadata.user_roles - where role = 'applyPresetOwner'; - """); + statement.execute( + //language=sql + """ + delete from permissions.users + where username = 'Merlin' + or username = 'Falcon'; + """); + statement.execute( + //language=sql + """ + delete from permissions.user_roles + where role = 'applyPresetOwner'; + """); } } } @@ -406,7 +408,7 @@ class RaiseIfPlanMergePermission { @Test void invalidValueThrows() throws SQLException { final SQLException exception = assertThrows(SQLException.class, () -> raiseIfPlanMergePermission("any")); - if(!exception.getMessage().contains("invalid input value for enum metadata.permission: \"any\"")) + if(!exception.getMessage().contains("invalid input value for enum permissions.permission: \"any\"")) throw exception; } @@ -440,7 +442,7 @@ void invalidPermissionFails() throws SQLException { final SQLException exception = assertThrows( SQLException.class, () -> checkMergePermissions("any", basePlan, viewerPlan, merlinHelper.user.name())); - if(!exception.getMessage().contains("invalid input value for enum metadata.permission: \"any\"")) + if(!exception.getMessage().contains("invalid input value for enum permissions.permission: \"any\"")) throw exception; } diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PlanCollaborationTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PlanCollaborationTests.java index 4fb9c33754..0395ba48ae 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PlanCollaborationTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PlanCollaborationTests.java @@ -8,13 +8,13 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; +import java.util.List; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -26,9 +26,9 @@ import gov.nasa.jpl.aerie.database.TagsTests.Tag; +@SuppressWarnings("SqlSourceToSinkFlow") @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class PlanCollaborationTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; @@ -44,40 +44,12 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation_template"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); - helper.clearTable("plan_dataset"); - helper.clearTable("simulation_dataset"); - helper.clearTable("plan_snapshot"); - helper.clearTable("plan_latest_snapshot"); - helper.clearTable("plan_snapshot_activities"); - helper.clearTable("plan_snapshot_parent"); - helper.clearTable("merge_request"); - helper.clearTable("merge_staging_area"); - helper.clearTable("conflicting_activities"); - helper.clearTable("anchor_validation_status"); - helper.clearTable("activity_presets"); - helper.clearTable("preset_to_directive"); - helper.clearTable("preset_to_snapshot_directive"); - helper.clearTable("metadata.tags"); - helper.clearTable("metadata.activity_directive_tags"); - helper.clearTable("metadata.constraint_tags"); - helper.clearTable("metadata.snapshot_activity_tags"); + helper.clearSchema("merlin"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_merlin_test", "Plan Collaboration Tests"); connection = helper.connection(); merlinHelper = new MerlinDatabaseTestHelper(connection); merlinHelper.insertUser("PlanCollaborationTests"); @@ -87,29 +59,16 @@ void beforeAll() throws SQLException, IOException, InterruptedException { @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } //region Helper Methods - private void updateActivityName(String newName, int activityId, int planId) throws SQLException { - try(final var statement = connection.createStatement()) { - statement.execute( - """ - update activity_directive - set name = '%s' - where id = %d and plan_id = %d; - """.formatted(newName, activityId, planId)); - } - } - private void updateActivityCreatedBy(String newCreator, int activityId, int planId) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( // language=sql """ - update activity_directive + update merlin.activity_directive set created_by = '%s' where id = %d and plan_id = %d; """.formatted(newCreator, activityId, planId)); @@ -122,7 +81,7 @@ private String getActivityCreatedBy(int activityId, int planId) throws SQLExcept // language=sql """ select created_by - from activity_directive + from merlin.activity_directive where id = %d and plan_id = %d; """.formatted(activityId, planId)); res.next(); @@ -133,8 +92,9 @@ private String getActivityCreatedBy(int activityId, int planId) throws SQLExcept private void updateActivityLastModifiedBy(String newModifier, int activityId, int planId) throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - update activity_directive + update merlin.activity_directive set last_modified_by = '%s' where id = %d and plan_id = %d; """.formatted(newModifier, activityId, planId)); @@ -147,7 +107,7 @@ private String getActivityLastModifiedBy(int activityId, int planId) throws SQLE // language=sql """ select last_modified_by - from activity_directive + from merlin.activity_directive where id = %d and plan_id = %d; """.formatted(activityId, planId)); res.next(); @@ -157,9 +117,11 @@ private String getActivityLastModifiedBy(int activityId, int planId) throws SQLE int duplicatePlan(final int planId, final String newPlanName) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select duplicate_plan(%s, '%s', 'PlanCollaborationTests') as id; - """.formatted(planId, newPlanName)); + final var res = statement.executeQuery( + //language=sql + """ + select merlin.duplicate_plan(%s, '%s', 'PlanCollaborationTests') as id; + """.formatted(planId, newPlanName)); res.next(); return res.getInt("id"); } @@ -167,9 +129,11 @@ int duplicatePlan(final int planId, final String newPlanName) throws SQLExceptio int createSnapshot(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select create_snapshot(%s) as id; - """.formatted(planId)); + final var res = statement.executeQuery( + //language=sql + """ + select merlin.create_snapshot(%s) as id; + """.formatted(planId)); res.next(); return res.getInt("id"); } @@ -180,20 +144,70 @@ int createSnapshot(final int planId, final String snapshot_name, final MerlinDat final var res = statement.executeQuery( //language=sql """ - select * from hasura_functions.create_snapshot(%s, '%s', '%s'::json); + select * from hasura.create_snapshot(%s, '%s', '%s'::json); """.formatted(planId, snapshot_name, user.session())); res.next(); return res.getInt(1); } } + void insertPlanLatestSnapshot(final int planId, final int snapshotId) throws SQLException { + try(final var statement = connection.createStatement()){ + statement.execute( + //language=sql + """ + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) VALUES (%d, %d); + """.formatted(planId, snapshotId)); + } + } + + private int getLatestSnapshot(final int planId) throws SQLException { + final var snapshots = getLatestSnapshots(planId); + assertEquals(1, snapshots.size()); + return snapshots.get(0); + } + + private List getLatestSnapshots(final int planId) throws SQLException { + try(final var statement = connection.createStatement()){ + final var results = statement.executeQuery( + //language=sql + """ + SELECT snapshot_id + FROM merlin.plan_latest_snapshot + WHERE plan_id = %d + ORDER BY snapshot_id DESC + """.formatted(planId) + ); + final List latestSnapshots = new ArrayList<>(); + while (results.next()) { + latestSnapshots.add(results.getInt(1)); + } + return latestSnapshots; + } + } + + private List getPlanHistory(final int planId) throws SQLException{ + try (final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + SELECT merlin.get_plan_history(%d); + """.formatted(planId)); + final ArrayList ancestorPlanIds = new ArrayList<>(); + while(res.next()) { + ancestorPlanIds.add(res.getInt(1)); + } + return ancestorPlanIds; + } + } + private SnapshotMetadata getSnapshotMetadata(final int snapshotId) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( //language=sql """ SELECT * - FROM plan_snapshot + FROM merlin.plan_snapshot WHERE snapshot_id = %d; """.formatted(snapshotId)); res.next(); @@ -211,19 +225,22 @@ private SnapshotMetadata getSnapshotMetadata(final int snapshotId) throws SQLExc void restoreFromSnapshot(final int planId, final int snapshotId) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - call restore_from_snapshot(%d, %d) + call merlin.restore_from_snapshot(%d, %d); """.formatted(planId, snapshotId)); } } int getParentPlanId(final int planId) throws SQLException{ try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select parent_id - from plan - where plan.id = %d; - """.formatted(planId)); + final var res = statement.executeQuery( + //language=sql + """ + select parent_id + from merlin.plan p + where p.id = %d; + """.formatted(planId)); res.next(); return res.getInt("parent_id"); } @@ -231,8 +248,10 @@ int getParentPlanId(final int planId) throws SQLException{ private void lockPlan(final int planId) throws SQLException{ try(final var statement = connection.createStatement()){ - statement.execute(""" - update plan + statement.execute( + //language=sql + """ + update merlin.plan set is_locked = true where id = %d; """.formatted(planId)); @@ -242,45 +261,51 @@ private void lockPlan(final int planId) throws SQLException{ private void unlockPlan(final int planId) throws SQLException{ //Unlock first to allow for after tasks try(final var statement = connection.createStatement()){ - statement.execute(""" - update plan + statement.execute( + //language=sql + """ + update merlin.plan set is_locked = false where id = %d; """.formatted(planId)); } } - int getMergeBaseFromPlanIds(final int planIdReceivingChanges, final int planIdSupplyingChanges) throws SQLException{ - try(final var statement = connection.createStatement()){ - final var snapshotRes = statement.executeQuery( + private boolean isPlanLocked(final int planId) throws SQLException { + try (final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql """ - select snapshot_id - from plan_latest_snapshot - where plan_id = %d - order by snapshot_id desc - limit 1; - """.formatted(planIdSupplyingChanges)); - snapshotRes.next(); - final int snapshotIdSupplyingChanges = snapshotRes.getInt(1); + select is_locked + from merlin.plan + where id = %d; + """.formatted(planId)); + assertTrue(res.next()); + return res.getBoolean(1); + } + } + int getMergeBaseFromPlanIds(final int planIdReceivingChanges, final int planIdSupplyingChanges) throws SQLException{ + try(final var statement = connection.createStatement()){ + final var snapshotIdSupplyingChanges = getLatestSnapshots(planIdSupplyingChanges).get(0); final var res = statement.executeQuery( + //language=sql """ - select get_merge_base(%d, %d); - """.formatted(planIdReceivingChanges, snapshotIdSupplyingChanges)); + select merlin.get_merge_base(%d, %d); + """.formatted(planIdReceivingChanges, snapshotIdSupplyingChanges)); res.next(); - return res.getInt(1); } - } private int createMergeRequest(final int planId_receiving, final int planId_supplying) throws SQLException{ try(final var statement = connection.createStatement()){ final var res = statement.executeQuery( + //language=sql """ - select create_merge_request(%d, %d, 'PlanCollaborationTests Requester'); - """.formatted(planId_supplying, planId_receiving) + select merlin.create_merge_request(%d, %d, 'PlanCollaborationTests Requester'); + """.formatted(planId_supplying, planId_receiving) ); res.next(); return res.getInt(1); @@ -290,8 +315,9 @@ private int createMergeRequest(final int planId_receiving, final int planId_supp private void beginMerge(final int mergeRequestId) throws SQLException{ try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - call begin_merge(%d, 'PlanCollaborationTests Reviewer') + call merlin.begin_merge(%d, 'PlanCollaborationTests Reviewer') """.formatted(mergeRequestId) ); } @@ -300,8 +326,9 @@ call begin_merge(%d, 'PlanCollaborationTests Reviewer') private void commitMerge(final int mergeRequestId) throws SQLException{ try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - call commit_merge(%d) + call merlin.commit_merge(%d) """.formatted(mergeRequestId) ); } @@ -310,8 +337,9 @@ call commit_merge(%d) private void withdrawMergeRequest(final int mergeRequestId) throws SQLException{ try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - call withdraw_merge_request(%d) + call merlin.withdraw_merge_request(%d) """.formatted(mergeRequestId) ); } @@ -320,8 +348,9 @@ call withdraw_merge_request(%d) private void denyMerge(final int mergeRequestId) throws SQLException{ try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - call deny_merge(%d) + call merlin.deny_merge(%d) """.formatted(mergeRequestId) ); } @@ -330,8 +359,9 @@ call deny_merge(%d) private void cancelMerge(final int mergeRequestId) throws SQLException{ try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - call cancel_merge(%d) + call merlin.cancel_merge(%d) """.formatted(mergeRequestId) ); } @@ -340,9 +370,10 @@ call cancel_merge(%d) private void setResolution(final int mergeRequestId, final int activityId, final String status) throws SQLException { try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - update conflicting_activities - set resolution = '%s'::conflict_resolution + update merlin.conflicting_activities + set resolution = '%s'::merlin.conflict_resolution where merge_request_id = %d and activity_id = %d; """.formatted(status, mergeRequestId, activityId) ); @@ -352,12 +383,14 @@ private void setResolution(final int mergeRequestId, final int activityId, final ArrayList getConflictingActivities(final int mergeRequestId) throws SQLException { final var conflicts = new ArrayList(); try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select activity_id, change_type_supplying, change_type_receiving - from conflicting_activities - where merge_request_id = %s - order by activity_id asc; - """.formatted(mergeRequestId)); + final var res = statement.executeQuery( + //language=sql + """ + select activity_id, change_type_supplying, change_type_receiving + from merlin.conflicting_activities + where merge_request_id = %d + order by activity_id; + """.formatted(mergeRequestId)); while (res.next()) { conflicts.add(new ConflictingActivity( res.getInt("activity_id"), @@ -372,12 +405,14 @@ ArrayList getConflictingActivities(final int mergeRequestId ArrayList getStagingAreaActivities(final int mergeRequestId) throws SQLException{ final var activities = new ArrayList(); try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - select activity_id, change_type - from merge_staging_area - where merge_request_id = %s - order by activity_id asc; - """.formatted(mergeRequestId)); + final var res = statement.executeQuery( + //language=sql + """ + select activity_id, change_type + from merlin.merge_staging_area + where merge_request_id = %d + order by activity_id; + """.formatted(mergeRequestId)); while (res.next()) { activities.add(new StagingAreaActivity( res.getInt("activity_id"), @@ -388,22 +423,16 @@ ArrayList getStagingAreaActivities(final int mergeRequestId return activities; } - private void deleteActivityDirective(final int planId, final int activityId) throws SQLException { - try (final var statement = connection.createStatement()) { - statement.executeUpdate(""" - delete from activity_directive where id = %s and plan_id = %s - """.formatted(activityId, planId)); - } - } - private Activity getActivity(final int planId, final int activityId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM activity_directive - WHERE id = %d - AND plan_id = %d; - """.formatted(activityId, planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.activity_directive + WHERE id = %d + AND plan_id = %d; + """.formatted(activityId, planId)); res.next(); return new Activity( res.getInt("id"), @@ -427,12 +456,14 @@ private Activity getActivity(final int planId, final int activityId) throws SQLE private ArrayList getActivities(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM activity_directive - WHERE plan_id = %d - ORDER BY id; - """.formatted(planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.activity_directive + WHERE plan_id = %d + ORDER BY id; + """.formatted(planId)); final var activities = new ArrayList(); while (res.next()){ @@ -460,12 +491,14 @@ private ArrayList getActivities(final int planId) throws SQLException private ArrayList getSnapshotActivities(final int snapshotId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM plan_snapshot_activities - WHERE snapshot_id = %d - ORDER BY id; - """.formatted(snapshotId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.plan_snapshot_activities + WHERE snapshot_id = %d + ORDER BY id; + """.formatted(snapshotId)); final var activities = new ArrayList(); while (res.next()){ @@ -493,11 +526,13 @@ private ArrayList getSnapshotActivities(final int snapshotId) private MergeRequest getMergeRequest(final int requestId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM merge_request - WHERE id = %d; - """.formatted(requestId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.merge_request + WHERE id = %d; + """.formatted(requestId)); res.next(); return new MergeRequest( res.getInt("id"), @@ -512,8 +547,9 @@ private MergeRequest getMergeRequest(final int requestId) throws SQLException { private void setMergeRequestStatus(final int requestId, final String newStatus) throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - UPDATE merge_request + UPDATE merlin.merge_request SET status = '%s' WHERE id = %d; """.formatted(newStatus, requestId) @@ -642,62 +678,48 @@ void snapshotInheritsAllLatestAsParents() throws SQLException{ snapshotIds[i] = createSnapshot(planId); } + //assert that there is exactly one entry for this plan in plan_latest_snapshot + getLatestSnapshot(planId); try(final var statement = connection.createStatement()) { - { - //assert that there is exactly one entry for this plan in plan_latest_snapshot - final var res = statement.executeQuery( - """ - select snapshot_id from plan_latest_snapshot where plan_id = %d; - """.formatted(planId)); - assertTrue(res.next()); - assertFalse(res.next()); - } - //delete the current entry of plan_latest_snapshot for this plan to avoid any confusion when it is readded below - statement.execute(""" - delete from plan_latest_snapshot where plan_id = %d; - """.formatted(planId)); - - for (final int snapshotId : snapshotIds) { - statement.execute(""" - insert into plan_latest_snapshot(plan_id, snapshot_id) VALUES (%d, %d); - """.formatted(planId, snapshotId)); - } - - final int finalSnapshotId = createSnapshot(planId); - - { - //assert that there is now only one entry for this plan in plan_latest_snapshot - final var res = statement.executeQuery( + statement.execute( + //language=sql """ - select snapshot_id from plan_latest_snapshot where plan_id = %d; + delete from merlin.plan_latest_snapshot where plan_id = %d; """.formatted(planId)); - assertTrue(res.next()); - assertFalse(res.next()); - } + } - final var snapshotHistory = new ArrayList(); - { - final var res = statement.executeQuery( - """ - select get_snapshot_history(%d); - """.formatted(finalSnapshotId)); + for (final int snapshotId : snapshotIds) { + insertPlanLatestSnapshot(planId, snapshotId); + } - while (res.next()) { - snapshotHistory.add(res.getInt(1)); - } - } + final int finalSnapshotId = createSnapshot(planId); - //assert that the snapshot history is n+1 long - assertEquals(snapshotHistory.size(), numberOfSnapshots + 1); + //assert that there is now only one entry for this plan in plan_latest_snapshot + getLatestSnapshot(planId); - //assert that res contains, in order: finalSnapshotId, snapshotId[0,1,...,n] - assertEquals(finalSnapshotId, snapshotHistory.get(0)); + final var snapshotHistory = new ArrayList(); + try(final var statement = connection.createStatement()) { + final var res = statement.executeQuery( + //language=sql + """ + select merlin.get_snapshot_history(%d); + """.formatted(finalSnapshotId)); - for (var i = 1; i < snapshotHistory.size(); i++) { - assertEquals(snapshotIds[i - 1], snapshotHistory.get(i)); + while (res.next()) { + snapshotHistory.add(res.getInt(1)); } } + + //assert that the snapshot history is n+1 long + assertEquals(snapshotHistory.size(), numberOfSnapshots + 1); + + //assert that res contains, in order: finalSnapshotId, snapshotId[0,1,...,n] + assertEquals(finalSnapshotId, snapshotHistory.get(0)); + + for (var i = 1; i < snapshotHistory.size(); i++) { + assertEquals(snapshotIds[i - 1], snapshotHistory.get(i)); + } } @Test @@ -821,7 +843,7 @@ void restoresDeletedActivities() throws SQLException { final int snapshotId = createSnapshot(planId); // Empty Plan - deleteActivityDirective(planId, deletedDirective.activityId); + merlinHelper.deleteActivityDirective(planId, deletedDirective.activityId); assertEquals(0, getActivities(planId).size()); // Restore Plan from Snapshot @@ -858,12 +880,12 @@ void restoreDeletesAddedActivities() throws SQLException { void restoresChangedActivities() throws SQLException { final int planId = merlinHelper.insertPlan(missionModelId); final int oldDirectiveId = merlinHelper.insertActivity(planId); - updateActivityName("old name", oldDirectiveId, planId); + merlinHelper.updateActivityName("old name", oldDirectiveId, planId); final Activity oldDirective = getActivity(planId, oldDirectiveId); final int snapshotId = createSnapshot(planId); // Modify Directive - updateActivityName("new name", oldDirective.activityId, planId); + merlinHelper.updateActivityName("new name", oldDirective.activityId, planId); // Restore Plan from Snapshot restoreFromSnapshot(planId, snapshotId); @@ -933,24 +955,12 @@ void duplicateSetsLatestSnapshot() throws SQLException{ final int parentOldSnapshot = createSnapshot(parentPlanId); final int childPlanId = duplicatePlan(parentPlanId, "Child Plan"); - try(final var statement = connection.createStatement()){ - var res = statement.executeQuery(""" - select snapshot_id from plan_latest_snapshot - where plan_id = %s; - """.formatted(parentPlanId)); - assertTrue(res.next()); - final int parentLatestSnapshot = res.getInt(1); - assertFalse(res.next()); // Should only be 1 latest snapshot - res = statement.executeQuery(""" - select snapshot_id from plan_latest_snapshot - where plan_id = %s; - """.formatted(childPlanId)); - assertTrue(res.next()); - final int childLatestSnapshot = res.getInt(1); - assertFalse(res.next()); - assertEquals(childLatestSnapshot, parentLatestSnapshot); - assertNotEquals(parentOldSnapshot, parentLatestSnapshot); - } + // There should only be 1 latest snapshot + final int parentLatestSnapshot = getLatestSnapshot(parentPlanId); + final int childLatestSnapshot = getLatestSnapshot(childPlanId); + + assertEquals(childLatestSnapshot, parentLatestSnapshot); + assertNotEquals(parentOldSnapshot, parentLatestSnapshot); } @Test @@ -965,12 +975,14 @@ void duplicateAttachesParentHistoryToChild() throws SQLException{ try(final var statementParent = connection.createStatement(); final var statementChild = connection.createStatement()) { final var parentRes = statementParent.executeQuery( + //language=sql """ - select get_snapshot_history_from_plan(%d); + select merlin.get_snapshot_history_from_plan(%d); """.formatted(parentPlanId)); final var childRes = statementChild.executeQuery( + //language=sql """ - select get_snapshot_history_from_plan(%d); + select merlin.get_snapshot_history_from_plan(%d); """.formatted(childPlanId)); final var parentHistory = new ArrayList(); @@ -1001,7 +1013,6 @@ void duplicateNonexistentPlanFails() throws SQLException { throw sqlEx; } } - } @Nested @@ -1014,18 +1025,9 @@ void getPlanHistoryCapturesAllAncestors() throws SQLException { plans[i] = duplicatePlan(plans[i-1], "Child of "+(i-1)); } - try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT get_plan_history(%d); - """.formatted(plans[9]) - ); - assertTrue(res.next()); - assertEquals(plans[9], res.getInt(1)); - - for(int i = plans.length-2; i >= 0; --i){ - assertTrue(res.next()); - assertEquals(plans[i], res.getInt(1)); - } + final var history = getPlanHistory(plans[9]); + for(int i = plans.length-1; i >= 0; --i) { + assertEquals(plans[i], history.get(plans.length-1-i)); } } @@ -1034,24 +1036,15 @@ void getPlanHistoryNoAncestors() throws SQLException { final int planId = merlinHelper.insertPlan(missionModelId); //The history of a plan with no ancestors is itself. - try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT get_plan_history(%d); - """.formatted(planId) - ); - assertTrue(res.next()); - assertTrue(res.isLast()); - assertEquals(planId, res.getInt(1)); - } + final var history = getPlanHistory(planId); + assertEquals(1, history.size()); + assertEquals(planId, history.get(0)); } @Test void getPlanHistoryInvalidId() throws SQLException { - try (final var statement = connection.createStatement()) { - statement.execute(""" - SELECT get_plan_history(-1); - """ - ); + try { + getPlanHistory(-1); fail(); } catch (SQLException sqlException) { @@ -1080,12 +1073,7 @@ void grandparentAdoptsChildrenOnDelete() throws SQLException{ assertEquals(unrelatedPlan, getParentPlanId(childOfUnrelatedPlan)); // Delete Parent Plan - try(final var statement = connection.createStatement()){ - statement.execute(""" - delete from plan - where id = %d; - """.formatted(parentPlan)); - } + merlinHelper.deletePlan(parentPlan); // Assert that sibling1 and sibling2 now have grandparentPlan set as their parent assertEquals(0, getParentPlanId(grandparentPlan)); @@ -1106,11 +1094,11 @@ void updateActivityShouldFailOnLockedPlan() throws SQLException { final String newName = "Test :-)"; final String oldName = "oldName"; - updateActivityName(oldName, activityId, planId); + merlinHelper.updateActivityName(oldName, activityId, planId); try { lockPlan(planId); - updateActivityName(newName, activityId, planId); + merlinHelper.updateActivityName(newName, activityId, planId); } catch (SQLException sqlEx) { if (!sqlEx.getMessage().contains("Plan " + planId + " is locked.")) throw sqlEx; @@ -1124,7 +1112,7 @@ void updateActivityShouldFailOnLockedPlan() throws SQLException { assertEquals(activityId, activitiesBefore.get(0).activityId); assertEquals(oldName, activitiesBefore.get(0).name); - updateActivityName(newName, activityId, planId); + merlinHelper.updateActivityName(newName, activityId, planId); final var activitiesAfter = getActivities(planId); assertEquals(1, activitiesAfter.size()); assertEquals(activityId, activitiesAfter.get(0).activityId); @@ -1138,7 +1126,7 @@ void deleteActivityShouldFailOnLockedPlan() throws SQLException { try { lockPlan(planId); - deleteActivityDirective(planId, activityId); + merlinHelper.deleteActivityDirective(planId, activityId); } catch (SQLException sqlEx) { if (!sqlEx.getMessage().contains("Plan " + planId + " is locked.")) throw sqlEx; @@ -1151,7 +1139,7 @@ void deleteActivityShouldFailOnLockedPlan() throws SQLException { assertEquals(1, activitiesBefore.size()); assertEquals(activityId, activitiesBefore.get(0).activityId); - deleteActivityDirective(planId, activityId); + merlinHelper.deleteActivityDirective(planId, activityId); final var activitiesAfter = getActivities(planId); assertTrue(activitiesAfter.isEmpty()); } @@ -1204,12 +1192,9 @@ void beginReviewFailsOnLockedPlan() throws SQLException { void deletePlanFailsWhileLocked() throws SQLException { final var planId = merlinHelper.insertPlan(missionModelId); - try (final var statement = connection.createStatement()) { + try { lockPlan(planId); - statement.execute(""" - delete from plan - where id = %d - """.formatted(planId)); + merlinHelper.deletePlan(planId); fail(); } catch (SQLException sqlEx) { if (!sqlEx.getMessage().contains("Cannot delete locked plan.")) @@ -1237,8 +1222,8 @@ void lockingPlanDoesNotAffectOtherPlans() throws SQLException { //Update the activity in the unlocked plans final String newName = "Test"; - updateActivityName(newName, activityId, relatedPlanId); - updateActivityName(newName, unrelatedActivityId, unrelatedPlanId); + merlinHelper.updateActivityName(newName, activityId, relatedPlanId); + merlinHelper.updateActivityName(newName, unrelatedActivityId, unrelatedPlanId); var relatedActivities = getActivities(relatedPlanId); var unrelatedActivities = getActivities(unrelatedPlanId); @@ -1266,8 +1251,8 @@ void lockingPlanDoesNotAffectOtherPlans() throws SQLException { assertEquals(newActivityUnrelated, unrelatedActivities.get(1).activityId); //Delete the first activity in the unlocked plans - deleteActivityDirective(relatedPlanId, activityId); - deleteActivityDirective(unrelatedPlanId, unrelatedActivityId); + merlinHelper.deleteActivityDirective(relatedPlanId, activityId); + merlinHelper.deleteActivityDirective(unrelatedPlanId, unrelatedActivityId); relatedActivities = getActivities(relatedPlanId); unrelatedActivities = getActivities(unrelatedPlanId); @@ -1298,17 +1283,7 @@ void mergeBaseBetweenSelf() throws SQLException { createSnapshot(planId); final int mostRecentSnapshotId = createSnapshot(planId); - try(final var statement = connection.createStatement()){ - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(planId) - ); - assertTrue(results.next()); - assertEquals(mostRecentSnapshotId, results.getInt(1)); - } + assertEquals(mostRecentSnapshotId, getMergeBaseFromPlanIds(planId,planId)); } /** @@ -1319,19 +1294,7 @@ void mergeBaseBetweenSelf() throws SQLException { void mergeBaseParentChild() throws SQLException { final int parentPlanId = merlinHelper.insertPlan(missionModelId); final int childPlanId = duplicatePlan(parentPlanId, "New Plan"); - final int childCreationSnapshotId; - - try(final var statement = connection.createStatement()){ - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(childPlanId) - ); - assertTrue(results.next()); - childCreationSnapshotId = results.getInt(1); - } + final int childCreationSnapshotId = getLatestSnapshot(childPlanId); createSnapshot(childPlanId); createSnapshot(childPlanId); @@ -1350,19 +1313,7 @@ void mergeBaseParentChild() throws SQLException { void mergeBaseSiblings() throws SQLException { final int parentPlan = merlinHelper.insertPlan(missionModelId); final int olderSibling = duplicatePlan(parentPlan, "Older"); - final int olderSibCreationId; - - try(final var statement = connection.createStatement()){ - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(olderSibling) - ); - assertTrue(results.next()); - olderSibCreationId = results.getInt(1); - } + final int olderSibCreationId = getLatestSnapshot(olderSibling); final int youngerSibling = duplicatePlan(parentPlan, "Younger"); @@ -1382,19 +1333,7 @@ void mergeBase10thGrandchild() throws SQLException { final int ancestor = merlinHelper.insertPlan(missionModelId); int priorAncestor = duplicatePlan(ancestor, "Child of " + ancestor); - final int ninthGrandparentCreation; - //get creation snapshot of the 9th grandparent - try (final var statement = connection.createStatement()) { - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(priorAncestor) - ); - assertTrue(results.next()); - ninthGrandparentCreation = results.getInt(1); - } + final int ninthGrandparentCreation = getLatestSnapshot(priorAncestor); for (int i = 0; i < 8; ++i) { priorAncestor = duplicatePlan(priorAncestor, "Child of " + priorAncestor); @@ -1415,20 +1354,7 @@ void mergeBase10thGrandchild() throws SQLException { void mergeBase10thCousin() throws SQLException{ final int commonAncestor = merlinHelper.insertPlan(missionModelId); final int olderSibling = duplicatePlan(commonAncestor, "Older Sibling"); - - final int olderSiblingCreation; - try (final var statement = connection.createStatement()) { - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(olderSibling) - ); - assertTrue(results.next()); - olderSiblingCreation = results.getInt(1); - } - + final int olderSiblingCreation = getLatestSnapshot(olderSibling); final int youngerSibling = duplicatePlan(commonAncestor, "Younger Sibling"); int olderDescendant = olderSibling; @@ -1455,21 +1381,9 @@ void mergeBase10thCousin() throws SQLException{ void mergeBasePreviouslyMerged() throws SQLException { final int basePlan = merlinHelper.insertPlan(missionModelId); final int newPlan = duplicatePlan(basePlan, "New Plan"); - final int creationSnapshot; + final int creationSnapshot = getLatestSnapshot(newPlan); final int postMergeSnapshot; - try (final var statement = connection.createStatement()) { - final var results = statement.executeQuery( - """ - SELECT snapshot_id - FROM plan_latest_snapshot - WHERE plan_id = %d; - """.formatted(newPlan) - ); - assertTrue(results.next()); - creationSnapshot = results.getInt(1); - } - merlinHelper.insertActivity(newPlan); final int mergeRequest = createMergeRequest(basePlan, newPlan); @@ -1478,11 +1392,12 @@ void mergeBasePreviouslyMerged() throws SQLException { try (final var statement = connection.createStatement()) { final var results = statement.executeQuery( + //language=sql """ - SELECT snapshot_id_supplying_changes - FROM merge_request mr - WHERE mr.id = %d; - """.formatted(mergeRequest) + SELECT snapshot_id_supplying_changes + FROM merlin.merge_request mr + WHERE mr.id = %d; + """.formatted(mergeRequest) ); assertTrue(results.next()); postMergeSnapshot = results.getInt(1); @@ -1505,10 +1420,10 @@ void mergeBaseFailsForInvalidPlanIds() throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select get_merge_base(%d, -1); - """.formatted(planId) - ); + select merlin.get_merge_base(%d, -1); + """.formatted(planId)); } catch (SQLException sqlEx){ if(!sqlEx.getMessage().contains("Snapshot ID "+-1 +" is not present in plan_snapshot table.")) @@ -1517,10 +1432,10 @@ void mergeBaseFailsForInvalidPlanIds() throws SQLException { try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select get_merge_base(-2, %d); - """.formatted(snapshotId) - ); + select merlin.get_merge_base(-2, %d); + """.formatted(snapshotId)); } catch (SQLException sqlEx){ if(!sqlEx.getMessage().contains("Snapshot ID "+-2 +" is not present in plan_snapshot table.")) @@ -1540,27 +1455,19 @@ void multipleValidMergeBases() throws SQLException { final int plan2Snapshot = createSnapshot(plan2); //Create artificial Merge Bases - try(final var statement = connection.createStatement()){ - statement.execute( - """ - insert into plan_latest_snapshot(plan_id, snapshot_id) VALUES (%d, %d); - """.formatted(plan2, plan1Snapshot) - ); - statement.execute( - """ - insert into plan_latest_snapshot(plan_id, snapshot_id) VALUES (%d, %d); - """.formatted(plan1, plan2Snapshot) - ); + insertPlanLatestSnapshot(plan2, plan1Snapshot); + insertPlanLatestSnapshot(plan1, plan2Snapshot); - //Plan2Snapshot is created after Plan1Snapshot, therefore it must have a higher id - assertEquals(plan2Snapshot, getMergeBaseFromPlanIds(plan1, plan2)); + //Plan2Snapshot is created after Plan1Snapshot, therefore it must have a higher id + assertEquals(plan2Snapshot, getMergeBaseFromPlanIds(plan1, plan2)); + try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - delete from plan_latest_snapshot + delete from merlin.plan_latest_snapshot where snapshot_id = %d; - """.formatted(plan2Snapshot) - ); + """.formatted(plan2Snapshot)); assertEquals(plan1Snapshot, getMergeBaseFromPlanIds(plan1, plan2)); } @@ -1579,8 +1486,9 @@ void noValidMergeBases() throws SQLException{ try(final var statement = connection.createStatement()){ final var res = statement.executeQuery( + //language=sql """ - select get_merge_base(%d, %d); + select merlin.get_merge_base(%d, %d); """.formatted(plan1, plan2Snapshot) ); assertTrue(res.next()); @@ -1691,9 +1599,10 @@ void beginMergeUpdatesMergeBase() throws SQLException { final int newMB = createSnapshot(planId); try(final var statement = connection.createStatement()){ statement.execute( + //language=sql """ - insert into plan_snapshot_parent(snapshot_id, parent_snapshot_id) - VALUES (%d, %d); + insert into merlin.plan_snapshot_parent(snapshot_id, parent_snapshot_id) + VALUES (%d, %d); """.formatted(mergeRQ.supplyingSnapshot, newMB) ); } @@ -1721,17 +1630,7 @@ void beginMergeNoChangesThrowsError() throws SQLException { } } // Assert that the plan was not locked - try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery( - """ - select is_locked - from plan - where id = %d; - """.formatted(planId) - ); - assertTrue(res.next()); - assertFalse(res.getBoolean(1)); - } + assertFalse(isPlanLocked(planId)); } @Test @@ -1807,7 +1706,7 @@ void noneModifyResolvesAsModify() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - updateActivityName(newName, activityId, childPlan); + merlinHelper.updateActivityName(newName, activityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -1829,7 +1728,7 @@ void noneDeleteResolvesAsDelete() throws SQLException { final int activityId = merlinHelper.insertActivity(basePlan); final int childPlan = duplicatePlan(basePlan, "Child Plan"); - deleteActivityDirective(childPlan, activityId); + merlinHelper.deleteActivityDirective(childPlan, activityId); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -1852,7 +1751,7 @@ void modifyNoneResolvesAsNone() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - updateActivityName(newName, activityId, basePlan); + merlinHelper.updateActivityName(newName, activityId, basePlan); // Insert to avoid NO-OP case in begin_merge final int noopDodger = merlinHelper.insertActivity(childPlan); @@ -1880,9 +1779,9 @@ void identicalModifyModifyResolvesAsNone() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - updateActivityName(newName, activityId, basePlan); - updateActivityName("Different Revision Proof", activityId, childPlan); - updateActivityName(newName, activityId, childPlan); + merlinHelper.updateActivityName(newName, activityId, basePlan); + merlinHelper.updateActivityName("Different Revision Proof", activityId, childPlan); + merlinHelper.updateActivityName(newName, activityId, childPlan); // Insert to avoid NO-OP case in begin_merge final int noopDodger = merlinHelper.insertActivity(childPlan); @@ -1988,8 +1887,8 @@ void differentModifyModifyResolvesAsConflict() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - updateActivityName(newName, activityId, basePlan); - updateActivityName("Different", activityId, childPlan); + merlinHelper.updateActivityName(newName, activityId, basePlan); + merlinHelper.updateActivityName("Different", activityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2013,8 +1912,8 @@ void modifyDeleteResolvesAsConflict() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - updateActivityName(newName, activityId, basePlan); - deleteActivityDirective(childPlan, activityId); + merlinHelper.updateActivityName(newName, activityId, basePlan); + merlinHelper.deleteActivityDirective(childPlan, activityId); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2037,7 +1936,7 @@ void deleteNoneIsExcludedFromStageAndConflict() throws SQLException { final int activityId = merlinHelper.insertActivity(basePlan); final int childPlan = duplicatePlan(basePlan, "Child Plan"); - deleteActivityDirective(basePlan, activityId); + merlinHelper.deleteActivityDirective(basePlan, activityId); // Insert to avoid NO-OP case in begin_merge final int noopDodger = merlinHelper.insertActivity(childPlan); @@ -2062,8 +1961,8 @@ void deleteModifyIsAConflict() throws SQLException { final int childPlan = duplicatePlan(basePlan, "Child Plan"); final String newName = "Test"; - deleteActivityDirective(basePlan, activityId); - updateActivityName(newName, activityId, childPlan); + merlinHelper.deleteActivityDirective(basePlan, activityId); + merlinHelper.updateActivityName(newName, activityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2086,8 +1985,8 @@ void deleteDeleteIsExcludedFromStageAndConflict() throws SQLException { final int activityId = merlinHelper.insertActivity(basePlan); final int childPlan = duplicatePlan(basePlan, "Child Plan"); - deleteActivityDirective(basePlan, activityId); - deleteActivityDirective(childPlan, activityId); + merlinHelper.deleteActivityDirective(basePlan, activityId); + merlinHelper.deleteActivityDirective(childPlan, activityId); // Insert to avoid NO-OP case in begin_merge final int noopDodger = merlinHelper.insertActivity(childPlan); @@ -2126,8 +2025,8 @@ void commitMergeFailsIfConflictsExist() throws SQLException { final int activityId = merlinHelper.insertActivity(basePlan); final int childPlan = duplicatePlan(basePlan, "Child"); - updateActivityName("BasePlan", activityId, basePlan); - updateActivityName("ChildPlan", activityId, childPlan); + merlinHelper.updateActivityName("BasePlan", activityId, basePlan); + merlinHelper.updateActivityName("ChildPlan", activityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2148,14 +2047,14 @@ void commitMergeSucceedsIfAllConflictsAreResolved() throws SQLException{ final int deleteModifyActivityId = merlinHelper.insertActivity(basePlan); final int childPlan = duplicatePlan(basePlan, "Child"); - updateActivityName("BaseActivity1", modifyModifyActivityId, basePlan); - updateActivityName("ChildActivity1", modifyModifyActivityId, childPlan); + merlinHelper.updateActivityName("BaseActivity1", modifyModifyActivityId, basePlan); + merlinHelper.updateActivityName("ChildActivity1", modifyModifyActivityId, childPlan); - updateActivityName("BaseActivity2", modifyDeleteActivityId, basePlan); - deleteActivityDirective(childPlan, modifyDeleteActivityId); + merlinHelper.updateActivityName("BaseActivity2", modifyDeleteActivityId, basePlan); + merlinHelper.deleteActivityDirective(childPlan, modifyDeleteActivityId); - deleteActivityDirective(basePlan, deleteModifyActivityId); - updateActivityName("ChildActivity2", deleteModifyActivityId, childPlan); + merlinHelper.deleteActivityDirective(basePlan, deleteModifyActivityId); + merlinHelper.updateActivityName("ChildActivity2", deleteModifyActivityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2199,10 +2098,10 @@ void commitMergeSucceedsIfNoConflictsExist() throws SQLException { -- modify the next 50 in the child -- add 25 activities to the parent */ - for(int i = 50; i < 75; ++i) { deleteActivityDirective(basePlan, baseActivities[i]); } - for(int i = 75; i < 100; ++i) { deleteActivityDirective(childPlan, baseActivities[i]); } - for(int i = 100; i < 150; ++i) { updateActivityName("Renamed Activity " + i, baseActivities[i], basePlan); } - for(int i = 150; i < 200; ++i) { updateActivityName("Renamed Activity " + i, baseActivities[i], childPlan); } + for(int i = 50; i < 75; ++i) { merlinHelper.deleteActivityDirective(basePlan, baseActivities[i]); } + for(int i = 75; i < 100; ++i) { merlinHelper.deleteActivityDirective(childPlan, baseActivities[i]); } + for(int i = 100; i < 150; ++i) { merlinHelper.updateActivityName("Renamed Activity " + i, baseActivities[i], basePlan); } + for(int i = 150; i < 200; ++i) { merlinHelper.updateActivityName("Renamed Activity " + i, baseActivities[i], childPlan); } for(int i = 0; i < 25; ++i) { merlinHelper.insertActivity(basePlan); } final int mergeRQ = createMergeRequest(basePlan, childPlan); @@ -2238,27 +2137,27 @@ void modifyAndDeletesApplyCorrectly() throws SQLException { assertEquals(8, getActivities(basePlan).size()); assertEquals(8, getActivities(childPlan).size()); - updateActivityName("Test", modifyUncontestedActId, childPlan); + merlinHelper.updateActivityName("Test", modifyUncontestedActId, childPlan); - deleteActivityDirective(childPlan, deleteUncontestedActId); + merlinHelper.deleteActivityDirective(childPlan, deleteUncontestedActId); - updateActivityName("Modify Contested Supplying Parent", modifyContestedSupplyingActId, basePlan); - updateActivityName("Modify Contested Supplying Child", modifyContestedSupplyingActId, childPlan); + merlinHelper.updateActivityName("Modify Contested Supplying Parent", modifyContestedSupplyingActId, basePlan); + merlinHelper.updateActivityName("Modify Contested Supplying Child", modifyContestedSupplyingActId, childPlan); - updateActivityName("Modify Contested Receiving Parent", modifyContestedReceivingActId, basePlan); - updateActivityName("Modify Contested Receiving Child", modifyContestedReceivingActId, childPlan); + merlinHelper.updateActivityName("Modify Contested Receiving Parent", modifyContestedReceivingActId, basePlan); + merlinHelper.updateActivityName("Modify Contested Receiving Child", modifyContestedReceivingActId, childPlan); - updateActivityName("Delete Contested Supplying Parent Resolve Supplying", deleteContestedSupplyingResolveSupplyingActId, basePlan); - deleteActivityDirective(childPlan, deleteContestedSupplyingResolveSupplyingActId); + merlinHelper.updateActivityName("Delete Contested Supplying Parent Resolve Supplying", deleteContestedSupplyingResolveSupplyingActId, basePlan); + merlinHelper.deleteActivityDirective(childPlan, deleteContestedSupplyingResolveSupplyingActId); - updateActivityName("Delete Contested Supplying Parent Resolve Receiving", deleteContestedSupplyingResolveReceivingActId, basePlan); - deleteActivityDirective(childPlan, deleteContestedSupplyingResolveReceivingActId); + merlinHelper.updateActivityName("Delete Contested Supplying Parent Resolve Receiving", deleteContestedSupplyingResolveReceivingActId, basePlan); + merlinHelper.deleteActivityDirective(childPlan, deleteContestedSupplyingResolveReceivingActId); - deleteActivityDirective(basePlan, deleteContestedReceivingResolveReceivingActId); - updateActivityName("Delete Contested Receiving Child Resolve Receiving", deleteContestedReceivingResolveReceivingActId, childPlan); + merlinHelper.deleteActivityDirective(basePlan, deleteContestedReceivingResolveReceivingActId); + merlinHelper.updateActivityName("Delete Contested Receiving Child Resolve Receiving", deleteContestedReceivingResolveReceivingActId, childPlan); - deleteActivityDirective(basePlan, deleteContestedReceivingResolveSupplyingActId); - updateActivityName("Delete Contested Receiving Child Resolve Supplying", deleteContestedReceivingResolveSupplyingActId, childPlan); + merlinHelper.deleteActivityDirective(basePlan, deleteContestedReceivingResolveSupplyingActId); + merlinHelper.updateActivityName("Delete Contested Receiving Child Resolve Supplying", deleteContestedReceivingResolveSupplyingActId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2313,8 +2212,8 @@ void commitMergeCleansUpSuccessfully() throws SQLException{ for(int i = 0; i < 5; ++i){ merlinHelper.insertActivity(basePlan, "00:00:"+(i%60)); } for(int i = 0; i < 5; ++i){ merlinHelper.insertActivity(basePlan, "00:00:"+(i%60)); } - updateActivityName("Conflict!", conflictActivityId, basePlan); - updateActivityName("Conflict >:-)", conflictActivityId, childPlan); + merlinHelper.updateActivityName("Conflict!", conflictActivityId, basePlan); + merlinHelper.updateActivityName("Conflict >:-)", conflictActivityId, childPlan); final int mergeRQ = createMergeRequest(basePlan, childPlan); beginMerge(mergeRQ); @@ -2331,20 +2230,8 @@ void commitMergeCleansUpSuccessfully() throws SQLException{ assertTrue(getConflictingActivities(mergeRQ).isEmpty()); assertTrue(getConflictingActivities(mergeRQ).isEmpty()); - try(final var statement = connection.createStatement()){ - final var res = statement.executeQuery(""" - SELECT plan.id as plan_id, is_locked, status - FROM plan - JOIN merge_request - ON plan_id_receiving_changes = plan.id - WHERE plan.id = %d; - """.formatted(basePlan) - ); - assertTrue(res.next()); - assertEquals(basePlan, res.getInt("plan_id")); - assertFalse(res.getBoolean("is_locked")); - assertEquals("accepted", res.getString("status")); - } + assertFalse(isPlanLocked(basePlan)); + assertEquals("accepted", getMergeRequest(mergeRQ).status); } } @@ -2654,11 +2541,11 @@ void denyCleansUpSuccessfully() throws SQLException { final int childActivity1 = merlinHelper.insertActivity(childPlan1); final int childActivity2 = merlinHelper.insertActivity(childPlan2); - updateActivityName("Conflict 1 Base", baseActivity1, basePlan1); - updateActivityName("Conflict 2 Base", baseActivity2, basePlan2); + merlinHelper.updateActivityName("Conflict 1 Base", baseActivity1, basePlan1); + merlinHelper.updateActivityName("Conflict 2 Base", baseActivity2, basePlan2); - updateActivityName("Conflict 1 Child", baseActivity1, childPlan1); - updateActivityName("Conflict 2 Child", baseActivity2, childPlan2); + merlinHelper.updateActivityName("Conflict 1 Child", baseActivity1, childPlan1); + merlinHelper.updateActivityName("Conflict 2 Child", baseActivity2, childPlan2); final int mergeRQ1 = createMergeRequest(basePlan1, childPlan1); final int mergeRQ2 = createMergeRequest(basePlan2, childPlan2); @@ -2683,27 +2570,8 @@ void denyCleansUpSuccessfully() throws SQLException { assertEquals(baseActivity2, getConflictingActivities(mergeRQ2).get(0).activityId); //Assert both plans are locked - try(final var statement = connection.createStatement()){ - var res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan1) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - - res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan2) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - } + assertTrue(isPlanLocked(basePlan1)); + assertTrue(isPlanLocked(basePlan2)); denyMerge(mergeRQ1); @@ -2722,27 +2590,8 @@ void denyCleansUpSuccessfully() throws SQLException { assertEquals(baseActivity2, getConflictingActivities(mergeRQ2).get(0).activityId); //Assert only the in-progress merge is now locked - try(final var statement = connection.createStatement()){ - var res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan1) - ); - assertTrue(res.next()); - assertFalse(res.getBoolean(1)); - - res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan2) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - } + assertFalse(isPlanLocked(basePlan1)); + assertTrue(isPlanLocked(basePlan2)); } /** @@ -2762,11 +2611,11 @@ void cancelCleansUpSuccessfully() throws SQLException { final int childActivity1 = merlinHelper.insertActivity(childPlan1); final int childActivity2 = merlinHelper.insertActivity(childPlan2); - updateActivityName("Conflict 1 Base", baseActivity1, basePlan1); - updateActivityName("Conflict 2 Base", baseActivity2, basePlan2); + merlinHelper.updateActivityName("Conflict 1 Base", baseActivity1, basePlan1); + merlinHelper.updateActivityName("Conflict 2 Base", baseActivity2, basePlan2); - updateActivityName("Conflict 1 Child", baseActivity1, childPlan1); - updateActivityName("Conflict 2 Child", baseActivity2, childPlan2); + merlinHelper.updateActivityName("Conflict 1 Child", baseActivity1, childPlan1); + merlinHelper.updateActivityName("Conflict 2 Child", baseActivity2, childPlan2); final int mergeRQ1 = createMergeRequest(basePlan1, childPlan1); final int mergeRQ2 = createMergeRequest(basePlan2, childPlan2); @@ -2791,27 +2640,8 @@ void cancelCleansUpSuccessfully() throws SQLException { assertEquals(baseActivity2, getConflictingActivities(mergeRQ2).get(0).activityId); //Assert both plans are locked - try(final var statement = connection.createStatement()){ - var res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan1) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - - res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan2) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - } + assertTrue(isPlanLocked(basePlan1)); + assertTrue(isPlanLocked(basePlan2)); cancelMerge(mergeRQ1); @@ -2830,27 +2660,8 @@ void cancelCleansUpSuccessfully() throws SQLException { assertEquals(baseActivity2, getConflictingActivities(mergeRQ2).get(0).activityId); //Assert only the in-progress merge is now locked - try(final var statement = connection.createStatement()){ - var res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan1) - ); - assertTrue(res.next()); - assertFalse(res.getBoolean(1)); - - res = statement.executeQuery( - """ - SELECT is_locked - FROM plan - WHERE id = %d; - """.formatted(basePlan2) - ); - assertTrue(res.next()); - assertTrue(res.getBoolean(1)); - } + assertFalse(isPlanLocked(basePlan1)); + assertTrue(isPlanLocked(basePlan2)); } } @@ -2892,7 +2703,7 @@ void anchorMustBeInTargetPlanAtEndOfMerge() throws SQLException{ final int childPlan = duplicatePlan(planId, "Anchor Delete Test"); merlinHelper.setAnchor(activityA, true, activityB, childPlan); - deleteActivityDirective(planId, activityA); + merlinHelper.deleteActivityDirective(planId, activityA); final int mergeRQ = createMergeRequest(planId, childPlan); beginMerge(mergeRQ); @@ -2920,8 +2731,9 @@ void deleteSubtreeDoesNotImpactRelatedPlans() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_delete_subtree(%d, %d, '%s'::json) + select hasura.delete_activity_by_pk_delete_subtree(%d, %d, '%s'::json) """.formatted(activityAId, planId, merlinHelper.admin.session())); } assertEquals(0, getActivities(planId).size()); @@ -2943,8 +2755,9 @@ void deletePlanReanchorDoesNotImpactRelatedPlans() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_plan_start(%d, %d, '%s'::json) + select hasura.delete_activity_by_pk_reanchor_plan_start(%d, %d, '%s'::json) """.formatted(activityAId, planId, merlinHelper.admin.session())); } assertEquals(1, getActivities(planId).size()); @@ -2968,8 +2781,9 @@ void deleteActivityReanchorDoesNotImpactRelatedPlans() throws SQLException{ try(final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - select hasura_functions.delete_activity_by_pk_reanchor_to_anchor(%d, %d, '%s'::json) + select hasura.delete_activity_by_pk_reanchor_to_anchor(%d, %d, '%s'::json) """.formatted(activityAId, planId, merlinHelper.admin.session())); } assertEquals(2, getActivities(planId).size()); @@ -3067,7 +2881,7 @@ void presetOnlyPullsFromSourceSnapshot() throws SQLException { // Remove preset from plan before branching merlinHelper.unassignPreset(presetId, activityId, planId); final int branchId = duplicatePlan(planId, "Delete Preset Branch"); - updateActivityName("new name", activityId, branchId); + merlinHelper.updateActivityName("new name", activityId, branchId); // Merge final int mergeRQId = createMergeRequest(planId, branchId); @@ -3094,7 +2908,7 @@ void presetUnaffectedByUnrelatedSnapshot() throws SQLException { final int planId = merlinHelper.insertPlan(missionModelId); final int activityId = merlinHelper.insertActivity(planId); final int branchId = duplicatePlan(planId, "Delete Preset Branch"); - updateActivityName("new name", activityId, branchId); + merlinHelper.updateActivityName("new name", activityId, branchId); // Merge final int mergeRQId = createMergeRequest(planId, branchId); @@ -3113,6 +2927,11 @@ class TagsTests { private final gov.nasa.jpl.aerie.database.TagsTests tagsHelper = new gov.nasa.jpl.aerie.database.TagsTests(); { tagsHelper.setConnection(helper);} + @AfterEach + void afterEach() throws SQLException { + helper.clearSchema("tags"); + } + // Checks that both activity directive and plan tags are copied @Test void duplicateCopiesTags() throws SQLException { @@ -3230,7 +3049,7 @@ void tagsPersistWithModify() throws SQLException { final int tagId = tagsHelper.insertTag("Farm", merlinHelper.admin.name()); tagsHelper.assignTagToActivity(activityId, planId, tagId); final int branchId = duplicatePlan(planId, "Modify Tags Branch"); - updateActivityName("New Name", activityId, branchId); + merlinHelper.updateActivityName("New Name", activityId, branchId); // Merge final int mergeRQId = createMergeRequest(planId, branchId); @@ -3288,7 +3107,7 @@ void tagsPersistWithModifyDeleteConflict() throws SQLException { tagsHelper.assignTagToActivity(activityId, planId, tractorTagId); tagsHelper.assignTagToActivity(activityId, planId, barnTagId); tagsHelper.removeTagFromActivity(activityId, planId, farmTagId); - deleteActivityDirective(branchId, activityId); + merlinHelper.deleteActivityDirective(branchId, activityId); final int mergeRQ = createMergeRequest(planId, branchId); beginMerge(mergeRQ); @@ -3318,7 +3137,7 @@ void tagsPersistWithDeleteModifyConflict() throws SQLException { tagsHelper.assignTagToActivity(activityId, branchId, tractorTagId); tagsHelper.assignTagToActivity(activityId, branchId, barnTagId); tagsHelper.removeTagFromActivity(activityId, branchId, farmTagId); - deleteActivityDirective(planId, activityId); + merlinHelper.deleteActivityDirective(planId, activityId); final int mergeRQ = createMergeRequest(planId, branchId); beginMerge(mergeRQ); @@ -3463,8 +3282,8 @@ void tagsAreNotShuffledDuringMerge() throws SQLException { final int case7Id = merlinHelper.insertActivity(branchId); final int case8Id = merlinHelper.insertActivity(branchId); tagsHelper.assignTagToActivity(case8Id, branchId, barnTagId); - deleteActivityDirective(branchId, case5Id); - deleteActivityDirective(branchId, case6Id); + merlinHelper.deleteActivityDirective(branchId, case5Id); + merlinHelper.deleteActivityDirective(branchId, case6Id); tagsHelper.assignTagToActivity(case2Id, planId, barnTagId); tagsHelper.assignTagToActivity(case3Id, branchId, barnTagId); diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PresetTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PresetTests.java index 380148c7b3..01121e7ef5 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PresetTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/PresetTests.java @@ -7,7 +7,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -20,7 +19,6 @@ @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class PresetTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; @@ -42,42 +40,19 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation_template"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); - helper.clearTable("plan_dataset"); - helper.clearTable("simulation_dataset"); - helper.clearTable("plan_snapshot"); - helper.clearTable("plan_latest_snapshot"); - helper.clearTable("plan_snapshot_activities"); - helper.clearTable("plan_snapshot_parent"); - helper.clearTable("anchor_validation_status"); - helper.clearTable("activity_presets"); - helper.clearTable("preset_to_directive"); - helper.clearTable("preset_to_snapshot_directive"); + helper.clearSchema("merlin"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_preset_test", "Aerie Preset Tests"); setConnection(helper); merlinHelper = new MerlinDatabaseTestHelper(connection); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } //region Helper Methods @@ -88,12 +63,14 @@ Activity assignPreset(int presetId, int activityId, int planId) throws SQLExcept Activity getActivity(final int planId, final int activityId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM activity_directive - WHERE id = %d - AND plan_id = %d; - """.formatted(activityId, planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.activity_directive + WHERE id = %d + AND plan_id = %d; + """.formatted(activityId, planId)); res.next(); return new Activity( res.getInt("id"), @@ -107,12 +84,14 @@ Activity getActivity(final int planId, final int activityId) throws SQLException ArrayList getActivities(final int planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT * - FROM activity_directive - WHERE plan_id = %d - ORDER BY id; - """.formatted(planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT * + FROM merlin.activity_directive + WHERE plan_id = %d + ORDER BY id; + """.formatted(planId)); final var activities = new ArrayList(); while (res.next()){ @@ -138,24 +117,27 @@ static void assertActivityEqualsAsideFromArgs(final Activity expected, final Act void deletePreset(int presetId) throws SQLException { try (final var statement = connection.createStatement()){ statement.execute( - """ - DELETE FROM activity_presets - WHERE id = %d - """.formatted(presetId)); + //language=sql + """ + DELETE FROM merlin.activity_presets + WHERE id = %d + """.formatted(presetId)); } } ArrayList getActivitiesWithPreset(final int presetId) throws SQLException{ try (final var statement = connection.createStatement()) { // Select from act dirs using the list of ids gotten from the join table preset to dirs - final var res = statement.executeQuery(""" - SELECT ad.id, ad.plan_id, ad.name, ad.type, ad.arguments - FROM activity_directive ad, + final var res = statement.executeQuery( + //language=sql + """ + SELECT ad.id, ad.plan_id, ad.name, ad.type, ad.arguments + FROM merlin.activity_directive ad, (SELECT activity_id, plan_id - FROM preset_to_directive + FROM merlin.preset_to_directive WHERE preset_id = %d) presets - WHERE (ad.id, ad.plan_id) = (presets.activity_id, presets.plan_id); - """.formatted(presetId)); + WHERE (ad.id, ad.plan_id) = (presets.activity_id, presets.plan_id); + """.formatted(presetId)); final var activities = new ArrayList(); while (res.next()){ @@ -173,11 +155,16 @@ ArrayList getActivitiesWithPreset(final int presetId) throws SQLExcept Preset getPresetAssignedToActivity(final int activityId, final int planId) throws SQLException{ try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - SELECT ap.id, ap.model_id, ap.name, ap.associated_activity_type, ap.arguments - FROM activity_presets ap, (SELECT preset_id from preset_to_directive WHERE (activity_id, plan_id) = (%d, %d)) o - WHERE ap.id = o.preset_id; - """.formatted(activityId, planId)); + final var res = statement.executeQuery( + //language=sql + """ + SELECT ap.id, ap.model_id, ap.name, ap.associated_activity_type, ap.arguments + FROM merlin.activity_presets ap, + (SELECT preset_id + FROM merlin.preset_to_directive + WHERE (activity_id, plan_id) = (%d, %d)) o + WHERE ap.id = o.preset_id; + """.formatted(activityId, planId)); return res.next() ? new Preset( res.getInt("id"), diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/SchedulerDatabaseTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/SchedulerDatabaseTests.java index 0368bcbe6e..8bfe76be56 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/SchedulerDatabaseTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/SchedulerDatabaseTests.java @@ -2,7 +2,6 @@ import org.junit.jupiter.api.*; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -11,36 +10,32 @@ @TestInstance(TestInstance.Lifecycle.PER_CLASS) class SchedulerDatabaseTests { - private static final File initSqlScriptFile = new File("../scheduler-server/sql/scheduler/init.sql"); private DatabaseTestHelper helper; + private MerlinDatabaseTestHelper merlinHelper; private Connection connection; @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_scheduler_test", - "Scheduler Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_scheduler_test", "Scheduler Database Tests"); connection = helper.connection(); + merlinHelper = new MerlinDatabaseTestHelper(connection); + merlinHelper.insertUser("scheduler db tests"); } @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } - int insertSpecification(final long planId) throws SQLException { + int getSpecification(final long planId) throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - insert into scheduling_specification( - revision, plan_id, plan_revision, horizon_start, horizon_end, simulation_arguments, analysis_only - ) values (0, %d, 0, now(), now(), '{}', false) returning id; - """.formatted(planId)); + final var res = statement.executeQuery( + //language=sql + """ + select id from scheduler.scheduling_specification + where plan_id = %d; + """.formatted(planId)); res.next(); return res.getInt("id"); } @@ -48,17 +43,19 @@ insert into scheduling_specification( int insertGoal() throws SQLException { try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - with metadata(id, owner) as ( - insert into scheduling_goal_metadata(name, description, owner, updated_by) - values ('test goal', 'no-op', 'scheduler db tests', 'scheduler db tests') - returning id, owner - ) - insert into scheduling_goal_definition(goal_id, definition, author) - select m.id, 'nothing', m.owner - from metadata m - returning goal_id as id; - """); + final var res = statement.executeQuery( + //language=sql + """ + with metadata(id, owner) as ( + insert into scheduler.scheduling_goal_metadata(name, description, owner, updated_by) + values ('test goal', 'no-op', 'scheduler db tests', 'scheduler db tests') + returning id, owner + ) + insert into scheduler.scheduling_goal_definition(goal_id, definition, author) + select m.id, 'nothing', m.owner + from metadata m + returning goal_id as id; + """); res.next(); return res.getInt("id"); } @@ -71,41 +68,40 @@ class TestSpecificationAndTemplateGoalTriggers { @BeforeEach void beforeEach() throws SQLException { - specificationIds = new int[]{insertSpecification(0), insertSpecification(1)}; + final int modelId = merlinHelper.insertMissionModel(merlinHelper.insertFileUpload()); + specificationIds = new int[]{ + getSpecification(merlinHelper.insertPlan(modelId)), + getSpecification(merlinHelper.insertPlan(modelId)) + }; goalIds = new int[]{insertGoal(), insertGoal(), insertGoal(), insertGoal(), insertGoal(), insertGoal()}; } @AfterEach void afterEach() throws SQLException { - helper.clearTable("scheduling_specification"); - helper.clearTable("scheduling_goal_metadata"); - helper.clearTable("scheduling_goal_definition"); - helper.clearTable("scheduling_specification_goals"); + helper.clearSchema("merlin"); + helper.clearSchema("scheduler"); } void insertGoalPriorities(int specOrTemplateIndex, final int[] goalIndices, int[] priorities) throws SQLException { for (int i = 0; i < priorities.length; i++) { - connection.createStatement().executeUpdate(""" - insert into scheduling_specification_goals(specification_id, goal_id, priority) - values (%d, %d, %d); - """.formatted( - specificationIds[specOrTemplateIndex], - goalIds[goalIndices[i]], - priorities[i] - )); + connection.createStatement().executeUpdate( + //language=sql + """ + insert into scheduler.scheduling_specification_goals(specification_id, goal_id, priority) + values (%d, %d, %d); + """.formatted(specificationIds[specOrTemplateIndex], goalIds[goalIndices[i]], priorities[i])); } } void checkPriorities(int specOrTemplateIndex, int[] goalIdIndices, int[] priorities) throws SQLException { assertEquals(goalIdIndices.length, priorities.length); for (int i = 0; i < priorities.length; i++) { - final var res = connection.createStatement().executeQuery(""" - select priority from scheduling_specification_goals - where goal_id = %d and specification_id = %d; - """.formatted( - goalIds[goalIdIndices[i]], - specificationIds[specOrTemplateIndex]) - ); + final var res = connection.createStatement().executeQuery( + //language=sql + """ + select priority from scheduler.scheduling_specification_goals + where goal_id = %d and specification_id = %d; + """.formatted(goalIds[goalIdIndices[i]], specificationIds[specOrTemplateIndex])); res.next(); assertEquals(priorities[i], res.getInt("priority")); res.close(); @@ -118,7 +114,7 @@ void shouldIncrementPrioritiesOnCollision() throws SQLException { insertGoalPriorities(1, new int[] {0, 1, 2}, new int[]{0, 1, 2}); checkPriorities(1, new int[]{0, 1, 2}, new int[]{0, 1, 2}); - helper.clearTable("scheduling_specification_goals"); + helper.clearTable("scheduler.scheduling_specification_goals"); // should cause increments insertGoalPriorities(0, new int[] {0, 1, 2}, new int[]{0, 0, 0}); checkPriorities(0, new int[]{0, 1, 2}, new int[]{2, 1, 0}); @@ -140,8 +136,10 @@ void shouldErrorWhenInsertingNonConsecutivePriority() throws SQLException { } private int getSpecificationRevision(int specificationId) throws SQLException { - final var res = connection.createStatement().executeQuery(""" - select revision from scheduling_specification + final var res = connection.createStatement().executeQuery( + //language=sql + """ + select revision from scheduler.scheduling_specification where id = %d; """.formatted(specificationId)); res.next(); @@ -152,11 +150,15 @@ private int getSpecificationRevision(int specificationId) throws SQLException { void shouldIncrementSpecRevisionAfterModifyingGoalSpec() throws SQLException { insertGoalPriorities(0, new int[] {0, 1, 2, 3, 4}, new int[]{0, 1, 2, 3, 4}); final var revisionBefore = getSpecificationRevision(specificationIds[0]); - connection.createStatement().executeUpdate(""" - update scheduling_specification_goals - set goal_revision = 0 - where goal_id = %d; - """.formatted(goalIds[3])); + try(final var statement = connection.createStatement()){ + statement.executeUpdate( + //language=sql + """ + update scheduler.scheduling_specification_goals + set goal_revision = 0 + where goal_id = %d; + """.formatted(goalIds[3])); + } final var revisionAfter = getSpecificationRevision(specificationIds[0]); assertEquals(revisionBefore + 1, revisionAfter); } @@ -166,21 +168,31 @@ void shouldReorderPrioritiesOnUpdate() throws SQLException { insertGoalPriorities(0, new int[] {0, 1, 2}, new int[]{0, 1, 2}); insertGoalPriorities(1, new int[] {3, 4, 5}, new int[]{0, 1, 2}); - // First test lowering a priority - connection.createStatement().executeUpdate(""" - update scheduling_specification_goals - set priority = 0 where specification_id = %d and goal_id = %d; - """.formatted(specificationIds[0], goalIds[2])); - checkPriorities( 0, new int[]{0, 1, 2}, new int[]{1, 2, 0}); - checkPriorities( 1, new int[]{3, 4, 5}, new int[]{0, 1, 2}); + try(final var statement = connection.createStatement()) { + // First test lowering a priority + statement.executeUpdate( + //language=sql + """ + update scheduler.scheduling_specification_goals + set priority = 0 + where specification_id = %d + and goal_id = %d; + """.formatted(specificationIds[0], goalIds[2])); + checkPriorities(0, new int[]{0, 1, 2}, new int[]{1, 2, 0}); + checkPriorities(1, new int[]{3, 4, 5}, new int[]{0, 1, 2}); - /// Next test raising a priority - connection.createStatement().executeUpdate(""" - update scheduling_specification_goals - set priority = 2 where specification_id = %d and goal_id = %d; - """.formatted(specificationIds[0], goalIds[2])); - checkPriorities( 0, new int[] {0, 1, 2}, new int[] {0, 1, 2}); - checkPriorities( 1, new int[] {3, 4, 5}, new int[] {0, 1, 2}); + /// Next test raising a priority + statement.executeUpdate( + //language=sql + """ + update scheduler.scheduling_specification_goals + set priority = 2 + where specification_id = %d + and goal_id = %d; + """.formatted(specificationIds[0], goalIds[2])); + } + checkPriorities(0, new int[] {0, 1, 2}, new int[] {0, 1, 2}); + checkPriorities(1, new int[] {3, 4, 5}, new int[] {0, 1, 2}); } @Test @@ -188,10 +200,15 @@ void shouldDecrementPrioritiesOnDelete() throws SQLException { insertGoalPriorities(0, new int[] {0, 1, 2}, new int[]{0, 1, 2}); insertGoalPriorities(1, new int[] {3, 4, 5}, new int[]{0, 1, 2}); - connection.createStatement().executeUpdate(""" - delete from scheduling_specification_goals - where specification_id = %d and goal_id = %d; - """.formatted(specificationIds[0], goalIds[1])); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + delete from scheduler.scheduling_specification_goals + where specification_id = %d + and goal_id = %d; + """.formatted(specificationIds[0], goalIds[1])); + } checkPriorities(0, new int[]{0, 2}, new int[]{0, 1}); checkPriorities(1, new int[]{3, 4, 5}, new int[]{0, 1, 2}); } @@ -201,14 +218,20 @@ void shouldTriggerMultipleReorders() throws SQLException { insertGoalPriorities(0, new int[] {0, 1, 2}, new int[]{0, 1, 2}); insertGoalPriorities(1, new int[] {3, 4, 5}, new int[]{0, 1, 2}); - connection.createStatement().executeUpdate(""" - delete from scheduling_specification_goals - where goal_id = %d; - """.formatted(goalIds[1])); - connection.createStatement().executeUpdate(""" - delete from scheduling_specification_goals - where goal_id = %d; - """.formatted(goalIds[4])); + try(final var statement=connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + delete from scheduler.scheduling_specification_goals + where goal_id = %d; + """.formatted(goalIds[1])); + statement.executeUpdate( + //language=sql + """ + delete from scheduler.scheduling_specification_goals + where goal_id = %d; + """.formatted(goalIds[4])); + } checkPriorities( 0, new int[] {0, 2}, new int[] {0, 1}); checkPriorities(1, new int[] {3, 5}, new int[] {0, 1}); } @@ -216,44 +239,43 @@ void shouldTriggerMultipleReorders() throws SQLException { @Test void shouldNotTriggerWhenPriorityIsUnchanged() throws SQLException { insertGoalPriorities(1, new int[] {0, 1, 2}, new int[]{0, 1, 2}); - connection.createStatement().executeUpdate(""" - update scheduling_specification_goals - set specification_id = %d - where specification_id = %d; - """.formatted( - specificationIds[0], - specificationIds[1] - )); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + update scheduler.scheduling_specification_goals + set specification_id = %d + where specification_id = %d; + """.formatted(specificationIds[0], specificationIds[1])); + } checkPriorities(0, new int[]{0, 1, 2}, new int[]{0, 1, 2}); } @Test void shouldGeneratePriorityWhenNull() throws SQLException { - connection.createStatement().executeUpdate(""" - insert into scheduling_specification_goals(specification_id, goal_id) - values (%d, %d); - """.formatted( - specificationIds[0], - goalIds[0] - )); - checkPriorities(0, new int[]{0}, new int[]{0}); - connection.createStatement().executeUpdate(""" - insert into scheduling_specification_goals(specification_id, goal_id, priority) - values (%d, %d, null); - """.formatted( - specificationIds[0], - goalIds[2] - )); - checkPriorities(0, new int[]{0, 2}, new int[]{0, 1}); - connection.createStatement().executeUpdate(""" - insert into scheduling_specification_goals(specification_id, goal_id) - values (%d, %d); - """.formatted( - specificationIds[0], - goalIds[1] - )); - checkPriorities(0, new int[]{0, 2, 1}, new int[]{0, 1, 2}); + try(final var statement = connection.createStatement()) { + statement.executeUpdate( + //language=sql + """ + insert into scheduler.scheduling_specification_goals(specification_id, goal_id) + values (%d, %d); + """.formatted(specificationIds[0], goalIds[0])); + checkPriorities(0, new int[] {0}, new int[] {0}); + statement.executeUpdate( + //language=sql + """ + insert into scheduler.scheduling_specification_goals(specification_id, goal_id, priority) + values (%d, %d, null); + """.formatted(specificationIds[0], goalIds[2])); + checkPriorities(0, new int[] {0, 2}, new int[] {0, 1}); + statement.executeUpdate( + //language=sql + """ + insert into scheduler.scheduling_specification_goals(specification_id, goal_id) + values (%d, %d); + """.formatted(specificationIds[0], goalIds[1])); + checkPriorities(0, new int[] {0, 2, 1}, new int[] {0, 1, 2}); + } } } - } diff --git a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/TagsTests.java b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/TagsTests.java index d74d359644..bf4492a2ca 100644 --- a/db-tests/src/test/java/gov/nasa/jpl/aerie/database/TagsTests.java +++ b/db-tests/src/test/java/gov/nasa/jpl/aerie/database/TagsTests.java @@ -8,7 +8,6 @@ import org.junit.jupiter.api.TestInstance; import java.awt.*; -import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; @@ -20,9 +19,9 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +@SuppressWarnings("SqlSourceToSinkFlow") @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class TagsTests { - private static final File initSqlScriptFile = new File("../merlin-server/sql/merlin/init.sql"); private final String constraintDefinition = "export default (): Constraint => Real.Resource(\"/fruit\").equal(Real.Resource(\"/peel\"))"; private DatabaseTestHelper helper; private MerlinDatabaseTestHelper merlinHelper; @@ -46,35 +45,13 @@ void beforeEach() throws SQLException { @AfterEach void afterEach() throws SQLException { - helper.clearTable("uploaded_file"); - helper.clearTable("mission_model"); - helper.clearTable("plan"); - helper.clearTable("activity_directive"); - helper.clearTable("simulation_template"); - helper.clearTable("simulation"); - helper.clearTable("dataset"); - helper.clearTable("plan_dataset"); - helper.clearTable("simulation_dataset"); - helper.clearTable("plan_snapshot"); - helper.clearTable("plan_latest_snapshot"); - helper.clearTable("plan_snapshot_activities"); - helper.clearTable("plan_snapshot_parent"); - helper.clearTable("anchor_validation_status"); - helper.clearTable("metadata.tags"); - helper.clearTable("metadata.activity_directive_tags"); - helper.clearTable("metadata.constraint_tags"); - helper.clearTable("metadata.constraint_definition_tags"); - helper.clearTable("metadata.snapshot_activity_tags"); + helper.clearSchema("merlin"); + helper.clearSchema("tags"); } @BeforeAll void beforeAll() throws SQLException, IOException, InterruptedException { - helper = new DatabaseTestHelper( - "aerie_merlin_test", - "Merlin Database Tests", - initSqlScriptFile - ); - helper.startDatabase(); + helper = new DatabaseTestHelper("aerie_tags_test", "Tags Tests"); setConnection(helper); merlinHelper = new MerlinDatabaseTestHelper(connection); tagsUser = merlinHelper.insertUser("TagsTest"); @@ -82,9 +59,7 @@ void beforeAll() throws SQLException, IOException, InterruptedException { @AfterAll void afterAll() throws SQLException, IOException, InterruptedException { - helper.stopDatabase(); - connection = null; - helper = null; + helper.close(); } //region Helper Functions @@ -94,11 +69,12 @@ private int insertTag(String name) throws SQLException { int insertTag(String name, String username) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - INSERT INTO metadata.tags (name, owner) - VALUES ('%s', '%s') - RETURNING id; - """.formatted(name, username) + INSERT INTO tags.tags (name, owner) + VALUES ('%s', '%s') + RETURNING id; + """.formatted(name, username) ); res.next(); return res.getInt("id"); @@ -108,11 +84,12 @@ int insertTag(String name, String username) throws SQLException { int insertTag(String name, String username, String color) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - INSERT INTO metadata.tags (name, color, owner) - VALUES ('%s', '%s', '%s') - RETURNING id; - """.formatted(name, color, username) + INSERT INTO tags.tags (name, color, owner) + VALUES ('%s', '%s', '%s') + RETURNING id; + """.formatted(name, color, username) ); res.next(); return res.getInt("id"); @@ -122,8 +99,9 @@ int insertTag(String name, String username, String color) throws SQLException { Tag updateTagColor(int tagId, String color) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - UPDATE metadata.tags + UPDATE tags.tags SET color = '%s' WHERE id = %d RETURNING id, name, color, owner @@ -142,29 +120,29 @@ Tag updateTagColor(int tagId, String color) throws SQLException { Tag updateTagName(int tagId, String name) throws SQLException { try(final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ - UPDATE metadata.tags + UPDATE tags.tags SET name = '%s' WHERE id = %d RETURNING id, name, color, owner - """.formatted(name, tagId) - ); + """.formatted(name, tagId)); res.next(); return new Tag( res.getInt("id"), res.getString("name"), res.getString("color"), - res.getString("owner") - ); + res.getString("owner")); } } Tag getTag(int id) throws SQLException { try(final var statement = connection.createStatement()) { final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags + FROM tags.tags WHERE id = %d; """.formatted(id) ); @@ -181,9 +159,10 @@ ArrayList getAllTags() throws SQLException { try(final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags; + FROM tags.tags; """ ); while(res.next()) { @@ -200,8 +179,9 @@ ArrayList getAllTags() throws SQLException { void deleteTag(int id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - DELETE FROM metadata.tags + DELETE FROM tags.tags WHERE id = %d; """.formatted(id)); } @@ -210,8 +190,9 @@ void deleteTag(int id) throws SQLException { void assignTagToActivity(int directive_id, int plan_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO metadata.activity_directive_tags (plan_id, directive_id, tag_id) + INSERT INTO tags.activity_directive_tags (plan_id, directive_id, tag_id) VALUES (%d, %d, %d) """.formatted(plan_id,directive_id,tag_id)); } @@ -220,8 +201,9 @@ void assignTagToActivity(int directive_id, int plan_id, int tag_id) throws SQLEx void removeTagFromActivity(int directive_id, int plan_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - DELETE FROM metadata.activity_directive_tags + DELETE FROM tags.activity_directive_tags WHERE plan_id = %d AND directive_id = %d AND tag_id = %d; @@ -233,14 +215,15 @@ ArrayList getTagsOnActivity(int directive_id, int plan_id) throws SQLExcept try (final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ - SELECT id, name, color, owner - FROM metadata.tags t, metadata.activity_directive_tags adt - WHERE adt.tag_id = t.id - AND adt.plan_id = %d - AND adt.directive_id = %d - ORDER BY id; - """.formatted(plan_id, directive_id)); + SELECT id, name, color, owner + FROM tags.tags t, tags.activity_directive_tags adt + WHERE adt.tag_id = t.id + AND adt.plan_id = %d + AND adt.directive_id = %d + ORDER BY id; + """.formatted(plan_id, directive_id)); while (res.next()) { tags.add(new Tag( res.getInt("id"), @@ -256,9 +239,10 @@ ArrayList getTagsOnActivitySnapshot(int directive_id, int snapshot_id) thro try (final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags t, metadata.snapshot_activity_tags sat + FROM tags.tags t, tags.snapshot_activity_tags sat WHERE sat.tag_id = t.id AND sat.snapshot_id = %d AND sat.directive_id = %d @@ -278,8 +262,9 @@ ArrayList getTagsOnActivitySnapshot(int directive_id, int snapshot_id) thro void assignTagToPlan(int plan_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO metadata.plan_tags (plan_id, tag_id) + INSERT INTO tags.plan_tags (plan_id, tag_id) VALUES (%d, %d) """.formatted(plan_id,tag_id)); } @@ -288,8 +273,9 @@ void assignTagToPlan(int plan_id, int tag_id) throws SQLException { void removeTagFromPlan(int plan_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - DELETE FROM metadata.plan_tags + DELETE FROM tags.plan_tags WHERE plan_id = %d AND tag_id = %d; """.formatted(plan_id, tag_id)); @@ -300,9 +286,10 @@ ArrayList getTagsOnPlan(int plan_id) throws SQLException { try(final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags t, metadata.plan_tags pt + FROM tags.tags t, tags.plan_tags pt WHERE pt.tag_id = t.id AND pt.plan_id = %d ORDER BY id; @@ -321,8 +308,9 @@ ArrayList getTagsOnPlan(int plan_id) throws SQLException { void assignTagToConstraint(int constraint_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO metadata.constraint_tags (constraint_id, tag_id) + INSERT INTO tags.constraint_tags (constraint_id, tag_id) VALUES (%d, %d) """.formatted(constraint_id, tag_id)); } @@ -331,8 +319,9 @@ void assignTagToConstraint(int constraint_id, int tag_id) throws SQLException { void assignTagToConstraintRevision(int constraint_id, int constraint_revision, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - INSERT INTO metadata.constraint_definition_tags (constraint_id, constraint_revision, tag_id) + INSERT INTO tags.constraint_definition_tags (constraint_id, constraint_revision, tag_id) VALUES (%d, %d, %d) """.formatted(constraint_id, constraint_revision, tag_id)); } @@ -341,8 +330,9 @@ void assignTagToConstraintRevision(int constraint_id, int constraint_revision, i void removeTagFromConstraint(int constraint_id, int tag_id) throws SQLException { try (final var statement = connection.createStatement()) { statement.execute( + //language=sql """ - DELETE FROM metadata.constraint_tags + DELETE FROM tags.constraint_tags WHERE constraint_id = %d AND tag_id = %d; """.formatted(constraint_id, tag_id)); @@ -353,9 +343,10 @@ ArrayList getTagsOnConstraint(int constraint_id) throws SQLException { try (final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags t, metadata.constraint_tags ct + FROM tags.tags t, tags.constraint_tags ct WHERE ct.tag_id = t.id AND ct.constraint_id = %d ORDER BY id; @@ -375,9 +366,10 @@ ArrayList getTagsOnConstraintRevision(int constraint_id, int constraint_rev try (final var statement = connection.createStatement()) { final var tags = new ArrayList(); final var res = statement.executeQuery( + //language=sql """ SELECT id, name, color, owner - FROM metadata.tags t, metadata.constraint_definition_tags ct + FROM tags.tags t, tags.constraint_definition_tags ct WHERE ct.tag_id = t.id AND ct.constraint_id = %d AND ct.constraint_revision = %d @@ -396,13 +388,15 @@ ArrayList getTagsOnConstraintRevision(int constraint_id, int constraint_rev int assignTagToActivityType(int modelId, String name, int tagId) throws SQLException{ try (final var statement = connection.createStatement()) { - final var res = statement.executeQuery(""" - UPDATE public.activity_type at - SET subsystem = %d - WHERE at.model_id = %d - AND at.name = '%s' - RETURNING subsystem; - """.formatted(tagId, modelId, name)); + final var res = statement.executeQuery( + //language=sql + """ + UPDATE merlin.activity_type at + SET subsystem = %d + WHERE at.model_id = %d + AND at.name = '%s' + RETURNING subsystem; + """.formatted(tagId, modelId, name)); assertTrue(res.next()); final int subsystem = res.getInt("subsystem"); assertFalse(res.next()); @@ -413,13 +407,14 @@ int assignTagToActivityType(int modelId, String name, int tagId) throws SQLExcep void removeTagFromActivityType(int modelId, String name) throws SQLException { try (final var statement = connection.createStatement()) { final var res = statement.executeQuery( - """ - UPDATE activity_type - SET subsystem = null - WHERE model_id = %d - AND name = '%s' - RETURNING subsystem - """.formatted(modelId, name)); + //language=sql + """ + UPDATE merlin.activity_type + SET subsystem = null + WHERE model_id = %d + AND name = '%s' + RETURNING subsystem + """.formatted(modelId, name)); assertTrue(res.next()); assertNull(res.getObject("subsystem")); assertFalse(res.next()); From b79c1a9b1c5d3773c3f46e371c850198df78df7b Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 13 Mar 2024 15:29:09 -0700 Subject: [PATCH 30/36] Update E2ETests - Replace inserting a scheduling spec with fetching the automatically generated one's id --- .../gov/nasa/jpl/aerie/e2e/BindingsTests.java | 10 +--- .../nasa/jpl/aerie/e2e/SchedulingTests.java | 30 ++---------- .../jpl/aerie/e2e/TimelineRemoteTests.java | 46 +++++++++---------- .../gov/nasa/jpl/aerie/e2e/utils/GQL.java | 12 ++--- .../jpl/aerie/e2e/utils/HasuraRequests.java | 22 ++------- 5 files changed, 37 insertions(+), 83 deletions(-) diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/BindingsTests.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/BindingsTests.java index 7d652eedc5..36e9244035 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/BindingsTests.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/BindingsTests.java @@ -1041,15 +1041,7 @@ void beforeEach() throws IOException, InterruptedException { duration, plan_start_timestamp, admin.session()); - - // Insert the Scheduling Spec - schedulingSpecId = hasura.insertSchedulingSpecification( - planId, - hasura.getPlanRevision(planId), - plan_start_timestamp, - plan_end_timestamp, - JsonValue.EMPTY_JSON_OBJECT, - true); + schedulingSpecId = hasura.getSchedulingSpecId(planId); } @AfterEach diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/SchedulingTests.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/SchedulingTests.java index ada11145be..fceb29b69a 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/SchedulingTests.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/SchedulingTests.java @@ -114,15 +114,7 @@ void beforeEach() throws IOException { "Test Plan - Scheduling Tests", "24:00:00", planStartTimestamp); - - // Insert Scheduling Spec - schedulingSpecId = hasura.insertSchedulingSpecification( - planId, - hasura.getPlanRevision(planId), - planStartTimestamp, - planEndTimestamp, - JsonValue.EMPTY_JSON_OBJECT, - false); + schedulingSpecId = hasura.getSchedulingSpecId(planId); } @AfterEach @@ -536,15 +528,7 @@ void beforeEach() throws IOException { "2136:00:00", planStartTimestamp); - // Insert Scheduling Spec - schedulingSpecId = hasura.insertSchedulingSpecification( - longPlanId, - hasura.getPlanRevision(longPlanId), - planStartTimestamp, - "2023-090T00:00:00.000", - JsonValue.EMPTY_JSON_OBJECT, - false); - + schedulingSpecId = hasura.getSchedulingSpecId(longPlanId); // Add Goal cardinalityGoalId = hasura.createSchedulingSpecGoal( "Cardinality and Decomposition Scheduling Test Goal", @@ -739,15 +723,7 @@ void beforeEach() throws IOException, InterruptedException { "Foo Plan - Scheduling Tests", "720:00:00", planStartTimestamp); - - // Insert Scheduling Spec - fooSchedulingSpecId = hasura.insertSchedulingSpecification( - fooPlan, - hasura.getPlanRevision(fooPlan), - planStartTimestamp, - "2023-01-31T00:00:00+00:00", - JsonValue.EMPTY_JSON_OBJECT, - false); + fooSchedulingSpecId = hasura.getSchedulingSpecId(fooPlan); // Add Goal fooGoalId = hasura.createSchedulingSpecGoal( diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java index dfea0dddc3..3ffe3431cd 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java @@ -44,21 +44,39 @@ public class TimelineRemoteTests { private Connection connection; private HikariDataSource dataSource; @BeforeAll - void beforeAll() { + void beforeAll() throws SQLException { // Setup Requests playwright = Playwright.create(); hasura = new HasuraRequests(playwright); + + // Connect to the database + final var hikariConfig = new HikariConfig(); + + hikariConfig.setDataSourceClassName("org.postgresql.ds.PGSimpleDataSource"); + + hikariConfig.addDataSourceProperty("serverName", "localhost"); + hikariConfig.addDataSourceProperty("portNumber", 5432); + hikariConfig.addDataSourceProperty("databaseName", "aerie"); + hikariConfig.addDataSourceProperty("applicationName", "Merlin Server"); + hikariConfig.setUsername(System.getenv("AERIE_USERNAME")); + hikariConfig.setPassword(System.getenv("AERIE_PASSWORD")); + + hikariConfig.setConnectionInitSql("set time zone 'UTC'"); + dataSource = new HikariDataSource(hikariConfig); + connection = dataSource.getConnection(); } @AfterAll - void afterAll() { + void afterAll() throws SQLException { // Cleanup Requests hasura.close(); playwright.close(); + connection.close(); + dataSource.close(); } @BeforeEach - void beforeEach() throws IOException, InterruptedException, SQLException { + void beforeEach() throws IOException, InterruptedException { // Insert the Mission Model try (final var gateway = new GatewayRequests(playwright)) { modelId = hasura.createMissionModel( @@ -81,33 +99,13 @@ void beforeEach() throws IOException, InterruptedException, SQLException { Json.createObjectBuilder().add("biteSize", 1).build()); simDatasetId = hasura.awaitSimulation(planId).simDatasetId(); - - // Connect to the database - - final var hikariConfig = new HikariConfig(); - - hikariConfig.setDataSourceClassName("org.postgresql.ds.PGSimpleDataSource"); - - hikariConfig.addDataSourceProperty("serverName", "localhost"); - hikariConfig.addDataSourceProperty("portNumber", 5432); - hikariConfig.addDataSourceProperty("databaseName", "aerie_merlin"); - hikariConfig.addDataSourceProperty("applicationName", "Merlin Server"); - hikariConfig.setUsername(System.getenv("AERIE_USERNAME")); - hikariConfig.setPassword(System.getenv("AERIE_PASSWORD")); - - hikariConfig.setConnectionInitSql("set time zone 'UTC'"); - dataSource = new HikariDataSource(hikariConfig); - connection = dataSource.getConnection(); - plan = new AeriePostgresPlan(connection, simDatasetId); } @AfterEach - void afterEach() throws IOException, SQLException { + void afterEach() throws IOException { hasura.deletePlan(planId); hasura.deleteMissionModel(modelId); - connection.close(); - dataSource.close(); } @Test diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/GQL.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/GQL.java index 39cc716fa0..e8ce00eafb 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/GQL.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/GQL.java @@ -369,6 +369,12 @@ query GetSchedulingRequest($analysisId: Int!) { status } }"""), + GET_SCHEDULING_SPECIFICATION_ID(""" + query GetSchedulingSpec($planId: Int!) { + scheduling_spec: scheduling_specification(where: {plan_id: {_eq: $planId}}) { + id + } + }"""), GET_SIMULATION_CONFIGURATION(""" query GetSimConfig($planId: Int!) { sim_config: simulation(where: {plan_id: {_eq:$planId}}) { @@ -457,12 +463,6 @@ mutation insertProfileSegment($segments: [profile_segment_insert_input!]!){ affected_rows } }"""), - INSERT_SCHEDULING_SPECIFICATION(""" - mutation MakeSchedulingSpec($scheduling_spec: scheduling_specification_insert_input!) { - scheduling_spec: insert_scheduling_specification_one(object: $scheduling_spec) { - id - } - }"""), INSERT_SIMULATION_DATASET(""" mutation InsertSimulationDataset($simulationDataset:simulation_dataset_insert_input!){ simulation_dataset: insert_simulation_dataset_one(object: $simulationDataset) { diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/HasuraRequests.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/HasuraRequests.java index 9e9e116500..2e8c0e5f6e 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/HasuraRequests.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/utils/HasuraRequests.java @@ -591,23 +591,11 @@ public void deleteSchedulingGoal(int goalId) throws IOException { makeRequest(GQL.DELETE_SCHEDULING_GOAL, variables); } - public int insertSchedulingSpecification( - int planId, - int planRevision, - String horizonStart, - String horizonEnd, - JsonObject simArguments, - boolean analysisOnly - ) throws IOException { - final var schedulingSpecInputBuilder = Json.createObjectBuilder() - .add("plan_id", planId) - .add("plan_revision", planRevision) - .add("horizon_start", horizonStart) - .add("horizon_end", horizonEnd) - .add("simulation_arguments", simArguments) - .add("analysis_only", analysisOnly); - final var variables = Json.createObjectBuilder().add("scheduling_spec", schedulingSpecInputBuilder).build(); - return makeRequest(GQL.INSERT_SCHEDULING_SPECIFICATION, variables).getJsonObject("scheduling_spec").getInt("id"); + public int getSchedulingSpecId(int planId) throws IOException { + final var variables = Json.createObjectBuilder().add("planId", planId).build(); + final var spec = makeRequest(GQL.GET_SCHEDULING_SPECIFICATION_ID, variables).getJsonArray("scheduling_spec"); + assertEquals(1, spec.size()); + return spec.getJsonObject(0).getInt("id"); } public void updatePlanRevisionSchedulingSpec(int planId) throws IOException { From cdaa8921022f0488171e252c49d8df74b1cacbd1 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 13 Mar 2024 15:57:24 -0700 Subject: [PATCH 31/36] Support one DB user per Aerie service Update DB Envvars --- .env.template | 18 ++- deployment/.env | 14 +- deployment/Environment.md | 125 +++++++++--------- deployment/docker-compose.yml | 67 +++++----- .../kubernetes/aerie-gateway-deployment.yaml | 14 +- .../kubernetes/aerie-merlin-deployment.yaml | 10 +- .../aerie-merlin-worker-deployment.yaml | 14 +- .../aerie-scheduler-deployment.yaml | 10 +- .../aerie-scheduler-worker-deployment.yaml | 14 +- .../aerie-sequencing-deployment.yaml | 10 +- deployment/kubernetes/hasura-deployment.yaml | 10 +- deployment/postgres-init-db/init-aerie.sh | 21 +++ deployment/postgres-init-db/sql/init.sql | 3 + .../postgres-init-db/sql/init_db_users.sql | 93 +++++++++++++ docker-compose.yml | 80 +++++------ e2e-tests/docker-compose-many-workers.yml | 72 +++++----- e2e-tests/docker-compose-test.yml | 89 +++++++------ .../jpl/aerie/e2e/TimelineRemoteTests.java | 4 +- .../aerie/merlin/server/AerieAppDriver.java | 6 +- .../merlin/worker/MerlinWorkerAppDriver.java | 10 +- .../scheduler/server/SchedulerAppDriver.java | 6 +- .../worker/SchedulerWorkerAppDriver.java | 10 +- sequencing-server/src/db.ts | 11 +- sequencing-server/src/env.ts | 36 +++-- 24 files changed, 438 insertions(+), 309 deletions(-) create mode 100644 deployment/postgres-init-db/sql/init_db_users.sql diff --git a/.env.template b/.env.template index 9222e8d712..c33cfd3fae 100644 --- a/.env.template +++ b/.env.template @@ -1,6 +1,20 @@ AERIE_USERNAME= AERIE_PASSWORD= + +GATEWAY_USERNAME= +GATEWAY_PASSWORD= + +MERLIN_USERNAME= +MERLIN_PASSWORD= + +SCHEDULER_USERNAME= +SCHEDULER_PASSWORD= + +SEQUENCING_USERNAME= +SEQUENCING_PASSWORD= + +POSTGRES_PASSWORD= +POSTGRES_USER= + HASURA_GRAPHQL_ADMIN_SECRET= HASURA_GRAPHQL_JWT_SECRET= -POSTGRES_USER= -POSTGRES_PASSWORD= diff --git a/deployment/.env b/deployment/.env index 525b1379af..d0296bb1ee 100644 --- a/deployment/.env +++ b/deployment/.env @@ -3,9 +3,21 @@ DOCKER_TAG=latest # Provide Usernames and Passwords Below -AERIE_USERNAME= +AERIE_USERNAME=aerie AERIE_PASSWORD= +GATEWAY_USERNAME=gateway +GATEWAY_PASSWORD= + +MERLIN_USERNAME=merlin +MERLIN_PASSWORD= + +SCHEDULER_USERNAME=scheduler +SCHEDULER_PASSWORD= + +SEQUENCING_USERNAME=sequencing +SEQUENCING_PASSWORD= + HASURA_GRAPHQL_ADMIN_SECRET= HASURA_GRAPHQL_JWT_SECRET= diff --git a/deployment/Environment.md b/deployment/Environment.md index e9018dd9ac..f47f379e38 100644 --- a/deployment/Environment.md +++ b/deployment/Environment.md @@ -16,77 +16,72 @@ See the [environment variables document](https://github.com/NASA-AMMOS/aerie-gat ## Aerie Merlin -| Name | Description | Type | Default | -| ------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | -------- | ------------------------------- | -| `JAVA_OPTS` | Configuration for Merlin's logging level and output file | `string` | log level: warn. output: stderr | -| `MERLIN_PORT` | Port number for the Merlin server | `number` | 27183 | -| `MERLIN_LOCAL_STORE` | Local storage for Merlin in the container | `string` | /usr/src/app/merlin_file_store | -| `MERLIN_DB_SERVER` | The DB instance that Merlin will connect with | `string` | | -| `MERLIN_DB_PORT` | The DB instance port number that Merlin will connect with | `number` | 5432 | -| `MERLIN_DB_USER` | Username of the DB instance | `string` | | -| `MERLIN_DB_PASSWORD` | Password of the DB instance | `string` | | -| `MERLIN_DB` | The DB for Merlin. | `string` | aerie_merlin | -| `UNTRUE_PLAN_START` | Temporary solution to provide plan start time to models, should be set to a time that models will not fail to initialize on | `string` | | -| `ENABLE_CONTINUOUS_VALIDATION_THREAD` | Flag to enable a worker thread that continously computes and caches activity directive validation results | `boolean`| true | -| `VALIDATION_THREAD_POLLING_PERIOD` | Number of milliseconds the above worker thread should wait before querying the database for new, unvalidated directives | `string` | 500 | +| Name | Description | Type | Default | +|---------------------------------------|-----------------------------------------------------------------------------------------------------------------------------|-----------|---------------------------------| +| `AERIE_DB_HOST` | The DB instance that Merlin will connect with | `string` | postgres | +| `AERIE_DB_PORT` | The DB instance port number that Merlin will connect with | `number` | 5432 | +| `JAVA_OPTS` | Configuration for Merlin's logging level and output file | `string` | log level: warn. output: stderr | +| `MERLIN_PORT` | Port number for the Merlin server | `number` | 27183 | +| `MERLIN_LOCAL_STORE` | Local storage for Merlin in the container | `string` | /usr/src/app/merlin_file_store | +| `MERLIN_DB_USER` | Username of the Merlin DB User | `string` | merlin | +| `MERLIN_DB_PASSWORD` | Password of the Merlin DB User | `string` | | +| `UNTRUE_PLAN_START` | Temporary solution to provide plan start time to models, should be set to a time that models will not fail to initialize on | `string` | | +| `ENABLE_CONTINUOUS_VALIDATION_THREAD` | Flag to enable a worker thread that continuously computes and caches activity directive validation results | `boolean` | true | +| `VALIDATION_THREAD_POLLING_PERIOD` | Number of milliseconds the above worker thread should wait before querying the database for new, unvalidated directives | `string` | 500 | ## Aerie Merlin Worker -| Name | Description | Type | Default | -| --------------------------- | --------------------------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------- | -| `JAVA_OPTS` | Configuration for Merlin's logging level and output file | `string` | log level: warn. output: stderr | -| `MERLIN_WORKER_LOCAL_STORE` | The local storage as for the Merlin container | `string` | /usr/src/app/merlin_file_store | -| `MERLIN_WORKER_DB_SERVER` | The DB instance that Merlin will connect with | `string` | (this must the same as the Merlin container) | -| `MERLIN_WORKER_DB_PORT` | The DB instance port number that Merlin will connect with | `number` | (this must the same as the Merlin container) | -| `MERLIN_WORKER_DB_USER` | Username of the DB instance | `string` | (this must the same as the Merlin container) | -| `MERLIN_WORKER_DB_PASSWORD` | Password of the DB instance | `string` | (this must the same as the Merlin container) | -| `MERLIN_WORKER_DB` | The DB for Merlin. | `string` | (this must the same as the Merlin container) | -| `SIMULATION_PROGRESS_POLL_PERIOD_MILLIS` | Cadence at which the worker will report simulation progress to the database. | `number` | 5000 | -| `UNTRUE_PLAN_START` | Temporary solution to provide plan start time to models, should be set to a time that models will not fail to initialize on | `string` | | +| Name | Description | Type | Default | +|------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------|----------|----------------------------------------------| +| `AERIE_DB_HOST` | The DB instance that Merlin will connect with | `string` | postgres | +| `AERIE_DB_PORT` | The DB instance port number that Merlin will connect with | `number` | 5432 | +| `JAVA_OPTS` | Configuration for Merlin's logging level and output file | `string` | log level: warn. output: stderr | +| `MERLIN_WORKER_LOCAL_STORE` | The local storage as for the Merlin container | `string` | /usr/src/app/merlin_file_store | +| `MERLIN_DB_USER` | Username of the Merlin DB User | `string` | merlin | +| `MERLIN_DB_PASSWORD` | Password of the Merlin DB User | `string` | (this must the same as the Merlin container) | +| `SIMULATION_PROGRESS_POLL_PERIOD_MILLIS` | Cadence at which the worker will report simulation progress to the database. | `number` | 5000 | +| `UNTRUE_PLAN_START` | Temporary solution to provide plan start time to models, should be set to a time that models will not fail to initialize on | `string` | | ## Aerie Scheduler -| Name | Description | Type | Default | -| ----------------------------- | --------------------------------------------------------------- | -------- | ------------------------------- | -| `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | | -| `JAVA_OPTS` | Configuration for the scheduler's logging level and output file | `string` | log level: warn. output: stderr | -| `MERLIN_GRAPHQL_URL` | URI of the Merlin graphql interface to call | `string` | http://hasura:8080/v1/graphql | -| `SCHEDULER_DB` | The DB for scheduler | `string` | aerie_scheduler | -| `SCHEDULER_DB_PASSWORD` | Password of the DB instance | `string` | | -| `SCHEDULER_DB_PORT` | The DB instance port number that scheduler will connect with | `number` | 5432 | -| `SCHEDULER_DB_SERVER` | The DB instance that scheduler will connect with | `string` | | -| `SCHEDULER_DB_USER` | Username of the DB instance | `string` | | -| `SCHEDULER_PORT` | Port number for the scheduler server | `number` | 27185 | +| Name | Description | Type | Default | +|-------------------------------|------------------------------------------------------------------|----------|---------------------------------| +| `AERIE_DB_HOST` | The DB instance that the Scheduler will connect with | `string` | postgres | +| `AERIE_DB_PORT` | The DB instance port number that the Scheduler will connect with | `number` | 5432 | +| `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | | +| `JAVA_OPTS` | Configuration for the scheduler's logging level and output file | `string` | log level: warn. output: stderr | +| `MERLIN_GRAPHQL_URL` | URI of the Merlin graphql interface to call | `string` | http://hasura:8080/v1/graphql | +| `SCHEDULER_DB_USER` | Username of the Scheduler DB User | `string` | scheduler | +| `SCHEDULER_DB_PASSWORD` | Password of the Scheduler DB User | `string` | | +| `SCHEDULER_PORT` | Port number for the scheduler server | `number` | 27185 | ## Aerie Scheduler Worker | Name | Description | Type | Default | -| ----------------------------- | --------------------------------------------------------------------- | -------- | -------------------------------------------------- | +|-------------------------------|-----------------------------------------------------------------------|----------|----------------------------------------------------| +| `AERIE_DB_HOST` | The DB instance that the Scheduler will connect with | `string` | postgres | +| `AERIE_DB_PORT` | The DB instance port number that the Scheduler will connect with | `number` | 5432 | | `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | | | `JAVA_OPTS` | Configuration for the scheduler's logging level and output file | `string` | log level: warn. output: stderr | | `MERLIN_GRAPHQL_URL` | URI of the Merlin graphql interface to call | `string` | http://hasura:8080/v1/graphql | | `MERLIN_LOCAL_STORE` | Local storage for Merlin in the container (for backdoor jar access) | `string` | /usr/src/app/merlin_file_store | -| `SCHEDULER_DB` | The DB for scheduler | `string` | aerie_scheduler | -| `SCHEDULER_DB_PASSWORD` | Password of the DB instance | `string` | | -| `SCHEDULER_DB_PORT` | The DB instance port number that scheduler will connect with | `number` | 5432 | -| `SCHEDULER_DB_SERVER` | The DB instance that scheduler will connect with | `string` | | -| `SCHEDULER_DB_USER` | Username of the DB instance | `string` | | -| `SCHEDULER_OUTPUT_MODE` | how scheduler output is sent back to aerie | `string` | UpdateInputPlanWithNewActivities | +| `SCHEDULER_DB_USER` | Username of the Scheduler DB User | `string` | scheduler | +| `SCHEDULER_DB_PASSWORD` | Password of the Scheduler DB User | `string` | | +| `SCHEDULER_OUTPUT_MODE` | How scheduler output is sent back to Aerie | `string` | UpdateInputPlanWithNewActivities | | `SCHEDULER_RULES_JAR` | Jar file to load scheduling rules from (until user input to database) | `string` | /usr/src/app/merlin_file_store/scheduler_rules.jar | ## Aerie Sequencing | Name | Description | Type | Default | -| ----------------------------- | ------------------------------------------------------------- | -------- | ---------------------------------- | +|-------------------------------|---------------------------------------------------------------|----------|------------------------------------| +| `AERIE_DB_HOST` | Hostname of Postgres instance | `string` | postgres | +| `AERIE_DB_PORT` | Port of Postgres instance | `number` | 5432 | | `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | | | `LOG_FILE` | Either an output filepath to log to, or 'console' | `string` | console | | `LOG_LEVEL` | Logging level for filtering logs | `string` | warn | | `MERLIN_GRAPHQL_URL` | URI of the Aerie GraphQL API | `string` | http://hasura:8080/v1/graphql | -| `SEQUENCING_DB` | Name of sequencing Postgres database | `string` | aerie_sequencing | -| `SEQUENCING_DB_SERVER` | Hostname of Postgres instance | `string` | | -| `SEQUENCING_DB_PASSWORD` | Password of Postgres instance | `string` | | -| `SEQUENCING_DB_PORT` | Port of Postgres instance | `number` | 5432 | -| `SEQUENCING_DB_USER` | User of Postgres instance | `string` | | +| `SEQUENCING_DB_USER` | Username of the Sequencing DB User | `string` | sequencing | +| `SEQUENCING_DB_PASSWORD` | Password of the Sequencing DB User | `string` | | | `SEQUENCING_LOCAL_STORE` | Local storage file storage in the container | `string` | /usr/src/app/sequencing_file_store | | `SEQUENCING_SERVER_PORT` | Port the server listens on | `number` | 27184 | @@ -96,25 +91,35 @@ See the [environment variables document](https://github.com/NASA-AMMOS/aerie-ui/ ## Hasura -| Name | Description | Type | -| ------------------------------- | ------------------------------------------------------------- | -------- | -| `AERIE_MERLIN_DATABASE_URL` | Url of the Merlin Postgres database. | `string` | -| `AERIE_MERLIN_URL` | Url of the Merlin service. | `string` | -| `AERIE_SCHEDULER_DATABASE_URL` | Url of the scheduler Postgres database. | `string` | -| `AERIE_SCHEDULER_URL` | Url of the scheduler service. | `string` | -| `AERIE_SEQUENCING_DATABASE_URL` | Url of the sequencing Postgres database. | `string` | -| `AERIE_SEQUENCING_URL` | Url of the sequencing service. | `string` | -| `AERIE_UI_DATABASE_URL` | Url of the UI Postgres database | `string` | -| `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | -| `HASURA_GRAPHQL_JWT_SECRET` | The JWT secret for JSON web token auth. Also in Gateway. | `string` | +| Name | Description | Type | +|-------------------------------|---------------------------------------------------------------|----------| +| `AERIE_DATABASE_URL` | Url of the Aerie Postgres database. | `string` | +| `AERIE_MERLIN_URL` | Url of the Merlin service. | `string` | +| `AERIE_SCHEDULER_URL` | Url of the scheduler service. | `string` | +| `AERIE_SEQUENCING_URL` | Url of the sequencing service. | `string` | +| `HASURA_GRAPHQL_ADMIN_SECRET` | The admin secret for Hasura which gives admin access if used. | `string` | +| `HASURA_GRAPHQL_JWT_SECRET` | The JWT secret for JSON web token auth. Also in Gateway. | `string` | Additionally, Hasura provides documentation on it's own environment variables you can use to fine-tune your deployment: 1. [graphql-engine](https://hasura.io/docs/latest/graphql/core/deployment/graphql-engine-flags/reference.html#server-flag-reference) -1. [metadata and migrations](https://hasura.io/docs/latest/graphql/core/migrations/advanced/auto-apply-migrations.html#applying-migrations) +2. [metadata and migrations](https://hasura.io/docs/latest/graphql/core/migrations/advanced/auto-apply-migrations.html#applying-migrations) ## Postgres The default Aerie deployment uses the default Postgres environment. See the [Docker Postgres documentation](https://hub.docker.com/_/postgres) for more complete information on those environment variables and how to use them. +| Name | Description | Type | +|--------------------------|--------------------------------------------------------|----------| +| `AERIE_USERNAME` | Username of the Aerie DB User, which owns the Aerie DB | `string` | +| `AERIE_PASSWORD` | Password of the Aerie DB User, which owns the Aerie DB | `string` | +| `GATEWAY_DB_USER` | Username of the Gateway DB User | `string` | +| `GATEWAY_DB_PASSWORD` | Password of the Gateway DB User | `string` | +| `MERLIN_DB_USER` | Password of the Merlin DB User | `string` | +| `MERLIN_DB_PASSWORD` | Password of the Merlin DB User | `string` | +| `SCHEDULER_DB_USER` | Username of the Scheduler DB User. | `string` | +| `SCHEDULER_DB_PASSWORD` | Password of the Scheduler DB User | `string` | +| `SEQUENCING_DB_USER` | Username of the Sequencing DB User. | `string` | +| `SEQUENCING_DB_PASSWORD` | Password of the Sequencing DB User | `string` | + [svelte-kit-adapter-node-docs]: https://github.com/sveltejs/kit/blob/master/packages/adapter-node/README.md diff --git a/deployment/docker-compose.yml b/deployment/docker-compose.yml index 18f0d4748e..2a7e74b564 100755 --- a/deployment/docker-compose.yml +++ b/deployment/docker-compose.yml @@ -10,11 +10,10 @@ services: LOG_FILE: console LOG_LEVEL: warn PORT: 9000 - POSTGRES_AERIE_MERLIN_DB: aerie_merlin - POSTGRES_HOST: postgres - POSTGRES_PASSWORD: "${AERIE_PASSWORD}" - POSTGRES_PORT: 5432 - POSTGRES_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" image: "${REPOSITORY_DOCKER_URL}/aerie-gateway:${DOCKER_TAG}" ports: ["9000:9000"] restart: always @@ -28,11 +27,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_URL: http://hasura:8080/v1/graphql - MERLIN_DB: "aerie_merlin" - MERLIN_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_DB_PORT: 5432 - MERLIN_DB_SERVER: postgres - MERLIN_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store MERLIN_PORT: 27183 JAVA_OPTS: > @@ -50,11 +48,10 @@ services: container_name: aerie_merlin_worker depends_on: ["postgres"] environment: - MERLIN_WORKER_DB: "aerie_merlin" - MERLIN_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store SIMULATION_PROGRESS_POLL_PERIOD_MILLIS: 2000 JAVA_OPTS: > @@ -75,11 +72,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_DB: "aerie_scheduler" - SCHEDULER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_DB_PORT: 5432 - SCHEDULER_DB_SERVER: postgres - SCHEDULER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_PORT: 27185 JAVA_OPTS: > -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN @@ -97,11 +93,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar @@ -124,11 +119,10 @@ services: LOG_LEVEL: warn MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql SEQUENCING_SERVER_PORT: 27184 - SEQUENCING_DB: aerie_sequencing - SEQUENCING_DB_PASSWORD: "${AERIE_PASSWORD}" - SEQUENCING_DB_PORT: 5432 - SEQUENCING_DB_SERVER: postgres - SEQUENCING_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" SEQUENCING_LOCAL_STORE: /usr/src/app/sequencing_file_store image: "${REPOSITORY_DOCKER_URL}/aerie-sequencing:${DOCKER_TAG}" ports: ["27184:27184"] @@ -157,13 +151,10 @@ services: container_name: hasura depends_on: ["postgres"] environment: - AERIE_MERLIN_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_merlin" + AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie?options=-c%20search_path%3Dutil_functions%2Chasura%2Cpermissions%2Ctags%2Cmerlin%2Cscheduler%2Csequencing%2Cpublic" AERIE_MERLIN_URL: "http://aerie_merlin:27183" - AERIE_SCHEDULER_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_scheduler" AERIE_SCHEDULER_URL: "http://aerie_scheduler:27185" - AERIE_SEQUENCING_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_sequencing" AERIE_SEQUENCING_URL: "http://aerie_sequencing:27184" - AERIE_UI_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_ui" HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_DEV_MODE: "true" HASURA_GRAPHQL_ENABLE_CONSOLE: "true" @@ -187,6 +178,14 @@ services: POSTGRES_USER: "${POSTGRES_USER}" AERIE_USERNAME: "${AERIE_USERNAME}" AERIE_PASSWORD: "${AERIE_PASSWORD}" + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" image: postgres:14.8 ports: ["5432:5432"] restart: always diff --git a/deployment/kubernetes/aerie-gateway-deployment.yaml b/deployment/kubernetes/aerie-gateway-deployment.yaml index ce69ddd30f..9f3cefc68f 100644 --- a/deployment/kubernetes/aerie-gateway-deployment.yaml +++ b/deployment/kubernetes/aerie-gateway-deployment.yaml @@ -37,22 +37,20 @@ spec: value: warn - name: PORT value: "9000" - - name: POSTGRES_AERIE_MERLIN_DB - value: aerie_merlin - - name: POSTGRES_HOST + - name: AERIE_DB_HOST value: postgres - - name: POSTGRES_PORT + - name: AERIE_DB_PORT value: "5432" - - name: POSTGRES_USER + - name: GATEWAY_DB_USER valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME - - name: POSTGRES_PASSWORD + key: GATEWAY_USERNAME + - name: GATEWAY_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: GATEWAY_PASSWORD volumes: - name: aerie-file-store persistentVolumeClaim: diff --git a/deployment/kubernetes/aerie-merlin-deployment.yaml b/deployment/kubernetes/aerie-merlin-deployment.yaml index 4149e67e0e..727e56fda8 100644 --- a/deployment/kubernetes/aerie-merlin-deployment.yaml +++ b/deployment/kubernetes/aerie-merlin-deployment.yaml @@ -25,11 +25,9 @@ spec: - name: JAVA_OPTS value: | -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN -Dorg.slf4j.simpleLogger.logFile=System.err - - name: MERLIN_DB - value: aerie_merlin - - name: MERLIN_DB_PORT + - name: AERIE_DB_PORT value: "5432" - - name: MERLIN_DB_SERVER + - name: AERIE_DB_HOST value: postgres - name: MERLIN_LOCAL_STORE value: /usr/src/app/merlin_file_store @@ -41,12 +39,12 @@ spec: valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME + key: MERLIN_USERNAME - name: MERLIN_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: MERLIN_PASSWORD restartPolicy: Always volumes: - name: aerie-file-store diff --git a/deployment/kubernetes/aerie-merlin-worker-deployment.yaml b/deployment/kubernetes/aerie-merlin-worker-deployment.yaml index 1bf466f27e..a899b1b6ba 100644 --- a/deployment/kubernetes/aerie-merlin-worker-deployment.yaml +++ b/deployment/kubernetes/aerie-merlin-worker-deployment.yaml @@ -27,26 +27,24 @@ spec: - name: JAVA_OPTS value: | -Dorg.slf4j.simpleLogger.defaultLogLevel=INFO -Dorg.slf4j.simpleLogger.log.com.zaxxer.hikari=WARN -Dorg.slf4j.simpleLogger.logFile=System.err - - name: MERLIN_WORKER_DB - value: aerie_merlin - - name: MERLIN_WORKER_DB_PORT + - name: AERIE_DB_PORT value: "5432" - - name: MERLIN_WORKER_DB_SERVER + - name: AERIE_DB_HOST value: postgres - name: MERLIN_WORKER_LOCAL_STORE value: /usr/src/app/merlin_file_store - name: UNTRUE_PLAN_START value: "2000-01-01T11:58:55.816Z" - - name: MERLIN_WORKER_DB_USER + - name: MERLIN_DB_USER valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME - - name: MERLIN_WORKER_DB_PASSWORD + key: MERLIN_USERNAME + - name: MERLIN_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: MERLIN_PASSWORD restartPolicy: Always volumes: - name: aerie-file-store diff --git a/deployment/kubernetes/aerie-scheduler-deployment.yaml b/deployment/kubernetes/aerie-scheduler-deployment.yaml index 085870bf6e..1c5d4646a9 100644 --- a/deployment/kubernetes/aerie-scheduler-deployment.yaml +++ b/deployment/kubernetes/aerie-scheduler-deployment.yaml @@ -32,11 +32,9 @@ spec: -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN -Dorg.slf4j.simpleLogger.logFile=System.err - name: MERLIN_GRAPHQL_URL value: http://hasura:8080/v1/graphql - - name: SCHEDULER_DB - value: aerie_scheduler - - name: SCHEDULER_DB_PORT + - name: AERIE_DB_PORT value: "5432" - - name: SCHEDULER_DB_SERVER + - name: AERIE_DB_HOST value: postgres - name: SCHEDULER_PORT value: "27185" @@ -44,12 +42,12 @@ spec: valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME + key: SCHEDULER_USERNAME - name: SCHEDULER_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: SCHEDULER_PASSWORD restartPolicy: Always volumes: - name: aerie-file-store diff --git a/deployment/kubernetes/aerie-scheduler-worker-deployment.yaml b/deployment/kubernetes/aerie-scheduler-worker-deployment.yaml index 18bd402d55..ffac8bb87b 100644 --- a/deployment/kubernetes/aerie-scheduler-worker-deployment.yaml +++ b/deployment/kubernetes/aerie-scheduler-worker-deployment.yaml @@ -40,22 +40,20 @@ spec: value: UpdateInputPlanWithNewActivities - name: SCHEDULER_RULES_JAR value: /usr/src/app/merlin_file_store/scheduler_rules.jar - - name: SCHEDULER_WORKER_DB - value: aerie_scheduler - - name: SCHEDULER_WORKER_DB_PORT + - name: AERIE_DB_PORT value: "5432" - - name: SCHEDULER_WORKER_DB_SERVER + - name: AERIE_DB_HOST value: postgres - - name: SCHEDULER_WORKER_DB_USER + - name: SCHEDULER_DB_USER valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME - - name: SCHEDULER_WORKER_DB_PASSWORD + key: SCHEDULER_USERNAME + - name: SCHEDULER_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: SCHEDULER_PASSWORD restartPolicy: Always volumes: - name: aerie-file-store diff --git a/deployment/kubernetes/aerie-sequencing-deployment.yaml b/deployment/kubernetes/aerie-sequencing-deployment.yaml index 0b6c40012a..2397398b3a 100644 --- a/deployment/kubernetes/aerie-sequencing-deployment.yaml +++ b/deployment/kubernetes/aerie-sequencing-deployment.yaml @@ -34,11 +34,9 @@ spec: value: warn - name: MERLIN_GRAPHQL_URL value: http://hasura:8080/v1/graphql - - name: SEQUENCING_DB - value: aerie_sequencing - - name: SEQUENCING_DB_PORT + - name: AERIE_DB_PORT value: "5432" - - name: SEQUENCING_DB_SERVER + - name: AERIE_DB_HOST value: postgres - name: SEQUENCING_LOCAL_STORE value: /usr/src/app/sequencing_file_store @@ -48,12 +46,12 @@ spec: valueFrom: secretKeyRef: name: dev-env - key: AERIE_USERNAME + key: SEQUENCING_USERNAME - name: SEQUENCING_DB_PASSWORD valueFrom: secretKeyRef: name: dev-env - key: AERIE_PASSWORD + key: SEQUENCING_PASSWORD restartPolicy: Always volumes: - name: aerie-file-store diff --git a/deployment/kubernetes/hasura-deployment.yaml b/deployment/kubernetes/hasura-deployment.yaml index 99e6d96982..3cfd7f73d4 100644 --- a/deployment/kubernetes/hasura-deployment.yaml +++ b/deployment/kubernetes/hasura-deployment.yaml @@ -19,20 +19,14 @@ spec: ports: - containerPort: 8080 env: - - name: AERIE_MERLIN_DATABASE_URL - value: postgres://aerie:aerie@postgres:5432/aerie_merlin + - name: AERIE_DATABASE_URL + value: postgres://aerie:aerie@postgres:5432/aerie - name: AERIE_MERLIN_URL value: http://aerie_merlin:27183 - - name: AERIE_SCHEDULER_DATABASE_URL - value: postgres://aerie:aerie@postgres:5432/aerie_scheduler - name: AERIE_SCHEDULER_URL value: http://aerie_scheduler:27185 - - name: AERIE_SEQUENCING_DATABASE_URL - value: postgres://aerie:aerie@postgres:5432/aerie_sequencing - name: AERIE_SEQUENCING_URL value: http://aerie_sequencing:27184 - - name: AERIE_UI_DATABASE_URL - value: postgres://aerie:aerie@postgres:5432/aerie_ui - name: HASURA_GRAPHQL_ADMIN_SECRET valueFrom: secretKeyRef: diff --git a/deployment/postgres-init-db/init-aerie.sh b/deployment/postgres-init-db/init-aerie.sh index c480b7669b..38e6a437c6 100755 --- a/deployment/postgres-init-db/init-aerie.sh +++ b/deployment/postgres-init-db/init-aerie.sh @@ -7,6 +7,22 @@ psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname postgres <<-EOSQL CREATE USER "$AERIE_USERNAME" WITH PASSWORD '$AERIE_PASSWORD'; \echo 'Done!' + \echo 'Initializing gateway user...' + CREATE USER "$GATEWAY_DB_USER" WITH PASSWORD '$GATEWAY_DB_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing merlin user...' + CREATE USER "$MERLIN_DB_USER" WITH PASSWORD '$MERLIN_DB_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing scheduler user...' + CREATE USER "$SCHEDULER_DB_USER" WITH PASSWORD '$SCHEDULER_DB_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing sequencing user...' + CREATE USER "$SEQUENCING_DB_USER" WITH PASSWORD '$SEQUENCING_DB_PASSWORD'; + \echo 'Done!' + \echo 'Initializing aerie database...' CREATE DATABASE aerie OWNER "$AERIE_USERNAME"; \connect aerie @@ -23,6 +39,11 @@ EOSQL export PGPASSWORD="$AERIE_PASSWORD" psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" <<-EOSQL + \set aerie_user $AERIE_USERNAME + \set gateway_user $GATEWAY_DB_USER + \set merlin_user $MERLIN_DB_USER + \set scheduler_user $SCHEDULER_DB_USER + \set sequencing_user $SEQUENCING_DB_USER \echo 'Initializing aerie database objects...' \ir /docker-entrypoint-initdb.d/sql/init.sql \echo 'Done!' diff --git a/deployment/postgres-init-db/sql/init.sql b/deployment/postgres-init-db/sql/init.sql index dc22512bd1..175c0ffba7 100644 --- a/deployment/postgres-init-db/sql/init.sql +++ b/deployment/postgres-init-db/sql/init.sql @@ -44,4 +44,7 @@ begin; -- Preload Data \ir default_user_roles.sql; + + -- Initialize DB User permissions + \ir init_db_users.sql end; diff --git a/deployment/postgres-init-db/sql/init_db_users.sql b/deployment/postgres-init-db/sql/init_db_users.sql new file mode 100644 index 0000000000..ec901ffdf3 --- /dev/null +++ b/deployment/postgres-init-db/sql/init_db_users.sql @@ -0,0 +1,93 @@ +/* + This file grants permissions to each of the DB users to the schemas. + It is executed by the AERIE user after the DB schema has been created. + + If granting a certain privilege to 'all X in schema' to a user, include an 'alter default privilege' statement + 'grant all' affects current DB objects, 'alter default' affects future DB objects +*/ +begin; + -- All services may execute functions in the `util_functions` schema + -- 'routines' includes both functions and procedures + grant usage on schema util_functions to public; + grant execute on all routines in schema util_functions to public; + alter default privileges in schema util_functions grant execute on routines to public; + + -- All services must be able to view the user role permissions table + grant usage on schema permissions to public; + grant select on table permissions.user_role_permission to public; + + -- All services can create temp tables + grant temp on database aerie to public; + + -- All services can read merlin data + grant usage on schema merlin to public; + grant select on all tables in schema merlin to public; + alter default privileges in schema merlin grant select on tables to public; + + -- Revoke create from public in the public schema + revoke create on schema public from public; + + ------------------------------ + -- Gateway User Permissions -- + ------------------------------ + -- The Gateway is in charge of managing user permissions + grant select on all tables in schema permissions to :"gateway_user"; + grant insert, update, delete on permissions.users, permissions.users_allowed_roles to :"gateway_user"; + grant execute on all routines in schema permissions to :"gateway_user"; + + alter default privileges in schema permissions grant select on tables to :"gateway_user"; + alter default privileges in schema permissions grant execute on routines to :"gateway_user"; + -- The Gateway is in charge of managing uploaded files + grant select, insert, update, delete on merlin.uploaded_file to :"gateway_user"; + + ----------------------------- + -- Merlin User Permissions -- + ----------------------------- + -- Merlin has control of all tables in the merlin schema + grant select, insert, update, delete on all tables in schema merlin to :"merlin_user"; + grant execute on all routines in schema merlin to :"merlin_user"; + + alter default privileges in schema merlin grant select, insert, update, delete on tables to :"merlin_user"; + alter default privileges in schema merlin grant execute on routines to :"merlin_user"; + + -------------------------------- + -- Scheduler User Permissions -- + -------------------------------- + -- The Scheduler has control of all tables in the scheduler schema + grant usage on schema scheduler to :"scheduler_user"; + grant select, insert, update, delete on all tables in schema scheduler to :"scheduler_user"; + grant execute on all routines in schema scheduler to :"scheduler_user"; + + alter default privileges in schema scheduler grant select, insert, update, delete on tables to :"scheduler_user"; + alter default privileges in schema scheduler grant execute on routines to :"scheduler_user"; + + -- The Scheduler needs to be able to Add/Update Activity Directives in a Plan + grant insert, update on table merlin.activity_directive to :"scheduler_user"; + grant insert on table merlin.plan to :"scheduler_user"; + + -- The Scheduler can write simulation data + grant insert, update on table merlin.span, merlin.simulation_dataset to :"scheduler_user"; + grant insert on table merlin.profile, merlin.profile_segment, merlin.topic, merlin.event to :"scheduler_user"; + + --------------------------------- + -- Sequencing User Permissions -- + --------------------------------- + -- The Sequencing Server has control of all tables in the sequencing schema + grant usage on schema sequencing to :"sequencing_user"; + grant select, insert, update, delete on all tables in schema sequencing to :"sequencing_user"; + grant execute on all routines in schema sequencing to :"sequencing_user"; + + alter default privileges in schema sequencing grant select, insert, update, delete on tables to :"sequencing_user"; + alter default privileges in schema sequencing grant execute on routines to :"sequencing_user"; + + ----------------------- + -- UI DB Permissions -- + ----------------------- + -- The Aerie User currently has control of all tables in the UI schema + grant create, usage on schema ui to :"aerie_user"; + grant select, insert, update, delete on all tables in schema ui to :"aerie_user"; + grant execute on all routines in schema ui to :"aerie_user"; + + alter default privileges in schema ui grant select, insert, update, delete on tables to :"aerie_user"; + alter default privileges in schema ui grant execute on routines to :"aerie_user"; +end; diff --git a/docker-compose.yml b/docker-compose.yml index e84bff083b..52691e81a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,11 +12,10 @@ services: LOG_LEVEL: debug NODE_TLS_REJECT_UNAUTHORIZED: "0" PORT: 9000 - POSTGRES_AERIE_MERLIN_DB: aerie_merlin - POSTGRES_HOST: postgres - POSTGRES_PASSWORD: "${AERIE_PASSWORD}" - POSTGRES_PORT: 5432 - POSTGRES_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" image: "ghcr.io/nasa-ammos/aerie-gateway:develop" ports: ["9000:9000"] restart: always @@ -31,11 +30,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_URL: http://hasura:8080/v1/graphql - MERLIN_DB: "aerie_merlin" - MERLIN_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_DB_PORT: 5432 - MERLIN_DB_SERVER: postgres - MERLIN_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store MERLIN_PORT: 27183 JAVA_OPTS: > @@ -58,11 +56,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_DB: "aerie_scheduler" - SCHEDULER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_DB_PORT: 5432 - SCHEDULER_DB_SERVER: postgres - SCHEDULER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_PORT: 27185 JAVA_OPTS: > -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 @@ -86,11 +83,10 @@ services: LOG_LEVEL: debug MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql SEQUENCING_SERVER_PORT: 27184 - SEQUENCING_DB: aerie_sequencing - SEQUENCING_DB_PASSWORD: "${AERIE_PASSWORD}" - SEQUENCING_DB_PORT: 5432 - SEQUENCING_DB_SERVER: postgres - SEQUENCING_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" SEQUENCING_LOCAL_STORE: /usr/src/app/sequencing_file_store SEQUENCING_WORKER_NUM: 8 SEQUENCING_MAX_WORKER_HEAP_MB: 1000 @@ -123,11 +119,10 @@ services: container_name: aerie_merlin_worker_1 depends_on: ["postgres"] environment: - MERLIN_WORKER_DB: "aerie_merlin" - MERLIN_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store SIMULATION_PROGRESS_POLL_PERIOD_MILLIS: 2000 JAVA_OPTS: > @@ -148,11 +143,10 @@ services: container_name: aerie_merlin_worker_2 depends_on: ["postgres"] environment: - MERLIN_WORKER_DB: "aerie_merlin" - MERLIN_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store SIMULATION_PROGRESS_POLL_PERIOD_MILLIS: 2000 JAVA_OPTS: > @@ -175,11 +169,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar @@ -202,11 +195,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar @@ -250,6 +242,14 @@ services: POSTGRES_USER: "${POSTGRES_USER}" AERIE_USERNAME: "${AERIE_USERNAME}" AERIE_PASSWORD: "${AERIE_PASSWORD}" + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" image: postgres:14.8 ports: ["5432:5432"] restart: always diff --git a/e2e-tests/docker-compose-many-workers.yml b/e2e-tests/docker-compose-many-workers.yml index a936866e93..802ce2379c 100644 --- a/e2e-tests/docker-compose-many-workers.yml +++ b/e2e-tests/docker-compose-many-workers.yml @@ -12,11 +12,10 @@ services: LOG_LEVEL: debug NODE_TLS_REJECT_UNAUTHORIZED: "0" PORT: 9000 - POSTGRES_AERIE_MERLIN_DB: aerie_merlin - POSTGRES_HOST: postgres - POSTGRES_PASSWORD: "${AERIE_PASSWORD}" - POSTGRES_PORT: 5432 - POSTGRES_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" image: "ghcr.io/nasa-ammos/aerie-gateway:develop" ports: ["9000:9000"] restart: always @@ -31,11 +30,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_URL: http://hasura:8080/v1/graphql - MERLIN_DB: "aerie_merlin" - MERLIN_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_DB_PORT: 5432 - MERLIN_DB_SERVER: postgres - MERLIN_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store MERLIN_PORT: 27183 JAVA_OPTS: > @@ -44,6 +42,7 @@ services: -Dorg.slf4j.simpleLogger.log.com.zaxxer.hikari=INFO -Dorg.slf4j.simpleLogger.logFile=System.err UNTRUE_PLAN_START: "2000-01-01T11:58:55.816Z" + ENABLE_CONTINUOUS_VALIDATION_THREAD: true image: aerie_merlin ports: ["27183:27183", "5005:5005"] restart: always @@ -58,11 +57,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_DB: "aerie_scheduler" - SCHEDULER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_DB_PORT: 5432 - SCHEDULER_DB_SERVER: postgres - SCHEDULER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_PORT: 27185 JAVA_OPTS: > -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 @@ -86,12 +84,14 @@ services: LOG_LEVEL: debug MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql SEQUENCING_SERVER_PORT: 27184 - SEQUENCING_DB: aerie_sequencing - SEQUENCING_DB_PASSWORD: "${AERIE_PASSWORD}" - SEQUENCING_DB_PORT: 5432 - SEQUENCING_DB_SERVER: postgres - SEQUENCING_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" SEQUENCING_LOCAL_STORE: /usr/src/app/sequencing_file_store + SEQUENCING_WORKER_NUM: 8 + SEQUENCING_MAX_WORKER_HEAP_MB: 1000 + TRANSPILER_ENABLED : "true" image: aerie_sequencing ports: ["27184:27184"] restart: always @@ -121,11 +121,10 @@ services: replicas: 8 depends_on: [ "postgres" ] environment: - MERLIN_WORKER_DB: "aerie_merlin" - MERLIN_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store SIMULATION_PROGRESS_POLL_PERIOD_MILLIS: 2000 JAVA_OPTS: > @@ -147,15 +146,15 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar JAVA_OPTS: > + -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 -Dorg.slf4j.simpleLogger.defaultLogLevel=DEBUG -Dorg.slf4j.simpleLogger.log.com.zaxxer.hikari=INFO -Dorg.slf4j.simpleLogger.logFile=System.err @@ -167,13 +166,10 @@ services: container_name: aerie_hasura depends_on: ["postgres"] environment: - AERIE_MERLIN_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_merlin" + AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie?options=-c%20search_path%3Dutil_functions%2Chasura%2Cpermissions%2Ctags%2Cmerlin%2Cscheduler%2Csequencing%2Cpublic" AERIE_MERLIN_URL: "http://aerie_merlin:27183" - AERIE_SCHEDULER_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_scheduler" AERIE_SCHEDULER_URL: "http://aerie_scheduler:27185" - AERIE_SEQUENCING_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_sequencing" AERIE_SEQUENCING_URL: "http://aerie_sequencing:27184" - AERIE_UI_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_ui" HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_DEV_MODE: "true" HASURA_GRAPHQL_ENABLE_CONSOLE: "true" @@ -196,6 +192,14 @@ services: POSTGRES_USER: "${POSTGRES_USER}" AERIE_USERNAME: "${AERIE_USERNAME}" AERIE_PASSWORD: "${AERIE_PASSWORD}" + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" image: postgres:14.8 ports: ["5432:5432"] restart: always diff --git a/e2e-tests/docker-compose-test.yml b/e2e-tests/docker-compose-test.yml index 4d2c8f1867..80da4d2cff 100644 --- a/e2e-tests/docker-compose-test.yml +++ b/e2e-tests/docker-compose-test.yml @@ -14,11 +14,10 @@ services: LOG_LEVEL: debug NODE_TLS_REJECT_UNAUTHORIZED: '0' PORT: 9000 - POSTGRES_AERIE_MERLIN_DB: aerie_merlin - POSTGRES_HOST: postgres - POSTGRES_PASSWORD: '${AERIE_PASSWORD}' - POSTGRES_PORT: 5432 - POSTGRES_USER: '${AERIE_USERNAME}' + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" image: 'ghcr.io/nasa-ammos/aerie-gateway:develop' ports: ['9000:9000'] restart: always @@ -33,11 +32,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_URL: http://hasura:8080/v1/graphql - MERLIN_DB: 'aerie_merlin' - MERLIN_DB_PASSWORD: '${AERIE_PASSWORD}' - MERLIN_DB_PORT: 5432 - MERLIN_DB_SERVER: postgres - MERLIN_DB_USER: '${AERIE_USERNAME}' + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store MERLIN_PORT: 27183 JAVA_OPTS: > @@ -60,11 +58,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_DB: 'aerie_scheduler' - SCHEDULER_DB_PASSWORD: '${AERIE_PASSWORD}' - SCHEDULER_DB_PORT: 5432 - SCHEDULER_DB_SERVER: postgres - SCHEDULER_DB_USER: '${AERIE_USERNAME}' + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_PORT: 27185 JAVA_OPTS: > -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 @@ -88,11 +85,11 @@ services: LOG_LEVEL: debug MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql SEQUENCING_SERVER_PORT: 27184 - SEQUENCING_DB: aerie_sequencing - SEQUENCING_DB_PASSWORD: '${AERIE_PASSWORD}' - SEQUENCING_DB_PORT: 5432 - SEQUENCING_DB_SERVER: postgres - SEQUENCING_DB_USER: '${AERIE_USERNAME}' + AERIE_DB_NAME: aerie + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" SEQUENCING_LOCAL_STORE: /usr/src/app/sequencing_file_store image: aerie_sequencing ports: ['27184:27184'] @@ -121,11 +118,11 @@ services: container_name: aerie_merlin_worker_1 depends_on: ['postgres'] environment: - MERLIN_WORKER_DB: 'aerie_merlin' - MERLIN_WORKER_DB_PASSWORD: '${AERIE_PASSWORD}' - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: '${AERIE_USERNAME}' + AERIE_DB_NAME: aerie + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store JAVA_OPTS: > -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 @@ -144,11 +141,11 @@ services: container_name: aerie_merlin_worker_2 depends_on: ['postgres'] environment: - MERLIN_WORKER_DB: 'aerie_merlin' - MERLIN_WORKER_DB_PASSWORD: '${AERIE_PASSWORD}' - MERLIN_WORKER_DB_PORT: 5432 - MERLIN_WORKER_DB_SERVER: postgres - MERLIN_WORKER_DB_USER: '${AERIE_USERNAME}' + AERIE_DB_NAME: aerie + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" MERLIN_WORKER_LOCAL_STORE: /usr/src/app/merlin_file_store JAVA_OPTS: > -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 @@ -169,11 +166,11 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_NAME: aerie + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar @@ -195,11 +192,10 @@ services: environment: HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" MERLIN_GRAPHQL_URL: http://hasura:8080/v1/graphql - SCHEDULER_WORKER_DB: "aerie_scheduler" - SCHEDULER_WORKER_DB_PASSWORD: "${AERIE_PASSWORD}" - SCHEDULER_WORKER_DB_PORT: 5432 - SCHEDULER_WORKER_DB_SERVER: postgres - SCHEDULER_WORKER_DB_USER: "${AERIE_USERNAME}" + AERIE_DB_HOST: postgres + AERIE_DB_PORT: 5432 + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" SCHEDULER_OUTPUT_MODE: UpdateInputPlanWithNewActivities MERLIN_LOCAL_STORE: /usr/src/app/merlin_file_store SCHEDULER_RULES_JAR: /usr/src/app/merlin_file_store/scheduler_rules.jar @@ -216,13 +212,10 @@ services: container_name: hasura depends_on: ['postgres'] environment: - AERIE_MERLIN_DATABASE_URL: 'postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_merlin' + AERIE_DATABASE_URL: "postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie?options=-c%20search_path%3Dutil_functions%2Chasura%2Cpermissions%2Ctags%2Cmerlin%2Cscheduler%2Csequencing%2Cpublic" AERIE_MERLIN_URL: "http://aerie_merlin:27183" - AERIE_SCHEDULER_DATABASE_URL: 'postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_scheduler' AERIE_SCHEDULER_URL: "http://aerie_scheduler:27185" - AERIE_SEQUENCING_DATABASE_URL: 'postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_sequencing' AERIE_SEQUENCING_URL: "http://aerie_sequencing:27184" - AERIE_UI_DATABASE_URL: 'postgres://${AERIE_USERNAME}:${AERIE_PASSWORD}@postgres:5432/aerie_ui' HASURA_GRAPHQL_ADMIN_SECRET: "${HASURA_GRAPHQL_ADMIN_SECRET}" HASURA_GRAPHQL_DEV_MODE: 'true' HASURA_GRAPHQL_ENABLE_CONSOLE: 'true' @@ -244,6 +237,14 @@ services: POSTGRES_USER: '${POSTGRES_USER}' AERIE_USERNAME: '${AERIE_USERNAME}' AERIE_PASSWORD: '${AERIE_PASSWORD}' + GATEWAY_DB_USER: "${GATEWAY_USERNAME}" + GATEWAY_DB_PASSWORD: "${GATEWAY_PASSWORD}" + MERLIN_DB_USER: "${MERLIN_USERNAME}" + MERLIN_DB_PASSWORD: "${MERLIN_PASSWORD}" + SCHEDULER_DB_USER: "${SCHEDULER_USERNAME}" + SCHEDULER_DB_PASSWORD: "${SCHEDULER_PASSWORD}" + SEQUENCING_DB_USER: "${SEQUENCING_USERNAME}" + SEQUENCING_DB_PASSWORD: "${SEQUENCING_PASSWORD}" image: postgres:14.8 ports: ['5432:5432'] restart: always diff --git a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java index 3ffe3431cd..0abb6e651b 100644 --- a/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java +++ b/e2e-tests/src/test/java/gov/nasa/jpl/aerie/e2e/TimelineRemoteTests.java @@ -58,8 +58,8 @@ void beforeAll() throws SQLException { hikariConfig.addDataSourceProperty("portNumber", 5432); hikariConfig.addDataSourceProperty("databaseName", "aerie"); hikariConfig.addDataSourceProperty("applicationName", "Merlin Server"); - hikariConfig.setUsername(System.getenv("AERIE_USERNAME")); - hikariConfig.setPassword(System.getenv("AERIE_PASSWORD")); + hikariConfig.setUsername(System.getenv("MERLIN_USERNAME")); + hikariConfig.setPassword(System.getenv("MERLIN_PASSWORD")); hikariConfig.setConnectionInitSql("set time zone 'UTC'"); dataSource = new HikariDataSource(hikariConfig); diff --git a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/AerieAppDriver.java b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/AerieAppDriver.java index 81b0289bab..edcf80c4a2 100644 --- a/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/AerieAppDriver.java +++ b/merlin-server/src/main/java/gov/nasa/jpl/aerie/merlin/server/AerieAppDriver.java @@ -172,11 +172,11 @@ private static AppConfiguration loadConfiguration() { Integer.parseInt(getEnv("MERLIN_PORT", "27183")), logger.isDebugEnabled(), Path.of(getEnv("MERLIN_LOCAL_STORE", "/usr/src/app/merlin_file_store")), - new PostgresStore(getEnv("MERLIN_DB_SERVER", "postgres"), + new PostgresStore(getEnv("AERIE_DB_HOST", "postgres"), getEnv("MERLIN_DB_USER", ""), - Integer.parseInt(getEnv("MERLIN_DB_PORT", "5432")), + Integer.parseInt(getEnv("AERIE_DB_PORT", "5432")), getEnv("MERLIN_DB_PASSWORD", ""), - getEnv("MERLIN_DB", "aerie_merlin")), + "aerie"), Instant.parse(getEnv("UNTRUE_PLAN_START", "")), URI.create(getEnv("HASURA_GRAPHQL_URL", "http://localhost:8080/v1/graphql")), getEnv("HASURA_GRAPHQL_ADMIN_SECRET", ""), diff --git a/merlin-worker/src/main/java/gov/nasa/jpl/aerie/merlin/worker/MerlinWorkerAppDriver.java b/merlin-worker/src/main/java/gov/nasa/jpl/aerie/merlin/worker/MerlinWorkerAppDriver.java index eabb961f10..9d4f66b77c 100644 --- a/merlin-worker/src/main/java/gov/nasa/jpl/aerie/merlin/worker/MerlinWorkerAppDriver.java +++ b/merlin-worker/src/main/java/gov/nasa/jpl/aerie/merlin/worker/MerlinWorkerAppDriver.java @@ -118,11 +118,11 @@ private static String getEnv(final String key, final String fallback){ private static WorkerAppConfiguration loadConfiguration() { return new WorkerAppConfiguration( Path.of(getEnv("MERLIN_WORKER_LOCAL_STORE", "/usr/src/app/merlin_file_store")), - new PostgresStore(getEnv("MERLIN_WORKER_DB_SERVER", "postgres"), - getEnv("MERLIN_WORKER_DB_USER", ""), - Integer.parseInt(getEnv("MERLIN_WORKER_DB_PORT", "5432")), - getEnv("MERLIN_WORKER_DB_PASSWORD", ""), - getEnv("MERLIN_WORKER_DB", "aerie_merlin")), + new PostgresStore(getEnv("AERIE_DB_HOST", "postgres"), + getEnv("MERLIN_DB_USER", ""), + Integer.parseInt(getEnv("AERIE_DB_PORT", "5432")), + getEnv("MERLIN_DB_PASSWORD", ""), + "aerie"), Integer.parseInt(getEnv("SIMULATION_PROGRESS_POLL_PERIOD_MILLIS", "5000")), Instant.parse(getEnv("UNTRUE_PLAN_START", "")) ); diff --git a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/SchedulerAppDriver.java b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/SchedulerAppDriver.java index 6fa88c4f91..dfffd3f8fe 100644 --- a/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/SchedulerAppDriver.java +++ b/scheduler-server/src/main/java/gov/nasa/jpl/aerie/scheduler/server/SchedulerAppDriver.java @@ -142,11 +142,11 @@ private static AppConfiguration loadConfiguration() { return new AppConfiguration( Integer.parseInt(getEnv("SCHEDULER_PORT", "27185")), logger.isDebugEnabled(), - new PostgresStore(getEnv("SCHEDULER_DB_SERVER", "postgres"), + new PostgresStore(getEnv("AERIE_DB_SERVER", "postgres"), getEnv("SCHEDULER_DB_USER", ""), - Integer.parseInt(getEnv("SCHEDULER_DB_PORT", "5432")), + Integer.parseInt(getEnv("AERIE_DB_PORT", "5432")), getEnv("SCHEDULER_DB_PASSWORD", ""), - getEnv("SCHEDULER_DB", "aerie_scheduler")), + "aerie"), URI.create(getEnv("MERLIN_GRAPHQL_URL", "http://localhost:8080/v1/graphql")), getEnv("HASURA_GRAPHQL_ADMIN_SECRET", "") ); diff --git a/scheduler-worker/src/main/java/gov/nasa/jpl/aerie/scheduler/worker/SchedulerWorkerAppDriver.java b/scheduler-worker/src/main/java/gov/nasa/jpl/aerie/scheduler/worker/SchedulerWorkerAppDriver.java index 253105dcaa..a782e46611 100644 --- a/scheduler-worker/src/main/java/gov/nasa/jpl/aerie/scheduler/worker/SchedulerWorkerAppDriver.java +++ b/scheduler-worker/src/main/java/gov/nasa/jpl/aerie/scheduler/worker/SchedulerWorkerAppDriver.java @@ -124,11 +124,11 @@ private static String getEnv(final String key, final String fallback){ private static WorkerAppConfiguration loadConfiguration() { return new WorkerAppConfiguration( - new PostgresStore(getEnv("SCHEDULER_WORKER_DB_SERVER", "postgres"), - getEnv("SCHEDULER_WORKER_DB_USER", ""), - Integer.parseInt(getEnv("SCHEDULER_WORKER_DB_PORT", "5432")), - getEnv("SCHEDULER_WORKER_DB_PASSWORD", ""), - getEnv("SCHEDULER_WORKER_DB", "aerie_scheduler")), + new PostgresStore(getEnv("AERIE_DB_SERVER", "postgres"), + getEnv("SCHEDULER_DB_USER", ""), + Integer.parseInt(getEnv("AERIE_DB_PORT", "5432")), + getEnv("SCHEDULER_DB_PASSWORD", ""), + "aerie"), URI.create(getEnv("MERLIN_GRAPHQL_URL", "http://localhost:8080/v1/graphql")), Path.of(getEnv("MERLIN_LOCAL_STORE", "/usr/src/app/merlin_file_store")), Path.of(getEnv("SCHEDULER_RULES_JAR", "/usr/src/app/merlin_file_store/scheduler_rules.jar")), diff --git a/sequencing-server/src/db.ts b/sequencing-server/src/db.ts index 743731b723..97d746af49 100644 --- a/sequencing-server/src/db.ts +++ b/sequencing-server/src/db.ts @@ -6,11 +6,10 @@ import { getEnv } from './env.js'; const { Pool: DbPool } = pg; const { - POSTGRES_AERIE_SEQUENCING_DB, - POSTGRES_HOST: host, - POSTGRES_PASSWORD: password, - POSTGRES_PORT: port, - POSTGRES_USER: user, + AERIE_DB_HOST: host, + AERIE_DB_PORT: port, + SEQUENCING_DB_USER: user, + SEQUENCING_DB_PASSWORD: password, } = getEnv(); const logger = getLogger('packages/db/db'); @@ -25,7 +24,7 @@ export class DbExpansion { static init() { try { const config: PoolConfig = { - database: POSTGRES_AERIE_SEQUENCING_DB, + database: 'aerie', host, password, port: parseInt(port, 10), diff --git a/sequencing-server/src/env.ts b/sequencing-server/src/env.ts index 0f2d5e8c06..91a65b6818 100644 --- a/sequencing-server/src/env.ts +++ b/sequencing-server/src/env.ts @@ -4,11 +4,10 @@ export type Env = { LOG_LEVEL: string; MERLIN_GRAPHQL_URL: string; PORT: string; - POSTGRES_AERIE_SEQUENCING_DB: string; - POSTGRES_HOST: string; - POSTGRES_PASSWORD: string; - POSTGRES_PORT: string; - POSTGRES_USER: string; + AERIE_DB_HOST: string; + AERIE_DB_PORT: string; + SEQUENCING_DB_USER: string; + SEQUENCING_DB_PASSWORD: string; STORAGE: string; SEQUENCING_WORKER_NUM: string; SEQUENCING_MAX_WORKER_HEAP_MB: string; @@ -21,11 +20,10 @@ export const defaultEnv: Env = { LOG_LEVEL: 'info', MERLIN_GRAPHQL_URL: 'http://hasura:8080/v1/graphql', PORT: '27184', - POSTGRES_AERIE_SEQUENCING_DB: 'aerie_sequencing', - POSTGRES_HOST: 'localhost', - POSTGRES_PASSWORD: '', - POSTGRES_PORT: '5432', - POSTGRES_USER: '', + AERIE_DB_HOST: 'localhost', + SEQUENCING_DB_PASSWORD: '', + AERIE_DB_PORT: '5432', + SEQUENCING_DB_USER: '', STORAGE: 'sequencing_file_store', SEQUENCING_WORKER_NUM: '8', SEQUENCING_MAX_WORKER_HEAP_MB: '1000', @@ -40,11 +38,10 @@ export function getEnv(): Env { const LOG_LEVEL = env['LOG_LEVEL'] ?? defaultEnv.LOG_LEVEL; const MERLIN_GRAPHQL_URL = env['MERLIN_GRAPHQL_URL'] ?? defaultEnv.MERLIN_GRAPHQL_URL; const PORT = env['SEQUENCING_SERVER_PORT'] ?? defaultEnv.PORT; - const POSTGRES_AERIE_SEQUENCING_DB = env['SEQUENCING_DB'] ?? defaultEnv.POSTGRES_AERIE_SEQUENCING_DB; - const POSTGRES_HOST = env['SEQUENCING_DB_SERVER'] ?? defaultEnv.POSTGRES_HOST; - const POSTGRES_PASSWORD = env['SEQUENCING_DB_PASSWORD'] ?? defaultEnv.POSTGRES_PASSWORD; - const POSTGRES_PORT = env['SEQUENCING_DB_PORT'] ?? defaultEnv.POSTGRES_PORT; - const POSTGRES_USER = env['SEQUENCING_DB_USER'] ?? defaultEnv.POSTGRES_USER; + const AERIE_DB_HOST = env['AERIE_DB_HOST'] ?? defaultEnv.AERIE_DB_HOST; + const AERIE_DB_PORT = env['AERIE_DB_PORT'] ?? defaultEnv.AERIE_DB_PORT; + const SEQUENCING_DB_USER = env['SEQUENCING_DB_USER'] ?? defaultEnv.SEQUENCING_DB_USER; + const SEQUENCING_DB_PASSWORD = env['SEQUENCING_DB_PASSWORD'] ?? defaultEnv.SEQUENCING_DB_PASSWORD; const STORAGE = env['SEQUENCING_LOCAL_STORE'] ?? defaultEnv.STORAGE; const SEQUENCING_WORKER_NUM = env['SEQUENCING_WORKER_NUM'] ?? defaultEnv.SEQUENCING_WORKER_NUM; const SEQUENCING_MAX_WORKER_HEAP_MB = @@ -56,11 +53,10 @@ export function getEnv(): Env { LOG_LEVEL, MERLIN_GRAPHQL_URL, PORT, - POSTGRES_AERIE_SEQUENCING_DB, - POSTGRES_HOST, - POSTGRES_PASSWORD, - POSTGRES_PORT, - POSTGRES_USER, + AERIE_DB_HOST, + AERIE_DB_PORT, + SEQUENCING_DB_USER, + SEQUENCING_DB_PASSWORD, STORAGE, SEQUENCING_WORKER_NUM, SEQUENCING_MAX_WORKER_HEAP_MB, From a756a88e101a7e9df7199ee8457e64c493257922 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Wed, 20 Mar 2024 17:17:45 -0700 Subject: [PATCH 32/36] Update DB Comparison Workflow - Remove outdated explanations - Take v2.8.0 as base - Exclude the HDB_Catalog schema from throwing comparison failures, as this schema is controlled by Hasura --- .github/scripts/compareDatabasesDown.sh | 105 ++++------------------- .github/scripts/compareDatabasesUp.sh | 105 ++++------------------- .github/scripts/explanations | 24 ------ .github/scripts/explanations_merlin_down | 16 ---- .github/workflows/pgcmp.yml | 89 ++++++++----------- 5 files changed, 70 insertions(+), 269 deletions(-) delete mode 100644 .github/scripts/explanations delete mode 100644 .github/scripts/explanations_merlin_down diff --git a/.github/scripts/compareDatabasesDown.sh b/.github/scripts/compareDatabasesDown.sh index e870c1d3f1..be003b16f6 100755 --- a/.github/scripts/compareDatabasesDown.sh +++ b/.github/scripts/compareDatabasesDown.sh @@ -3,96 +3,27 @@ mkdir results mkdir comparison -PGCMPINPUT1=./pgdumpv1_0_1/AerieMerlinV1_0_1 PGCMPINPUT2=./pgdumpmigrateddown/AerieMerlinMigratedDown PGCLABEL1=AerieMerlinV1_0_1 PGCLABEL2=AerieMerlinMigratedDown PGCFULLOUTPUT=./comparison/fulloutputMerlin.txt PGCUNEXPLAINED=./comparison/unexplainedMerlin.txt PGCBADEXPLAIN=./comparison/badexplanationsMerlin.txt PGDB=postgres PGBINDIR=/usr/bin PGCEXPLANATIONS=./explanations_merlin ./pgcmp +PGCMPINPUT1=./pgdumpV2_8_0/AerieV2_8_0 \ +PGCMPINPUT2=./pgdumpmigrateddown/AerieMigratedDown \ +PGCLABEL1=AerieV2_8_0 \ +PGCLABEL2=AerieMigratedDown \ +PGCFULLOUTPUT=./comparison/fulloutput.txt \ +PGCUNEXPLAINED=./comparison/unexplained.txt \ +PGCBADEXPLAIN=./comparison/badexplanations.txt \ +PGDB=postgres \ +PGBINDIR=/usr/bin \ +PGCOMITSCHEMAS="('hdb_catalog'),('pg_catalog'),('information_schema')" \ +./pgcmp return_code=$? -if [ $return_code -ne 0 ]; then - echo "AerieMerlin comparison failed - return code=$return_code" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "==================\nAerieMerlin Results\n==================\n" >> results/fulloutput - printf "==================\nAerieMerlin Results\n==================\n" >> results/unexplained - printf "==================\nAerieMerlin Results\n==================\n" >> results/badexplanations - printf "==================\nAerieMerlin Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputMerlin.txt >> results/fulloutput - cat ./comparison/unexplainedMerlin.txt >> results/unexplained - cat ./comparison/badexplanationsMerlin.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieMerlin comparison succeeded" -fi - -PGCMPINPUT1=./pgdumpv1_0_1/AerieSchedulerV1_0_1 PGCMPINPUT2=./pgdumpmigrateddown/AerieSchedulerMigratedDown PGCLABEL1=AerieSchedulerV1_0_1 PGCLABEL2=AerieSchedulerMigratedDown PGCFULLOUTPUT=./comparison/fulloutputScheduler.txt PGCUNEXPLAINED=./comparison/unexplainedScheduler.txt PGCBADEXPLAIN=./comparison/badexplanationsScheduler.txt PGDB=postgres PGBINDIR=/usr/bin PGCEXPLANATIONS=./explanations ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieScheduler comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputScheduler.txt >> results/fulloutput - cat ./comparison/unexplainedScheduler.txt >> results/unexplained - cat ./comparison/badexplanationsScheduler.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieScheduler comparison succeeded" -fi -PGCMPINPUT1=./pgdumpv1_0_1/AerieSequencingV1_0_1 PGCMPINPUT2=./pgdumpmigrateddown/AerieSequencingMigratedDown PGCLABEL1=AerieSequencingV1_0_1 PGCLABEL2=AerieSequencingMigratedDown PGCFULLOUTPUT=./comparison/fulloutputSequencing.txt PGCUNEXPLAINED=./comparison/unexplainedSequencing.txt PGCBADEXPLAIN=./comparison/badexplanationsSequencing.txt PGDB=postgres PGBINDIR=/usr/bin PGCEXPLANATIONS=./explanations ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieSequencing comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputSequencing.txt >> results/fulloutput - cat ./comparison/unexplainedSequencing.txt >> results/unexplained - cat ./comparison/badexplanationsSequencing.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieSequencing comparison succeeded" -fi - -PGCMPINPUT1=./pgdumpv1_0_1/AerieUIV1_0_1 PGCMPINPUT2=./pgdumpmigrateddown/AerieUIMigratedDown PGCLABEL1=AerieUIV1_0_1 PGCLABEL2=AerieUIMigratedDown PGCFULLOUTPUT=./comparison/fulloutputUI.txt PGCUNEXPLAINED=./comparison/unexplainedUI.txt PGCBADEXPLAIN=./comparison/badexplanationsUI.txt PGDB=postgres PGBINDIR=/usr/bin PGCEXPLANATIONS=./explanations ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieUI comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieUI Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieUI Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieUI Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieUI Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputUI.txt >> results/fulloutput - cat ./comparison/unexplainedUI.txt >> results/unexplained - cat ./comparison/badexplanationsUI.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log +if [ $return_code -ne 0 ]; then + echo "Database schema comparison failed - return code=$return_code" + mv ./comparison/fulloutput.txt results/fulloutput + mv ./comparison/unexplained.txt results/unexplained + mv ./comparison/badexplanations.txt results/badexplanations + mv /tmp/perform-comparison.log results/perform-comparison.log else - echo "AerieUI comparison succeeded" + echo "Database schema comparison succeeded" fi exit $return_code diff --git a/.github/scripts/compareDatabasesUp.sh b/.github/scripts/compareDatabasesUp.sh index d936abaa8b..2cf967f46d 100755 --- a/.github/scripts/compareDatabasesUp.sh +++ b/.github/scripts/compareDatabasesUp.sh @@ -3,96 +3,27 @@ mkdir results mkdir comparison -PGCMPINPUT1=./pgdumpmigrated/AerieMerlinMigrated PGCMPINPUT2=./pgdumpraw/AerieMerlinRaw PGCLABEL1=AerieMerlinMigrated PGCLABEL2=AerieMerlinRaw PGCFULLOUTPUT=./comparison/fulloutputMerlin.txt PGCUNEXPLAINED=./comparison/unexplainedMerlin.txt PGCBADEXPLAIN=./comparison/badexplanationsMerlin.txt PGDB=postgres PGBINDIR=/usr/bin ./pgcmp +PGCMPINPUT1=./pgdumpmigrated/AerieMigrated \ +PGCMPINPUT2=./pgdumpraw/AerieRaw \ +PGCLABEL1=AerieMigrated \ +PGCLABEL2=AerieRaw \ +PGCFULLOUTPUT=./comparison/fulloutput.txt \ +PGCUNEXPLAINED=./comparison/unexplained.txt \ +PGCBADEXPLAIN=./comparison/badexplanations.txt \ +PGDB=postgres \ +PGBINDIR=/usr/bin \ +PGCOMITSCHEMAS="('hdb_catalog'),('pg_catalog'),('information_schema')" \ +./pgcmp return_code=$? -if [ $return_code -ne 0 ]; then - echo "AerieMerlin comparison failed - return code=$return_code" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "==================\nAerieMerlin Results\n==================\n" >> results/fulloutput - printf "==================\nAerieMerlin Results\n==================\n" >> results/unexplained - printf "==================\nAerieMerlin Results\n==================\n" >> results/badexplanations - printf "==================\nAerieMerlin Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputMerlin.txt >> results/fulloutput - cat ./comparison/unexplainedMerlin.txt >> results/unexplained - cat ./comparison/badexplanationsMerlin.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieMerlin comparison succeeded" -fi - -PGCMPINPUT1=./pgdumpmigrated/AerieSchedulerMigrated PGCMPINPUT2=./pgdumpraw/AerieSchedulerRaw PGCLABEL1=AerieSchedulerMigrated PGCLABEL2=AerieSchedulerRaw PGCFULLOUTPUT=./comparison/fulloutputScheduler.txt PGCUNEXPLAINED=./comparison/unexplainedScheduler.txt PGCBADEXPLAIN=./comparison/badexplanationsScheduler.txt PGDB=postgres PGBINDIR=/usr/bin ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieScheduler comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieScheduler Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputScheduler.txt >> results/fulloutput - cat ./comparison/unexplainedScheduler.txt >> results/unexplained - cat ./comparison/badexplanationsScheduler.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieScheduler comparison succeeded" -fi -PGCMPINPUT1=./pgdumpmigrated/AerieSequencingMigrated PGCMPINPUT2=./pgdumpraw/AerieSequencingRaw PGCLABEL1=AerieSequencingMigrated PGCLABEL2=AerieSequencingRaw PGCFULLOUTPUT=./comparison/fulloutputSequencing.txt PGCUNEXPLAINED=./comparison/unexplainedSequencing.txt PGCBADEXPLAIN=./comparison/badexplanationsSequencing.txt PGDB=postgres PGBINDIR=/usr/bin ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieSequencing comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieSequencing Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputSequencing.txt >> results/fulloutput - cat ./comparison/unexplainedSequencing.txt >> results/unexplained - cat ./comparison/badexplanationsSequencing.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log -else - echo "AerieSequencing comparison succeeded" -fi - -PGCMPINPUT1=./pgdumpmigrated/AerieUIMigrated PGCMPINPUT2=./pgdumpraw/AerieUIRaw PGCLABEL1=AerieUIMigrated PGCLABEL2=AerieUIRaw PGCFULLOUTPUT=./comparison/fulloutputUI.txt PGCUNEXPLAINED=./comparison/unexplainedUI.txt PGCBADEXPLAIN=./comparison/badexplanationsUI.txt PGDB=postgres PGBINDIR=/usr/bin ./pgcmp -retcode=$? -if [ $retcode -gt $return_code ]; then -return_code=$retcode -fi - -if [ $retcode -ne 0 ]; then - echo "AerieUI comparison failed - return code=$retcode" - touch results/fulloutput - touch results/unexplained - touch results/badexplanations - touch results/perform-comparison.log - printf "\n==================\nAerieUI Results\n==================\n" >> results/fulloutput - printf "\n==================\nAerieUI Results\n==================\n" >> results/unexplained - printf "\n==================\nAerieUI Results\n==================\n" >> results/badexplanations - printf "\n==================\nAerieUI Results\n==================\n" >> results/perform-comparison.log - cat ./comparison/fulloutputUI.txt >> results/fulloutput - cat ./comparison/unexplainedUI.txt >> results/unexplained - cat ./comparison/badexplanationsUI.txt >> results/badexplanations - cat /tmp/perform-comparison.log >> results/perform-comparison.log +if [ $return_code -ne 0 ]; then + echo "Database schema comparison failed - return code=$return_code" + mv ./comparison/fulloutput.txt results/fulloutput + mv ./comparison/unexplained.txt results/unexplained + mv ./comparison/badexplanations.txt results/badexplanations + mv /tmp/perform-comparison.log results/perform-comparison.log else - echo "AerieUI comparison succeeded" + echo "Database schema comparison succeeded" fi exit $return_code diff --git a/.github/scripts/explanations b/.github/scripts/explanations deleted file mode 100644 index 2040ce85fe..0000000000 --- a/.github/scripts/explanations +++ /dev/null @@ -1,24 +0,0 @@ -schema migrations migrations missing in 1st DB From Migration 0 2 -function definition migrations migrations.migrations.mark_migration_applied(character varying) missing in 1st DB From Migration 0 3 -function definition migrations migrations.migrations.mark_migration_rolled_back(character varying) missing in 1st DB From Migration 0 3 -function permissions migrations migrations.mark_migration_applied(character varying)-role:aerie missing in 1st DB From Migration 0 3 -function permissions migrations migrations.mark_migration_rolled_back(character varying)-role:aerie missing in 1st DB From Migration 0 3 -function permissions migrations migrations.mark_migration_applied(character varying)-role:PUBLIC missing in 1st DB From Migration 0 3 -function permissions migrations migrations.mark_migration_rolled_back(character varying)-role:PUBLIC missing in 1st DB From Migration 0 3 -function owner migrations migrations.mark_migration_applied(character varying) missing in 1st DB From Migration 0 3 -function owner migrations migrations.mark_migration_rolled_back(character varying) missing in 1st DB From Migration 0 3 -function language migrations migrations.migrations.mark_migration_applied(character varying) missing in 1st DB From Migration 0 3 -table owner migrations migrations.schema_migrations missing in 1st DB From Migration 0 4 -column migrations migrations.schema_migrations.migration_id missing in 1st DB From Migration 0 4 -function language migrations migrations.migrations.mark_migration_rolled_back(character varying) missing in 1st DB From Migration 0 3 -function security type migrations migrations.migrations.mark_migration_applied(character varying) missing in 1st DB From Migration 0 3 -function security type migrations migrations.migrations.mark_migration_rolled_back(character varying) missing in 1st DB From Migration 0 3 -check constraint migrations migrations.schema_migrations:schema_migrations_pkey missing in 1st DB From Migration 0 3 -index migrations migrations.schema_migrations:schema_migrations_pkey missing in 1st DB From Migration 0 3 -data type migrations migrations.schema_migrations missing in 1st DB From Migration 0 3 -data type migrations migrations._schema_migrations missing in 1st DB From Migration 0 3 -table migrations migrations.schema_migrations missing in 1st DB From Migration 0 3 -function config migrations migrations.mark_migration_applied(character varying) missing in 1st DB From Migration 0 3 -function config migrations migrations.mark_migration_rolled_back(character varying) missing in 1st DB From Migration 0 3 -data type owner migrations migrations.schema_migrations missing in 1st DB From Migration 0 4 -data type owner migrations migrations._schema_migrations missing in 1st DB From Migration 0 4 diff --git a/.github/scripts/explanations_merlin_down b/.github/scripts/explanations_merlin_down deleted file mode 100644 index ae5219102a..0000000000 --- a/.github/scripts/explanations_merlin_down +++ /dev/null @@ -1,16 +0,0 @@ -function definition hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function definition hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function permissions hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"()-role:aerie missing in 1st DB From_RefreshResourceTypes_Action 2 -function permissions hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"()-role:aerie missing in 1st DB From_RefreshResourceTypes_Action 2 -function permissions hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"()-role:PUBLIC missing in 1st DB From_RefreshResourceTypes_Action 2 -function permissions hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"()-role:PUBLIC missing in 1st DB From_RefreshResourceTypes_Action 2 -function owner hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function owner hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function language hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function language hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function security type hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function security type hdb_catalog hdb_catalog.hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"() missing in 1st DB From_RefreshResourceTypes_Action 2 -trigger public public.mission_model."notify_hasura_refreshResourceTypes_INSERT"/"INSERT" missing in 1st DB From_RefreshResourceTypes_Action 2 -trigger public public.mission_model."notify_hasura_refreshResourceTypes_UPDATE"/"UPDATE" missing in 1st DB From_RefreshResourceTypes_Action 2 -function config hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_UPDATE"() missing in 1st DB From_RefreshResourceTypes_Action 2 -function config hdb_catalog hdb_catalog."notify_hasura_refreshResourceTypes_INSERT"() missing in 1st DB From_RefreshResourceTypes_Action 2 diff --git a/.github/workflows/pgcmp.yml b/.github/workflows/pgcmp.yml index 1d2cf88740..3d41329565 100644 --- a/.github/workflows/pgcmp.yml +++ b/.github/workflows/pgcmp.yml @@ -4,16 +4,10 @@ on: pull_request: paths: - "deployment/hasura/migrations/**" - - "merlin-server/sql/**" - - "sequencing-server/sql/**" - - "scheduler-server/sql/**" - "deployment/postgres-init-db/sql/**" push: paths: - "deployment/hasura/migrations/**" - - "merlin-server/sql/**" - - "sequencing-server/sql/**" - - "scheduler-server/sql/**" - "deployment/postgres-init-db/sql/**" branches: - develop @@ -27,10 +21,10 @@ jobs: runs-on: ubuntu-latest environment: e2e-test steps: - - name: Checkout v1.0.1 + - name: Checkout v2.8.0 uses: actions/checkout@v4 with: - ref: "v1.0.1" + ref: "v2.8.0" - name: Clone PGCMP uses: actions/checkout@v4 with: @@ -46,12 +40,6 @@ jobs: sudo apt-get install --yes postgresql-client - name: Setup Hasura CLI run: sudo curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | bash - - name: Validate Gradle Wrapper - uses: gradle/wrapper-validation-action@v2 - - name: Distribute SQL and Assemble Java - uses: gradle/actions/setup-gradle@v3 - with: - arguments: distributeSQL - name: Start Postgres run: | docker compose up -d postgres hasura @@ -67,22 +55,23 @@ jobs: - name: Sleep for 1 Minute run: sleep 60s shell: bash - - name: Dump v1.0.1 Database + - name: Dump v2.8.0 Database env: AERIE_USERNAME: "${{secrets.AERIE_USERNAME}}" AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" run: | - mkdir pgdumpv1_0_1 - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_merlin PGCMPOUTPUT=./pgdumpv1_0_1/AerieMerlinV1_0_1 PGCLABEL=AerieMerlinV1_0_1 PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_scheduler PGCMPOUTPUT=./pgdumpv1_0_1/AerieSchedulerV1_0_1 PGCLABEL=AerieSchedulerV1_0_1 PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_sequencing PGCMPOUTPUT=./pgdumpv1_0_1/AerieSequencingV1_0_1 PGCLABEL=AerieSequencingV1_0_1 PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_ui PGCMPOUTPUT=./pgdumpv1_0_1/AerieUIV1_0_1 PGCLABEL=AerieUIV1_0_1 PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump + mkdir pgdumpV2_8_0 + PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie \ + PGCMPOUTPUT=./pgdumpV2_8_0/AerieV2_8_0 \ + PGCLABEL=AerieV2_8_0 \ + PGBINDIR=/usr/bin \ + ./pgcmp/pgcmp-dump shell: bash - name: Share Database Dump uses: actions/upload-artifact@v4 with: - name: v1_0_1-db-dump - path: pgdumpv1_0_1 + name: v2_8_0-db-dump + path: pgdumpV2_8_0 retention-days: 1 - name: Checkout Latest uses: actions/checkout@v4 @@ -115,21 +104,17 @@ jobs: env: AERIE_USERNAME: "${{secrets.AERIE_USERNAME}}" AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" - - name: Clone PGCMP - uses: actions/checkout@v4 - with: - repository: cbbrowne/pgcmp - path: pgcmp - name: Dump Databases env: AERIE_USERNAME: "${{secrets.AERIE_USERNAME}}" AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" run: | mkdir pgdumpmigrated - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_merlin PGCMPOUTPUT=./pgdumpmigrated/AerieMerlinMigrated PGCLABEL=AerieMerlinMigrated PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_scheduler PGCMPOUTPUT=./pgdumpmigrated/AerieSchedulerMigrated PGCLABEL=AerieSchedulerMigrated PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_sequencing PGCMPOUTPUT=./pgdumpmigrated/AerieSequencingMigrated PGCLABEL=AerieSequencingMigrated PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_ui PGCMPOUTPUT=./pgdumpmigrated/AerieUIMigrated PGCLABEL=AerieUIMigrated PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump + PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie \ + PGCMPOUTPUT=./pgdumpmigrated/AerieMigratedUp \ + PGCLABEL=AerieMigratedUp \ + PGBINDIR=/usr/bin \ + ./pgcmp/pgcmp-dump shell: bash - name: Share Database Dump uses: actions/upload-artifact@v4 @@ -158,12 +143,6 @@ jobs: run: | sudo apt-get update sudo apt-get install --yes postgresql-client - - name: Validate Gradle Wrapper - uses: gradle/wrapper-validation-action@v2 - - name: Distribute SQL - uses: gradle/actions/setup-gradle@v3 - with: - arguments: distributeSQL - name: Setup Python uses: actions/setup-python@v5 with: @@ -195,17 +174,18 @@ jobs: AERIE_USERNAME: "${{secrets.AERIE_USERNAME}}" AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" run: | - mkdir pgdumpraw - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_merlin PGCMPOUTPUT=./pgdumpraw/AerieMerlinRaw PGCLABEL=AerieMerlinRaw PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_scheduler PGCMPOUTPUT=./pgdumpraw/AerieSchedulerRaw PGCLABEL=AerieSchedulerRaw PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_sequencing PGCMPOUTPUT=./pgdumpraw/AerieSequencingRaw PGCLABEL=AerieSequencingRaw PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_ui PGCMPOUTPUT=./pgdumpraw/AerieUIRaw PGCLABEL=AerieUIRaw PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump + mkdir pgdumpcurrent + PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie \ + PGCMPOUTPUT=./pgdumpcurrent/AerieCurrent \ + PGCLABEL=AerieCurrent \ + PGBINDIR=/usr/bin \ + ./pgcmp/pgcmp-dump shell: bash - name: Share Database Dump uses: actions/upload-artifact@v4 with: - name: raw-sql-db-dump - path: pgdumpraw + name: current-sql-db-dump + path: pgdumpcurrent retention-days: 1 - name: Migrate Latest to Base run: | @@ -226,10 +206,11 @@ jobs: AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" run: | mkdir pgdumpmigrateddown - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_merlin PGCMPOUTPUT=./pgdumpmigrateddown/AerieMerlinMigratedDown PGCLABEL=AerieMerlinMigratedDown PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_scheduler PGCMPOUTPUT=./pgdumpmigrateddown/AerieSchedulerMigratedDown PGCLABEL=AerieSchedulerMigratedDown PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_sequencing PGCMPOUTPUT=./pgdumpmigrateddown/AerieSequencingMigratedDown PGCLABEL=AerieSequencingMigratedDown PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump - PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie_ui PGCMPOUTPUT=./pgdumpmigrateddown/AerieUIMigratedDown PGCLABEL=AerieUIMigratedDown PGBINDIR=/usr/bin ./pgcmp/pgcmp-dump + PGURI=postgres://"${AERIE_USERNAME}":"${AERIE_PASSWORD}"@localhost:5432/aerie \ + PGCMPOUTPUT=./pgdumpmigrateddown/AerieMigratedDown \ + PGCLABEL=AerieMigratedDown \ + PGBINDIR=/usr/bin \ + ./pgcmp/pgcmp-dump shell: bash - name: Share Database Dump uses: actions/upload-artifact@v4 @@ -263,7 +244,7 @@ jobs: sudo apt-get update sudo apt-get install --yes postgresql-client - name: Start Postgres - run: docker run -d -p 5432:5432 -e POSTGRES_HOST_AUTH_METHOD=trust --name=postgres postgres:14.1 + run: docker run -d -p 5432:5432 -e POSTGRES_HOST_AUTH_METHOD=trust --name=postgres postgres:14.8 - name: Sleep for 5 Seconds run: sleep 5s shell: bash @@ -274,8 +255,8 @@ jobs: path: pgcmp/pgdumpmigrated - uses: actions/download-artifact@v4 with: - name: raw-sql-db-dump - path: pgcmp/pgdumpraw + name: current-sql-db-dump + path: pgcmp/pgdumpcurrent - name: Compare Databases id: dbcmp run: | @@ -323,8 +304,8 @@ jobs: - name: Download Shared Dumps uses: actions/download-artifact@v4 with: - name: v1_0_1-db-dump - path: pgcmp/pgdumpv1_0_1 + name: v2_8_0-db-dump + path: pgcmp/pgdumpV2_8_0 - uses: actions/download-artifact@v4 with: name: migrated-down-db-dump @@ -333,8 +314,6 @@ jobs: id: dbcmp run: | cp ./.github/scripts/compareDatabasesDown.sh pgcmp/compareDatabases.sh - cp ./.github/scripts/explanations pgcmp/explanations - cat ./.github/scripts/explanations ./.github/scripts/explanations_merlin_down > pgcmp/explanations_merlin cd pgcmp ./compareDatabases.sh shell: bash From 162141c9212680333f922da9ec5e2dfaf80f1898 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Thu, 21 Mar 2024 14:21:52 -0700 Subject: [PATCH 33/36] Update Aerie Migration Script --- deployment/aerie_db_migration.py | 251 ++++++++++++------------------- 1 file changed, 94 insertions(+), 157 deletions(-) diff --git a/deployment/aerie_db_migration.py b/deployment/aerie_db_migration.py index 3fef2414ab..9341e7aada 100755 --- a/deployment/aerie_db_migration.py +++ b/deployment/aerie_db_migration.py @@ -21,19 +21,14 @@ def __init__(self, db_name): def add_migration_step(self, _migration_step): self.steps = sorted(_migration_step, key=lambda x:int(x.split('_')[0])) -def step_by_step_migration(database, apply): - clear_screen() - print('#' * len(database.db_name)) - print(database.db_name) - print('#' * len(database.db_name)) - +def step_by_step_migration(db_migration, apply): display_string = "\n\033[4mMIGRATION STEPS AVAILABLE:\033[0m\n" - _output = subprocess.getoutput(f'hasura migrate status --database-name {database.db_name}').split("\n") + _output = subprocess.getoutput(f'hasura migrate status --database-name {db_migration.db_name}').split("\n") del _output[0:3] display_string += _output[0] + "\n" # Filter out the steps that can't be applied given the current mode and currently applied steps - available_steps = database.steps.copy() + available_steps = db_migration.steps.copy() for i in range(1, len(_output)): split = list(filter(None, _output[i].split(" "))) @@ -44,13 +39,13 @@ def step_by_step_migration(database, apply): return if apply: - if (len(split) == 4) or (not os.path.isfile(f'migrations/{database.db_name}/{split[0]}_{split[1]}/up.sql')): + if (len(split) == 4) or (not os.path.isfile(f'migrations/{db_migration.db_name}/{split[0]}_{split[1]}/up.sql')): available_steps.remove(f'{split[0]}_{split[1]}') else: display_string += _output[i] + "\n" else: if (len(split) == 5 and "Not Present" == (split[3] + " " + split[4])) \ - or (not os.path.isfile(f'migrations/{database.db_name}/{split[0]}_{split[1]}/down.sql')): + or (not os.path.isfile(f'migrations/{db_migration.db_name}/{split[0]}_{split[1]}/down.sql')): available_steps.remove(f'{split[0]}_{split[1]}') else: display_string += _output[i] + "\n" @@ -65,27 +60,27 @@ def step_by_step_migration(database, apply): timestamp = step.split("_")[0] if apply: - os.system(f'hasura migrate apply --version {timestamp} --database-name {database.db_name} --dry-run --log-level WARN') + os.system(f'hasura migrate apply --version {timestamp} --database-name {db_migration.db_name} --dry-run --log-level WARN') else: - os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {database.db_name} --dry-run --log-level WARN') + os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {db_migration.db_name} --dry-run --log-level WARN') print() _value = '' - while _value != "y" and _value != "n" and _value != "q": + while _value != "y" and _value != "n" and _value != "q" and _value != "quit": if apply: - _value = input(f'Apply {step}? (y/n): ').lower() + _value = input(f'Apply {step}? (y/n/\033[4mq\033[0muit): ').lower() else: - _value = input(f'Revert {step}? (y/n): ').lower() + _value = input(f'Revert {step}? (y/n/\033[4mq\033[0muit): ').lower() - if _value == "q": + if _value == "q" or _value == "quit": sys.exit() if _value == "y": if apply: print('Applying...') - exit_code = os.system(f'hasura migrate apply --version {timestamp} --type up --database-name {database.db_name}') + exit_code = os.system(f'hasura migrate apply --version {timestamp} --type up --database-name {db_migration.db_name}') else: print('Reverting...') - exit_code = os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {database.db_name}') + exit_code = os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {db_migration.db_name}') os.system('hasura metadata reload') print() if exit_code != 0: @@ -94,59 +89,48 @@ def step_by_step_migration(database, apply): return input("Press Enter to continue...") -def bulk_migration(migration_db, apply): - clear_screen() +def bulk_migration(db_migration, apply): # Migrate each database exit_with = 0 - for database in migration_db: - exit_with = exit_with << 1 - print('#' * len(database.db_name)) - print(database.db_name) - print('#' * len(database.db_name)) - - if apply: - os.system(f'hasura migrate apply --database-name {database.db_name} --dry-run --log-level WARN') - exit_code = os.system(f'hasura migrate apply --database-name {database.db_name}') - if exit_code != 0: - exit_with += 1 - else: - os.system(f'hasura migrate apply --goto 0 --database-name {database.db_name} --dry-run --log-level WARN') - exit_code = os.system(f'hasura migrate apply --goto 0 --database-name {database.db_name}') - if exit_code != 0: - exit_with += 1 + if apply: + os.system(f'hasura migrate apply --database-name {db_migration.db_name} --dry-run --log-level WARN') + exit_code = os.system(f'hasura migrate apply --database-name {db_migration.db_name}') + if exit_code != 0: + exit_with = 1 + else: + os.system(f'hasura migrate apply --goto 1 --database-name {db_migration.db_name} --dry-run --log-level WARN &&' + f'hasura migrate apply --down 1 --database-name {db_migration.db_name} --dry-run --log-level WARN') + exit_code = os.system(f'hasura migrate apply --goto 1 --database-name {db_migration.db_name} &&' + f'hasura migrate apply --down 1 --database-name {db_migration.db_name}') + if exit_code != 0: + exit_with = 1 - os.system('hasura metadata reload') + os.system('hasura metadata reload') # Show the result after the migration - print(f'\n' - f'\n###############' + print(f'\n###############' f'\nDatabase Status' f'\n###############') - for database in migration_db: - os.system(f'hasura migrate status --database-name {database.db_name}') + _output = subprocess.getoutput(f'hasura migrate status --database-name {db_migration.db_name}').split("\n") + del _output[0:3] + print("\n".join(_output)) exit(exit_with) -def mark_current_version(dbs_to_apply, username, password, netloc): - for db in dbs_to_apply: - # Convert db.name to the actual format of the db name: aerie_dbSuffix - name = "aerie_"+db.db_name.removeprefix("Aerie").lower() - connectionString = "postgres://"+username+":"+password+"@"+netloc+":5432/"+name - current_schema = 0 - - # Connect to DB - with psycopg.connect(connectionString) as connection: - # Open a cursor to perform database operations - with connection.cursor() as cursor: - # Get the current schema version - try: - cursor.execute("SELECT migration_id FROM migrations.schema_migrations ORDER BY migration_id::int DESC LIMIT 1") - except psycopg.errors.UndefinedTable: - return - current_schema = int(cursor.fetchone()[0]) - - # Mark everything up to that as applied - for i in range(0, current_schema+1): - os.system('hasura migrate apply --skip-execution --version '+str(i)+' --database-name '+db.db_name+' >/dev/null 2>&1') +def mark_current_version(username, password, netloc): + # Convert db.name to the actual format of the db name: aerie_dbSuffix + connectionString = "postgres://"+username+":"+password+"@"+netloc+":5432/aerie" + + # Connect to DB + with psycopg.connect(connectionString) as connection: + # Open a cursor to perform database operations + with connection.cursor() as cursor: + # Get the current schema version + cursor.execute("SELECT migration_id FROM migrations.schema_migrations ORDER BY migration_id::int DESC LIMIT 1") + current_schema = int(cursor.fetchone()[0]) + + # Mark everything up to that as applied + for i in range(0, current_schema+1): + os.system('hasura migrate apply --skip-execution --version '+str(i)+' --database-name aerie >/dev/null 2>&1') def main(): # Create a cli parser @@ -157,33 +141,26 @@ def main(): # Add arguments exclusive_args.add_argument( '-a', '--apply', - help="apply migration steps to specified databases", + help="apply migration steps to the database", action='store_true') exclusive_args.add_argument( '-r', '--revert', - help="revert migration steps to specified databases", + help="revert migration steps to the databases", action='store_true') parser.add_argument( '--all', - help="apply[revert] ALL unapplied[applied] migration steps to all databases if none are provided", + help="apply[revert] ALL unapplied[applied] migration steps to the database", action='store_true') - parser.add_argument( - '-db', '--db-names', - help="list of databases to migrate. migrates all if unspecified", - nargs='+', - default=[]) - parser.add_argument( '-p', '--hasura-path', help="the path to the directory containing the config.yaml for Aerie. defaults to ./hasura") parser.add_argument( '-e', '--env-path', - help="the path to the .env file used to deploy aerie. must define AERIE_USERNAME and AERIE_PASSWORD. defaults to .env", - default='.env') + help="the path to the .env file used to deploy aerie. must define AERIE_USERNAME and AERIE_PASSWORD") parser.add_argument( '-n', '--network-location', @@ -196,46 +173,25 @@ def main(): HASURA_PATH = "./hasura" if args.hasura_path: HASURA_PATH = args.hasura_path - MIGRATION_PATH = HASURA_PATH+"/migrations/" - - # find all migration folders for each Aerie database - migration_db = [] - to_migrate_set = set(args.db_names) - dbs_specified = True - if not to_migrate_set: - dbs_specified = False + MIGRATION_PATH = HASURA_PATH+"/migrations/Aerie" + # find all migration folders for the database + migration = DB_Migration("Aerie") try: - os.listdir(MIGRATION_PATH) + for root,dirs,files in os.walk(MIGRATION_PATH): + if dirs: + migration.add_migration_step(dirs) except FileNotFoundError as fne: print("\033[91mError\033[0m:"+ str(fne).split("]")[1]) sys.exit(1) - for db in os.listdir(MIGRATION_PATH): - # ignore hidden folders - if db.startswith('.'): - continue - # Only process if the folder is on the list of databases or if we don't have a list of databases - if not dbs_specified or db in to_migrate_set: - migration = DB_Migration(db) - for root,dirs,files in os.walk(MIGRATION_PATH+db): - if dirs: - migration.add_migration_step(dirs) - if len(migration.steps) > 0: - # If reverting, reverse the list - if args.revert: - migration.steps.reverse() - migration_db.append(migration) - to_migrate_set.discard(db) - - if to_migrate_set: - print("\033[91mError\033[0m: The following Database(s) do not contain migrations:\n\t" - +"\n\t".join(to_migrate_set)) - sys.exit(1) - - if not migration_db: + if len(migration.steps) <= 0: print("\033[91mError\033[0m: No database migrations found.") sys.exit(1) + # If reverting, reverse the list + if args.revert: + migration.steps.reverse() + # Check that hasura cli is installed if not shutil.which('hasura'): sys.exit(f'Hasura CLI is not installed. Exiting...') @@ -243,67 +199,48 @@ def main(): os.system('hasura version') # Get the Username/Password - username = "" - password = "" - usernameFound = False - passwordFound = False - with open(args.env_path) as envFile: - for line in envFile: - if usernameFound and passwordFound: - break - line = line.strip() - if line.startswith("AERIE_USERNAME"): - username = line.removeprefix("AERIE_USERNAME=") - usernameFound = True - continue - if line.startswith("AERIE_PASSWORD"): - password = line.removeprefix("AERIE_PASSWORD=") - passwordFound = True - continue - if not usernameFound: - print("\033[91mError\033[0m: AERIE_USERNAME environment variable is not defined in "+args.env_path+".") - sys.exit(1) - if not passwordFound: - print("\033[91mError\033[0m: AERIE_PASSWORD environment variable is not defined in "+args.env_path+".") - sys.exit(1) + username = os.environ.get('AERIE_USERNAME', "") + password = os.environ.get('AERIE_PASSWORD', "") + + if args.env_path: + usernameFound = False + passwordFound = False + with open(args.env_path) as envFile: + for line in envFile: + if usernameFound and passwordFound: + break + line = line.strip() + if line.startswith("AERIE_USERNAME"): + username = line.removeprefix("AERIE_USERNAME=") + usernameFound = True + continue + if line.startswith("AERIE_PASSWORD"): + password = line.removeprefix("AERIE_PASSWORD=") + passwordFound = True + continue + if not usernameFound: + print("\033[91mError\033[0m: AERIE_USERNAME environment variable is not defined in "+args.env_path+".") + sys.exit(1) + if not passwordFound: + print("\033[91mError\033[0m: AERIE_PASSWORD environment variable is not defined in "+args.env_path+".") + sys.exit(1) # Navigate to the hasura directory os.chdir(HASURA_PATH) # Mark all migrations previously applied to the databases to be updated as such - mark_current_version(migration_db, username, password, args.network_location) + mark_current_version(username, password, args.network_location) + clear_screen() + print(f'\n###############################' + f'\nAERIE DATABASE MIGRATION HELPER' + f'\n###############################') # Enter step-by-step mode if not otherwise specified if not args.all: - while True: - clear_screen() - print(f'\n###############################' - f'\nAERIE DATABASE MIGRATION HELPER' - f'\n###############################') - - print(f'\n0) \033[4mQ\033[0muit the migration helper') - for migration_number in range(0,len(migration_db)): - print(f'\n{migration_number+1}) Database: {migration_db[migration_number].db_name}') - output = subprocess.getoutput(f'hasura migrate status --database-name {migration_db[migration_number].db_name}').split("\n") - del output[0:3] - print("\n".join(output)) - - value = -1 - while value < 0 or value > len(migration_db): - _input = input(f"\nSelect a database to migrate (0-{len(migration_db)}): ").lower() - if _input == 'q' or _input == '0': - sys.exit() - - try: - value = int(_input) - except ValueError: - value = -1 - - # Go step-by-step through the migrations available for the selected database - step_by_step_migration(migration_db[value-1], args.apply) + # Go step-by-step through the migrations available for the selected database + step_by_step_migration(migration, args.apply) else: - bulk_migration(migration_db, args.apply) - print() + bulk_migration(migration, args.apply) if __name__ == "__main__": main() From 3ca416ae6f6bcabad8a68a4b5910f0b904f971c3 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Tue, 26 Mar 2024 10:01:18 -0700 Subject: [PATCH 34/36] Add new envvars to Workflows --- .github/workflows/load-test.yml | 8 ++++++++ .github/workflows/pgcmp.yml | 24 ++++++++++++++++++++++++ .github/workflows/test.yml | 8 ++++++++ 3 files changed, 40 insertions(+) diff --git a/.github/workflows/load-test.yml b/.github/workflows/load-test.yml index d6ac773bcc..8db3ddf3ac 100644 --- a/.github/workflows/load-test.yml +++ b/.github/workflows/load-test.yml @@ -10,6 +10,14 @@ env: HASURA_GRAPHQL_JWT_SECRET: '{ "type": "HS256", "key": "oursupersecretsupersecurekey1234567890" }' POSTGRES_USER: "postgres" POSTGRES_PASSWORD: "postgres" + GATEWAY_USERNAME: "${{secrets.GATEWAY_USERNAME}}" + GATEWAY_PASSWORD: "${{secrets.GATEWAY_PASSWORD}}" + MERLIN_USERNAME: "${{secrets.MERLIN_USERNAME}}" + MERLIN_PASSWORD: "${{secrets.MERLIN_PASSWORD}}" + SCHEDULER_USERNAME: "${{secrets.SCHEDULER_USERNAME}}" + SCHEDULER_PASSWORD: "${{secrets.SCHEDULER_PASSWORD}}" + SEQUENCING_USERNAME: "${{secrets.SEQUENCING_USERNAME}}" + SEQUENCING_PASSWORD: "${{secrets.SEQUENCING_PASSWORD}}" jobs: load-test: diff --git a/.github/workflows/pgcmp.yml b/.github/workflows/pgcmp.yml index 3d41329565..3a37372ccf 100644 --- a/.github/workflows/pgcmp.yml +++ b/.github/workflows/pgcmp.yml @@ -50,6 +50,14 @@ jobs: AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" POSTGRES_USER: "${{secrets.POSTGRES_USER}}" POSTGRES_PASSWORD: "${{secrets.POSTGRES_PASSWORD}}" + GATEWAY_USERNAME: "${{secrets.GATEWAY_USERNAME}}" + GATEWAY_PASSWORD: "${{secrets.GATEWAY_PASSWORD}}" + MERLIN_USERNAME: "${{secrets.MERLIN_USERNAME}}" + MERLIN_PASSWORD: "${{secrets.MERLIN_PASSWORD}}" + SCHEDULER_USERNAME: "${{secrets.SCHEDULER_USERNAME}}" + SCHEDULER_PASSWORD: "${{secrets.SCHEDULER_PASSWORD}}" + SEQUENCING_USERNAME: "${{secrets.SEQUENCING_USERNAME}}" + SEQUENCING_PASSWORD: "${{secrets.SEQUENCING_PASSWORD}}" HASURA_GRAPHQL_ADMIN_SECRET: "${{secrets.HASURA_GRAPHQL_ADMIN_SECRET}}" HASURA_GRAPHQL_JWT_SECRET: "${{secrets.HASURA_GRAPHQL_JWT_SECRET}}" - name: Sleep for 1 Minute @@ -86,6 +94,14 @@ jobs: AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" POSTGRES_USER: "${{secrets.POSTGRES_USER}}" POSTGRES_PASSWORD: "${{secrets.POSTGRES_PASSWORD}}" + GATEWAY_USERNAME: "${{secrets.GATEWAY_USERNAME}}" + GATEWAY_PASSWORD: "${{secrets.GATEWAY_PASSWORD}}" + MERLIN_USERNAME: "${{secrets.MERLIN_USERNAME}}" + MERLIN_PASSWORD: "${{secrets.MERLIN_PASSWORD}}" + SCHEDULER_USERNAME: "${{secrets.SCHEDULER_USERNAME}}" + SCHEDULER_PASSWORD: "${{secrets.SCHEDULER_PASSWORD}}" + SEQUENCING_USERNAME: "${{secrets.SEQUENCING_USERNAME}}" + SEQUENCING_PASSWORD: "${{secrets.SEQUENCING_PASSWORD}}" HASURA_GRAPHQL_ADMIN_SECRET: "${{secrets.HASURA_GRAPHQL_ADMIN_SECRET}}" HASURA_GRAPHQL_JWT_SECRET: "${{secrets.HASURA_GRAPHQL_JWT_SECRET}}" - name: Sleep for 30 seconds @@ -159,6 +175,14 @@ jobs: AERIE_PASSWORD: "${{secrets.AERIE_PASSWORD}}" POSTGRES_USER: "${{secrets.POSTGRES_USER}}" POSTGRES_PASSWORD: "${{secrets.POSTGRES_PASSWORD}}" + GATEWAY_USERNAME: "${{secrets.GATEWAY_USERNAME}}" + GATEWAY_PASSWORD: "${{secrets.GATEWAY_PASSWORD}}" + MERLIN_USERNAME: "${{secrets.MERLIN_USERNAME}}" + MERLIN_PASSWORD: "${{secrets.MERLIN_PASSWORD}}" + SCHEDULER_USERNAME: "${{secrets.SCHEDULER_USERNAME}}" + SCHEDULER_PASSWORD: "${{secrets.SCHEDULER_PASSWORD}}" + SEQUENCING_USERNAME: "${{secrets.SEQUENCING_USERNAME}}" + SEQUENCING_PASSWORD: "${{secrets.SEQUENCING_PASSWORD}}" HASURA_GRAPHQL_ADMIN_SECRET: "${{secrets.HASURA_GRAPHQL_ADMIN_SECRET}}" HASURA_GRAPHQL_JWT_SECRET: "${{secrets.HASURA_GRAPHQL_JWT_SECRET}}" - name: Sleep for 1 Minute diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 277f96079b..82a0640073 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,6 +19,14 @@ env: HASURA_GRAPHQL_JWT_SECRET: "${{secrets.HASURA_GRAPHQL_JWT_SECRET}}" POSTGRES_USER: "${{secrets.POSTGRES_USER}}" POSTGRES_PASSWORD: "${{secrets.POSTGRES_PASSWORD}}" + GATEWAY_USERNAME: "${{secrets.GATEWAY_USERNAME}}" + GATEWAY_PASSWORD: "${{secrets.GATEWAY_PASSWORD}}" + MERLIN_USERNAME: "${{secrets.MERLIN_USERNAME}}" + MERLIN_PASSWORD: "${{secrets.MERLIN_PASSWORD}}" + SCHEDULER_USERNAME: "${{secrets.SCHEDULER_USERNAME}}" + SCHEDULER_PASSWORD: "${{secrets.SCHEDULER_PASSWORD}}" + SEQUENCING_USERNAME: "${{secrets.SEQUENCING_USERNAME}}" + SEQUENCING_PASSWORD: "${{secrets.SEQUENCING_PASSWORD}}" jobs: unit-test: From 3c188527bffe96920c4f3c73dc47a4cf120df680 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Tue, 26 Mar 2024 11:42:10 -0700 Subject: [PATCH 35/36] Merge Aerie DB Script --- deployment/aerie_db_migration.py | 10 +- .../aerie_db_migration_preMerge.py | 309 +++ deployment/merge_aerie_db/drop_old_dbs.sh | 54 + deployment/merge_aerie_db/merge_db.sh | 155 ++ .../merge_db/database_permissions.sql | 108 + .../merge_db/migrate_hasura_functions.sql | 736 +++++++ .../merge_db/migrate_merlin.sql | 161 ++ .../merge_db/migrate_merlin_functions.sql | 1937 +++++++++++++++++ .../migrate_permissions_functions.sql | 438 ++++ .../merge_db/migrate_scheduler.sql | 118 + .../merge_db/migrate_scheduler_functions.sql | 323 +++ .../merge_db/migrate_sequencing.sql | 119 + .../merge_db/migrate_tags_functions.sql | 85 + .../merge_aerie_db/merge_db/migrate_ui.sql | 42 + 14 files changed, 4589 insertions(+), 6 deletions(-) create mode 100755 deployment/merge_aerie_db/aerie_db_migration_preMerge.py create mode 100644 deployment/merge_aerie_db/drop_old_dbs.sh create mode 100644 deployment/merge_aerie_db/merge_db.sh create mode 100644 deployment/merge_aerie_db/merge_db/database_permissions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_hasura_functions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_merlin.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_merlin_functions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_permissions_functions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_scheduler.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_sequencing.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_tags_functions.sql create mode 100644 deployment/merge_aerie_db/merge_db/migrate_ui.sql diff --git a/deployment/aerie_db_migration.py b/deployment/aerie_db_migration.py index 9341e7aada..28d05cd206 100755 --- a/deployment/aerie_db_migration.py +++ b/deployment/aerie_db_migration.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Migrate an AERIE Database""" +"""Migrate the Aerie Database""" import os import argparse @@ -90,7 +90,7 @@ def step_by_step_migration(db_migration, apply): input("Press Enter to continue...") def bulk_migration(db_migration, apply): - # Migrate each database + # Migrate the database exit_with = 0 if apply: os.system(f'hasura migrate apply --database-name {db_migration.db_name} --dry-run --log-level WARN') @@ -117,10 +117,8 @@ def bulk_migration(db_migration, apply): exit(exit_with) def mark_current_version(username, password, netloc): - # Convert db.name to the actual format of the db name: aerie_dbSuffix - connectionString = "postgres://"+username+":"+password+"@"+netloc+":5432/aerie" - # Connect to DB + connectionString = "postgres://"+username+":"+password+"@"+netloc+":5432/aerie" with psycopg.connect(connectionString) as connection: # Open a cursor to perform database operations with connection.cursor() as cursor: @@ -175,7 +173,7 @@ def main(): HASURA_PATH = args.hasura_path MIGRATION_PATH = HASURA_PATH+"/migrations/Aerie" - # find all migration folders for the database + # Find all migration folders for the database migration = DB_Migration("Aerie") try: for root,dirs,files in os.walk(MIGRATION_PATH): diff --git a/deployment/merge_aerie_db/aerie_db_migration_preMerge.py b/deployment/merge_aerie_db/aerie_db_migration_preMerge.py new file mode 100755 index 0000000000..dcac89fb49 --- /dev/null +++ b/deployment/merge_aerie_db/aerie_db_migration_preMerge.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python3 +"""Migrate an AERIE Database""" + +import os +import argparse +import sys +import shutil +import subprocess +import psycopg + +def clear_screen(): + os.system('cls' if os.name == 'nt' else 'clear') + +# internal class +class DB_Migration: + steps = [] + db_name = '' + def __init__(self, db_name): + self.db_name = db_name + + def add_migration_step(self, _migration_step): + self.steps = sorted(_migration_step, key=lambda x:int(x.split('_')[0])) + +def step_by_step_migration(database, apply): + clear_screen() + print('#' * len(database.db_name)) + print(database.db_name) + print('#' * len(database.db_name)) + + display_string = "\n\033[4mMIGRATION STEPS AVAILABLE:\033[0m\n" + _output = subprocess.getoutput(f'hasura migrate status --database-name {database.db_name}').split("\n") + del _output[0:3] + display_string += _output[0] + "\n" + + # Filter out the steps that can't be applied given the current mode and currently applied steps + available_steps = database.steps.copy() + for i in range(1, len(_output)): + split = list(filter(None, _output[i].split(" "))) + + if len(split) >= 5 and "Not Present" == (split[2]+" "+split[3]): + print("\n\033[91mError\033[0m: Migration files exist on server that do not exist on this machine. " + "Synchronize files and try again.\n") + input("Press Enter to continue...") + return + + if apply: + if (len(split) == 4) or (not os.path.isfile(f'migrations/{database.db_name}/{split[0]}_{split[1]}/up.sql')): + available_steps.remove(f'{split[0]}_{split[1]}') + else: + display_string += _output[i] + "\n" + else: + if (len(split) == 5 and "Not Present" == (split[3] + " " + split[4])) \ + or (not os.path.isfile(f'migrations/{database.db_name}/{split[0]}_{split[1]}/down.sql')): + available_steps.remove(f'{split[0]}_{split[1]}') + else: + display_string += _output[i] + "\n" + + if available_steps: + print(display_string) + else: + print("\nNO MIGRATION STEPS AVAILABLE\n") + + for step in available_steps: + print("\033[4mCURRENT STEP:\033[0m\n") + timestamp = step.split("_")[0] + + if apply: + os.system(f'hasura migrate apply --version {timestamp} --database-name {database.db_name} --dry-run --log-level WARN') + else: + os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {database.db_name} --dry-run --log-level WARN') + + print() + _value = '' + while _value != "y" and _value != "n" and _value != "q": + if apply: + _value = input(f'Apply {step}? (y/n): ').lower() + else: + _value = input(f'Revert {step}? (y/n): ').lower() + + if _value == "q": + sys.exit() + if _value == "y": + if apply: + print('Applying...') + exit_code = os.system(f'hasura migrate apply --version {timestamp} --type up --database-name {database.db_name}') + else: + print('Reverting...') + exit_code = os.system(f'hasura migrate apply --version {timestamp} --type down --database-name {database.db_name}') + os.system('hasura metadata reload') + print() + if exit_code != 0: + return + elif _value == "n": + return + input("Press Enter to continue...") + +def bulk_migration(migration_db, apply): + clear_screen() + # Migrate each database + exit_with = 0 + for database in migration_db: + exit_with = exit_with << 1 + print('#' * len(database.db_name)) + print(database.db_name) + print('#' * len(database.db_name)) + + if apply: + os.system(f'hasura migrate apply --database-name {database.db_name} --dry-run --log-level WARN') + exit_code = os.system(f'hasura migrate apply --database-name {database.db_name}') + if exit_code != 0: + exit_with += 1 + else: + os.system(f'hasura migrate apply --goto 0 --database-name {database.db_name} --dry-run --log-level WARN') + exit_code = os.system(f'hasura migrate apply --goto 0 --database-name {database.db_name}') + if exit_code != 0: + exit_with += 1 + + os.system('hasura metadata reload') + + # Show the result after the migration + print(f'\n' + f'\n###############' + f'\nDatabase Status' + f'\n###############') + for database in migration_db: + os.system(f'hasura migrate status --database-name {database.db_name}') + exit(exit_with) + +def mark_current_version(dbs_to_apply, username, password, netloc): + for db in dbs_to_apply: + # Convert db.name to the actual format of the db name: aerie_dbSuffix + name = "aerie_"+db.db_name.removeprefix("Aerie").lower() + connectionString = "postgres://"+username+":"+password+"@"+netloc+":5432/"+name + current_schema = 0 + + # Connect to DB + with psycopg.connect(connectionString) as connection: + # Open a cursor to perform database operations + with connection.cursor() as cursor: + # Get the current schema version + try: + cursor.execute("SELECT migration_id FROM migrations.schema_migrations ORDER BY migration_id::int DESC LIMIT 1") + except psycopg.errors.UndefinedTable: + return + current_schema = int(cursor.fetchone()[0]) + + # Mark everything up to that as applied + for i in range(0, current_schema+1): + os.system('hasura migrate apply --skip-execution --version '+str(i)+' --database-name '+db.db_name+' >/dev/null 2>&1') + +def main(): + # Create a cli parser + parser = argparse.ArgumentParser(description=__doc__) + # Applying and Reverting are exclusive arguments + exclusive_args = parser.add_mutually_exclusive_group(required='true') + + # Add arguments + exclusive_args.add_argument( + '-a', '--apply', + help="apply migration steps to specified databases", + action='store_true') + + exclusive_args.add_argument( + '-r', '--revert', + help="revert migration steps to specified databases", + action='store_true') + + parser.add_argument( + '--all', + help="apply[revert] ALL unapplied[applied] migration steps to all databases if none are provided", + action='store_true') + + parser.add_argument( + '-db', '--db-names', + help="list of databases to migrate. migrates all if unspecified", + nargs='+', + default=[]) + + parser.add_argument( + '-p', '--hasura-path', + help="the path to the directory containing the config.yaml for Aerie. defaults to ./hasura") + + parser.add_argument( + '-e', '--env-path', + help="the path to the .env file used to deploy aerie. must define AERIE_USERNAME and AERIE_PASSWORD. defaults to .env", + default='.env') + + parser.add_argument( + '-n', '--network-location', + help="the network location of the database. defaults to localhost", + default='localhost') + + # Generate arguments + args = parser.parse_args() + + HASURA_PATH = "../hasura" + if args.hasura_path: + HASURA_PATH = args.hasura_path + MIGRATION_PATH = HASURA_PATH+"/migrations/" + + # find all migration folders for each Aerie database + migration_db = [] + to_migrate_set = set(args.db_names) + dbs_specified = True + if not to_migrate_set: + dbs_specified = False + + try: + os.listdir(MIGRATION_PATH) + except FileNotFoundError as fne: + print("\033[91mError\033[0m:"+ str(fne).split("]")[1]) + sys.exit(1) + for db in os.listdir(MIGRATION_PATH): + # ignore hidden folders + if db.startswith('.'): + continue + # Only process if the folder is on the list of databases or if we don't have a list of databases + if not dbs_specified or db in to_migrate_set: + migration = DB_Migration(db) + for root,dirs,files in os.walk(MIGRATION_PATH+db): + if dirs: + migration.add_migration_step(dirs) + if len(migration.steps) > 0: + # If reverting, reverse the list + if args.revert: + migration.steps.reverse() + migration_db.append(migration) + to_migrate_set.discard(db) + + if to_migrate_set: + print("\033[91mError\033[0m: The following Database(s) do not contain migrations:\n\t" + +"\n\t".join(to_migrate_set)) + sys.exit(1) + + if not migration_db: + print("\033[91mError\033[0m: No database migrations found.") + sys.exit(1) + + # Check that hasura cli is installed + if not shutil.which('hasura'): + sys.exit(f'Hasura CLI is not installed. Exiting...') + else: + os.system('hasura version') + + # Get the Username/Password + username = "" + password = "" + usernameFound = False + passwordFound = False + with open(args.env_path) as envFile: + for line in envFile: + if usernameFound and passwordFound: + break + line = line.strip() + if line.startswith("AERIE_USERNAME"): + username = line.removeprefix("AERIE_USERNAME=") + usernameFound = True + continue + if line.startswith("AERIE_PASSWORD"): + password = line.removeprefix("AERIE_PASSWORD=") + passwordFound = True + continue + if not usernameFound: + print("\033[91mError\033[0m: AERIE_USERNAME environment variable is not defined in "+args.env_path+".") + sys.exit(1) + if not passwordFound: + print("\033[91mError\033[0m: AERIE_PASSWORD environment variable is not defined in "+args.env_path+".") + sys.exit(1) + + # Navigate to the hasura directory + os.chdir(HASURA_PATH) + + # Mark all migrations previously applied to the databases to be updated as such + mark_current_version(migration_db, username, password, args.network_location) + + # Enter step-by-step mode if not otherwise specified + if not args.all: + while True: + clear_screen() + print(f'\n###############################' + f'\nAERIE DATABASE MIGRATION HELPER' + f'\n###############################') + + print(f'\n0) \033[4mQ\033[0muit the migration helper') + for migration_number in range(0,len(migration_db)): + print(f'\n{migration_number+1}) Database: {migration_db[migration_number].db_name}') + output = subprocess.getoutput(f'hasura migrate status --database-name {migration_db[migration_number].db_name}').split("\n") + del output[0:3] + print("\n".join(output)) + + value = -1 + while value < 0 or value > len(migration_db): + _input = input(f"\nSelect a database to migrate (0-{len(migration_db)}): ").lower() + if _input == 'q' or _input == '0': + sys.exit() + + try: + value = int(_input) + except ValueError: + value = -1 + + # Go step-by-step through the migrations available for the selected database + step_by_step_migration(migration_db[value-1], args.apply) + else: + bulk_migration(migration_db, args.apply) + print() + +if __name__ == "__main__": + main() diff --git a/deployment/merge_aerie_db/drop_old_dbs.sh b/deployment/merge_aerie_db/drop_old_dbs.sh new file mode 100644 index 0000000000..8e9891db11 --- /dev/null +++ b/deployment/merge_aerie_db/drop_old_dbs.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +######## +# Help # +######## +Help() +{ + # Display Help + echo "Drop the pre-v2.8.0 unmerged Aerie Databases" + echo + echo "usage: drop_old_dbs.sh [-h] [-e ENV_PATH] [-n NETWORK_LOCATION]" + echo "options:" + echo "-h print this message and exit" + echo "-e path to the .env file used to deploy v2.8.0 Aerie. defaults to ../.env" + echo "-n network location of the database. defaults to localhost" + echo +} + +################# +# Set variables # +################# + +EnvFile="../.env" +NetLoc="localhost" + +######################### +# Process input options # +######################### + +# Get the options +while getopts "he:n" option; do + case $option in + e) EnvFile=$OPTARG;; + n) NetLoc=$OPTARG;; + h | *) + Help + exit;; + esac +done + +################# +# Main program # +################# +source $EnvFile + +echo 'Dropping unmerged databases...' +PGPASSWORD="$POSTGRES_PASSWORD" \ +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "postgres" --host "$NetLoc" <<-EOSQL + DROP DATABASE aerie_merlin; + DROP DATABASE aerie_scheduler; + DROP DATABASE aerie_sequencing; + DROP DATABASE aerie_ui; + \echo Done! +EOSQL diff --git a/deployment/merge_aerie_db/merge_db.sh b/deployment/merge_aerie_db/merge_db.sh new file mode 100644 index 0000000000..366e57d697 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db.sh @@ -0,0 +1,155 @@ +#!/bin/bash + +######## +# Help # +######## +Help() +{ + # Display Help + echo "Migrate from a pre-v2.8.0 Aerie Database to v2.8.0" + echo + echo "usage: merge_db.sh [-h] [-d] [-e ENV_PATH] [-p HASURA_PATH] [-n NETWORK_LOCATION]" + echo "options:" + echo "-h print this message and exit" + echo "-d drop the old databases after the merge" + echo "-e path to the .env file used to deploy v2.8.0 Aerie. defaults to ../.env" + echo "-p path to the directory containing the config.yaml for the Aerie deployment. defaults to ../hasura" + echo "-n network location of the database. defaults to localhost" + echo +} + +################# +# Set variables # +################# + +EnvFile="../.env" +HasuraPath="../hasura" +NetLoc="localhost" +DropOld=-1 + +######################### +# Process input options # +######################### + +# Get the options +while getopts "he:p:n:d" option; do + case $option in + e) EnvFile=$OPTARG;; + p) HasuraPath=$OPTARG;; + n) NetLoc=$OPTARG;; + d) DropOld=1;; + h | *) + Help + exit;; + esac +done + +################# +# Main program # +################# +source $EnvFile + +# Migrate the existing DB to the latest +echo 'Migrate existing DBs to latest...' +python3 aerie_db_migration_preMerge.py -a --all -p $HasuraPath -e $EnvFile -n $NetLoc +return_code=$? +if [ $return_code -ne 0 ]; then + echo 'Migrating to latest failed, aborting merge...' + exit $return_code +fi + +echo 'Done!' + +cd merge_db + +# Start the new DB on the server +echo 'Creating merged database...' +set -e +export PGPASSWORD="$POSTGRES_PASSWORD" +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname postgres --host "$NetLoc" <<-EOSQL + \echo 'Initializing aerie user...' + DO \$\$ BEGIN + CREATE USER "$AERIE_USERNAME" WITH PASSWORD '$AERIE_PASSWORD'; + EXCEPTION + WHEN duplicate_object THEN NULL; + END \$\$; + \echo 'Done!' + + \echo 'Initializing gateway user...' + CREATE USER "$GATEWAY_USERNAME" WITH PASSWORD '$GATEWAY_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing merlin user...' + CREATE USER "$MERLIN_USERNAME" WITH PASSWORD '$MERLIN_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing scheduler user...' + CREATE USER "$SCHEDULER_USERNAME" WITH PASSWORD '$SCHEDULER_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing sequencing user...' + CREATE USER "$SEQUENCING_USERNAME" WITH PASSWORD '$SEQUENCING_PASSWORD'; + \echo 'Done!' + + \echo 'Initializing aerie database...' + CREATE DATABASE aerie OWNER "$AERIE_USERNAME"; + \connect aerie + ALTER SCHEMA public OWNER TO "$AERIE_USERNAME"; + \echo 'Done!' +EOSQL + +echo 'Migrating aerie_merlin Database...' +# Move Merlin +export PGPASSWORD="$AERIE_PASSWORD" +pg_dump -U $AERIE_USERNAME -h $NetLoc aerie_merlin | psql -U $AERIE_USERNAME -h $NetLoc -d aerie +# Migrate Merlin +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" -h $NetLoc < migrate_merlin.sql +echo 'Done!' + +echo 'Migrating aerie_scheduler Database...' +# Move Scheduler +pg_dump -U $AERIE_USERNAME -h $NetLoc --exclude-schema=migrations aerie_scheduler | psql -U $AERIE_USERNAME -h $NetLoc -d aerie +# Migrate Scheduler +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" -h $NetLoc < migrate_scheduler.sql +echo 'Done!' + +echo 'Migrating aerie_sequencing Database...' +# Move Sequencing +pg_dump -U $AERIE_USERNAME -h $NetLoc --exclude-schema=migrations aerie_sequencing | psql -U $AERIE_USERNAME -h $NetLoc -d aerie +# Migrate Sequencing +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" -h $NetLoc < migrate_sequencing.sql +echo 'Done!' + +echo 'Migrating aerie_ui Database...' +# Move UI +pg_dump -U $AERIE_USERNAME -h $NetLoc --exclude-schema=migrations aerie_ui | psql -U $AERIE_USERNAME -h $NetLoc -d aerie +# Migrate UI +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" -h $NetLoc < migrate_ui.sql +echo 'Done!' + +echo 'Setting up Database Permissions...' +psql -v ON_ERROR_STOP=1 --username "$AERIE_USERNAME" --dbname "aerie" --host "$NetLoc" <<-EOSQL + \set aerie_user $AERIE_USERNAME + \set postgres_user $POSTGRES_USER + \set gateway_user $GATEWAY_USERNAME + \set merlin_user $MERLIN_USERNAME + \set scheduler_user $SCHEDULER_USERNAME + \set sequencing_user $SEQUENCING_USERNAME + \ir database_permissions.sql +EOSQL +echo 'Done!' + + +# Drop the old DBs +if [ $DropOld -eq 1 ]; then + echo 'Dropping unmerged databases...' + PGPASSWORD="$POSTGRES_PASSWORD" \ + psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "postgres" --host "$NetLoc" <<-EOSQL + DROP DATABASE aerie_merlin; + DROP DATABASE aerie_scheduler; + DROP DATABASE aerie_sequencing; + DROP DATABASE aerie_ui; + \echo Done! +EOSQL +fi +exit 0 diff --git a/deployment/merge_aerie_db/merge_db/database_permissions.sql b/deployment/merge_aerie_db/merge_db/database_permissions.sql new file mode 100644 index 0000000000..b0ebcf4c31 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/database_permissions.sql @@ -0,0 +1,108 @@ +/* + This file grants permissions to each of the DB users to the schemas. + It is executed by the AERIE user after the DB schema has been created. +*/ +begin; + -- Revoke Migrated Postgres permissions + revoke all privileges on schema merlin from :"postgres_user"; + revoke all privileges on schema scheduler from :"postgres_user"; + revoke all privileges on schema sequencing from :"postgres_user"; + revoke all privileges on schema ui from :"postgres_user"; + + -- Revoke Migrated Aerie permissions + revoke all privileges on schema ui from :"aerie_user"; + + -- All services may execute functions in the `util_functions` schema + -- 'routines' includes both functions and procedures + grant usage on schema util_functions to public; + grant execute on all routines in schema util_functions to public; + alter default privileges in schema util_functions grant execute on routines to public; + + -- All services must be able to view the user role permissions table + grant usage on schema permissions to public; + grant select on table permissions.user_role_permission to public; + + -- All services can create temp tables + grant temp on database aerie to public; + + -- All services can read merlin data + grant usage on schema merlin to public; + grant select on all tables in schema merlin to public; + alter default privileges in schema merlin grant select on tables to public; + + ------------------------------ + -- Gateway User Permissions -- + ------------------------------ + -- The Gateway is in charge of managing user permissions + grant select on all tables in schema permissions to :"gateway_user"; + grant insert, update, delete on permissions.users, permissions.users_allowed_roles to :"gateway_user"; + grant execute on all routines in schema permissions to :"gateway_user"; + + alter default privileges in schema permissions grant select on tables to :"gateway_user"; + alter default privileges in schema permissions grant execute on routines to :"gateway_user"; + -- The Gateway is in charge of managing uploaded files + grant select, insert, update, delete on merlin.uploaded_file to :"gateway_user"; + + ----------------------------- + -- Merlin User Permissions -- + ----------------------------- + -- Merlin has control of all tables in the merlin schema + grant select, insert, update, delete on all tables in schema merlin to :"merlin_user"; + grant execute on all routines in schema merlin to :"merlin_user"; + + alter default privileges in schema merlin grant select, insert, update, delete on tables to :"merlin_user"; + alter default privileges in schema merlin grant execute on routines to :"merlin_user"; + + -------------------------------- + -- Scheduler User Permissions -- + -------------------------------- + -- The Scheduler has control of all tables in the scheduler schema + grant usage on schema scheduler to :"scheduler_user"; + grant select, insert, update, delete on all tables in schema scheduler to :"scheduler_user"; + grant execute on all routines in schema scheduler to :"scheduler_user"; + + alter default privileges in schema scheduler grant select, insert, update, delete on tables to :"scheduler_user"; + alter default privileges in schema scheduler grant execute on routines to :"scheduler_user"; + + -- The Scheduler needs to be able to Add/Update Activity Directives in a Plan + grant insert, update on table merlin.activity_directive to :"scheduler_user"; + grant insert on table merlin.plan to :"scheduler_user"; + + -- The Scheduler can write simulation data + grant insert, update on table merlin.span, merlin.simulation_dataset to :"scheduler_user"; + grant insert on table merlin.profile, merlin.profile_segment, merlin.topic, merlin.event to :"scheduler_user"; + + --------------------------------- + -- Sequencing User Permissions -- + --------------------------------- + -- The Sequencing Server has control of all tables in the sequencing schema + grant usage on schema sequencing to :"sequencing_user"; + grant select, insert, update, delete on all tables in schema sequencing to :"sequencing_user"; + grant execute on all routines in schema sequencing to :"sequencing_user"; + + alter default privileges in schema sequencing grant select, insert, update, delete on tables to :"sequencing_user"; + alter default privileges in schema sequencing grant execute on routines to :"sequencing_user"; + + ----------------------- + -- UI DB Permissions -- + ----------------------- + -- The Aerie User currently has control of all tables in the UI schema + grant create, usage on schema ui to :"aerie_user"; + grant select, insert, update, delete on all tables in schema ui to :"aerie_user"; + grant execute on all routines in schema ui to :"aerie_user"; + + alter default privileges in schema ui grant select, insert, update, delete on tables to :"aerie_user"; + alter default privileges in schema ui grant execute on routines to :"aerie_user"; + + ------------- + -- CLEANUP -- + ------------- + -- Revoke Migrated Public permissions + revoke create on schema merlin from public; + revoke all privileges on schema scheduler from public; + revoke all privileges on schema sequencing from public; + revoke all privileges on schema ui from public; + + -- Restore public schema can be used + grant usage on schema public to public; +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_hasura_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_hasura_functions.sql new file mode 100644 index 0000000000..e815e24af4 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_hasura_functions.sql @@ -0,0 +1,736 @@ +-- Activity Presets +create or replace function hasura.apply_preset_to_activity(_preset_id int, _activity_id int, _plan_id int, hasura_session json) +returns merlin.activity_directive +strict +volatile +language plpgsql as $$ + declare + returning_directive merlin.activity_directive; + ad_activity_type text; + preset_activity_type text; + _function_permission permissions.permission; + _user text; +begin + _function_permission := permissions.get_function_permissions('apply_preset', hasura_session); + perform permissions.raise_if_plan_merge_permission('apply_preset', _function_permission); + -- Check valid permissions + _user := hasura_session ->> 'x-hasura-user-id'; + if not _function_permission = 'NO_CHECK' then + if _function_permission = 'OWNER' then + if not exists(select * from merlin.activity_presets ap where ap.id = _preset_id and ap.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run ''apply_preset'': '''|| _user ||''' is not OWNER on Activity Preset ' + || _preset_id ||'.'; + end if; + end if; + -- Additionally, the user needs to be OWNER of the plan + call permissions.check_general_permissions('apply_preset', _function_permission, _plan_id, _user); + end if; + + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity directive % does not exist in plan %', _activity_id, _plan_id; + end if; + if not exists(select id from merlin.activity_presets where id = _preset_id) then + raise exception 'Activity preset % does not exist', _preset_id; + end if; + + select type from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) into ad_activity_type; + select associated_activity_type from merlin.activity_presets where id = _preset_id into preset_activity_type; + + if (ad_activity_type != preset_activity_type) then + raise exception 'Cannot apply preset for activity type "%" onto an activity of type "%".', preset_activity_type, ad_activity_type; + end if; + + update merlin.activity_directive + set arguments = (select arguments from merlin.activity_presets where id = _preset_id) + where (id, plan_id) = (_activity_id, _plan_id); + + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) + select _preset_id, _activity_id, _plan_id + on conflict (activity_id, plan_id) do update + set preset_id = _preset_id; + + select * from merlin.activity_directive + where (id, plan_id) = (_activity_id, _plan_id) + into returning_directive; + + return returning_directive; +end +$$; + +-- Hasura functions for handling anchors during delete +create or replace function hasura.delete_activity_by_pk_reanchor_plan_start(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile +language plpgsql as $$ + declare + _function_permission permissions.permission; + begin + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_plan', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_plan', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_reanchor_plan', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + return query + with updated as ( + select merlin.anchor_direct_descendents_to_plan(_activity_id := _activity_id, _plan_id := _plan_id) + ) + select updated.*, 'updated' + from updated; + + return query + with deleted as ( + delete from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * + ) + select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, + deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; + end +$$; + +create or replace function hasura.delete_activity_by_pk_reanchor_to_anchor(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile + language plpgsql as $$ +declare + _function_permission permissions.permission; +begin + _function_permission := permissions.get_function_permissions('delete_activity_reanchor', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_reanchor', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + return query + with updated as ( + select merlin.anchor_direct_descendents_to_ancestor(_activity_id := _activity_id, _plan_id := _plan_id) + ) + select updated.*, 'updated' + from updated; + return query + with deleted as ( + delete from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id) returning * + ) + select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, + deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; +end +$$; + +create or replace function hasura.delete_activity_by_pk_delete_subtree(_activity_id int, _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile + language plpgsql as $$ +declare + _function_permission permissions.permission; +begin + _function_permission := permissions.get_function_permissions('delete_activity_subtree', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_subtree', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_subtree', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + return query + with recursive + descendents(activity_id, p_id) as ( + select _activity_id, _plan_id + from merlin.activity_directive ad + where (ad.id, ad.plan_id) = (_activity_id, _plan_id) + union + select ad.id, ad.plan_id + from merlin.activity_directive ad, descendents d + where (ad.anchor_id, ad.plan_id) = (d.activity_id, d.p_id) + ), + deleted as ( + delete from merlin.activity_directive ad + using descendents + where (ad.plan_id, ad.id) = (_plan_id, descendents.activity_id) + returning * + ) + select (deleted.id, deleted.plan_id, deleted.name, deleted.source_scheduling_goal_id, + deleted.created_at, deleted.created_by, deleted.last_modified_at, deleted.last_modified_by, deleted.start_offset, deleted.type, deleted.arguments, + deleted.last_modified_arguments_at, deleted.metadata, deleted.anchor_id, deleted.anchored_to_start)::merlin.activity_directive, 'deleted' from deleted; +end +$$; + +-- Bulk versions of Anchor Deletion +create or replace function hasura.delete_activity_by_pk_reanchor_plan_start_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile +language plpgsql as $$ + declare + activity_id int; + _function_permission permissions.permission; + begin + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_plan_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_plan_bulk', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_reanchor_plan_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + set constraints merlin.validate_anchors_update_trigger immediate; + foreach activity_id in array _activity_ids loop + -- An activity ID might've been deleted in a prior step, so validate that it exists first + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + return query + select * from hasura.delete_activity_by_pk_reanchor_plan_start(activity_id, _plan_id, hasura_session); + end if; + end loop; + set constraints merlin.validate_anchors_update_trigger deferred; + end +$$; + +create or replace function hasura.delete_activity_by_pk_reanchor_to_anchor_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile +language plpgsql as $$ + declare + activity_id int; + _function_permission permissions.permission; + begin + _function_permission := permissions.get_function_permissions('delete_activity_reanchor_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_reanchor_bulk', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_reanchor_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + set constraints merlin.validate_anchors_update_trigger immediate; + foreach activity_id in array _activity_ids loop + -- An activity ID might've been deleted in a prior step, so validate that it exists first + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + return query + select * from hasura.delete_activity_by_pk_reanchor_to_anchor(activity_id, _plan_id, hasura_session); + end if; + end loop; + set constraints merlin.validate_anchors_update_trigger deferred; + end +$$; + +create or replace function hasura.delete_activity_by_pk_delete_subtree_bulk(_activity_ids int[], _plan_id int, hasura_session json) + returns setof hasura.delete_anchor_return_value + strict + volatile +language plpgsql as $$ + declare + activity_id int; + _function_permission permissions.permission; + begin + _function_permission := permissions.get_function_permissions('delete_activity_subtree_bulk', hasura_session); + perform permissions.raise_if_plan_merge_permission('delete_activity_subtree_bulk', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('delete_activity_subtree_bulk', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + set constraints merlin.validate_anchors_update_trigger immediate; + foreach activity_id in array _activity_ids loop + if exists(select id from merlin.activity_directive where (id, plan_id) = (activity_id, _plan_id)) then + return query + select * from hasura.delete_activity_by_pk_delete_subtree(activity_id, _plan_id, hasura_session); + end if; + end loop; + set constraints merlin.validate_anchors_update_trigger deferred; + end +$$; + +create or replace function hasura.get_resources_at_start_offset(_dataset_id int, _start_offset interval) +returns setof hasura.resource_at_start_offset_return_value +strict +stable +security invoker +language plpgsql as $$ +begin + return query + select distinct on (p.name) + p.dataset_id, p.id, p.name, p.type, ps.start_offset, ps.dynamics, ps.is_gap + from merlin.profile p, merlin.profile_segment ps + where ps.profile_id = p.id + and p.dataset_id = _dataset_id + and ps.dataset_id = _dataset_id + and ps.start_offset <= _start_offset + order by p.name, ps.start_offset desc; +end +$$; + +create or replace function hasura.restore_activity_changelog( + _plan_id integer, + _activity_directive_id integer, + _revision integer, + hasura_session json +) + returns setof merlin.activity_directive + volatile + language plpgsql as $$ +declare + _function_permission permissions.permission; +begin + _function_permission := + permissions.get_function_permissions('restore_activity_changelog', hasura_session); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions( + 'restore_activity_changelog', + _function_permission, _plan_id, + (hasura_session ->> 'x-hasura-user-id') + ); + end if; + + if not exists(select id from merlin.plan where id = _plan_id) then + raise exception 'Plan % does not exist', _plan_id; + end if; + + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_directive_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_directive_id, _plan_id; + end if; + + if not exists(select revision + from merlin.activity_directive_changelog + where (plan_id, activity_directive_id, revision) = + (_plan_id, _activity_directive_id, _revision)) + then + raise exception 'Changelog Revision % does not exist for Plan % and Activity Directive %', _revision, _plan_id, _activity_directive_id; + end if; + + return query + update merlin.activity_directive as ad + set name = c.name, + source_scheduling_goal_id = c.source_scheduling_goal_id, + start_offset = c.start_offset, + type = c.type, + arguments = c.arguments, + last_modified_arguments_at = c.changed_arguments_at, + metadata = c.metadata, + anchor_id = c.anchor_id, + anchored_to_start = c.anchored_to_start, + last_modified_at = c.changed_at, + last_modified_by = c.changed_by + from merlin.activity_directive_changelog as c + where ad.id = _activity_directive_id + and c.activity_directive_id = _activity_directive_id + and ad.plan_id = _plan_id + and c.plan_id = _plan_id + and c.revision = _revision + returning ad.*; +end +$$; + +create or replace function hasura.duplicate_plan(plan_id integer, new_plan_name text, hasura_session json) + returns hasura.duplicate_plan_return_value -- plan_id of the new plan + volatile + language plpgsql as $$ +declare + res integer; + new_owner text; + _function_permission permissions.permission; +begin + new_owner := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('branch_plan', hasura_session); + perform permissions.raise_if_plan_merge_permission('branch_plan', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('branch_plan', _function_permission, plan_id, new_owner); + end if; + + select merlin.duplicate_plan(plan_id, new_plan_name, new_owner) into res; + return row(res)::hasura.duplicate_plan_return_value; +end; +$$; + +create or replace function hasura.get_plan_history(_plan_id integer, hasura_session json) + returns setof hasura.get_plan_history_return_value + stable + language plpgsql as $$ +declare + _function_permission permissions.permission; +begin + _function_permission := permissions.get_function_permissions('get_plan_history', hasura_session); + perform permissions.raise_if_plan_merge_permission('get_plan_history', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('get_plan_history', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + return query select get_plan_history($1); +end; +$$; + +create or replace function hasura.duplicate_plan(plan_id integer, new_plan_name text, hasura_session json) + returns hasura.duplicate_plan_return_value -- plan_id of the new plan + volatile + language plpgsql as $$ +declare + res integer; + new_owner text; + _function_permission permissions.permission; +begin + new_owner := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('branch_plan', hasura_session); + perform permissions.raise_if_plan_merge_permission('branch_plan', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('branch_plan', _function_permission, plan_id, new_owner); + end if; + + select merlin.duplicate_plan(plan_id, new_plan_name, new_owner) into res; + return row(res)::hasura.duplicate_plan_return_value; +end; +$$; + +create or replace function hasura.get_plan_history(_plan_id integer, hasura_session json) + returns setof hasura.get_plan_history_return_value + stable + language plpgsql as $$ +declare + _function_permission permissions.permission; +begin + _function_permission := permissions.get_function_permissions('get_plan_history', hasura_session); + perform permissions.raise_if_plan_merge_permission('get_plan_history', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('get_plan_history', _function_permission, _plan_id, (hasura_session ->> 'x-hasura-user-id')); + end if; + + return query select get_plan_history($1); +end; +$$; + +create or replace function hasura.create_merge_request(source_plan_id integer, target_plan_id integer, hasura_session json) + returns hasura.create_merge_request_return_value -- plan_id of the new plan + volatile + language plpgsql as $$ +declare + res integer; + requester_username text; + _function_permission permissions.permission; +begin + requester_username := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('create_merge_rq', hasura_session); + call permissions.check_merge_permissions('create_merge_rq', _function_permission, target_plan_id, source_plan_id, requester_username); + + select merlin.create_merge_request(source_plan_id, target_plan_id, requester_username) into res; + return row(res)::hasura.create_merge_request_return_value; +end; +$$; + +create or replace function hasura.get_non_conflicting_activities(_merge_request_id integer, hasura_session json) + returns setof hasura.get_non_conflicting_activities_return_value + strict + volatile + language plpgsql as $$ +declare + _snapshot_id_supplying_changes integer; + _plan_id_receiving_changes integer; +begin + call permissions.check_merge_permissions('get_non_conflicting_activities', _merge_request_id, hasura_session); + + select snapshot_id_supplying_changes, plan_id_receiving_changes + from merlin.merge_request + where merge_request.id = $1 + into _snapshot_id_supplying_changes, _plan_id_receiving_changes; + + return query + with plan_tags as ( + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) as tags, adt.directive_id + from tags.tags tags, tags.activity_directive_tags adt + where tags.id = adt.tag_id + and adt.plan_id = _plan_id_receiving_changes + group by adt.directive_id + ), + snapshot_tags as ( + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) as tags, sat.directive_id + from tags.tags tags, tags.snapshot_activity_tags sat + where tags.id = sat.tag_id + and sat.snapshot_id = _snapshot_id_supplying_changes + group by sat.directive_id + ) + select + activity_id, + change_type, + snap_act, + act, + coalesce(st.tags, '[]'), + coalesce(pt.tags, '[]') + from + (select msa.activity_id, msa.change_type + from merlin.merge_staging_area msa + where msa.merge_request_id = $1) c + left join merlin.plan_snapshot_activities snap_act + on _snapshot_id_supplying_changes = snap_act.snapshot_id + and c.activity_id = snap_act.id + left join merlin.activity_directive act + on _plan_id_receiving_changes = act.plan_id + and c.activity_id = act.id + left join plan_tags pt + on c.activity_id = pt.directive_id + left join snapshot_tags st + on c.activity_id = st.directive_id; +end +$$; + +create or replace function hasura.get_conflicting_activities(_merge_request_id integer, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value + strict + volatile + language plpgsql as $$ +declare + _snapshot_id_supplying_changes integer; + _plan_id_receiving_changes integer; + _merge_base_snapshot_id integer; +begin + call permissions.check_merge_permissions('get_conflicting_activities', _merge_request_id, hasura_session); + + select snapshot_id_supplying_changes, plan_id_receiving_changes, merge_base_snapshot_id + from merlin.merge_request + where merge_request.id = _merge_request_id + into _snapshot_id_supplying_changes, _plan_id_receiving_changes, _merge_base_snapshot_id; + + return query + with plan_tags as ( + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) as tags, adt.directive_id + from tags.tags tags, tags.activity_directive_tags adt + where tags.id = adt.tag_id + and _plan_id_receiving_changes = adt.plan_id + group by adt.directive_id + ), snapshot_tags as ( + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) as tags, sdt.directive_id, sdt.snapshot_id + from tags.tags tags, tags.snapshot_activity_tags sdt + where tags.id = sdt.tag_id + and (sdt.snapshot_id = _snapshot_id_supplying_changes + or sdt.snapshot_id = _merge_base_snapshot_id) + group by sdt.directive_id, sdt.snapshot_id + ) + select + activity_id, + change_type_supplying, + change_type_receiving, + case + when c.resolution = 'supplying' then 'source'::hasura.resolution_type + when c.resolution = 'receiving' then 'target'::hasura.resolution_type + when c.resolution = 'none' then 'none'::hasura.resolution_type + end, + snap_act, + act, + merge_base_act, + coalesce(st.tags, '[]'), + coalesce(pt.tags, '[]'), + coalesce(mbt.tags, '[]') + from + (select * from merlin.conflicting_activities c where c.merge_request_id = _merge_request_id) c + left join merlin.plan_snapshot_activities merge_base_act + on c.activity_id = merge_base_act.id and _merge_base_snapshot_id = merge_base_act.snapshot_id + left join merlin.plan_snapshot_activities snap_act + on c.activity_id = snap_act.id and _snapshot_id_supplying_changes = snap_act.snapshot_id + left join merlin.activity_directive act + on _plan_id_receiving_changes = act.plan_id and c.activity_id = act.id + left join plan_tags pt + on c.activity_id = pt.directive_id + left join snapshot_tags st + on c.activity_id = st.directive_id and _snapshot_id_supplying_changes = st.snapshot_id + left join snapshot_tags mbt + on c.activity_id = st.directive_id and _merge_base_snapshot_id = st.snapshot_id; +end; +$$; + +create or replace function hasura.begin_merge(_merge_request_id integer, hasura_session json) + returns hasura.begin_merge_return_value -- plan_id of the new plan + strict + volatile + language plpgsql as $$ + declare + non_conflicting_activities hasura.get_non_conflicting_activities_return_value[]; + conflicting_activities hasura.get_conflicting_activities_return_value[]; + reviewer_username text; +begin + call permissions.check_merge_permissions('begin_merge', _merge_request_id, hasura_session); + + reviewer_username := (hasura_session ->> 'x-hasura-user-id'); + call merlin.begin_merge($1, reviewer_username); + + non_conflicting_activities := array(select hasura.get_non_conflicting_activities($1, hasura_session)); + conflicting_activities := array(select hasura.get_conflicting_activities($1, hasura_session)); + + return row($1, non_conflicting_activities, conflicting_activities)::hasura.begin_merge_return_value; +end; +$$; + +create or replace function hasura.commit_merge(_merge_request_id integer, hasura_session json) + returns hasura.commit_merge_return_value + strict + volatile + language plpgsql as $$ +begin + call permissions.check_merge_permissions('commit_merge', _merge_request_id, hasura_session); + call merlin.commit_merge(_merge_request_id); + return row(_merge_request_id)::hasura.commit_merge_return_value; +end; +$$; + +create or replace function hasura.deny_merge(merge_request_id integer, hasura_session json) + returns hasura.deny_merge_return_value + strict + volatile + language plpgsql as $$ +begin + call permissions.check_merge_permissions('deny_merge', $1, hasura_session); + call merlin.deny_merge($1); + return row($1)::hasura.deny_merge_return_value; +end; +$$; + +create or replace function hasura.withdraw_merge_request(_merge_request_id integer, hasura_session json) + returns hasura.withdraw_merge_request_return_value + strict + volatile + language plpgsql as $$ +begin + call permissions.check_merge_permissions('withdraw_merge_rq', _merge_request_id, hasura_session); + call merlin.withdraw_merge_request(_merge_request_id); + return row(_merge_request_id)::hasura.withdraw_merge_request_return_value; +end; +$$; + +create or replace function hasura.cancel_merge(_merge_request_id integer, hasura_session json) + returns hasura.cancel_merge_return_value + strict + volatile + language plpgsql as $$ +begin + call permissions.check_merge_permissions('cancel_merge', _merge_request_id, hasura_session); + call merlin.cancel_merge(_merge_request_id); + return row(_merge_request_id)::hasura.cancel_merge_return_value; +end; +$$; + +create or replace function hasura.set_resolution(_merge_request_id integer, _activity_id integer, _resolution hasura.resolution_type, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value + strict + volatile + language plpgsql as $$ + declare + _conflict_resolution merlin.conflict_resolution; + begin + call permissions.check_merge_permissions('set_resolution', _merge_request_id, hasura_session); + + select into _conflict_resolution + case + when _resolution = 'source' then 'supplying'::merlin.conflict_resolution + when _resolution = 'target' then 'receiving'::merlin.conflict_resolution + when _resolution = 'none' then 'none'::merlin.conflict_resolution + end; + + update merlin.conflicting_activities ca + set resolution = _conflict_resolution + where ca.merge_request_id = _merge_request_id and ca.activity_id = _activity_id; + return query + select * from hasura.get_conflicting_activities(_merge_request_id, hasura_session) + where activity_id = _activity_id + limit 1; + end + $$; + +create or replace function hasura.set_resolution_bulk(_merge_request_id integer, _resolution hasura.resolution_type, hasura_session json) + returns setof hasura.get_conflicting_activities_return_value + strict + volatile + language plpgsql as $$ +declare + _conflict_resolution merlin.conflict_resolution; +begin + call permissions.check_merge_permissions('set_resolution_bulk', _merge_request_id, hasura_session); + + select into _conflict_resolution + case + when _resolution = 'source' then 'supplying'::merlin.conflict_resolution + when _resolution = 'target' then 'receiving'::merlin.conflict_resolution + when _resolution = 'none' then 'none'::merlin.conflict_resolution + end; + + update merlin.conflicting_activities ca + set resolution = _conflict_resolution + where ca.merge_request_id = _merge_request_id; + return query + select * from hasura.get_conflicting_activities(_merge_request_id, hasura_session); +end +$$; + + +-- Description must be the last parameter since it has a default value +create or replace function hasura.create_snapshot(_plan_id integer, _snapshot_name text, hasura_session json, _description text default null) + returns hasura.create_snapshot_return_value + volatile + language plpgsql as $$ +declare + _snapshot_id integer; + _snapshotter text; + _function_permission permissions.permission; +begin + _snapshotter := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('create_snapshot', hasura_session); + perform permissions.raise_if_plan_merge_permission('create_snapshot', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('create_snapshot', _function_permission, _plan_id, _snapshotter); + end if; + if _snapshot_name is null then + raise exception 'Snapshot name cannot be null.'; + end if; + + select merlin.create_snapshot(_plan_id, _snapshot_name, _description, _snapshotter) into _snapshot_id; + return row(_snapshot_id)::hasura.create_snapshot_return_value; +end; +$$; + +create or replace function hasura.restore_from_snapshot(_plan_id integer, _snapshot_id integer, hasura_session json) + returns hasura.create_snapshot_return_value + volatile + language plpgsql as $$ +declare + _user text; + _function_permission permissions.permission; +begin + _user := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('restore_snapshot', hasura_session); + perform permissions.raise_if_plan_merge_permission('restore_snapshot', _function_permission); + if not _function_permission = 'NO_CHECK' then + call permissions.check_general_permissions('restore_snapshot', _function_permission, _plan_id, _user); + end if; + + call merlin.restore_from_snapshot(_plan_id, _snapshot_id); + return row(_snapshot_id)::hasura.create_snapshot_return_value; +end +$$; diff --git a/deployment/merge_aerie_db/merge_db/migrate_merlin.sql b/deployment/merge_aerie_db/merge_db/migrate_merlin.sql new file mode 100644 index 0000000000..67f6387836 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_merlin.sql @@ -0,0 +1,161 @@ +begin; +-- Create Cross Service Schemas (migrations already exists and hasura will be created via rename) +comment on schema migrations is 'DB Migrations Schema'; +create schema permissions; +comment on schema permissions is 'Aerie User and User Roles Schema'; +create schema tags; +comment on schema tags is 'Tags Metadata Schema'; +create schema util_functions; +comment on schema util_functions is 'Cross-service Helper Function Schema'; + +-- Drop the PGCrypto extension that Hasura auto-installed out of "public" +drop extension if exists pgcrypto; + +-- Move the contents of "public" to "merlin" +alter schema public rename to merlin; +comment on schema merlin is 'Merlin Service Schema'; +create schema public; + +-- Move the contents of "hasura_functions" to "hasura" +alter schema hasura_functions rename to hasura; +comment on schema hasura is 'Hasura Helper Function Schema'; +alter type merlin.resolution_type set schema hasura; + +-- Empty schema_migrations +truncate migrations.schema_migrations; +call migrations.mark_migration_applied('0'); + +-- Move Permissions Tables +alter table metadata.users set schema permissions; +alter table metadata.users_allowed_roles set schema permissions; +alter table metadata.users_and_roles set schema permissions; +alter table metadata.user_role_permission set schema permissions; +alter table metadata.user_roles set schema permissions; + +-- Move Permissions Types +alter type metadata.permission set schema permissions; +alter type metadata.action_permission_key set schema permissions; +alter type metadata.function_permission_key set schema permissions; + +-- Move Tags Tables +alter table metadata.tags set schema tags; +alter table metadata.plan_tags set schema tags; +alter table metadata.plan_snapshot_tags set schema tags; +alter table metadata.activity_directive_tags set schema tags; +alter table metadata.snapshot_activity_tags set schema tags; +alter table metadata.constraint_definition_tags set schema tags; +alter table metadata.constraint_tags set schema tags; + +-- Move Permissions Functions +alter procedure metadata.check_general_permissions(_function permissions.function_permission_key, _permission permissions.permission, _plan_id integer, _user text) set schema permissions; +alter procedure metadata.check_merge_permissions(_function permissions.function_permission_key, _merge_request_id integer, hasura_session json) set schema permissions; +alter procedure metadata.check_merge_permissions(_function permissions.function_permission_key, _permission permissions.permission, _plan_id_receiving integer, _plan_id_supplying integer, _user text) set schema permissions; +alter function metadata.get_function_permissions(_function permissions.function_permission_key, hasura_session json) set schema permissions; +alter function metadata.get_role(hasura_session json) set schema permissions; +alter function metadata.insert_permission_for_user_role() set schema permissions; +alter function metadata.raise_if_plan_merge_permission(_function permissions.function_permission_key, _permission permissions.permission) set schema permissions; +alter function metadata.validate_permissions_json() set schema permissions; + +-- Move Tags Functions +alter function merlin.get_tags(_activity_id int, _plan_id int) set schema tags; +alter function merlin.adt_check_locked_new() set schema tags; +alter function merlin.adt_check_locked_old() set schema tags; +alter function merlin.snapshot_tags_in_review_delete() set schema tags; +alter function metadata.tag_ids_activity_directive(_directive_id integer, _plan_id integer) set schema tags; +alter function metadata.tag_ids_activity_snapshot(_directive_id integer, _snapshot_id integer) set schema tags; + +-- Metadata Schema is empty now +drop schema metadata; + +-- Replace status_t with util_functions.request_status +create type util_functions.request_status as enum('pending', 'incomplete', 'failed', 'success'); + +drop trigger notify_simulation_workers_cancel on merlin.simulation_dataset; +alter table merlin.simulation_dataset +alter column status drop default, +alter column status type util_functions.request_status using status::text::util_functions.request_status, +alter column status set default 'pending'; + +create trigger notify_simulation_workers_cancel +after update of canceled on merlin.simulation_dataset +for each row +when ((old.status != 'success' or old.status != 'failed') and new.canceled) +execute function merlin.notify_simulation_workers_cancel(); + +drop type merlin.status_t; + +-- Update Tables +alter table merlin.activity_type + rename constraint activity_type_owned_by_mission_model to activity_type_mission_model_exists; +alter table merlin.activity_type rename constraint activity_type_natural_key to activity_type_pkey; + +-- Update Types +alter domain merlin.merlin_parameter_set rename to parameter_set; +alter domain merlin.merlin_argument_set rename to argument_set; +alter domain merlin.merlin_required_parameter_set rename to required_parameter_set; +alter type merlin.merlin_activity_directive_metadata_set rename to activity_directive_metadata_set; + +-- Update function definitions +\! echo 'Migrating Merlin Functions...' +\ir ./migrate_merlin_functions.sql +\! echo 'Done!' +\! echo 'Migrating Hasura Functions...' +\ir ./migrate_hasura_functions.sql +\! echo 'Done!' +\! echo 'Migrating Tags Functions...' +\ir ./migrate_tags_functions.sql +\! echo 'Done!' +\! echo 'Migrating Permissions Functions...' +\ir ./migrate_permissions_functions.sql +\! echo 'Done!' + +-- Update Views +-- Update Views +create or replace view merlin.activity_directive_extended as +( + select + -- Activity Directive Properties + ad.id as id, + ad.plan_id as plan_id, + -- Additional Properties + ad.name as name, + tags.get_tags(ad.id, ad.plan_id) as tags, + ad.source_scheduling_goal_id as source_scheduling_goal_id, + ad.created_at as created_at, + ad.created_by as created_by, + ad.last_modified_at as last_modified_at, + ad.last_modified_by as last_modified_by, + ad.start_offset as start_offset, + ad.type as type, + ad.arguments as arguments, + ad.last_modified_arguments_at as last_modified_arguments_at, + ad.metadata as metadata, + ad.anchor_id as anchor_id, + ad.anchored_to_start as anchored_to_start, + -- Derived Properties + merlin.get_approximate_start_time(ad.id, ad.plan_id) as approximate_start_time, + ptd.preset_id as preset_id, + ap.arguments as preset_arguments + from merlin.activity_directive ad + left join merlin.preset_to_directive ptd on ad.id = ptd.activity_id and ad.plan_id = ptd.plan_id + left join merlin.activity_presets ap on ptd.preset_id = ap.id +); + +create or replace view merlin.simulated_activity as +( + select span.id as id, + sd.id as simulation_dataset_id, + span.parent_id as parent_id, + span.start_offset as start_offset, + span.duration as duration, + span.attributes as attributes, + span.type as activity_type_name, + (span.attributes#>>'{directiveId}')::integer as directive_id, + sd.simulation_start_time + span.start_offset as start_time, + sd.simulation_start_time + span.start_offset + span.duration as end_time + from merlin.span span + join merlin.dataset d on span.dataset_id = d.id + join merlin.simulation_dataset sd on d.id = sd.dataset_id + join merlin.simulation s on s.id = sd.simulation_id +); +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_merlin_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_merlin_functions.sql new file mode 100644 index 0000000000..f9b13aed44 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_merlin_functions.sql @@ -0,0 +1,1937 @@ +begin; +------------------------- +-- INIT UTIL_FUNCTIONS -- +------------------------- +create function util_functions.set_updated_at() +returns trigger +security invoker +language plpgsql as $$begin + new.updated_at = now(); + return new; +end$$; + +create function util_functions.increment_revision_update() +returns trigger +security invoker +language plpgsql as $$ +begin + new.revision = old.revision +1; + return new; +end$$; + +create function util_functions.raise_duration_is_negative() +returns trigger +security invoker +language plpgsql as $$begin + raise exception 'invalid duration, expected nonnegative duration but found: %', new.duration; +end$$; + +--------------------- +-- UPDATE TRIGGERS -- +--------------------- +create or replace trigger set_timestamp +before update or insert on merlin.plan +for each row +execute function util_functions.set_updated_at(); +drop function merlin.plan_set_updated_at(); + +create or replace trigger check_plan_duration_is_nonnegative_trigger +before insert or update on merlin.plan +for each row +when (new.duration < '0') +execute function util_functions.raise_duration_is_negative(); +drop function merlin.raise_duration_is_negative(); + +drop trigger increment_revision_on_update_plan_trigger on merlin.plan; +create trigger increment_revision_plan_update +before update on merlin.plan +for each row +when (pg_trigger_depth() < 1) +execute function util_functions.increment_revision_update(); +drop function merlin.increment_revision_on_update_plan(); + +drop trigger increment_revision_on_update_mission_model_trigger on merlin.mission_model; +create trigger increment_revision_mission_model_update +before update on merlin.mission_model +for each row +when (pg_trigger_depth() < 1) +execute function util_functions.increment_revision_update(); +drop function merlin.increment_revision_on_update_mission_model(); + +alter trigger increment_revision_on_update_mission_model_jar_trigger on merlin.uploaded_file rename to increment_revision_mission_model_jar_update_trigger; + +alter function merlin.create_dataset() rename to plan_dataset_create_dataset; +alter function merlin.process_delete() rename to plan_dataset_process_delete; + +alter trigger increment_revision_on_insert_activity_directive_trigger on merlin.activity_directive rename to increment_plan_revision_on_directive_insert_trigger; +alter trigger increment_revision_on_update_activity_directive_trigger on merlin.activity_directive rename to increment_plan_revision_on_directive_update_trigger; +alter trigger increment_revision_on_delete_activity_directive_trigger on merlin.activity_directive rename to increment_plan_revision_on_directive_delete_trigger; + +create or replace trigger activity_directive_metadata_schema_updated_at_trigger +before update on merlin.activity_directive_metadata_schema +for each row +execute procedure util_functions.set_updated_at(); +drop function merlin.activity_directive_metadata_schema_updated_at(); + +create or replace trigger set_timestamp +before update on merlin.constraint_metadata +for each row +execute function util_functions.set_updated_at(); +drop function merlin.constraint_metadata_set_updated_at(); + +create or replace trigger set_timestamp + before update or insert on merlin.merge_request + for each row +execute function util_functions.set_updated_at(); +drop function merlin.merge_request_set_updated_at(); + +drop trigger increment_revision_for_update_simulation_trigger on merlin.simulation; +create trigger increment_revision_for_update_simulation_trigger +before update on merlin.simulation +for each row +when (pg_trigger_depth() < 1) +execute function util_functions.increment_revision_update(); +drop function merlin.increment_revision_for_update_simulation(); + +drop trigger increment_revision_for_update_simulation_template_trigger on merlin.simulation_template; +create trigger increment_revision_for_update_simulation_template_trigger +before update on merlin.simulation_template +for each row +when (pg_trigger_depth() < 1) +execute function util_functions.increment_revision_update(); +drop function merlin.increment_revision_for_update_simulation_template(); + +--------------------------------- +-- UPDATE FUNCTION DEFINITIONS -- +--------------------------------- +create or replace function merlin.cleanup_on_delete() + returns trigger + language plpgsql as $$ +begin + -- prevent deletion if the plan is locked + if old.is_locked then + raise exception 'Cannot delete locked plan.'; + end if; + + -- withdraw pending rqs + update merlin.merge_request + set status='withdrawn' + where plan_id_receiving_changes = old.id + and status = 'pending'; + + -- have the children be 'adopted' by this plan's parent + update merlin.plan + set parent_id = old.parent_id + where + parent_id = old.id; + return old; +end +$$; + +alter function merlin.increment_revision_on_update_mission_model_jar() rename to increment_revision_mission_model_jar_update; +create or replace function merlin.increment_revision_mission_model_jar_update() +returns trigger +security definer +language plpgsql as $$begin + update merlin.mission_model + set revision = revision + 1 + where jar_id = new.id + or jar_id = old.id; + + return new; +end$$; + +create or replace function merlin.plan_dataset_create_dataset() +returns trigger +security definer +language plpgsql as $$ +begin + insert into merlin.dataset + default values + returning id into new.dataset_id; + return new; +end$$; + +create or replace function merlin.calculate_offset() +returns trigger +security definer +language plpgsql as $$ +declare + reference merlin.plan_dataset; + reference_plan_start timestamptz; + dataset_start timestamptz; + new_plan_start timestamptz; +begin + -- Get an existing association with this dataset for reference + select into reference * from merlin.plan_dataset + where dataset_id = new.dataset_id; + + -- If no reference exists, raise an exception + if reference is null + then + raise exception 'Nonexistent dataset_id --> %', new.dataset_id + using hint = 'dataset_id must already be associated with a plan.'; + end if; + + -- Get the plan start times + select start_time into reference_plan_start from merlin.plan where id = reference.plan_id; + select start_time into new_plan_start from merlin.plan where id = new.plan_id; + + -- calculate and assign the new offset from plan start + dataset_start := reference_plan_start + reference.offset_from_plan_start; + new.offset_from_plan_start = dataset_start - new_plan_start; + return new; +end$$; + +create or replace function merlin.plan_dataset_process_delete() +returns trigger +security definer +language plpgsql as $$begin + if (select count(*) from merlin.plan_dataset where dataset_id = old.dataset_id) = 0 + then + delete from merlin.dataset + where id = old.dataset_id; + end if; +return old; +end$$; + +create or replace function merlin.get_approximate_start_time(_activity_id int, _plan_id int) + returns timestamptz + security definer + language plpgsql as $$ + declare + _plan_duration interval; + _plan_start_time timestamptz; + _net_offset interval; + _root_activity_id int; + _root_anchored_to_start boolean; +begin + -- Sum up all the activities from here until the plan + with recursive get_net_offset(activity_id, plan_id, anchor_id, net_offset) as ( + select id, plan_id, anchor_id, start_offset + from merlin.activity_directive ad + where (ad.id, ad.plan_id) = (_activity_id, _plan_id) + union + select ad.id, ad.plan_id, ad.anchor_id, ad.start_offset+gno.net_offset + from merlin.activity_directive ad, get_net_offset gno + where (ad.id, ad.plan_id) = (gno.anchor_id, gno.plan_id) + ) + select gno.net_offset, activity_id from get_net_offset gno + where gno.anchor_id is null + into _net_offset, _root_activity_id; + + -- Get the plan start time and duration + select start_time, duration + from merlin.plan + where id = _plan_id + into _plan_start_time, _plan_duration; + + select anchored_to_start + from merlin.activity_directive + where (id, plan_id) = (_root_activity_id, _plan_id) + into _root_anchored_to_start; + + -- If the root activity is anchored to the end of the plan, add the net to duration + if not _root_anchored_to_start then + _net_offset = _plan_duration + _net_offset; + end if; + + return _plan_start_time+_net_offset; +end +$$; + +alter function merlin.increment_revision_on_insert_activity_directive() rename to increment_plan_revision_on_directive_insert; +create or replace function merlin.increment_plan_revision_on_directive_insert() +returns trigger +security definer +language plpgsql as $$begin + update merlin.plan + set revision = revision + 1 + where id = new.plan_id; + + return new; +end$$; + +alter function merlin.increment_revision_on_update_activity_directive() rename to increment_plan_revision_on_directive_update; +create or replace function merlin.increment_plan_revision_on_directive_update() +returns trigger +security definer +language plpgsql as $$begin + update merlin.plan + set revision = revision + 1 + where id = new.plan_id + or id = old.plan_id; + + return new; +end$$; + +alter function merlin.increment_revision_on_delete_activity_directive() rename to increment_plan_revision_on_directive_delete; +create or replace function merlin.increment_plan_revision_on_directive_delete() +returns trigger +security invoker +language plpgsql as $$begin + update merlin.plan + set revision = revision + 1 + where id = old.plan_id; + + return old; +end$$; + +create or replace function merlin.generate_activity_directive_name() +returns trigger +security invoker +language plpgsql as $$begin + call merlin.plan_locked_exception(new.plan_id); + if new.name is null then + new.name = new.type || ' ' || new.id; + end if; + return new; +end$$; + +alter function merlin.activity_directive_set_updated_at() rename to set_last_modified_at; +create or replace function merlin.set_last_modified_at() +returns trigger +security invoker +language plpgsql as $$begin + new.last_modified_at = now(); + return new; +end$$; + +create or replace function merlin.activity_directive_set_arguments_updated_at() + returns trigger + security definer + language plpgsql as +$$ begin + call merlin.plan_locked_exception(new.plan_id); + new.last_modified_arguments_at = now(); + + -- request new validation + update merlin.activity_directive_validations + set last_modified_arguments_at = new.last_modified_arguments_at, + status = 'pending' + where (directive_id, plan_id) = (new.id, new.plan_id); + + return new; +end $$; + +create or replace function merlin.activity_directive_validation_entry() + returns trigger + security definer + language plpgsql as +$$ begin + insert into merlin.activity_directive_validations + (directive_id, plan_id, last_modified_arguments_at) + values (new.id, new.plan_id, new.last_modified_arguments_at); + return new; +end $$; + +create or replace function merlin.check_activity_directive_metadata() +returns trigger +security definer +language plpgsql as $$ + declare + _key text; + _value jsonb; + _schema jsonb; + _type text; + _subValue jsonb; + begin + call merlin.plan_locked_exception(new.plan_id); + for _key, _value in + select * from jsonb_each(new.metadata::jsonb) + loop + select schema into _schema from merlin.activity_directive_metadata_schema where key = _key; + _type := _schema->>'type'; + if _type = 'string' then + if jsonb_typeof(_value) != 'string' then + raise exception 'invalid metadata value for key %. Expected: string, Received: %', _key, _value; + end if; + elsif _type = 'long_string' then + if jsonb_typeof(_value) != 'string' then + raise exception 'invalid metadata value for key %. Expected: string, Received: %', _key, _value; + end if; + elsif _type = 'boolean' then + if jsonb_typeof(_value) != 'boolean' then + raise exception 'invalid metadata value for key %. Expected: boolean, Received: %', _key, _value; + end if; + elsif _type = 'number' then + if jsonb_typeof(_value) != 'number' then + raise exception 'invalid metadata value for key %. Expected: number, Received: %', _key, _value; + end if; + elsif _type = 'enum' then + if (_value not in (select * from jsonb_array_elements(_schema->'enumerates'))) then + raise exception 'invalid metadata value for key %. Expected: %, Received: %', _key, _schema->>'enumerates', _value; + end if; + elsif _type = 'enum_multiselect' then + if jsonb_typeof(_value) != 'array' then + raise exception 'invalid metadata value for key %. Expected an array of enumerates: %, Received: %', _key, _schema->>'enumerates', _value; + end if; + for _subValue in select * from jsonb_array_elements(_value) + loop + if (_subValue not in (select * from jsonb_array_elements(_schema->'enumerates'))) then + raise exception 'invalid metadata value for key %. Expected one of the valid enumerates: %, Received: %', _key, _schema->>'enumerates', _value; + end if; + end loop; + end if; + end loop; + return new; +end$$; + +create or replace function merlin.check_locked_on_delete() + returns trigger + security definer + language plpgsql as $$ + begin + call merlin.plan_locked_exception(old.plan_id); + return old; + end $$; + +create or replace function merlin.store_activity_directive_change() + returns trigger + language plpgsql as $$ +begin + insert into merlin.activity_directive_changelog ( + revision, + plan_id, + activity_directive_id, + name, + start_offset, + type, + arguments, + changed_arguments_at, + metadata, + changed_by, + anchor_id, + anchored_to_start) + values ( + (select coalesce(max(revision), -1) + 1 + from merlin.activity_directive_changelog + where plan_id = new.plan_id + and activity_directive_id = new.id), + new.plan_id, + new.id, + new.name, + new.start_offset, + new.type, + new.arguments, + new.last_modified_arguments_at, + new.metadata, + new.last_modified_by, + new.anchor_id, + new.anchored_to_start); + + return new; +end +$$; + +create or replace function merlin.delete_min_activity_directive_revision() + returns trigger + language plpgsql as $$ +begin + delete from merlin.activity_directive_changelog + where activity_directive_id = new.activity_directive_id + and plan_id = new.plan_id + and revision = (select min(revision) + from merlin.activity_directive_changelog + where activity_directive_id = new.activity_directive_id + and plan_id = new.plan_id); + return new; +end$$; + +create or replace function merlin.get_dependent_activities(_activity_id int, _plan_id int) + returns table(activity_id int, total_offset interval) + stable + language plpgsql as $$ +begin + return query + with recursive d_activities(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, ad.start_offset + from merlin.activity_directive ad + where (ad.anchor_id, ad.plan_id) = (_activity_id, _plan_id) -- select all activities anchored to this one + union + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, da.total_offset + ad.start_offset + from merlin.activity_directive ad, d_activities da + where (ad.anchor_id, ad.plan_id) = (da.activity_id, _plan_id) -- select all activities anchored to those in the selection + and ad.anchored_to_start -- stop at next end-time anchor + ) select da.activity_id, da.total_offset + from d_activities da; +end; +$$; + +create or replace procedure merlin.validate_nonnegative_net_end_offset(_activity_id integer, _plan_id integer) + security definer + language plpgsql as $$ +declare + end_anchor_id integer; + offset_from_end_anchor interval; + _anchor_id integer; + _start_offset interval; + _anchored_to_start boolean; +begin + select anchor_id, start_offset, anchored_to_start + from merlin.activity_directive + where (id, plan_id) = (_activity_id, _plan_id) + into _anchor_id, _start_offset, _anchored_to_start; + + if (_anchor_id is not null) -- if the activity is anchored to the plan, then it can't be anchored to the end of another activity directive + then + /* + Postgres ANDs don't "short-circuit" -- all clauses are evaluated. Therefore, this query is placed here so that + it only runs iff the outer 'if' is true + */ + with recursive end_time_anchor(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( + select _activity_id, _anchor_id, _anchored_to_start, _start_offset, _start_offset + union + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, eta.total_offset + ad.start_offset + from merlin.activity_directive ad, end_time_anchor eta + where (ad.id, ad.plan_id) = (eta.anchor_id, _plan_id) + and eta.anchor_id is not null -- stop at plan + and eta.anchored_to_start -- or stop at end time anchor + ) select into end_anchor_id, offset_from_end_anchor + anchor_id, total_offset from end_time_anchor eta -- get the id of the activity that the selected activity is anchored to + where not eta.anchored_to_start and eta.anchor_id is not null + limit 1; + + if end_anchor_id is not null and offset_from_end_anchor < '0' then + raise notice 'Activity Directive % has a net negative offset relative to an end-time anchor on Activity Directive %.', _activity_id, end_anchor_id; + + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + values (_activity_id, _plan_id, 'Activity Directive ' || _activity_id || ' has a net negative offset relative to an end-time' || + ' anchor on Activity Directive ' || end_anchor_id ||'.') + on conflict (activity_id, plan_id) do update + set reason_invalid = 'Activity Directive ' || excluded.activity_id || ' has a net negative offset relative to an end-time' || + ' anchor on Activity Directive ' || end_anchor_id ||'.'; + end if; + end if; +end +$$; + +create or replace procedure merlin.validate_nonegative_net_plan_start(_activity_id integer, _plan_id integer) + security definer + language plpgsql as $$ + declare + net_offset interval; + _anchor_id integer; + _start_offset interval; + _anchored_to_start boolean; + begin + select anchor_id, start_offset, anchored_to_start + from merlin.activity_directive + where (id, plan_id) = (_activity_id, _plan_id) + into _anchor_id, _start_offset, _anchored_to_start; + + if (_start_offset < '0' and _anchored_to_start) then -- only need to check if anchored to start or something with a negative offset + with recursive anchors(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( + select _activity_id, _anchor_id, _anchored_to_start, _start_offset, _start_offset + union + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, anchors.total_offset + ad.start_offset + from merlin.activity_directive ad, anchors + where anchors.anchor_id is not null -- stop at plan + and (ad.id, ad.plan_id) = (anchors.anchor_id, _plan_id) + and anchors.anchored_to_start -- or, stop at end-time offset + ) + select total_offset -- get the id of the activity that the selected activity is anchored to + from anchors a + where a.anchor_id is null + and a.anchored_to_start + limit 1 + into net_offset; + + if(net_offset < '0') then + raise notice 'Activity Directive % has a net negative offset relative to Plan Start.', _activity_id; + + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + values (_activity_id, _plan_id, 'Activity Directive ' || _activity_id || ' has a net negative offset relative to Plan Start.') + on conflict (activity_id, plan_id) do update + set reason_invalid = 'Activity Directive ' || excluded.activity_id || ' has a net negative offset relative to Plan Start.'; + end if; + end if; + end + $$; + +create or replace function merlin.validate_anchors() + returns trigger + security definer + language plpgsql as $$ +declare + end_anchor_id integer; + invalid_descendant_act_ids integer[]; + offset_from_end_anchor interval; + offset_from_plan_start interval; +begin + -- Clear the reason invalid field (if an exception is thrown, this will be rolled back) + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + values (new.id, new.plan_id, '') + on conflict (activity_id, plan_id) do update + set reason_invalid = ''; + + -- An activity cannot anchor to itself + if(new.anchor_id = new.id) then + raise exception 'Cannot anchor activity % to itself.', new.anchor_id; + end if; + + -- Validate that no cycles were added + if exists( + with recursive history(activity_id, anchor_id, is_cycle, path) as ( + select new.id, new.anchor_id, false, array[new.id] + union all + select ad.id, ad.anchor_id, + ad.id = any(path), + path || ad.id + from merlin.activity_directive ad, history h + where (ad.id, ad.plan_id) = (h.anchor_id, new.plan_id) + and not is_cycle + ) select * from history + where is_cycle + limit 1 + ) then + raise exception 'Cycle detected. Cannot apply changes.'; + end if; + + /* + An activity directive may have a negative offset from its anchor's start time. + If its anchor is anchored to the end time of another activity (or so on up the chain), the activity with a + negative offset must come out to have a positive offset relative to that end time anchor. + */ + call merlin.validate_nonnegative_net_end_offset(new.id, new.plan_id); + call merlin.validate_nonegative_net_plan_start(new.id, new.plan_id); + + /* + Everything below validates that the activities anchored to this one did not become invalid as a result of these changes. + + This only checks descendent start-time anchors, as we know that the state after an end-time anchor is valid + (As if it no longer is, it will be caught when that activity's row is processed by this trigger) + */ + -- Get the total offset from the most recent end-time anchor earlier in this activity's chain (or null if there is none) + with recursive end_time_anchor(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( + select new.id, new.anchor_id, new.anchored_to_start, new.start_offset, new.start_offset + union + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, eta.total_offset + ad.start_offset + from merlin.activity_directive ad, end_time_anchor eta + where (ad.id, ad.plan_id) = (eta.anchor_id, new.plan_id) + and eta.anchor_id is not null -- stop at plan + and eta.anchored_to_start -- or stop at end time anchor + ) select into end_anchor_id, offset_from_end_anchor + anchor_id, total_offset from end_time_anchor eta -- get the id of the activity that the selected activity is anchored to + where not eta.anchored_to_start and eta.anchor_id is not null + limit 1; + + -- Not null iff the activity being looked at has some end anchor to another activity in its chain + if offset_from_end_anchor is not null then + select array_agg(activity_id) + from merlin.get_dependent_activities(new.id, new.plan_id) + where total_offset + offset_from_end_anchor < '0' + into invalid_descendant_act_ids; + + if invalid_descendant_act_ids is not null then + raise info 'The following Activity Directives now have a net negative offset relative to an end-time anchor on Activity Directive %: % \n' + 'There may be additional activities that are invalid relative to this activity.', + end_anchor_id, array_to_string(invalid_descendant_act_ids, ','); + + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + select id, new.plan_id, 'Activity Directive ' || id || ' has a net negative offset relative to an end-time' || + ' anchor on Activity Directive ' || end_anchor_id ||'.' + from unnest(invalid_descendant_act_ids) as id + on conflict (activity_id, plan_id) do update + set reason_invalid = 'Activity Directive ' || excluded.activity_id || ' has a net negative offset relative to an end-time' || + ' anchor on Activity Directive ' || end_anchor_id ||'.'; + end if; + end if; + + -- Gets the total offset from plan start (or null if there's an end-time anchor in the way) + with recursive anchors(activity_id, anchor_id, anchored_to_start, start_offset, total_offset) as ( + select new.id, new.anchor_id, new.anchored_to_start, new.start_offset, new.start_offset + union + select ad.id, ad.anchor_id, ad.anchored_to_start, ad.start_offset, anchors.total_offset + ad.start_offset + from merlin.activity_directive ad, anchors + where anchors.anchor_id is not null -- stop at plan + and (ad.id, ad.plan_id) = (anchors.anchor_id, new.plan_id) + and anchors.anchored_to_start -- or, stop at end-time offset + ) + select total_offset -- get the id of the activity that the selected activity is anchored to + from anchors a + where a.anchor_id is null + and a.anchored_to_start + limit 1 + into offset_from_plan_start; + + -- Not null iff the activity being looked at is connected to plan start via a chain of start anchors + if offset_from_plan_start is not null then + -- Validate descendents + invalid_descendant_act_ids := null; + select array_agg(activity_id) + from merlin.get_dependent_activities(new.id, new.plan_id) + where total_offset + offset_from_plan_start < '0' + into invalid_descendant_act_ids; -- grab all and split + + if invalid_descendant_act_ids is not null then + raise info 'The following Activity Directives now have a net negative offset relative to Plan Start: % \n' + 'There may be additional activities that are invalid relative to this activity.', + array_to_string(invalid_descendant_act_ids, ','); + + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + select id, new.plan_id, 'Activity Directive ' || id || ' has a net negative offset relative to Plan Start.' + from unnest(invalid_descendant_act_ids) as id + on conflict (activity_id, plan_id) do update + set reason_invalid = 'Activity Directive ' || excluded.activity_id || ' has a net negative offset relative to Plan Start.'; + end if; + end if; + + -- These are both null iff the activity is anchored to plan end + if(offset_from_plan_start is null and offset_from_end_anchor is null) then + -- All dependent activities should have no errors, as Plan End can have an offset of any value. + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + select da.activity_id, new.plan_id, '' + from merlin.get_dependent_activities(new.id, new.plan_id) as da + on conflict (activity_id, plan_id) do update + set reason_invalid = ''; + end if; + + -- Remove the error from the dependent activities that wouldn't have been flagged by the earlier checks. + insert into merlin.anchor_validation_status (activity_id, plan_id, reason_invalid) + select da.activity_id, new.plan_id, '' + from merlin.get_dependent_activities(new.id, new.plan_id) as da + where total_offset + offset_from_plan_start >= '0' + or total_offset + offset_from_end_anchor >= '0' -- only one of these checks will run depending on which one has `null` behind the offset + on conflict (activity_id, plan_id) do update + set reason_invalid = ''; + + return new; +end $$; + +create or replace function merlin.constraint_definition_set_revision() +returns trigger +volatile +language plpgsql as $$ +declare + max_revision integer; +begin + -- Grab the current max value of revision, or -1, if this is the first revision + select coalesce((select revision + from merlin.constraint_definition + where constraint_id = new.constraint_id + order by revision desc + limit 1), -1) + into max_revision; + + new.revision = max_revision + 1; + return new; +end +$$; + +create or replace function merlin.delete_dataset_cascade() + returns trigger + security definer + language plpgsql as +$$begin + delete from merlin.span s where s.dataset_id = old.id; + return old; +end$$; + +create or replace function merlin.allocate_dataset_partitions(dataset_id integer) + returns merlin.dataset + security definer + language plpgsql as $$ +declare + dataset_ref merlin.dataset; +begin + select * from merlin.dataset d where d.id = dataset_id into dataset_ref; + if dataset_id is null + then + raise exception 'Cannot allocate partitions for non-existent dataset id %', dataset_id; + end if; + + execute 'create table merlin.profile_segment_' || dataset_id || ' ( + like merlin.profile_segment including defaults including constraints + );'; + execute 'alter table merlin.profile_segment + attach partition merlin.profile_segment_' || dataset_id || ' for values in ('|| dataset_id ||');'; + + execute 'create table merlin.event_' || dataset_id || ' ( + like merlin.event including defaults including constraints + );'; + execute 'alter table merlin.event + attach partition merlin.event_' || dataset_id || ' for values in (' || dataset_id || ');'; + + execute 'create table merlin.span_' || dataset_id || ' ( + like merlin.span including defaults including constraints + );'; + execute 'alter table merlin.span + attach partition merlin.span_' || dataset_id || ' for values in (' || dataset_id || ');'; + + -- Create a self-referencing foreign key on the span partition table. We avoid referring to the top level span table + -- in order to avoid lock contention with concurrent inserts + call merlin.span_add_foreign_key_to_partition('merlin.span_' || dataset_id); + return dataset_ref; +end$$; + +create or replace function merlin.call_create_partition() + returns trigger + security invoker + language plpgsql as $$ +begin + perform merlin.allocate_dataset_partitions(new.id); + return new; +end +$$; + +create or replace function merlin.event_integrity_function() + returns trigger + security invoker + language plpgsql as $$begin + if not exists( + select from merlin.topic t + where t.dataset_id = new.dataset_id + and t.topic_index = new.topic_index + for key share of t) + -- for key share is important: it makes sure that concurrent transactions cannot update + -- the columns that compose the topic's key until after this transaction commits. + then + raise exception 'foreign key violation: there is no topic with topic_index % in dataset %', new.topic_index, new.dataset_id; + end if; + return new; +end$$; + +create or replace function merlin.delete_profile_cascade() + returns trigger + security invoker + language plpgsql as +$$begin + delete from merlin.profile_segment ps + where ps.dataset_id = old.dataset_id and ps.profile_id = old.id; + return old; +end$$; + +create or replace function merlin.update_profile_cascade() + returns trigger + security invoker + language plpgsql as $$begin + if old.id != new.id or old.dataset_id != new.dataset_id + then + update merlin.profile_segment ps + set profile_id = new.id, + dataset_id = new.dataset_id + where ps.dataset_id = old.dataset_id and ps.profile_id = old.id; + end if; + return new; +end$$; + +create or replace function merlin.profile_segment_integrity_function() + returns trigger + security invoker + language plpgsql as $$begin + if not exists( + select from merlin.profile p + where p.dataset_id = new.dataset_id + and p.id = new.profile_id + for key share of p) + -- for key share is important: it makes sure that concurrent transactions cannot update + -- the columns that compose the profile's key until after this transaction commits. + then + raise exception 'foreign key violation: there is no profile with id % in dataset %', new.profile_id, new.dataset_id; + end if; + return new; +end$$; + +create or replace function merlin.span_integrity_function() + returns trigger + security invoker + language plpgsql as $$begin + if not exists(select from merlin.dataset d where d.id = new.dataset_id for key share of d) + then + raise exception 'foreign key violation: there is no dataset with id %', new.dataset_id; + end if; + return new; +end$$; + +create or replace function merlin.delete_topic_cascade() + returns trigger + security invoker + language plpgsql as $$ +begin + delete from merlin.event e + where e.topic_index = old.topic_index and e.dataset_id = old.dataset_id; + return old; +end +$$; + +create or replace function merlin.update_topic_cascade() + returns trigger + security invoker + language plpgsql as $$begin + if old.topic_index != new.topic_index or old.dataset_id != new.dataset_id + then + update merlin.event e + set topic_index = new.topic_index, + dataset_id = new.dataset_id + where e.dataset_id = old.dataset_id and e.topic_index = old.topic_index; + end if; + return new; +end$$; + +create or replace function merlin.set_revisions_and_initialize_dataset_on_insert() +returns trigger +security definer +language plpgsql as $$ +declare + simulation_ref merlin.simulation; + plan_ref merlin.plan; + model_ref merlin.mission_model; + template_ref merlin.simulation_template; + dataset_ref merlin.dataset; +begin + -- Set the revisions + select into simulation_ref * from merlin.simulation where id = new.simulation_id; + select into plan_ref * from merlin.plan where id = simulation_ref.plan_id; + select into template_ref * from merlin.simulation_template where id = simulation_ref.simulation_template_id; + select into model_ref * from merlin.mission_model where id = plan_ref.model_id; + new.model_revision = model_ref.revision; + new.plan_revision = plan_ref.revision; + new.simulation_template_revision = template_ref.revision; + new.simulation_revision = simulation_ref.revision; + + -- Create the dataset + insert into merlin.dataset + default values + returning * into dataset_ref; + new.dataset_id = dataset_ref.id; + new.dataset_revision = dataset_ref.revision; +return new; +end$$; + +create or replace function merlin.delete_dataset_on_delete() +returns trigger +security definer +language plpgsql as $$begin + delete from merlin.dataset + where id = old.dataset_id; +return old; +end$$; + +create or replace function merlin.notify_simulation_workers() +returns trigger +security definer +language plpgsql as $$ +declare + simulation_ref merlin.simulation; +begin + select into simulation_ref * from merlin.simulation where id = new.simulation_id; + + perform ( + with payload(model_revision, + plan_revision, + simulation_revision, + simulation_template_revision, + dataset_id, + simulation_id, + plan_id) as + ( + select NEW.model_revision, + NEW.plan_revision, + NEW.simulation_revision, + NEW.simulation_template_revision, + NEW.dataset_id, + NEW.simulation_id, + simulation_ref.plan_id + ) + select pg_notify('simulation_notification', json_strip_nulls(row_to_json(payload))::text) + from payload + ); + return null; +end$$; + +create or replace function merlin.update_offset_from_plan_start() +returns trigger +security invoker +language plpgsql as $$ +declare + plan_start timestamptz; +begin + select p.start_time + from merlin.simulation s, merlin.plan p + where s.plan_id = p.id + and new.simulation_id = s.id + into plan_start; + + new.offset_from_plan_start = new.simulation_start_time - plan_start; + return new; +end +$$; + +create or replace function merlin.anchor_direct_descendents_to_plan(_activity_id int, _plan_id int) + returns setof merlin.activity_directive + language plpgsql as $$ +declare + _total_offset interval; +begin + if _plan_id is null then + raise exception 'Plan ID cannot be null.'; + end if; + if _activity_id is null then + raise exception 'Activity ID cannot be null.'; + end if; + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + with recursive history(activity_id, anchor_id, total_offset) as ( + select ad.id, ad.anchor_id, ad.start_offset + from merlin.activity_directive ad + where (ad.id, ad.plan_id) = (_activity_id, _plan_id) + union + select ad.id, ad.anchor_id, h.total_offset + ad.start_offset + from merlin.activity_directive ad, history h + where (ad.id, ad.plan_id) = (h.anchor_id, _plan_id) + and h.anchor_id is not null + ) select total_offset + from history + where history.anchor_id is null + into _total_offset; + + return query update merlin.activity_directive + set start_offset = start_offset + _total_offset, + anchor_id = null, + anchored_to_start = true + where (anchor_id, plan_id) = (_activity_id, _plan_id) + returning *; +end +$$; + +create or replace function merlin.anchor_direct_descendents_to_ancestor(_activity_id int, _plan_id int) + returns setof merlin.activity_directive + language plpgsql as $$ +declare + _current_offset interval; + _current_anchor_id int; +begin + if _plan_id is null then + raise exception 'Plan ID cannot be null.'; + end if; + if _activity_id is null then + raise exception 'Activity ID cannot be null.'; + end if; + if not exists(select id from merlin.activity_directive where (id, plan_id) = (_activity_id, _plan_id)) then + raise exception 'Activity Directive % does not exist in Plan %', _activity_id, _plan_id; + end if; + + select start_offset, anchor_id + from merlin.activity_directive + where (id, plan_id) = (_activity_id, _plan_id) + into _current_offset, _current_anchor_id; + + return query + update merlin.activity_directive + set start_offset = start_offset + _current_offset, + anchor_id = _current_anchor_id + where (anchor_id, plan_id) = (_activity_id, _plan_id) + returning *; +end +$$; + +create or replace function merlin.create_snapshot(_plan_id integer) + returns integer + language plpgsql as $$ +begin + return merlin.create_snapshot(_plan_id, null, null, null); +end +$$; + +create or replace function merlin.create_snapshot(_plan_id integer, _snapshot_name text, _description text, _user text) + returns integer -- snapshot id inserted into the table + language plpgsql as $$ + declare + validate_plan_id integer; + inserted_snapshot_id integer; +begin + select id from merlin.plan where plan.id = _plan_id into validate_plan_id; + if validate_plan_id is null then + raise exception 'Plan % does not exist.', _plan_id; + end if; + + insert into merlin.plan_snapshot(plan_id, revision, snapshot_name, description, taken_by) + select id, revision, _snapshot_name, _description, _user + from merlin.plan where id = _plan_id + returning snapshot_id into inserted_snapshot_id; + insert into merlin.plan_snapshot_activities( + snapshot_id, id, name, source_scheduling_goal_id, created_at, created_by, + last_modified_at, last_modified_by, start_offset, type, + arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start) + select + inserted_snapshot_id, -- this is the snapshot id + id, name, source_scheduling_goal_id, created_at, created_by, -- these are the rest of the data for an activity row + last_modified_at, last_modified_by, start_offset, type, + arguments, last_modified_arguments_at, metadata, anchor_id, anchored_to_start + from merlin.activity_directive where activity_directive.plan_id = _plan_id; + insert into merlin.preset_to_snapshot_directive(preset_id, activity_id, snapshot_id) + select ptd.preset_id, ptd.activity_id, inserted_snapshot_id + from merlin.preset_to_directive ptd + where ptd.plan_id = _plan_id; + insert into tags.snapshot_activity_tags(snapshot_id, directive_id, tag_id) + select inserted_snapshot_id, directive_id, tag_id + from tags.activity_directive_tags adt + where adt.plan_id = _plan_id; + + --all snapshots in plan_latest_snapshot for plan plan_id become the parent of the current snapshot + insert into merlin.plan_snapshot_parent(snapshot_id, parent_snapshot_id) + select inserted_snapshot_id, snapshot_id + from merlin.plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; + + --remove all of those entries from plan_latest_snapshot and add this new snapshot. + delete from merlin.plan_latest_snapshot where plan_latest_snapshot.plan_id = _plan_id; + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) values (_plan_id, inserted_snapshot_id); + + return inserted_snapshot_id; + end; +$$; + +create or replace function merlin.get_plan_history(starting_plan_id integer) + returns setof integer + language plpgsql as $$ + declare + validate_id integer; + begin + select plan.id from merlin.plan where plan.id = starting_plan_id into validate_id; + if validate_id is null then + raise exception 'Plan ID % is not present in plan table.', starting_plan_id; + end if; + + return query with recursive history(id) as ( + values(starting_plan_id) -- base case + union + select parent_id from merlin.plan p + join history on history.id = p.id and p.parent_id is not null-- recursive case + ) select * from history; + end +$$; + +create or replace function merlin.get_snapshot_history_from_plan(starting_plan_id integer) + returns setof integer + language plpgsql as $$ + begin + return query + select merlin.get_snapshot_history(snapshot_id) --runs the recursion + from merlin.plan_latest_snapshot where plan_id = starting_plan_id; --supplies input for get_snapshot_history + end +$$; + +create or replace function merlin.get_snapshot_history(starting_snapshot_id integer) + returns setof integer + language plpgsql as $$ + declare + validate_id integer; +begin + select plan_snapshot.snapshot_id from merlin.plan_snapshot where plan_snapshot.snapshot_id = starting_snapshot_id into validate_id; + if validate_id is null then + raise exception 'Snapshot ID % is not present in plan_snapshot table.', starting_snapshot_id; + end if; + + return query with recursive history(id) as ( + values(starting_snapshot_id) --base case + union + select parent_snapshot_id from merlin.plan_snapshot_parent psp + join history on id = psp.snapshot_id --recursive case + ) select * from history; +end +$$; + +create or replace procedure merlin.restore_from_snapshot(_plan_id integer, _snapshot_id integer) + language plpgsql as $$ + declare + _snapshot_name text; + _plan_name text; + begin + -- Input Validation + select name from merlin.plan where id = _plan_id into _plan_name; + if _plan_name is null then + raise exception 'Cannot Restore: Plan with ID % does not exist.', _plan_id; + end if; + if not exists(select snapshot_id from merlin.plan_snapshot where snapshot_id = _snapshot_id) then + raise exception 'Cannot Restore: Snapshot with ID % does not exist.', _snapshot_id; + end if; + if not exists(select snapshot_id from merlin.plan_snapshot where _snapshot_id = snapshot_id and _plan_id = plan_id ) then + select snapshot_name from merlin.plan_snapshot where snapshot_id = _snapshot_id into _snapshot_name; + if _snapshot_name is not null then + raise exception 'Cannot Restore: Snapshot ''%'' (ID %) is not a snapshot of Plan ''%'' (ID %)', + _snapshot_name, _snapshot_id, _plan_name, _plan_id; + else + raise exception 'Cannot Restore: Snapshot % is not a snapshot of Plan ''%'' (ID %)', + _snapshot_id, _plan_name, _plan_id; + end if; + end if; + + -- Catch Plan_Locked + call merlin.plan_locked_exception(_plan_id); + + -- Record the Union of Activities in Plan and Snapshot + -- and note which ones have been added since the Snapshot was taken (in_snapshot = false) + create temp table diff( + activity_id integer, + in_snapshot boolean not null + ); + insert into diff(activity_id, in_snapshot) + select id as activity_id, true + from merlin.plan_snapshot_activities where snapshot_id = _snapshot_id; + + insert into diff (activity_id, in_snapshot) + select activity_id, false + from( + select id as activity_id + from merlin.activity_directive + where plan_id = _plan_id + except + select activity_id + from diff) a; + + -- Remove any added activities + delete from merlin.activity_directive ad + using diff d + where (ad.id, ad.plan_id) = (d.activity_id, _plan_id) + and d.in_snapshot is false; + + -- Upsert the rest + insert into merlin.activity_directive ( + id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, + start_offset, type, arguments, last_modified_arguments_at, metadata, + anchor_id, anchored_to_start) + select psa.id, _plan_id, psa.name, psa.source_scheduling_goal_id, psa.created_at, psa.created_by, psa.last_modified_at, psa.last_modified_by, + psa.start_offset, psa.type, psa.arguments, psa.last_modified_arguments_at, psa.metadata, + psa.anchor_id, psa.anchored_to_start + from merlin.plan_snapshot_activities psa + where psa.snapshot_id = _snapshot_id + on conflict (id, plan_id) do update + -- 'last_modified_at' and 'last_modified_arguments_at' are skipped during update, as triggers will overwrite them to now() + set name = excluded.name, + source_scheduling_goal_id = excluded.source_scheduling_goal_id, + created_at = excluded.created_at, + created_by = excluded.created_by, + last_modified_by = excluded.last_modified_by, + start_offset = excluded.start_offset, + type = excluded.type, + arguments = excluded.arguments, + metadata = excluded.metadata, + anchor_id = excluded.anchor_id, + anchored_to_start = excluded.anchored_to_start; + + -- Tags + delete from tags.activity_directive_tags adt + using diff d + where (adt.directive_id, adt.plan_id) = (d.activity_id, _plan_id); + + insert into tags.activity_directive_tags(directive_id, plan_id, tag_id) + select sat.directive_id, _plan_id, sat.tag_id + from tags.snapshot_activity_tags sat + where sat.snapshot_id = _snapshot_id + on conflict (directive_id, plan_id, tag_id) do nothing; + + -- Presets + delete from merlin.preset_to_directive + where plan_id = _plan_id; + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) + select pts.preset_id, pts.activity_id, _plan_id + from merlin.preset_to_snapshot_directive pts + where pts.snapshot_id = _snapshot_id + on conflict (activity_id, plan_id) + do update set preset_id = excluded.preset_id; + + -- Clean up + drop table diff; + end +$$; + +create or replace procedure merlin.begin_merge(_merge_request_id integer, review_username text) + language plpgsql as $$ + declare + validate_id integer; + validate_status merlin.merge_request_status; + validate_non_no_op_status merlin.activity_change_type; + snapshot_id_supplying integer; + plan_id_receiving integer; + merge_base_id integer; +begin + -- validate id and status + select id, status + from merlin.merge_request + where _merge_request_id = id + into validate_id, validate_status; + + if validate_id is null then + raise exception 'Request ID % is not present in merge_request table.', _merge_request_id; + end if; + + if validate_status != 'pending' then + raise exception 'Cannot begin request. Merge request % is not in pending state.', _merge_request_id; + end if; + + -- select from merge-request the snapshot_sc (s_sc) and plan_rc (p_rc) ids + select plan_id_receiving_changes, snapshot_id_supplying_changes + from merlin.merge_request + where id = _merge_request_id + into plan_id_receiving, snapshot_id_supplying; + + -- ensure the plan receiving changes isn't locked + if (select is_locked from merlin.plan where plan.id=plan_id_receiving) then + raise exception 'Cannot begin merge request. Plan to receive changes is locked.'; + end if; + + -- lock plan_rc + update merlin.plan + set is_locked = true + where plan.id = plan_id_receiving; + + -- get merge base (mb) + select merlin.get_merge_base(plan_id_receiving, snapshot_id_supplying) + into merge_base_id; + + -- update the status to "in progress" + update merlin.merge_request + set status = 'in-progress', + merge_base_snapshot_id = merge_base_id, + reviewer_username = review_username + where id = _merge_request_id; + + + -- perform diff between mb and s_sc (s_diff) + -- delete is B minus A on key + -- add is A minus B on key + -- A intersect B is no op + -- A minus B on everything except everything currently in the table is modify + create temp table supplying_diff( + activity_id integer, + change_type merlin.activity_change_type not null + ); + + insert into supplying_diff (activity_id, change_type) + select activity_id, 'delete' + from( + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id + except + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = snapshot_id_supplying) a; + + insert into supplying_diff (activity_id, change_type) + select activity_id, 'add' + from( + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = snapshot_id_supplying + except + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id) a; + + insert into supplying_diff (activity_id, change_type) + select activity_id, 'none' + from( + select psa.id as activity_id, name, tags.tag_ids_activity_snapshot(psa.id, merge_base_id), + source_scheduling_goal_id, created_at, start_offset, type, arguments, + metadata, anchor_id, anchored_to_start + from merlin.plan_snapshot_activities psa + where psa.snapshot_id = merge_base_id + intersect + select id as activity_id, name, tags.tag_ids_activity_snapshot(psa.id, snapshot_id_supplying), + source_scheduling_goal_id, created_at, start_offset, type, arguments, + metadata, anchor_id, anchored_to_start + from merlin.plan_snapshot_activities psa + where psa.snapshot_id = snapshot_id_supplying) a; + + insert into supplying_diff (activity_id, change_type) + select activity_id, 'modify' + from( + select id as activity_id from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id or snapshot_id = snapshot_id_supplying + except + select activity_id from supplying_diff) a; + + -- perform diff between mb and p_rc (r_diff) + create temp table receiving_diff( + activity_id integer, + change_type merlin.activity_change_type not null + ); + + insert into receiving_diff (activity_id, change_type) + select activity_id, 'delete' + from( + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id + except + select id as activity_id + from merlin.activity_directive + where plan_id = plan_id_receiving) a; + + insert into receiving_diff (activity_id, change_type) + select activity_id, 'add' + from( + select id as activity_id + from merlin.activity_directive + where plan_id = plan_id_receiving + except + select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id) a; + + insert into receiving_diff (activity_id, change_type) + select activity_id, 'none' + from( + select id as activity_id, name, tags.tag_ids_activity_snapshot(id, merge_base_id), + source_scheduling_goal_id, created_at, start_offset, type, arguments, + metadata, anchor_id, anchored_to_start + from merlin.plan_snapshot_activities psa + where psa.snapshot_id = merge_base_id + intersect + select id as activity_id, name, tags.tag_ids_activity_directive(id, plan_id_receiving), + source_scheduling_goal_id, created_at, start_offset, type, arguments, + metadata, anchor_id, anchored_to_start + from merlin.activity_directive ad + where ad.plan_id = plan_id_receiving) a; + + insert into receiving_diff (activity_id, change_type) + select activity_id, 'modify' + from ( + (select id as activity_id + from merlin.plan_snapshot_activities + where snapshot_id = merge_base_id + union + select id as activity_id + from merlin.activity_directive + where plan_id = plan_id_receiving) + except + select activity_id + from receiving_diff) a; + + + -- perform diff between s_diff and r_diff + -- upload the non-conflicts into merge_staging_area + -- upload conflict into conflicting_activities + create temp table diff_diff( + activity_id integer, + change_type_supplying merlin.activity_change_type not null, + change_type_receiving merlin.activity_change_type not null + ); + + -- this is going to require us to do the "none" operation again on the remaining modifies + -- but otherwise we can just dump the 'adds' and 'none' into the merge staging area table + + -- 'delete' against a 'delete' does not enter the merge staging area table + -- receiving 'delete' against supplying 'none' does not enter the merge staging area table + + insert into merlin.merge_staging_area ( + merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type + ) + -- 'adds' can go directly into the merge staging area table + select _merge_request_id, activity_id, name, tags.tag_ids_activity_snapshot(s_diff.activity_id, psa.snapshot_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type + from supplying_diff as s_diff + join merlin.plan_snapshot_activities psa + on s_diff.activity_id = psa.id + where snapshot_id = snapshot_id_supplying and change_type = 'add' + union + -- an 'add' between the receiving plan and merge base is actually a 'none' + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(r_diff.activity_id, ad.plan_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, 'none'::merlin.activity_change_type + from receiving_diff as r_diff + join merlin.activity_directive ad + on r_diff.activity_id = ad.id + where plan_id = plan_id_receiving and change_type = 'add'; + + -- put the rest in diff_diff + insert into diff_diff (activity_id, change_type_supplying, change_type_receiving) + select activity_id, supplying_diff.change_type as change_type_supplying, receiving_diff.change_type as change_type_receiving + from receiving_diff + join supplying_diff using (activity_id) + where receiving_diff.change_type != 'add' or supplying_diff.change_type != 'add'; + + -- ...except for that which is not recorded + delete from diff_diff + where (change_type_receiving = 'delete' and change_type_supplying = 'delete') + or (change_type_receiving = 'delete' and change_type_supplying = 'none'); + + insert into merlin.merge_staging_area ( + merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type + ) + -- receiving 'none' and 'modify' against 'none' in the supplying side go into the merge staging area as 'none' + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, 'none' + from diff_diff + join merlin.activity_directive + on activity_id=id + where plan_id = plan_id_receiving + and change_type_supplying = 'none' + and (change_type_receiving = 'modify' or change_type_receiving = 'none') + union + -- supplying 'modify' against receiving 'none' go into the merge staging area as 'modify' + select _merge_request_id, activity_id, name, tags.tag_ids_activity_snapshot(diff_diff.activity_id, snapshot_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type_supplying + from diff_diff + join merlin.plan_snapshot_activities p + on diff_diff.activity_id = p.id + where snapshot_id = snapshot_id_supplying + and (change_type_receiving = 'none' and diff_diff.change_type_supplying = 'modify') + union + -- supplying 'delete' against receiving 'none' go into the merge staging area as 'delete' + select _merge_request_id, activity_id, name, tags.tag_ids_activity_directive(diff_diff.activity_id, plan_id), source_scheduling_goal_id, created_at, + created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type_supplying + from diff_diff + join merlin.activity_directive p + on diff_diff.activity_id = p.id + where plan_id = plan_id_receiving + and (change_type_receiving = 'none' and diff_diff.change_type_supplying = 'delete'); + + -- 'modify' against a 'modify' must be checked for equality first. + with false_modify as ( + select activity_id, name, tags.tag_ids_activity_directive(dd.activity_id, psa.snapshot_id) as tags, + source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start + from merlin.plan_snapshot_activities psa + join diff_diff dd + on dd.activity_id = psa.id + where psa.snapshot_id = snapshot_id_supplying + and (dd.change_type_receiving = 'modify' and dd.change_type_supplying = 'modify') + intersect + select activity_id, name, tags.tag_ids_activity_directive(dd.activity_id, ad.plan_id) as tags, + source_scheduling_goal_id, created_at, start_offset, type, arguments, metadata, anchor_id, anchored_to_start + from diff_diff dd + join merlin.activity_directive ad + on dd.activity_id = ad.id + where ad.plan_id = plan_id_receiving + and (dd.change_type_supplying = 'modify' and dd.change_type_receiving = 'modify')) + insert into merlin.merge_staging_area ( + merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type) + select _merge_request_id, ad.id, ad.name, tags, ad.source_scheduling_goal_id, ad.created_at, ad.created_by, + ad.last_modified_by, ad.start_offset, ad.type, ad.arguments, ad.metadata, ad.anchor_id, ad.anchored_to_start, 'none' + from false_modify fm + left join merlin.activity_directive ad + on (ad.plan_id, ad.id) = (plan_id_receiving, fm.activity_id); + + -- 'modify' against 'delete' and inequal 'modify' against 'modify' goes into conflict table (aka everything left in diff_diff) + insert into merlin.conflicting_activities (merge_request_id, activity_id, change_type_supplying, change_type_receiving) + select begin_merge._merge_request_id, activity_id, change_type_supplying, change_type_receiving + from (select begin_merge._merge_request_id, activity_id + from diff_diff + except + select msa.merge_request_id, activity_id + from merlin.merge_staging_area msa) a + join diff_diff using (activity_id); + + -- Fail if there are no differences between the snapshot and the plan getting merged + validate_non_no_op_status := null; + select change_type_receiving + from merlin.conflicting_activities + where merge_request_id = _merge_request_id + limit 1 + into validate_non_no_op_status; + + if validate_non_no_op_status is null then + select change_type + from merlin.merge_staging_area msa + where merge_request_id = _merge_request_id + and msa.change_type != 'none' + limit 1 + into validate_non_no_op_status; + + if validate_non_no_op_status is null then + raise exception 'Cannot begin merge. The contents of the two plans are identical.'; + end if; + end if; + + + -- clean up + drop table supplying_diff; + drop table receiving_diff; + drop table diff_diff; +end +$$; + +create or replace procedure merlin.commit_merge(_request_id integer) + language plpgsql as $$ + declare + validate_noConflicts integer; + plan_id_R integer; + snapshot_id_S integer; +begin + if(select id from merlin.merge_request where id = _request_id) is null then + raise exception 'Invalid merge request id %.', _request_id; + end if; + + -- Stop if this merge is not 'in-progress' + if (select status from merlin.merge_request where id = _request_id) != 'in-progress' then + raise exception 'Cannot commit a merge request that is not in-progress.'; + end if; + + -- Stop if any conflicts have not been resolved + select * from merlin.conflicting_activities + where merge_request_id = _request_id and resolution = 'none' + limit 1 + into validate_noConflicts; + + if(validate_noConflicts is not null) then + raise exception 'There are unresolved conflicts in merge request %. Cannot commit merge.', _request_id; + end if; + + select plan_id_receiving_changes from merlin.merge_request mr where mr.id = _request_id into plan_id_R; + select snapshot_id_supplying_changes from merlin.merge_request mr where mr.id = _request_id into snapshot_id_S; + + insert into merlin.merge_staging_area( + merge_request_id, activity_id, name, tags, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start, change_type) + -- gather delete data from the opposite tables + select _request_id, activity_id, name, tags.tag_ids_activity_directive(ca.activity_id, ad.plan_id), + source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, + 'delete'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.activity_directive ad + on ca.activity_id = ad.id + where ca.resolution = 'supplying' + and ca.merge_request_id = _request_id + and plan_id = plan_id_R + and ca.change_type_supplying = 'delete' + union + select _request_id, activity_id, name, tags.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), + source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, + 'delete'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.plan_snapshot_activities psa + on ca.activity_id = psa.id + where ca.resolution = 'receiving' + and ca.merge_request_id = _request_id + and snapshot_id = snapshot_id_S + and ca.change_type_receiving = 'delete' + union + select _request_id, activity_id, name, tags.tag_ids_activity_directive(ca.activity_id, ad.plan_id), + source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, + 'none'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.activity_directive ad + on ca.activity_id = ad.id + where ca.resolution = 'receiving' + and ca.merge_request_id = _request_id + and plan_id = plan_id_R + and ca.change_type_receiving = 'modify' + union + select _request_id, activity_id, name, tags.tag_ids_activity_snapshot(ca.activity_id, psa.snapshot_id), + source_scheduling_goal_id, created_at, created_by, last_modified_by, start_offset, type, arguments, metadata, anchor_id, anchored_to_start, + 'modify'::merlin.activity_change_type + from merlin.conflicting_activities ca + join merlin.plan_snapshot_activities psa + on ca.activity_id = psa.id + where ca.resolution = 'supplying' + and ca.merge_request_id = _request_id + and snapshot_id = snapshot_id_S + and ca.change_type_supplying = 'modify'; + + -- Unlock so that updates can be written + update merlin.plan + set is_locked = false + where id = plan_id_R; + + -- Update the plan's activities to match merge-staging-area's activities + -- Add + insert into merlin.activity_directive( + id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start ) + select activity_id, plan_id_R, name, source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, type, arguments, metadata, anchor_id, anchored_to_start + from merlin.merge_staging_area + where merge_staging_area.merge_request_id = _request_id + and change_type = 'add'; + + -- Modify + insert into merlin.activity_directive( + id, plan_id, "name", source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, "type", arguments, metadata, anchor_id, anchored_to_start ) + select activity_id, plan_id_R, "name", source_scheduling_goal_id, created_at, created_by, last_modified_by, + start_offset, "type", arguments, metadata, anchor_id, anchored_to_start + from merlin.merge_staging_area + where merge_staging_area.merge_request_id = _request_id + and change_type = 'modify' + on conflict (id, plan_id) + do update + set name = excluded.name, + source_scheduling_goal_id = excluded.source_scheduling_goal_id, + created_at = excluded.created_at, + created_by = excluded.created_by, + last_modified_by = excluded.last_modified_by, + start_offset = excluded.start_offset, + type = excluded.type, + arguments = excluded.arguments, + metadata = excluded.metadata, + anchor_id = excluded.anchor_id, + anchored_to_start = excluded.anchored_to_start; + + -- Tags + delete from tags.activity_directive_tags adt + using merlin.merge_staging_area msa + where adt.directive_id = msa.activity_id + and adt.plan_id = plan_id_R + and msa.merge_request_id = _request_id + and msa.change_type = 'modify'; + + insert into tags.activity_directive_tags(plan_id, directive_id, tag_id) + select plan_id_R, activity_id, t.id + from merlin.merge_staging_area msa + inner join tags.tags t -- Inner join because it's specifically inserting into a tags-association table, so if there are no valid tags we do not want a null value for t.id + on t.id = any(msa.tags) + where msa.merge_request_id = _request_id + and (change_type = 'modify' + or change_type = 'add') + on conflict (directive_id, plan_id, tag_id) do nothing; + -- Presets + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) + select pts.preset_id, pts.activity_id, plan_id_R + from merlin.merge_staging_area msa + inner join merlin.preset_to_snapshot_directive pts using (activity_id) + where pts.snapshot_id = snapshot_id_S + and msa.merge_request_id = _request_id + and (msa.change_type = 'add' + or msa.change_type = 'modify') + on conflict (activity_id, plan_id) + do update + set preset_id = excluded.preset_id; + + -- Delete + delete from merlin.activity_directive ad + using merlin.merge_staging_area msa + where ad.id = msa.activity_id + and ad.plan_id = plan_id_R + and msa.merge_request_id = _request_id + and msa.change_type = 'delete'; + + -- Clean up + delete from merlin.conflicting_activities where merge_request_id = _request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = _request_id; + + update merlin.merge_request + set status = 'accepted' + where id = _request_id; + + -- Attach snapshot history + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) + select plan_id_receiving_changes, snapshot_id_supplying_changes + from merlin.merge_request + where id = _request_id; +end +$$; + +create or replace function merlin.duplicate_plan(_plan_id integer, new_plan_name text, new_owner text) + returns integer -- plan_id of the new plan + security definer + language plpgsql as $$ + declare + validate_plan_id integer; + new_plan_id integer; + created_snapshot_id integer; +begin + select id from merlin.plan where plan.id = _plan_id into validate_plan_id; + if(validate_plan_id is null) then + raise exception 'Plan % does not exist.', _plan_id; + end if; + + select merlin.create_snapshot(_plan_id) into created_snapshot_id; + + insert into merlin.plan(revision, name, model_id, duration, start_time, parent_id, owner, updated_by) + select + 0, new_plan_name, model_id, duration, start_time, _plan_id, new_owner, new_owner + from merlin.plan where id = _plan_id + returning id into new_plan_id; + insert into merlin.activity_directive( + id, plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, + last_modified_arguments_at, metadata, anchor_id, anchored_to_start) + select + id, new_plan_id, name, source_scheduling_goal_id, created_at, created_by, last_modified_at, last_modified_by, start_offset, type, arguments, + last_modified_arguments_at, metadata, anchor_id, anchored_to_start + from merlin.activity_directive where activity_directive.plan_id = _plan_id; + + with source_plan as ( + select simulation_template_id, arguments, simulation_start_time, simulation_end_time + from merlin.simulation + where simulation.plan_id = _plan_id + ) + update merlin.simulation s + set simulation_template_id = source_plan.simulation_template_id, + arguments = source_plan.arguments, + simulation_start_time = source_plan.simulation_start_time, + simulation_end_time = source_plan.simulation_end_time + from source_plan + where s.plan_id = new_plan_id; + + insert into merlin.preset_to_directive(preset_id, activity_id, plan_id) + select preset_id, activity_id, new_plan_id + from merlin.preset_to_directive ptd where ptd.plan_id = _plan_id; + + insert into tags.plan_tags(plan_id, tag_id) + select new_plan_id, tag_id + from tags.plan_tags pt where pt.plan_id = _plan_id; + insert into tags.activity_directive_tags(plan_id, directive_id, tag_id) + select new_plan_id, directive_id, tag_id + from tags.activity_directive_tags adt where adt.plan_id = _plan_id; + + insert into merlin.plan_latest_snapshot(plan_id, snapshot_id) values(new_plan_id, created_snapshot_id); + return new_plan_id; +end +$$; + +create or replace function merlin.get_merge_base(plan_id_receiving_changes integer, snapshot_id_supplying_changes integer) + returns integer + language plpgsql as $$ + declare + result integer; +begin + select * from + ( + select merlin.get_snapshot_history_from_plan(plan_id_receiving_changes) as ids + intersect + select merlin.get_snapshot_history(snapshot_id_supplying_changes) as ids + ) + as ids + order by ids desc + limit 1 + into result; + return result; +end +$$; + +create or replace function merlin.create_merge_request(plan_id_supplying integer, plan_id_receiving integer, request_username text) + returns integer + language plpgsql as $$ +declare + merge_base_snapshot_id integer; + validate_planIds integer; + supplying_snapshot_id integer; + merge_request_id integer; +begin + if plan_id_receiving = plan_id_supplying then + raise exception 'Cannot create a merge request between a plan and itself.'; + end if; + select id from merlin.plan where plan.id = plan_id_receiving into validate_planIds; + if validate_planIds is null then + raise exception 'Plan receiving changes (Plan %) does not exist.', plan_id_receiving; + end if; + select id from merlin.plan where plan.id = plan_id_supplying into validate_planIds; + if validate_planIds is null then + raise exception 'Plan supplying changes (Plan %) does not exist.', plan_id_supplying; + end if; + + select merlin.create_snapshot(plan_id_supplying) into supplying_snapshot_id; + + select merlin.get_merge_base(plan_id_receiving, supplying_snapshot_id) into merge_base_snapshot_id; + if merge_base_snapshot_id is null then + raise exception 'Cannot create merge request between unrelated plans.'; + end if; + + insert into merlin.merge_request(plan_id_receiving_changes, snapshot_id_supplying_changes, merge_base_snapshot_id, requester_username) + values(plan_id_receiving, supplying_snapshot_id, merge_base_snapshot_id, request_username) + returning id into merge_request_id; + return merge_request_id; +end +$$; + +create or replace procedure merlin.withdraw_merge_request(request_id integer) + language plpgsql as +$$ +declare + validate_status merlin.merge_request_status; +begin + select status from merlin.merge_request where id = request_id into validate_status; + if validate_status is null then + raise exception 'Merge request % does not exist. Cannot withdraw request.', request_id; + elsif validate_status != 'pending' and validate_status != 'withdrawn' then + raise exception 'Cannot withdraw request.'; + end if; + + update merlin.merge_request + set status = 'withdrawn' + where id = request_id; +end +$$; + +create or replace procedure merlin.deny_merge(request_id integer) + language plpgsql as $$ +begin + if(select id from merlin.merge_request where id = request_id) is null then + raise exception 'Invalid merge request id %.', request_id; + end if; + + if (select status from merlin.merge_request where id = request_id) != 'in-progress' then + raise exception 'Cannot reject merge not in progress.'; + end if; + + delete from merlin.conflicting_activities where merge_request_id = request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = deny_merge.request_id; + + update merlin.merge_request + set status = 'rejected' + where merge_request.id = request_id; + + update merlin.plan + set is_locked = false + where plan.id = (select plan_id_receiving_changes from merlin.merge_request where id = request_id); +end +$$; + +create or replace procedure merlin.cancel_merge(request_id integer) + language plpgsql as $$ +declare + verify_status merlin.merge_request_status; +begin + if(select id from merlin.merge_request where id = request_id) is null then + raise exception 'Invalid merge request id %.', request_id; + end if; + + select status from merlin.merge_request where id = request_id into verify_status; + if not (verify_status = 'in-progress' or verify_status = 'pending') then + raise exception 'Cannot cancel merge.'; + end if; + + delete from merlin.conflicting_activities where merge_request_id = request_id; + delete from merlin.merge_staging_area where merge_staging_area.merge_request_id = cancel_merge.request_id; + + update merlin.merge_request + set status = 'pending' + where merge_request.id = request_id; + + update merlin.plan + set is_locked = false + where plan.id = (select plan_id_receiving_changes from merlin.merge_request where id = request_id); +end +$$; + +create or replace procedure merlin.plan_locked_exception(plan_id integer) +language plpgsql as $$ + begin + if(select is_locked from merlin.plan p where p.id = plan_id limit 1) then + raise exception 'Plan % is locked.', plan_id; + end if; + end +$$; + +comment on procedure merlin.plan_locked_exception(plan_id integer) is e'' + 'Verify that the specified plan is unlocked, throwing an exception if not.'; + +create or replace function merlin.populate_constraint_spec_new_plan() +returns trigger +language plpgsql as $$ +begin + insert into merlin.constraint_specification (plan_id, constraint_id, constraint_revision) + select new.id, cms.constraint_id, cms.constraint_revision + from merlin.constraint_model_specification cms + where cms.model_id = new.model_id; + return new; +end; +$$; + +create or replace function merlin.create_simulation_row_for_new_plan() +returns trigger +security definer +language plpgsql as $$begin + insert into merlin.simulation (revision, simulation_template_id, plan_id, arguments, simulation_start_time, simulation_end_time) + values (0, null, new.id, '{}', new.start_time, new.start_time+new.duration); + return new; +end +$$; + +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_permissions_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_permissions_functions.sql new file mode 100644 index 0000000000..39b2cde4bb --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_permissions_functions.sql @@ -0,0 +1,438 @@ +create or replace function permissions.insert_permission_for_user_role() + returns trigger + security definer +language plpgsql as $$ + begin + insert into permissions.user_role_permission(role) + values (new.role); + return new; + end +$$; + +create or replace function permissions.validate_permissions_json() +returns trigger +language plpgsql as $$ + declare + error_msg text; + plan_merge_fns text[]; +begin + error_msg = ''; + + plan_merge_fns := '{ + "begin_merge", + "cancel_merge", + "commit_merge", + "create_merge_rq", + "deny_merge", + "get_conflicting_activities", + "get_non_conflicting_activities", + "set_resolution", + "set_resolution_bulk", + "withdraw_merge_rq" + }'; + + -- Do all the validation checks up front + -- Duplicate keys are not checked for, as as all but the last instance is removed + -- during conversion of JSON Text to JSONB (https://www.postgresql.org/docs/14/datatype-json.html) + create temp table _validate_functions_table as + select + jsonb_object_keys(new.function_permissions) as function_key, + new.function_permissions ->> jsonb_object_keys(new.function_permissions) as function_permission, + jsonb_object_keys(new.function_permissions) = any(enum_range(null::permissions.function_permission_key)::text[]) as valid_function_key, + new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range(null::permissions.permission)::text[]) as valid_function_permission, + jsonb_object_keys(new.function_permissions) = any(plan_merge_fns) as is_plan_merge_key, + new.function_permissions ->> jsonb_object_keys(new.function_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::permissions.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::permissions.permission)::text[]) as is_plan_merge_permission; + + create temp table _validate_actions_table as + select + jsonb_object_keys(new.action_permissions) as action_key, + new.action_permissions ->> jsonb_object_keys(new.action_permissions) as action_permission, + jsonb_object_keys(new.action_permissions) = any(enum_range(null::permissions.action_permission_key)::text[]) as valid_action_key, + new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range(null::permissions.permission)::text[]) as valid_action_permission, + new.action_permissions ->> jsonb_object_keys(new.action_permissions) = any(enum_range('PLAN_OWNER_SOURCE'::permissions.permission, 'PLAN_OWNER_COLLABORATOR_TARGET'::permissions.permission)::text[]) as is_plan_merge_permission; + + + -- Get any invalid Action Keys + if exists(select from _validate_actions_table where not valid_action_key) + then + error_msg = 'The following action keys are not valid: ' + || (select string_agg(action_key, ', ') + from _validate_actions_table + where not valid_action_key) + ||e'\n'; + end if; + -- Get any invalid Function Keys + if exists(select from _validate_functions_table where not valid_function_key) + then + error_msg = error_msg + || 'The following function keys are not valid: ' + || (select string_agg(function_key, ', ') + from _validate_functions_table + where not valid_function_key); + end if; + + -- Raise if there were invalid Action/Function Keys + if error_msg != '' then + raise exception using + message = 'invalid keys in supplied row', + detail = trim(both e'\n' from error_msg), + errcode = 'invalid_json_text', + hint = 'Visit https://nasa-ammos.github.io/aerie-docs/deployment/advanced-permissions/#action-and-function-permissions for a list of valid keys.'; + end if; + + -- Get any values that aren't Action Permissions + if exists(select from _validate_actions_table where not valid_action_permission) + then + error_msg = 'The following action keys have invalid permissions: {' + || (select string_agg(action_key || ': ' || action_permission, ', ') + from _validate_actions_table + where not valid_action_permission) + ||e'}\n'; + end if; + + -- Get any values that aren't Function Permissions + if exists(select from _validate_functions_table where not valid_function_permission) + then + error_msg = error_msg + || 'The following function keys have invalid permissions: {' + || (select string_agg(function_key || ': ' || function_permission, ', ') + from _validate_functions_table + where not valid_function_permission) + || '}'; + end if; + + -- Raise if there were invalid Action/Function Permissions + if error_msg != '' then + raise exception using + message = 'invalid permissions in supplied row', + detail = trim(both e'\n' from error_msg), + errcode = 'invalid_json_text', + hint = 'Visit https://nasa-ammos.github.io/aerie-docs/deployment/advanced-permissions/#action-and-function-permissions for a list of valid Permissions.'; + end if; + + -- Check that no Actions have Plan Merge Permissions + if exists(select from _validate_actions_table where is_plan_merge_permission) + then + error_msg = 'The following action keys may not take plan merge permissions: {' + || (select string_agg(action_key || ': ' || action_permission, ', ') + from _validate_actions_table + where is_plan_merge_permission) + ||e'}\n'; + end if; + + -- Check that no non-Plan Merge Functions have Plan Merge Permissions + if exists(select from _validate_functions_table where is_plan_merge_permission and not is_plan_merge_key) + then + error_msg = error_msg + || 'The following function keys may not take plan merge permissions: {' + || (select string_agg(function_key || ': ' || function_permission, ', ') + from _validate_functions_table + where is_plan_merge_permission and not is_plan_merge_key) + || '}'; + end if; + + -- Raise if Plan Merge Permissions were improperly applied + if error_msg != '' then + raise exception using + message = 'invalid permissions in supplied row', + detail = trim(both e'\n' from error_msg), + errcode = 'invalid_json_text', + hint = 'Visit https://nasa-ammos.github.io/aerie-docs/deployment/advanced-permissions/#action-and-function-permissions for more information.'; + end if; + + -- Drop Temp Tables + drop table _validate_functions_table; + drop table _validate_actions_table; + + return new; +end +$$; + +create or replace procedure permissions.check_general_permissions( + _function permissions.function_permission_key, + _permission permissions.permission, + _plan_id integer, + _user text) +language plpgsql as $$ +declare + _mission_model_id integer; + _plan_name text; +begin + select name from merlin.plan where id = _plan_id into _plan_name; + + -- MISSION_MODEL_OWNER: The user must own the relevant Mission Model + if _permission = 'MISSION_MODEL_OWNER' then + select id from merlin.mission_model mm + where mm.id = (select model_id from merlin.plan p where p.id = _plan_id) + into _mission_model_id; + + if not exists(select * from merlin.mission_model mm where mm.id = _mission_model_id and mm.owner =_user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not MISSION_MODEL_OWNER on Model ' || _mission_model_id ||'.'; + end if; + + -- OWNER: The user must be the owner of all relevant objects directly used by the KEY + -- In most cases, OWNER is equivalent to PLAN_OWNER. Use a custom solution when that is not true. + elseif _permission = 'OWNER' then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not OWNER on Plan ' || _plan_id ||' ('|| _plan_name ||').'; + end if; + + -- PLAN_OWNER: The user must be the Owner of the relevant Plan + elseif _permission = 'PLAN_OWNER' then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Plan '|| _plan_id ||' ('|| _plan_name ||').'; + end if; + + -- PLAN_COLLABORATOR: The user must be a Collaborator of the relevant Plan. The Plan Owner is NOT considered a Collaborator of the Plan + elseif _permission = 'PLAN_COLLABORATOR' then + if not exists(select * from merlin.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Plan '|| _plan_id ||' ('|| _plan_name ||').'; + end if; + + -- PLAN_OWNER_COLLABORATOR: The user must be either the Owner or a Collaborator of the relevant Plan + elseif _permission = 'PLAN_OWNER_COLLABORATOR' then + if not exists(select * from merlin.plan p where p.id = _plan_id and p.owner = _user) then + if not exists(select * from merlin.plan_collaborators pc where pc.plan_id = _plan_id and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Plan '|| _plan_id ||' ('|| _plan_name ||').'; + end if; + end if; + end if; +end +$$; + +create or replace function permissions.get_function_permissions(_function permissions.function_permission_key, hasura_session json) +returns permissions.permission +stable +language plpgsql as $$ +declare + _role text; + _function_permission permissions.permission; +begin + _role := permissions.get_role(hasura_session); + -- The aerie_admin role is always treated as having NO_CHECK permissions on all functions. + if _role = 'aerie_admin' then return 'NO_CHECK'; end if; + + select (function_permissions ->> _function::text)::permissions.permission + from permissions.user_role_permission urp + where urp.role = _role + into _function_permission; + + -- The absence of the function key means that the role does not have permission to perform the function. + if _function_permission is null then + raise insufficient_privilege + using message = 'User with role '''|| _role ||''' is not permitted to run '''|| _function ||''''; + end if; + + return _function_permission::permissions.permission; +end +$$; + +create or replace function permissions.get_role(hasura_session json) +returns text +stable +language plpgsql as $$ +declare + _role text; + _username text; +begin + _role := hasura_session ->> 'x-hasura-role'; + if _role is not null then + return _role; + end if; + _username := hasura_session ->> 'x-hasura-user-id'; + select default_role from permissions.users u + where u.username = _username into _role; + if _role is null then + raise exception 'Invalid username: %', _username; + end if; + return _role; +end +$$; + +create or replace function permissions.raise_if_plan_merge_permission(_function permissions.function_permission_key, _permission permissions.permission) +returns void +immutable +language plpgsql as $$ +begin + if _permission::text = any(array['PLAN_OWNER_SOURCE', 'PLAN_COLLABORATOR_SOURCE', 'PLAN_OWNER_COLLABORATOR_SOURCE', + 'PLAN_OWNER_TARGET', 'PLAN_COLLABORATOR_TARGET', 'PLAN_OWNER_COLLABORATOR_TARGET']) + then + raise 'Invalid Permission: The Permission ''%'' may not be applied to function ''%''', _permission, _function; + end if; +end +$$; + +create or replace procedure permissions.check_merge_permissions(_function permissions.function_permission_key, _merge_request_id integer, hasura_session json) +language plpgsql as $$ +declare + _plan_id_receiving_changes integer; + _plan_id_supplying_changes integer; + _function_permission permissions.permission; + _user text; +begin + select plan_id_receiving_changes + from merlin.merge_request mr + where mr.id = _merge_request_id + into _plan_id_receiving_changes; + + select plan_id + from merlin.plan_snapshot ps, merlin.merge_request mr + where mr.id = _merge_request_id and ps.snapshot_id = mr.snapshot_id_supplying_changes + into _plan_id_supplying_changes; + + _user := (hasura_session ->> 'x-hasura-user-id'); + _function_permission := permissions.get_function_permissions('get_non_conflicting_activities', hasura_session); + call permissions.check_merge_permissions(_function, _function_permission, _plan_id_receiving_changes, + _plan_id_supplying_changes, _user); +end +$$; + +create or replace procedure permissions.check_merge_permissions( + _function permissions.function_permission_key, + _permission permissions.permission, + _plan_id_receiving integer, + _plan_id_supplying integer, + _user text) +language plpgsql as $$ +declare + _supplying_plan_name text; + _receiving_plan_name text; +begin + select name from merlin.plan where id = _plan_id_supplying into _supplying_plan_name; + select name from merlin.plan where id = _plan_id_receiving into _receiving_plan_name; + + -- MISSION_MODEL_OWNER: The user must own the relevant Mission Model + if _permission = 'MISSION_MODEL_OWNER' then + call permissions.check_general_permissions(_function, _permission, _plan_id_receiving, _user); + + -- OWNER: The user must be the Owner of both Plans + elseif _permission = 'OWNER' then + if not (exists(select * from merlin.plan p where p.id = _plan_id_receiving and p.owner = _user)) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': ''' + || _user ||''' is not OWNER on Plan '|| _plan_id_receiving + ||' ('|| _receiving_plan_name ||').'; + elseif not (exists(select * from merlin.plan p2 where p2.id = _plan_id_supplying and p2.owner = _user)) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': ''' + || _user ||''' is not OWNER on Plan '|| _plan_id_supplying + ||' ('|| _supplying_plan_name ||').'; + end if; + + -- PLAN_OWNER: The user must be the Owner of either Plan + elseif _permission = 'PLAN_OWNER' then + if not exists(select * + from merlin.plan p + where (p.id = _plan_id_receiving or p.id = _plan_id_supplying) + and p.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': ''' + || _user ||''' is not PLAN_OWNER on either Plan '|| _plan_id_receiving + ||' ('|| _receiving_plan_name ||') or Plan '|| _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + end if; + + -- PLAN_COLLABORATOR: The user must be a Collaborator of either Plan. The Plan Owner is NOT considered a Collaborator of the Plan + elseif _permission = 'PLAN_COLLABORATOR' then + if not exists(select * + from merlin.plan_collaborators pc + where (pc.plan_id = _plan_id_receiving or pc.plan_id = _plan_id_supplying) + and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': ''' + || _user ||''' is not PLAN_COLLABORATOR on either Plan '|| _plan_id_receiving + ||' ('|| _receiving_plan_name ||') or Plan '|| _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + end if; + + -- PLAN_OWNER_COLLABORATOR: The user must be either the Owner or a Collaborator of either Plan + elseif _permission = 'PLAN_OWNER_COLLABORATOR' then + if not exists(select * + from merlin.plan p + where (p.id = _plan_id_receiving or p.id = _plan_id_supplying) + and p.owner = _user) then + if not exists(select * + from merlin.plan_collaborators pc + where (pc.plan_id = _plan_id_receiving or pc.plan_id = _plan_id_supplying) + and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': ''' + || _user ||''' is not PLAN_OWNER_COLLABORATOR on either Plan '|| _plan_id_receiving + ||' ('|| _receiving_plan_name ||') or Plan '|| _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + + end if; + end if; + + -- PLAN_OWNER_SOURCE: The user must be the Owner of the Supplying Plan + elseif _permission = 'PLAN_OWNER_SOURCE' then + if not exists(select * + from merlin.plan p + where p.id = _plan_id_supplying and p.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Source Plan ' + || _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + end if; + + -- PLAN_COLLABORATOR_SOURCE: The user must be a Collaborator of the Supplying Plan. + elseif _permission = 'PLAN_COLLABORATOR_SOURCE' then + if not exists(select * + from merlin.plan_collaborators pc + where pc.plan_id = _plan_id_supplying and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Source Plan ' + || _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + end if; + + -- PLAN_OWNER_COLLABORATOR_SOURCE: The user must be either the Owner or a Collaborator of the Supplying Plan. + elseif _permission = 'PLAN_OWNER_COLLABORATOR_SOURCE' then + if not exists(select * + from merlin.plan p + where p.id = _plan_id_supplying and p.owner = _user) then + if not exists(select * + from merlin.plan_collaborators pc + where pc.plan_id = _plan_id_supplying and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Source Plan ' + || _plan_id_supplying ||' ('|| _supplying_plan_name ||').'; + end if; + end if; + + -- PLAN_OWNER_TARGET: The user must be the Owner of the Receiving Plan. + elseif _permission = 'PLAN_OWNER_TARGET' then + if not exists(select * + from merlin.plan p + where p.id = _plan_id_receiving and p.owner = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER on Target Plan ' + || _plan_id_receiving ||' ('|| _receiving_plan_name ||').'; + end if; + + -- PLAN_COLLABORATOR_TARGET: The user must be a Collaborator of the Receiving Plan. + elseif _permission = 'PLAN_COLLABORATOR_TARGET' then + if not exists(select * + from merlin.plan_collaborators pc + where pc.plan_id = _plan_id_receiving and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_COLLABORATOR on Target Plan ' + || _plan_id_receiving ||' ('|| _receiving_plan_name ||').'; + end if; + + -- PLAN_OWNER_COLLABORATOR_TARGET: The user must be either the Owner or a Collaborator of the Receiving Plan. + elseif _permission = 'PLAN_OWNER_COLLABORATOR_TARGET' then + if not exists(select * + from merlin.plan p + where p.id = _plan_id_receiving and p.owner = _user) then + if not exists(select * + from merlin.plan_collaborators pc + where pc.plan_id = _plan_id_receiving and pc.collaborator = _user) then + raise insufficient_privilege + using message = 'Cannot run '''|| _function ||''': '''|| _user ||''' is not PLAN_OWNER_COLLABORATOR on Target Plan ' + || _plan_id_receiving ||' ('|| _receiving_plan_name ||').'; + end if; + end if; + end if; +end +$$; diff --git a/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql b/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql new file mode 100644 index 0000000000..5861c4dc12 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql @@ -0,0 +1,118 @@ +begin; +-- Move the contents of "public" to "scheduler" +alter schema public rename to scheduler; +comment on schema scheduler is 'Scheduler Service Schema'; +create schema public; + +-- Move Tags Tables +alter table metadata.scheduling_condition_tags set schema tags; +alter table metadata.scheduling_condition_definition_tags set schema tags; +alter table metadata.scheduling_goal_tags set schema tags; +alter table metadata.scheduling_goal_definition_tags set schema tags; + +-- Metadata Schema is empty now +drop schema metadata; + +-- Update Foreign Keys +alter table tags.scheduling_condition_tags + add foreign key (tag_id) references tags.tags + on update cascade + on delete cascade; +alter table tags.scheduling_condition_definition_tags + add foreign key (tag_id) references tags.tags + on update cascade + on delete cascade; +alter table tags.scheduling_goal_tags + add foreign key (tag_id) references tags.tags + on update cascade + on delete cascade; +alter table tags.scheduling_goal_definition_tags + add foreign key (tag_id) references tags.tags + on update cascade + on delete cascade; + +-- Replace status_t with util_functions.request_status +drop trigger notify_scheduling_workers_cancel on scheduler.scheduling_request; +alter table scheduler.scheduling_request +alter column status drop default, +alter column status type util_functions.request_status using status::text::util_functions.request_status, +alter column status set default 'pending'; + +create trigger notify_scheduling_workers_cancel +after update of canceled on scheduler.scheduling_request +for each row +when ((old.status != 'success' or old.status != 'failed') and new.canceled) +execute function scheduler.notify_scheduling_workers_cancel(); + +drop type scheduler.status_t; + +-- Add new constraints +alter table scheduler.scheduling_request +add constraint scheduling_request_requester_exists + foreign key (requested_by) + references permissions.users + on update cascade + on delete set null, +add constraint scheduling_request_references_dataset + foreign key (dataset_id) + references merlin.dataset + on update cascade + on delete set null; + +alter table scheduler.scheduling_model_specification_conditions +add foreign key (model_id) + references merlin.mission_model + on update cascade + on delete cascade; +alter table scheduler.scheduling_model_specification_goals +add foreign key (model_id) + references merlin.mission_model + on update cascade + on delete cascade; +alter table scheduler.scheduling_specification +add constraint scheduling_spec_plan_id_fkey + foreign key (plan_id) + references merlin.plan + on update cascade + on delete cascade; +alter table scheduler.scheduling_condition_definition +add constraint condition_definition_author_exists + foreign key (author) + references permissions.users + on update cascade + on delete set null; +alter table scheduler.scheduling_condition_metadata +add constraint condition_owner_exists + foreign key (owner) + references permissions.users + on update cascade + on delete set null, +add constraint condition_updated_by_exists + foreign key (updated_by) + references permissions.users + on update cascade + on delete set null; +alter table scheduler.scheduling_goal_definition +add constraint goal_definition_author_exists + foreign key (author) + references permissions.users + on update cascade + on delete set null; +alter table scheduler.scheduling_goal_metadata +add constraint goal_owner_exists + foreign key (owner) + references permissions.users + on update cascade + on delete set null, +add constraint goal_updated_by_exists + foreign key (updated_by) + references permissions.users + on update cascade + on delete set null; + + +-- Update function definitions +\! echo 'Migrating Scheduler Functions...' +\ir migrate_scheduler_functions.sql +\! echo 'Done!' +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql new file mode 100644 index 0000000000..05288f7263 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql @@ -0,0 +1,323 @@ +begin; +--------------------- +-- UPDATE TRIGGERS -- +--------------------- +create or replace trigger increment_revision_on_update_trigger + before update on scheduler.scheduling_specification + for each row + when (pg_trigger_depth() < 1) + execute function util_functions.increment_revision_update(); +drop function scheduler.increment_revision_on_update(); + +create or replace trigger set_timestamp +before update on scheduler.scheduling_condition_metadata +for each row +execute function util_functions.set_updated_at(); +drop function scheduler.scheduling_condition_metadata_set_updated_at(); + +create or replace trigger set_timestamp +before update on scheduler.scheduling_goal_metadata +for each row +execute function util_functions.set_updated_at(); +drop function scheduler.scheduling_goal_metadata_set_updated_at(); + +create function scheduler.create_scheduling_spec_for_new_plan() +returns trigger +security definer +language plpgsql as $$ +declare + spec_id integer; +begin + -- Create a new scheduling specification + insert into scheduler.scheduling_specification (revision, plan_id, plan_revision, horizon_start, horizon_end, + simulation_arguments, analysis_only) + values (0, new.id, new.revision, new.start_time, new.start_time+new.duration, '{}', false) + returning id into spec_id; + + -- Populate the scheduling specification + insert into scheduler.scheduling_specification_goals (specification_id, goal_id, goal_revision, priority) + select spec_id, msg.goal_id, msg.goal_revision, msg.priority + from scheduler.scheduling_model_specification_goals msg + where msg.model_id = new.model_id; + + insert into scheduler.scheduling_specification_conditions (specification_id, condition_id, condition_revision) + select spec_id, msc.condition_id, msc.condition_revision + from scheduler.scheduling_model_specification_conditions msc + where msc.model_id = new.model_id; + + return new; +end +$$; + +comment on function scheduler.create_scheduling_spec_for_new_plan() is e'' +'Creates a scheduling specification for a new plan + and populates it with the contents of the plan''s model''s specification.'; + +create trigger scheduling_spec_for_new_plan_trigger +after insert on merlin.plan +for each row +execute function scheduler.create_scheduling_spec_for_new_plan(); + +--------------------------------- +-- UPDATE FUNCTION DEFINITIONS -- +--------------------------------- +create or replace function scheduler.cancel_pending_scheduling_rqs() +returns trigger +security definer +language plpgsql as $$ +begin + update scheduler.scheduling_request + set canceled = true + where status = 'pending' + and specification_id = new.specification_id; + return new; +end +$$; + +create or replace function scheduler.insert_scheduling_model_specification_goal_func() + returns trigger + language plpgsql as $$ + declare + next_priority integer; +begin + select coalesce( + (select priority + from scheduler.scheduling_model_specification_goals smg + where smg.model_id = new.model_id + order by priority desc + limit 1), -1) + 1 + into next_priority; + + if new.priority > next_priority then + raise numeric_value_out_of_range using + message = ('Updated priority % for model_id % is not consecutive', new.priority, new.model_id), + hint = ('The next available priority is %.', next_priority); + end if; + + if new.priority is null then + new.priority = next_priority; + end if; + + update scheduler.scheduling_model_specification_goals + set priority = priority + 1 + where model_id = new.model_id + and priority >= new.priority; + return new; +end; +$$; + +create or replace function scheduler.update_scheduling_model_specification_goal_func() + returns trigger + language plpgsql as $$ + declare + next_priority integer; +begin + select coalesce( + (select priority + from scheduler.scheduling_model_specification_goals smg + where smg.model_id = new.model_id + order by priority desc + limit 1), -1) + 1 + into next_priority; + + if new.priority > next_priority then + raise numeric_value_out_of_range using + message = ('Updated priority % for model_id % is not consecutive', new.priority, new.model_id), + hint = ('The next available priority is %.', next_priority); + end if; + + if new.priority > old.priority then + update scheduler.scheduling_model_specification_goals + set priority = priority - 1 + where model_id = new.model_id + and priority between old.priority + 1 and new.priority + and goal_id != new.goal_id; + else + update scheduler.scheduling_model_specification_goals + set priority = priority + 1 + where model_id = new.model_id + and priority between new.priority and old.priority - 1 + and goal_id != new.goal_id; + end if; + return new; +end; +$$; + +create or replace function scheduler.delete_scheduling_model_specification_goal_func() + returns trigger + language plpgsql as $$ +begin + update scheduler.scheduling_model_specification_goals + set priority = priority - 1 + where model_id = old.model_id + and priority > old.priority; + return null; +end; +$$; + +create or replace function scheduler.increment_spec_revision_on_conditions_spec_update() + returns trigger + security definer +language plpgsql as $$ +begin + update scheduler.scheduling_specification + set revision = revision + 1 + where id = new.specification_id; + return new; +end; +$$; + +create or replace function scheduler.increment_spec_revision_on_conditions_spec_delete() + returns trigger + security definer +language plpgsql as $$ +begin + update scheduler.scheduling_specification + set revision = revision + 1 + where id = new.specification_id; + return new; +end; +$$; + +create or replace function scheduler.insert_scheduling_specification_goal_func() + returns trigger + language plpgsql as $$ + declare + next_priority integer; +begin + select coalesce( + (select priority + from scheduler.scheduling_specification_goals ssg + where ssg.specification_id = new.specification_id + order by priority desc + limit 1), -1) + 1 + into next_priority; + + if new.priority > next_priority then + raise numeric_value_out_of_range using + message = ('Updated priority % for specification_id % is not consecutive', new.priority, new.specification_id), + hint = ('The next available priority is %.', next_priority); + end if; + + if new.priority is null then + new.priority = next_priority; + end if; + + update scheduler.scheduling_specification_goals + set priority = priority + 1 + where specification_id = new.specification_id + and priority >= new.priority; + return new; +end; +$$; + +create or replace function scheduler.update_scheduling_specification_goal_func() + returns trigger + language plpgsql as $$ + declare + next_priority integer; +begin + select coalesce( + (select priority + from scheduler.scheduling_specification_goals ssg + where ssg.specification_id = new.specification_id + order by priority desc + limit 1), -1) + 1 + into next_priority; + + if new.priority > next_priority then + raise numeric_value_out_of_range using + message = ('Updated priority % for specification_id % is not consecutive', new.priority, new.specification_id), + hint = ('The next available priority is %.', next_priority); + end if; + + if new.priority > old.priority then + update scheduler.scheduling_specification_goals + set priority = priority - 1 + where specification_id = new.specification_id + and priority between old.priority + 1 and new.priority + and goal_id != new.goal_id; + else + update scheduler.scheduling_specification_goals + set priority = priority + 1 + where specification_id = new.specification_id + and priority between new.priority and old.priority - 1 + and goal_id != new.goal_id; + end if; + return new; +end; +$$; + +create or replace function scheduler.delete_scheduling_specification_goal_func() + returns trigger + language plpgsql as $$ +begin + update scheduler.scheduling_specification_goals + set priority = priority - 1 + where specification_id = old.specification_id + and priority > old.priority; + return null; +end; +$$; + +create or replace function scheduler.increment_spec_revision_on_goal_spec_update() + returns trigger + security definer +language plpgsql as $$begin + update scheduler.scheduling_specification + set revision = revision + 1 + where id = new.specification_id; + return new; +end$$; + +create or replace function scheduler.increment_spec_revision_on_goal_spec_delete() + returns trigger + security definer +language plpgsql as $$begin + update scheduler.scheduling_specification + set revision = revision + 1 + where id = old.specification_id; + return old; +end$$; + +create or replace function scheduler.scheduling_condition_definition_set_revision() +returns trigger +volatile +language plpgsql as $$ +declare + max_revision integer; +begin + -- Grab the current max value of revision, or -1, if this is the first revision + select coalesce((select revision + from scheduler.scheduling_condition_definition + where condition_id = new.condition_id + order by revision desc + limit 1), -1) + into max_revision; + + new.revision = max_revision + 1; + return new; +end +$$; + +create or replace function scheduler.scheduling_goal_definition_set_revision() +returns trigger +volatile +language plpgsql as $$ +declare + max_revision integer; +begin + -- Grab the current max value of revision, or -1, if this is the first revision + select coalesce((select revision + from scheduler.scheduling_goal_definition + where goal_id = new.goal_id + order by revision desc + limit 1), -1) + into max_revision; + + new.revision = max_revision + 1; + return new; +end +$$; + +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql b/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql new file mode 100644 index 0000000000..6813b15618 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql @@ -0,0 +1,119 @@ +begin; +-- Move the contents of "public" to "sequencing" +alter schema public rename to sequencing; +comment on schema sequencing is 'Sequencing Service Schema'; +create schema public; + +-- Move Tags Table +alter table metadata.expansion_rule_tags set schema tags; +-- Metadata Schema is empty now +drop schema metadata; + +-- Update Foreign Keys +alter table tags.expansion_rule_tags + add foreign key (tag_id) references tags.tags + on update cascade + on delete cascade; +alter table sequencing.expanded_sequences + add constraint expanded_sequences_to_sim_run + foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade; +alter table sequencing.expansion_rule + add foreign key (authoring_mission_model_id) + references merlin.mission_model + on update cascade + on delete set null, + add foreign key (owner) + references permissions.users + on update cascade + on delete set null, + add foreign key (updated_by) + references permissions.users + on update cascade + on delete set null; +comment on column sequencing.expansion_rule.activity_type is e'' + 'The activity type this expansion rule applies to. This type is not model-specific.'; + +alter table sequencing.expansion_run + add foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade; +alter table sequencing.expansion_set + add foreign key (mission_model_id) + references merlin.mission_model + on delete cascade, + add foreign key (owner) + references permissions.users + on update cascade + on delete set null, + add foreign key (updated_by) + references permissions.users + on update cascade + on delete set null; +alter table sequencing.sequence + add foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade; +alter table sequencing.sequence_to_simulated_activity + add constraint sequence_to_sim_run + foreign key (simulation_dataset_id) + references merlin.simulation_dataset + on delete cascade; +alter table sequencing.user_sequence + add foreign key (authoring_command_dict_id) + references sequencing.command_dictionary + on delete cascade, + add foreign key (owner) + references permissions.users + on update cascade + on delete cascade; + +-- Update Triggers +drop trigger set_timestamp on sequencing.expansion_rule; +drop function sequencing.expansion_rule_set_updated_at(); + +create trigger set_timestamp +before update on sequencing.expansion_rule +for each row +execute function util_functions.set_updated_at(); + +drop trigger set_timestamp on sequencing.user_sequence; +drop function sequencing.user_sequence_set_updated_at(); + +create trigger set_timestamp +before update on sequencing.user_sequence +for each row +execute function util_functions.set_updated_at(); + +-- Update Views +create or replace view sequencing.expansion_set_rule_view as +select str.set_id, + rule.id, + rule.activity_type, + rule.expansion_logic, + rule.authoring_command_dict_id, + rule.authoring_mission_model_id, + rule.created_at, + rule.updated_at, + rule.name, + rule.owner, + rule.updated_by, + rule.description +from sequencing.expansion_set_to_rule str left join sequencing.expansion_rule rule + on str.rule_id = rule.id; +create or replace view sequencing.rule_expansion_set_view as +select str.rule_id, + set.id, + set.name, + set.owner, + set.description, + set.command_dict_id, + set.mission_model_id, + set.created_at, + set.updated_at, + set.updated_by +from sequencing.expansion_set_to_rule str left join sequencing.expansion_set set + on str.set_id = set.id; + +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_tags_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_tags_functions.sql new file mode 100644 index 0000000000..2ccc3812ee --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_tags_functions.sql @@ -0,0 +1,85 @@ +create or replace function tags.tag_ids_activity_snapshot(_directive_id integer, _snapshot_id integer) + returns int[] + language plpgsql as $$ + declare + tags int[]; +begin + select array_agg(tag_id) + from tags.snapshot_activity_tags sat + where sat.snapshot_id = _snapshot_id + and sat.directive_id = _directive_id + into tags; + return tags; +end +$$; + +create or replace function tags.tag_ids_activity_directive(_directive_id integer, _plan_id integer) + returns int[] + language plpgsql as $$ + declare + tags int[]; +begin + select array_agg(tag_id) + from tags.activity_directive_tags adt + where adt.plan_id = _plan_id + and adt.directive_id = _directive_id + into tags; + return tags; +end +$$; + +create or replace function tags.get_tags(_activity_id int, _plan_id int) + returns jsonb + security invoker + language plpgsql as $$ + declare + tags jsonb; +begin + select jsonb_agg(json_build_object( + 'id', id, + 'name', name, + 'color', color, + 'owner', owner, + 'created_at', created_at + )) + from tags.tags tags, tags.activity_directive_tags adt + where tags.id = adt.tag_id + and (adt.directive_id, adt.plan_id) = (_activity_id, _plan_id) + into tags; + return tags; +end +$$; + +create or replace function tags.adt_check_locked_new() + returns trigger + security definer + language plpgsql as $$ +begin + call merlin.plan_locked_exception(new.plan_id); + return new; +end $$; + +create or replace function tags.adt_check_locked_old() + returns trigger + security definer + language plpgsql as $$ +begin + call merlin.plan_locked_exception(old.plan_id); + return old; +end $$; + +create or replace function tags.snapshot_tags_in_review_delete() + returns trigger + security definer +language plpgsql as $$ +begin + if exists(select status from merlin.merge_request mr + where + (mr.snapshot_id_supplying_changes = old.snapshot_id + or mr.merge_base_snapshot_id = old.snapshot_id) + and mr.status = 'in-progress') then + raise exception 'Cannot delete. Snapshot is in use in an active merge review.'; + end if; + return old; +end +$$; diff --git a/deployment/merge_aerie_db/merge_db/migrate_ui.sql b/deployment/merge_aerie_db/merge_db/migrate_ui.sql new file mode 100644 index 0000000000..27e16b84f4 --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_ui.sql @@ -0,0 +1,42 @@ +begin; +-- Move the contents of "public" to "ui" +alter schema public rename to ui; +comment on schema ui is 'UI Service Schema'; +create schema public; +comment on schema public is 'standard public schema'; + +-- Add PGCrypto back to "public" +create extension pgcrypto with schema public; + +-- Add Missing FKeys +alter table ui.extension_roles + add foreign key (role) + references permissions.user_roles (role) + on update cascade + on delete cascade; +alter table ui.extensions + add foreign key (owner) + references permissions.users (username) + on update cascade + on delete set null; +alter table ui.view + add foreign key (owner) + references permissions.users (username) + on update cascade + on delete set null; + +-- Update Triggers +drop trigger extensions_set_timestamp on ui.extensions; +drop function ui.extensions_set_updated_at(); +create trigger extensions_set_timestamp + before update on ui.extensions + for each row +execute function util_functions.set_updated_at(); + +drop trigger set_timestamp on ui.view; +drop function ui.view_set_updated_at(); +create trigger set_timestamp +before update on ui.view +for each row +execute function util_functions.set_updated_at(); +end; From 3a285c90231d2e64cb94a8674a04908adddb9e34 Mon Sep 17 00:00:00 2001 From: Theresa Kamerman Date: Fri, 29 Mar 2024 15:48:37 -0700 Subject: [PATCH 36/36] Handle Orphaned Rows Before Adding FKeys Done in case the database has any rows that would violate the foreign key about to be added. --- .../merge_db/migrate_scheduler.sql | 86 ++++++++++++++- .../merge_db/migrate_scheduler_functions.sql | 59 ----------- .../merge_db/migrate_scheduler_triggers.sql | 60 +++++++++++ .../merge_db/migrate_sequencing.sql | 100 ++++++++++++++---- .../merge_aerie_db/merge_db/migrate_ui.sql | 48 ++++++--- 5 files changed, 258 insertions(+), 95 deletions(-) create mode 100644 deployment/merge_aerie_db/merge_db/migrate_scheduler_triggers.sql diff --git a/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql b/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql index 5861c4dc12..81c86530ee 100644 --- a/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql +++ b/deployment/merge_aerie_db/merge_db/migrate_scheduler.sql @@ -13,19 +13,43 @@ alter table metadata.scheduling_goal_definition_tags set schema tags; -- Metadata Schema is empty now drop schema metadata; --- Update Foreign Keys +-- Update function definitions +\! echo 'Migrating Scheduler Triggers...' +\ir migrate_scheduler_triggers.sql +\! echo 'Done!' + +-- Add Tags Foreign Keys, removing orphan entries first +delete from tags.scheduling_condition_tags + where not exists( + select from tags.tags t + where tag_id = t.id); alter table tags.scheduling_condition_tags add foreign key (tag_id) references tags.tags on update cascade on delete cascade; + +delete from tags.scheduling_condition_definition_tags + where not exists( + select from tags.tags t + where tag_id = t.id); alter table tags.scheduling_condition_definition_tags add foreign key (tag_id) references tags.tags on update cascade on delete cascade; + +delete from tags.scheduling_goal_tags + where not exists( + select from tags.tags t + where tag_id = t.id); alter table tags.scheduling_goal_tags add foreign key (tag_id) references tags.tags on update cascade on delete cascade; + +delete from tags.scheduling_goal_definition_tags + where not exists( + select from tags.tags t + where tag_id = t.id); alter table tags.scheduling_goal_definition_tags add foreign key (tag_id) references tags.tags on update cascade @@ -46,7 +70,17 @@ execute function scheduler.notify_scheduling_workers_cancel(); drop type scheduler.status_t; --- Add new constraints +-- Add new constraints, handling orphans first +update scheduler.scheduling_request + set dataset_id = null + where not exists( + select from merlin.dataset d + where dataset_id = d.id); +update scheduler.scheduling_request + set requested_by = null + where not exists( + select from permissions.users u + where requested_by = u.username); alter table scheduler.scheduling_request add constraint scheduling_request_requester_exists foreign key (requested_by) @@ -59,28 +93,59 @@ add constraint scheduling_request_references_dataset on update cascade on delete set null; +delete from scheduler.scheduling_model_specification_conditions + where not exists( + select from merlin.mission_model m + where model_id = m.id); alter table scheduler.scheduling_model_specification_conditions add foreign key (model_id) references merlin.mission_model on update cascade on delete cascade; + +delete from scheduler.scheduling_model_specification_goals + where not exists( + select from merlin.mission_model m + where model_id = m.id); alter table scheduler.scheduling_model_specification_goals add foreign key (model_id) references merlin.mission_model on update cascade on delete cascade; + +delete from scheduler.scheduling_specification + where not exists( + select from merlin.plan p + where plan_id = p.id); alter table scheduler.scheduling_specification add constraint scheduling_spec_plan_id_fkey foreign key (plan_id) references merlin.plan on update cascade on delete cascade; + +update scheduler.scheduling_condition_definition + set author = null + where not exists( + select from permissions.users u + where author = u.username); alter table scheduler.scheduling_condition_definition add constraint condition_definition_author_exists foreign key (author) references permissions.users on update cascade on delete set null; + +update scheduler.scheduling_condition_metadata + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); +update scheduler.scheduling_condition_metadata + set updated_by = null + where not exists( + select from permissions.users u + where updated_by = u.username); alter table scheduler.scheduling_condition_metadata add constraint condition_owner_exists foreign key (owner) @@ -92,12 +157,29 @@ add constraint condition_updated_by_exists references permissions.users on update cascade on delete set null; + +update scheduler.scheduling_goal_definition + set author = null + where not exists( + select from permissions.users u + where author = u.username); alter table scheduler.scheduling_goal_definition add constraint goal_definition_author_exists foreign key (author) references permissions.users on update cascade on delete set null; + +update scheduler.scheduling_goal_metadata + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); +update scheduler.scheduling_goal_metadata + set updated_by = null + where not exists( + select from permissions.users u + where updated_by = u.username); alter table scheduler.scheduling_goal_metadata add constraint goal_owner_exists foreign key (owner) diff --git a/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql b/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql index 05288f7263..6b59f77a11 100644 --- a/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql +++ b/deployment/merge_aerie_db/merge_db/migrate_scheduler_functions.sql @@ -1,63 +1,4 @@ begin; ---------------------- --- UPDATE TRIGGERS -- ---------------------- -create or replace trigger increment_revision_on_update_trigger - before update on scheduler.scheduling_specification - for each row - when (pg_trigger_depth() < 1) - execute function util_functions.increment_revision_update(); -drop function scheduler.increment_revision_on_update(); - -create or replace trigger set_timestamp -before update on scheduler.scheduling_condition_metadata -for each row -execute function util_functions.set_updated_at(); -drop function scheduler.scheduling_condition_metadata_set_updated_at(); - -create or replace trigger set_timestamp -before update on scheduler.scheduling_goal_metadata -for each row -execute function util_functions.set_updated_at(); -drop function scheduler.scheduling_goal_metadata_set_updated_at(); - -create function scheduler.create_scheduling_spec_for_new_plan() -returns trigger -security definer -language plpgsql as $$ -declare - spec_id integer; -begin - -- Create a new scheduling specification - insert into scheduler.scheduling_specification (revision, plan_id, plan_revision, horizon_start, horizon_end, - simulation_arguments, analysis_only) - values (0, new.id, new.revision, new.start_time, new.start_time+new.duration, '{}', false) - returning id into spec_id; - - -- Populate the scheduling specification - insert into scheduler.scheduling_specification_goals (specification_id, goal_id, goal_revision, priority) - select spec_id, msg.goal_id, msg.goal_revision, msg.priority - from scheduler.scheduling_model_specification_goals msg - where msg.model_id = new.model_id; - - insert into scheduler.scheduling_specification_conditions (specification_id, condition_id, condition_revision) - select spec_id, msc.condition_id, msc.condition_revision - from scheduler.scheduling_model_specification_conditions msc - where msc.model_id = new.model_id; - - return new; -end -$$; - -comment on function scheduler.create_scheduling_spec_for_new_plan() is e'' -'Creates a scheduling specification for a new plan - and populates it with the contents of the plan''s model''s specification.'; - -create trigger scheduling_spec_for_new_plan_trigger -after insert on merlin.plan -for each row -execute function scheduler.create_scheduling_spec_for_new_plan(); - --------------------------------- -- UPDATE FUNCTION DEFINITIONS -- --------------------------------- diff --git a/deployment/merge_aerie_db/merge_db/migrate_scheduler_triggers.sql b/deployment/merge_aerie_db/merge_db/migrate_scheduler_triggers.sql new file mode 100644 index 0000000000..551e5e655e --- /dev/null +++ b/deployment/merge_aerie_db/merge_db/migrate_scheduler_triggers.sql @@ -0,0 +1,60 @@ +begin; +--------------------- +-- UPDATE TRIGGERS -- +--------------------- +create or replace trigger increment_revision_on_update_trigger + before update on scheduler.scheduling_specification + for each row + when (pg_trigger_depth() < 1) + execute function util_functions.increment_revision_update(); +drop function scheduler.increment_revision_on_update(); + +create or replace trigger set_timestamp +before update on scheduler.scheduling_condition_metadata +for each row +execute function util_functions.set_updated_at(); +drop function scheduler.scheduling_condition_metadata_set_updated_at(); + +create or replace trigger set_timestamp +before update on scheduler.scheduling_goal_metadata +for each row +execute function util_functions.set_updated_at(); +drop function scheduler.scheduling_goal_metadata_set_updated_at(); + +create function scheduler.create_scheduling_spec_for_new_plan() +returns trigger +security definer +language plpgsql as $$ +declare + spec_id integer; +begin + -- Create a new scheduling specification + insert into scheduler.scheduling_specification (revision, plan_id, plan_revision, horizon_start, horizon_end, + simulation_arguments, analysis_only) + values (0, new.id, new.revision, new.start_time, new.start_time+new.duration, '{}', false) + returning id into spec_id; + + -- Populate the scheduling specification + insert into scheduler.scheduling_specification_goals (specification_id, goal_id, goal_revision, priority) + select spec_id, msg.goal_id, msg.goal_revision, msg.priority + from scheduler.scheduling_model_specification_goals msg + where msg.model_id = new.model_id; + + insert into scheduler.scheduling_specification_conditions (specification_id, condition_id, condition_revision) + select spec_id, msc.condition_id, msc.condition_revision + from scheduler.scheduling_model_specification_conditions msc + where msc.model_id = new.model_id; + + return new; +end +$$; + +comment on function scheduler.create_scheduling_spec_for_new_plan() is e'' +'Creates a scheduling specification for a new plan + and populates it with the contents of the plan''s model''s specification.'; + +create trigger scheduling_spec_for_new_plan_trigger +after insert on merlin.plan +for each row +execute function scheduler.create_scheduling_spec_for_new_plan(); +end; diff --git a/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql b/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql index 6813b15618..8ed61cd5cc 100644 --- a/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql +++ b/deployment/merge_aerie_db/merge_db/migrate_sequencing.sql @@ -9,16 +9,58 @@ alter table metadata.expansion_rule_tags set schema tags; -- Metadata Schema is empty now drop schema metadata; --- Update Foreign Keys +-- Update Triggers +drop trigger set_timestamp on sequencing.expansion_rule; +drop function sequencing.expansion_rule_set_updated_at(); + +create trigger set_timestamp +before update on sequencing.expansion_rule +for each row +execute function util_functions.set_updated_at(); + +drop trigger set_timestamp on sequencing.user_sequence; +drop function sequencing.user_sequence_set_updated_at(); + +create trigger set_timestamp +before update on sequencing.user_sequence +for each row +execute function util_functions.set_updated_at(); + +-- Update Foreign Keys, handling orphans first +delete from tags.expansion_rule_tags + where not exists( + select from tags.tags t + where tag_id = t.id); alter table tags.expansion_rule_tags add foreign key (tag_id) references tags.tags on update cascade on delete cascade; + +delete from sequencing.expanded_sequences + where not exists( + select from merlin.simulation_dataset sd + where simulation_dataset_id = sd.id); alter table sequencing.expanded_sequences add constraint expanded_sequences_to_sim_run foreign key (simulation_dataset_id) references merlin.simulation_dataset on delete cascade; + +update sequencing.expansion_rule + set authoring_mission_model_id = null + where not exists( + select from merlin.mission_model m + where authoring_mission_model_id = m.id); +update sequencing.expansion_rule + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); +update sequencing.expansion_rule + set updated_by = null + where not exists( + select from permissions.users u + where updated_by = u.username); alter table sequencing.expansion_rule add foreign key (authoring_mission_model_id) references merlin.mission_model @@ -35,10 +77,29 @@ alter table sequencing.expansion_rule comment on column sequencing.expansion_rule.activity_type is e'' 'The activity type this expansion rule applies to. This type is not model-specific.'; +delete from sequencing.expansion_run + where not exists( + select from merlin.simulation_dataset sd + where simulation_dataset_id = sd.id); alter table sequencing.expansion_run add foreign key (simulation_dataset_id) references merlin.simulation_dataset on delete cascade; + +delete from sequencing.expansion_set + where not exists( + select from merlin.mission_model m + where mission_model_id = m.id); +update sequencing.expansion_set + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); +update sequencing.expansion_set + set updated_by = null + where not exists( + select from permissions.users u + where updated_by = u.username); alter table sequencing.expansion_set add foreign key (mission_model_id) references merlin.mission_model @@ -51,15 +112,35 @@ alter table sequencing.expansion_set references permissions.users on update cascade on delete set null; + +delete from sequencing.sequence + where not exists( + select from merlin.simulation_dataset sd + where simulation_dataset_id = sd.id); alter table sequencing.sequence add foreign key (simulation_dataset_id) references merlin.simulation_dataset on delete cascade; + +delete from sequencing.sequence_to_simulated_activity + where not exists( + select from merlin.simulation_dataset sd + where simulation_dataset_id = sd.id); alter table sequencing.sequence_to_simulated_activity add constraint sequence_to_sim_run foreign key (simulation_dataset_id) references merlin.simulation_dataset on delete cascade; + +delete from sequencing.user_sequence + where not exists( + select from sequencing.command_dictionary cd + where authoring_command_dict_id = cd.id); +update sequencing.user_sequence + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); alter table sequencing.user_sequence add foreign key (authoring_command_dict_id) references sequencing.command_dictionary @@ -69,23 +150,6 @@ alter table sequencing.user_sequence on update cascade on delete cascade; --- Update Triggers -drop trigger set_timestamp on sequencing.expansion_rule; -drop function sequencing.expansion_rule_set_updated_at(); - -create trigger set_timestamp -before update on sequencing.expansion_rule -for each row -execute function util_functions.set_updated_at(); - -drop trigger set_timestamp on sequencing.user_sequence; -drop function sequencing.user_sequence_set_updated_at(); - -create trigger set_timestamp -before update on sequencing.user_sequence -for each row -execute function util_functions.set_updated_at(); - -- Update Views create or replace view sequencing.expansion_set_rule_view as select str.set_id, diff --git a/deployment/merge_aerie_db/merge_db/migrate_ui.sql b/deployment/merge_aerie_db/merge_db/migrate_ui.sql index 27e16b84f4..3de5ea63ab 100644 --- a/deployment/merge_aerie_db/merge_db/migrate_ui.sql +++ b/deployment/merge_aerie_db/merge_db/migrate_ui.sql @@ -8,35 +8,51 @@ comment on schema public is 'standard public schema'; -- Add PGCrypto back to "public" create extension pgcrypto with schema public; --- Add Missing FKeys +-- Update Triggers +drop trigger extensions_set_timestamp on ui.extensions; +drop function ui.extensions_set_updated_at(); +create trigger extensions_set_timestamp + before update on ui.extensions + for each row +execute function util_functions.set_updated_at(); + +drop trigger set_timestamp on ui.view; +drop function ui.view_set_updated_at(); +create trigger set_timestamp +before update on ui.view +for each row +execute function util_functions.set_updated_at(); + +-- Add Missing FKeys, Handling Orphans First +delete from ui.extension_roles er + where not exists( + select from permissions.user_roles ur + where er.role = ur.role); alter table ui.extension_roles add foreign key (role) references permissions.user_roles (role) on update cascade on delete cascade; + +update ui.extensions + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); alter table ui.extensions add foreign key (owner) references permissions.users (username) on update cascade on delete set null; + +update ui.view + set owner = null + where not exists( + select from permissions.users u + where owner = u.username); alter table ui.view add foreign key (owner) references permissions.users (username) on update cascade on delete set null; - --- Update Triggers -drop trigger extensions_set_timestamp on ui.extensions; -drop function ui.extensions_set_updated_at(); -create trigger extensions_set_timestamp - before update on ui.extensions - for each row -execute function util_functions.set_updated_at(); - -drop trigger set_timestamp on ui.view; -drop function ui.view_set_updated_at(); -create trigger set_timestamp -before update on ui.view -for each row -execute function util_functions.set_updated_at(); end;