Skip to content

Commit

Permalink
feat: 18 alert list page (#21)
Browse files Browse the repository at this point in the history
  • Loading branch information
franTarkenton authored Feb 6, 2024
1 parent 475b3ae commit 172528d
Show file tree
Hide file tree
Showing 61 changed files with 2,632 additions and 800 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -132,3 +132,7 @@ frontend-old/**

backend/src/junk.py
Hydrological Alerting-installation-dev.json
backend/junk.sql
demo/database.db
backend/alembic/u-junk/**
test_db.db
24 changes: 20 additions & 4 deletions backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
### Features
- [x] FastAPI
- [x] SQLAlchemy
- [x] SQLModel
- [x] Poetry
- [x] Prospector
- [x] Flyway
- [x] Alembic
- [x] Docker
- [x] Docker Compose
- [x] GitHub Actions
Expand All @@ -17,14 +18,29 @@

### Local Development

[Alembic Docs](../docs/db_migration_alembic.md)

#### Local Dev with Docker

- Run the `docker compose -f .\docker-compose.py.yml up` command to start the entire stack.
* Wasn't able to get the frontend working in the docker compose so startup is
now a two step process

##### Backend Start

- Run the `docker compose up backend-py` command to start the backend stack.
- The database changes are applied automatically by alembic
- The models are generated into `backend-py/src/v1/models/model.py` .
- The source of the migrations are the SQLModels `backend/src/V1/models/model.py`
- Alembic versioned migration files are at `backend/alembic/versions`
- The API is Documentation available at http://localhost:3003/docs

#### Local Dev - poetry
##### Frontend Start

- Navigate to the frontend directory `cd frontend/hydro_alerting`
- Start the server with `npm run start-local`
- Establishes the proxy to the backend on the cli, vs deployed where it is handled by caddy
- App URL is `http://localhost:4200`

#### Local Backend Dev - poetry

* create the env `cd backend; poetry install`
* activate the env `source $(poetry env info --path)/bin/activate`
Expand Down
15 changes: 15 additions & 0 deletions backend/alembic/data/alert_levels.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[
{
"id": 1,
"alert_level": "High Streamflow Advisory"
},
{
"id": 2,
"alert_level": "Flood Watch"
},
{
"id": 3,
"alert_level": "Flood Warning"
}

]
58 changes: 41 additions & 17 deletions backend/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@
from logging.config import fileConfig

import src.core.config as app_config
from alembic import context
from alembic.script import ScriptDirectory
from sqlalchemy import create_engine, pool
from sqlalchemy import create_engine

# from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import CreateSchema
from sqlmodel import SQLModel # NEW
from src.v1.models.model import * # NEW make *
from sqlmodel import SQLModel
from src.v1.models.model import *

# from ...src.v1.models.model import metadata
from alembic import context
from alembic.script import ScriptDirectory

config = context.config
if config.config_file_name is not None:
Expand All @@ -35,7 +36,16 @@
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# target_metadata = SQLModel.metadata
target_metadata = SQLModel.metadata
# target_metadata = MetaData()

# for datum in target_metadata_sql_model:
# for table in datum.tables.values():
# print(f'table: {table}')
# table.to_metadata(target_metadata)

print("target_metadata: ", target_metadata)

# other values from the config, defined by the needs of env.py,
# can be acquired:
Expand All @@ -44,7 +54,7 @@


def process_revision_directives(context, revision, directives):
"""overriding the default generation of revision ids to use a
"""overriding the default generation of revision ids to use a
sequential integer instead of a hex string.
:param context: _description_
Expand Down Expand Up @@ -80,7 +90,9 @@ def get_url():
LOGGER.debug(f"url from -x: {url}")

if not url:
LOGGER.debug(f"app_config.Configuration.SQLALCHEMY_DATABASE_URI: {app_config.Configuration.SQLALCHEMY_DATABASE_URI}")
LOGGER.debug(
f"app_config.Configuration.SQLALCHEMY_DATABASE_URI: {app_config.Configuration.SQLALCHEMY_DATABASE_URI}"
)
url = app_config.Configuration.SQLALCHEMY_DATABASE_URI.unicode_string()
LOGGER.debug(f"url from app config: {url}")
LOGGER.debug(f"captured the url string: {url}")
Expand All @@ -100,16 +112,16 @@ def run_migrations_offline() -> None:
script output.
"""
include_schemas=True
include_schemas = True
LOGGER.debug("running migrations offline")
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
literal_binds=True,
target_metadata=target_metadata,
version_table='alembic_version',
version_table="alembic_version",
version_table_schema=app_config.Configuration.DEFAULT_SCHEMA,
process_revision_directives=process_revision_directives
process_revision_directives=process_revision_directives,
)

with context.begin_transaction():
Expand All @@ -123,30 +135,42 @@ def run_migrations_online() -> None:
and associate a connection with the context.
"""
include_schemas=True
include_schemas = True
url = get_url()
LOGGER.debug(f"using url: {url}")
connectable = create_engine(url)
connectable = create_engine(
url,
execution_options={
"schema_translate_map": {
"tenant_schema": app_config.Configuration.DEFAULT_SCHEMA
}
},
)

with connectable.connect() as connection:
# connection(execution_options={"schema_translate_map": {"tenant_schema": app_config.Configuration.DEFAULT_SCHEMA}})

context.configure(
include_schemas=True,
connection=connection,
version_table='alembic_version',
compare_type=True,
version_table="alembic_version",
target_metadata=target_metadata,
version_table_schema=app_config.Configuration.DEFAULT_SCHEMA,
process_revision_directives=process_revision_directives
process_revision_directives=process_revision_directives,
)
schema_create = CreateSchema(
app_config.Configuration.DEFAULT_SCHEMA, if_not_exists=True
)
schema_create = CreateSchema(app_config.Configuration.DEFAULT_SCHEMA, if_not_exists=True)
LOGGER.debug(f"schema_create: {schema_create}")
connection.execute(schema_create)
# create_schema_sql = 'CREATE SCHEMA IF NOT EXISTS {}'
# connection.execute(create_schema_sql)


with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
Expand Down
30 changes: 30 additions & 0 deletions backend/alembic/versions/V4_rename_basin_and_sub_basin_pks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
"""rename basin and sub basin pks
Revision ID: V7
Revises: V6
Create Date: 2024-01-31 12:01:14.390799
"""
from typing import Sequence, Union

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "V4"
down_revision: Union[str, None] = "V3"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("ALTER TABLE py_api.basins RENAME COLUMN id TO basin_id;")
op.execute("ALTER TABLE py_api.subbasins RENAME COLUMN id TO subbasin_id;")
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("ALTER TABLE py_api.basins RENAME COLUMN basin_id TO id;")
op.execute("ALTER TABLE py_api.subbasins RENAME COLUMN subbasin_id TO id;")
# ### end Alembic commands ###
114 changes: 114 additions & 0 deletions backend/alembic/versions/V5_create_alert_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
"""create alert tables
Revision ID: V5
Revises: V4
Create Date: 2024-02-01 13:28:40.806607
"""

from typing import Sequence, Union

import sqlalchemy as sa
import sqlmodel
from alembic import op

# revision identifiers, used by Alembic.
revision: str = "V5"
down_revision: Union[str, None] = "V4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"alert_levels",
sa.Column("alert_level_id", sa.Integer(), nullable=False),
sa.Column("alert_level", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("alert_level_id"),
schema="py_api",
)
op.create_table(
"alerts",
sa.Column(
"alert_description", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column(
"alert_hydro_conditions", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column(
"alert_meteorological_conditions",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
),
sa.Column("alert_created", sa.DateTime(), nullable=False),
sa.Column("alert_updated", sa.DateTime(), nullable=False),
sa.Column("author_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("alert_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("alert_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("alert_id"),
schema="py_api",
)
op.create_table(
"cap_event",
sa.Column("alert_id", sa.Integer(), nullable=False),
sa.Column("alert_level_id", sa.Integer(), nullable=False),
sa.Column(
"cap_event_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column("cap_event_created_date", sa.DateTime(), nullable=False),
sa.Column("cap_event_updated_date", sa.DateTime(), nullable=False),
sa.Column("cap_event_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["alert_id"],
["py_api.alerts.alert_id"],
),
sa.ForeignKeyConstraint(
["alert_level_id"],
["py_api.alert_levels.alert_level_id"],
),
sa.PrimaryKeyConstraint("cap_event_id"),
schema="py_api",
)
op.create_table(
"cap_event_areas",
sa.Column("cap_event_area_id", sa.Integer(), nullable=False),
sa.Column("basin_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["basin_id"],
["py_api.basins.basin_id"],
),
sa.PrimaryKeyConstraint("cap_event_area_id"),
)
op.create_table(
"alert_areas",
sa.Column("alert_id", sa.Integer(), nullable=False),
sa.Column("basin_id", sa.Integer(), nullable=False),
sa.Column("alert_level_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["alert_id"],
["py_api.alerts.alert_id"],
),
sa.ForeignKeyConstraint(
["alert_level_id"],
["py_api.alert_levels.alert_level_id"],
),
sa.ForeignKeyConstraint(
["basin_id"],
["py_api.basins.basin_id"],
),
sa.PrimaryKeyConstraint("alert_id", "basin_id", "alert_level_id"),
sa.UniqueConstraint("alert_id", "basin_id", "alert_level_id"),
schema="py_api",
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("alert_areas", schema="py_api")
op.drop_table("cap_event_areas", schema="py_api")
op.drop_table("cap_event", schema="py_api")
op.drop_table("alerts", schema="py_api")
op.drop_table("alert_levels", schema="py_api")
# ### end Alembic commands ###
Loading

0 comments on commit 172528d

Please sign in to comment.