Skip to content

Commit

Permalink
Added a test container and sample tests (#123)
Browse files Browse the repository at this point in the history
* added test container

* updated init.sql, added tests for addresses and seismic

* updated env file ci workflow

* updated test db url
  • Loading branch information
agennadi authored Dec 18, 2024
1 parent bd446b3 commit b40ec4f
Show file tree
Hide file tree
Showing 15 changed files with 81 additions and 67 deletions.
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ POSTGIS_VERSION=3.5.0
FRONTEND_HOST=http://localhost:3000
DATABASE_URL=postgresql://postgres:password@db:5432/qsdatabase # Connection string for the PostgreSQL database
LOCALHOST_DATABASE_URL=postgresql://postgres:password@localhost:5432/qsdatabase #Connection string for the PostgreSQL database when running locally
DATABASE_URL_SQLALCHEMY=postgresql+psycopg2://postgres:password@db:5432/qsdatabase # database url for SQLAlchemy; use postgresql+asyncpg for async calls
DATABASE_URL_SQLALCHEMY=postgresql+psycopg2://postgres:password@db:5432/qsdatabase # database url for SQLAlchemy; use postgresql+asyncpg for async calls, otherwise postgresql+psycopg2
DATABASE_URL_SQLALCHEMY_TEST=postgresql+psycopg2://postgres:password@db:5433/qsdatabase
LOCALHOST_DATABASE_URL_SQLALCHEMY=postgresql+psycopg2://postgres:password@localhost:5432/qsdatabase
ENVIRONMENT=local # For custom application logic
SECRET_KEY=default-secret-key_local # Replace with your actual secret key
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/env_vars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ jobs:
envkey_DATABASE_URL: ${{ secrets.DATABASE_URL }}
envkey_LOCALHOST_DATABASE_URL: ${{ secrets.LOCALHOST_DATABASE_URL }}
envkey_DATABASE_URL_SQLALCHEMY: ${{ secrets.DATABASE_URL_SQLALCHEMY }}
envkey_DATABASE_URL_SQLALCHEMY_TEST: ${{ secrets.DATABASE_URL_SQLALCHEMY_TEST }}
envkey_LOCALHOST_DATABASE_URL_SQLALCHEMY: ${{ secrets.LOCALHOST_DATABASE_URL_SQLALCHEMY }}
envkey_ENVIRONMENT: ${{ secrets.ENVIRONMENT }}
envkey_SECRET_KEY: ${{ secrets.SECRET_KEY }}
Expand Down
1 change: 1 addition & 0 deletions backend/api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class Settings(BaseSettings):
database_url: str
localhost_database_url: str
database_url_sqlalchemy: str
database_url_sqlalchemy_test: str
localhost_database_url_sqlalchemy: str
environment: str
secret_key: str
Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/addresses_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from geojson_pydantic import Feature, FeatureCollection, Point
from geoalchemy2.shape import to_shape
from typing import List
from datetime import datetime


class AddressProperties(BaseModel):
Expand All @@ -16,6 +17,7 @@ class AddressProperties(BaseModel):

eas_fullid: str
address: str
update_timestamp: datetime


class AddressFeature(Feature):
Expand Down Expand Up @@ -48,6 +50,7 @@ def from_sqlalchemy_model(address: Address):
properties={
"eas_fullid": address.eas_fullid,
"address": address.address,
"update_timestamp": address.update_timestamp,
},
)

Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/landslide_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from geoalchemy2.shape import to_shape
from typing import List
import json
from datetime import datetime


class LandslideProperties(BaseModel):
Expand All @@ -17,6 +18,7 @@ class LandslideProperties(BaseModel):

identifier: int
gridcode: int
update_timestamp: datetime


class LandslideFeature(Feature):
Expand Down Expand Up @@ -53,6 +55,7 @@ def from_sqlalchemy_model(landslide_zone: LandslideZone):
properties={
"identifier": landslide_zone.identifier,
"gridcode": landslide_zone.gridcode,
"update_timestamp": landslide_zone.update_timestamp,
},
)

Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/liquefaction_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from geoalchemy2.shape import to_shape
from typing import List
import json
from datetime import datetime


class LiquefactionProperties(BaseModel):
Expand All @@ -17,6 +18,7 @@ class LiquefactionProperties(BaseModel):

identifier: int
liq: str
update_timestamp: datetime


class LiquefactionFeature(Feature):
Expand Down Expand Up @@ -53,6 +55,7 @@ def from_sqlalchemy_model(liquefaction_zone: LiquefactionZone):
properties={
"identifier": liquefaction_zone.identifier,
"liq": liquefaction_zone.liq,
"update_timestamp": liquefaction_zone.update_timestamp,
},
)

Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/seismic_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from geoalchemy2.shape import to_shape
from typing import List
import json
from datetime import datetime


class SeismicProperties(BaseModel):
Expand All @@ -15,6 +16,7 @@ class SeismicProperties(BaseModel):
"""

identifier: int
update_timestamp: datetime


class SeismicFeature(Feature):
Expand Down Expand Up @@ -50,6 +52,7 @@ def from_sqlalchemy_model(seismic_hazard_zone: SeismicHazardZone):
geometry=json.loads(seismic_hazard_zone.multipolygon_as_geosjon),
properties={
"identifier": seismic_hazard_zone.identifier,
"update_timestamp": seismic_hazard_zone.update_timestamp,
},
)

Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/soft_story_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from geoalchemy2.shape import to_shape
from typing import List
import json
from datetime import datetime


class SoftStoryProperties(BaseModel):
Expand All @@ -15,6 +16,7 @@ class SoftStoryProperties(BaseModel):
"""

identifier: int
update_timestamp: datetime


class SoftStoryFeature(Feature):
Expand Down Expand Up @@ -55,6 +57,7 @@ def from_sqlalchemy_model(soft_story: SoftStoryProperty):
geometry={"type": "Point", "coordinates": coordinates},
properties={
"identifier": soft_story.identifier,
"update_timestamp": soft_story.update_timestamp,
},
)

Expand Down
3 changes: 3 additions & 0 deletions backend/api/schemas/tsunami_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from geojson_pydantic import Feature, FeatureCollection, MultiPolygon
from typing import List
import json
from datetime import datetime


class TsunamiProperties(BaseModel):
Expand All @@ -16,6 +17,7 @@ class TsunamiProperties(BaseModel):

identifier: int
evacuate: str
update_timestamp: datetime


class TsunamiFeature(Feature):
Expand Down Expand Up @@ -52,6 +54,7 @@ def from_sqlalchemy_model(tsunami_zone: TsunamiZone):
properties={
"identifier": tsunami_zone.identifier,
"evacuate": tsunami_zone.evacuate,
"update_timestamp": tsunami_zone.update_timestamp,
},
)

Expand Down
14 changes: 12 additions & 2 deletions backend/api/tests/test_addresses.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,17 @@
from backend.api.tests.test_session_config import test_engine, test_session, client


def test_get_address(client):
def test_get_address_by_id(client):
id = "495990-764765-0"
response = client.get(f"/api/addresses/{id}")
response = client.get(f"/addresses/{id}")
response_dict = response.json()
assert response.status_code == 200
assert response_dict["properties"]["address"] == "46 AUBURN ST"
assert response_dict["geometry"]["coordinates"] == [-122.41228, 37.77967]


def test_get_addresses(client):
response = client.get(f"/addresses/")
response_dict = response.json()
assert response.status_code == 200
assert len(response_dict) == 2
52 changes: 6 additions & 46 deletions backend/api/tests/test_seismic.py
Original file line number Diff line number Diff line change
@@ -1,50 +1,10 @@
import pytest
from fastapi.testclient import TestClient
from backend.api.tests.test_session_config import test_engine, test_session, client

# Will the .. be stable?
from ..main import app
from ..schemas.geo import Polygon


@pytest.fixture
def client():
return TestClient(app)


def test_delete_polygon(client):
response = client.delete("/api/polygons/1?table_name=seismic")
assert response.status_code == 200
# Temporary guaranteed failure until test is written
assert False


def test_put_polygon(client):
response = client.put(
"/api/polygons/1?table_name=seismic", json=Polygon().model_dump()
)
assert response.status_code == 200
# Temporary guaranteed failure until test is written
assert False


def test_post_polygon(client):
response = client.put(
"/api/polygons/1?table_name=seismic", json=Polygon().model_dump()
)
assert response.status_code == 200
# Temporary guaranteed failure until test is written
assert False


def test_get_polygon(client):
response = client.get("/api/polygons/1?table_name=seismic")
assert response.status_code == 200
# Temporary guaranteed failure until test is written
assert False


def test_get_seismic_risk(client):
response = client.get("/api/seismic-risk/address")
def test_get_seismic_hazard_zones(client):
response = client.get(f"/seismic-zones/")
response_dict = response.json()
print(response_dict)
assert response.status_code == 200
# Temporary guaranteed failure until test is written
assert False
assert len(response_dict) == 2
2 changes: 1 addition & 1 deletion backend/api/tests/test_session_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# Set up a test database engine
@pytest.fixture(scope="session")
def test_engine():
engine = create_engine(settings.localhost_database_url_sqlalchemy)
engine = create_engine(settings.database_url_sqlalchemy_test)
yield engine


Expand Down
38 changes: 26 additions & 12 deletions backend/database/init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,11 @@ create extension if not exists postgis;

set search_path to public;

create table if not exists address (
eas_baseid integer not null,
eas_subid integer not null,
create table if not exists addresses (
eas_fullid varchar(255) primary key,
address varchar(255) not null,
unit_number varchar(255),
address_number integer,
address_number_suffix varchar(255) not null,
street_name varchar(255) not null,
street_type varchar(255),
parcel_number varchar(255),
Expand All @@ -22,22 +19,39 @@ create table if not exists address (
longitude float not null,
latitude float not null,
zip_code integer not null,
point geography(point, 4326) not null,
point Geometry(point, 4326) not null,
supdist varchar(255),
supervisor integer,
supdistpad varchar(255),
numbertext varchar(255),
supname varchar(255),
nhood varchar(255),
complete_landmark_name varchar(255),
sfdata_as_of date not null,
sfdata_loaded_at timestamp not null
sfdata_as_of timestamp not null,
created_timestamp timestamp,
update_timestamp timestamp
);

create table if not exists seismic_hazard_zones (
identifier integer primary key,
geometry Geometry(multipolygon, 4326) not null,
update_timestamp timestamp
);
-- Potential functions to creat a Point: ST_MakePoint(-122.41228, 37.77967); ST_GeomFromText('POINT(-122.41228, 37.77967)', 4326); ST_SetSRID(ST_MakePoint(-122.41228, 37.77967), 4326)

insert into address (eas_baseid, eas_subid, eas_fullid, address, unit_number, address_number, address_number_suffix, street_name, street_type, parcel_number, block, lot, cnn, longitude, latitude, zip_code, point, supdist, supervisor, supdistpad, numbertext, supname, nhood, complete_landmark_name, sfdata_as_of, sfdata_loaded_at) values
(495990, 764765, '495990-764765-0', '46 AUBURN ST', '', 46, '', 'AUBURN', 'ST', '', '', '', 830000, -122.41228, 37.77967, 94133, ST_SetSRID(ST_MakePoint(-122.41228, 37.77967), 4326), 'SUPERVISORIAL DISTRICT 3', 3, 3, 'THREE', 'Aaron Peskin', 'Nob Hill', '', '2024/10/28 03:40:00 AM', '2024/10/28 10:11:26 PM');
insert into addresses (eas_fullid, address, unit_number, address_number, street_name, street_type, parcel_number, block, lot, cnn, longitude, latitude, zip_code, point, supdist, supervisor, supname, nhood, sfdata_as_of, created_timestamp, update_timestamp) values
('495990-764765-0', '46 AUBURN ST', '', 46, 'AUBURN', 'ST', '', '', '', 830000, -122.41228, 37.77967, 94133, ST_SetSRID(ST_MakePoint(-122.41228, 37.77967), 4326), 'SUPERVISORIAL DISTRICT 3', 3, 'Aaron Peskin', 'Nob Hill', '2024/10/28 03:40:00 AM', '2024/10/28 10:11:26 PM', '2024/11/28 5:11:26 PM'),
('12345-678-9', '10 TEST ST', '', 10, 'TEST', 'ST', '', '', '', 800050, -122.41509, 37.64097, 94000, ST_SetSRID(ST_MakePoint(-122.41509, 37.64097), 4326), 'SUPERVISORIAL DISTRICT 2', 2, 'User Name', 'Nob Hill', '2024/10/29 03:40:00 AM', '2024/10/29 10:11:26 PM', '2024/11/29 5:11:26 PM');

insert into seismic_hazard_zones (identifier, geometry, update_timestamp) values
(1, ST_GeomFromText('MULTIPOLYGON(
((-122.5 37.7, -122.4 37.7, -122.4 37.8, -122.5 37.8, -122.5 37.7)),
((-122.6 37.6, -122.5 37.6, -122.5 37.7, -122.6 37.7, -122.6 37.6))
)', 4326),
'2024/12/16 5:10:00 PM'),

(2, ST_GeomFromText('MULTIPOLYGON(
((-122.4 37.8, -122.3 37.8, -122.35 37.85, -122.4 37.8)),
((-122.5 37.7, -122.4 37.7, -122.4 37.8, -122.5 37.8, -122.5 37.7))
)', 4326),
'2024/12/17 3:10:00 PM');


create table if not exists combined_risk (
Expand Down
2 changes: 1 addition & 1 deletion backend/database/tests/test_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
@pytest.fixture(scope="module")
def test_db():
# Create a session using the existing database
engine = create_engine(settings.database_url)
engine = create_engine(settings.database_url_sqlalchemy_test)
connection = engine.connect()

# Begin a transaction
Expand Down
17 changes: 13 additions & 4 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,30 +20,39 @@ services:
env_file:
- .env
environment:
- PYTHONPATH=/backend:/
- PYTHONPATH=/backend:/
ports:
- 8000:8000
depends_on:
db:
condition: service_healthy
volumes:
- ./backend:/backend
db:
db_test:
image: postgis-custom
build:
context: ./backend/database
dockerfile: Dockerfile
container_name: my_postgis_db_test
restart: always
env_file:
- .env
volumes:
- ./backend/database:/docker-entrypoint-initdb.d # Mount the SQL scripts directory
ports:
- 5433:5432
db:
image: postgis-custom
container_name: my_postgis_db
restart: always
env_file:
- .env
volumes:
- db-data:/var/lib/postgresql/data
- ./backend/database:/docker-entrypoint-initdb.d # Mount the SQL scripts directory
ports:
- 5432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
test: [ "CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}" ]
interval: 10s
timeout: 5s
retries: 5
Expand Down

0 comments on commit b40ec4f

Please sign in to comment.