Skip to content

Commit

Permalink
Tlc (#8)
Browse files Browse the repository at this point in the history
* TLC

- Remove TYPE_CHECKING code
- Improve type checking

* Add translation code and product validation task

This adds a translations endpoint that is used to get translations for the workflow specific stuff.
This also adds a task and schedule to do sanity checks on the database

* Bump version: 0.0.9 → 0.0.10

* Fix import sorting
  • Loading branch information
hmvp authored Apr 6, 2021
1 parent f43f25a commit c4b1e87
Show file tree
Hide file tree
Showing 24 changed files with 455 additions and 46 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.0.9
current_version = 0.0.10
commit = False
tag = False
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(\-(?P<release>[a-z]+)(?P<build>\d+))?
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -127,3 +127,5 @@ dmypy.json

# Pyre type checker
.pyre/

.vscode
2 changes: 1 addition & 1 deletion orchestrator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

"""This is the orchestrator workflow engine."""

__version__ = "0.0.9"
__version__ = "0.0.10"

from orchestrator.app import OrchestratorCore
from orchestrator.settings import app_settings, oauth2_settings
Expand Down
6 changes: 6 additions & 0 deletions orchestrator/api/api_v1/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
settings,
subscription_customer_descriptions,
subscriptions,
translations,
user,
workflows,
)
Expand Down Expand Up @@ -79,3 +80,8 @@
settings.router, prefix="/settings", tags=["Core", "Settings"], dependencies=[Depends(opa_security_default)]
)
api_router.include_router(health.router, prefix="/health", tags=["Core"])
api_router.include_router(
translations.router,
prefix="/translations",
tags=["Core", "Translations"],
)
32 changes: 32 additions & 0 deletions orchestrator/api/api_v1/endpoints/translations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright 2019-2020 SURF.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Dict

import structlog
from fastapi import Query
from fastapi.routing import APIRouter

from orchestrator.services.translations import generate_translations

logger = structlog.get_logger(__name__)


router = APIRouter()


@router.get("/{language}", response_model=dict)
def get_translations(language: str = Query(..., regex="^[a-z]+-[A-Z]+$")) -> Dict[str, str]:
translations = generate_translations(language)

return translations
6 changes: 3 additions & 3 deletions orchestrator/domain/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,11 +485,11 @@ def from_db(
sub_instances = cls._load_instances(subscription_instance.children, status)

try:
return cls( # type: ignore
return cls(
subscription_instance_id=subscription_instance_id,
label=label,
**instance_values,
**sub_instances,
**sub_instances, # type: ignore
)
except ValidationError:
logger.exception(
Expand Down Expand Up @@ -869,7 +869,7 @@ def from_product_id(
end_date=end_date,
note=note,
**fixed_inputs,
**instances,
**instances, # type: ignore
)

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion orchestrator/exception_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@

from http import HTTPStatus

from nwastdlib.ex import show_ex
from starlette.requests import Request
from starlette.responses import JSONResponse

from orchestrator.api.error_handling import ProblemDetailException
from orchestrator.forms import FormException, FormNotCompleteError, FormValidationError
from orchestrator.utils.errors import show_ex
from orchestrator.utils.json import json_dumps, json_loads

PROBLEM_DETAIL_FIELDS = ("title", "type")
Expand Down
6 changes: 3 additions & 3 deletions orchestrator/forms/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def __get_validators__(cls) -> Generator:
yield cls.must_be_complete

@classmethod
def enum_validator(cls, v: Any, field: "ModelField") -> "Accept":
def enum_validator(cls, v: Any, field: "ModelField") -> str:
try:
enum_v = cls.Values(v)
except ValueError:
Expand Down Expand Up @@ -175,9 +175,9 @@ class Choice(strEnum):
label: ClassVar[str]

def __new__(cls, value: str, label: Optional[str] = None) -> "Choice":
obj = str.__new__(cls, value) # type:ignore
obj = str.__new__(cls, value)
obj._value_ = value
obj.label = label or value
obj.label = label or value # type:ignore
return obj

@classmethod
Expand Down
39 changes: 20 additions & 19 deletions orchestrator/migrations/helpers.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,44 @@
from uuid import uuid4
from typing import Any, Dict, List
from uuid import UUID, uuid4

import sqlalchemy as sa


def get_resource_type_id_by_name(conn, name):
def get_resource_type_id_by_name(conn: sa.engine.Connection, name: str) -> Any:
result = conn.execute(
sa.text("SELECT resource_type_id FROM resource_types WHERE resource_types.resource_type=:name"), name=name
)
return [x for (x,) in result.fetchall()][0]


def get_product_block_id_by_name(conn, name):
def get_product_block_id_by_name(conn: sa.engine.Connection, name: str) -> Any:
result = conn.execute(
sa.text("SELECT product_block_id FROM product_blocks WHERE product_blocks.name=:name"), name=name
)
return [x for (x,) in result.fetchall()][0]


def get_product_id_by_name(conn, name):
def get_product_id_by_name(conn: sa.engine.Connection, name: str) -> Any:
result = conn.execute(sa.text("SELECT product_id FROM products WHERE products.name=:name"), name=name)
return [x for (x,) in result.fetchall()][0]


def get_product_name_by_id(conn, id):
def get_product_name_by_id(conn: sa.engine.Connection, id: str) -> Any:
result = conn.execute(sa.text("SELECT name FROM products WHERE product_id=:id"), id=id)
return [x for (x,) in result.fetchall()][0]


def get_product_by_id(conn, id):
def get_product_by_id(conn: sa.engine.Connection, id: str) -> Any:
result = conn.execute(sa.text("SELECT * FROM products WHERE product_id=:id"), id=id)
return result.fetchall()[0]


def get_fixed_inputs_by_product_id(conn, id):
def get_fixed_inputs_by_product_id(conn: sa.engine.Connection, id: str) -> Any:
result = conn.execute(sa.text("SELECT * FROM fixed_inputs WHERE product_id=:id"), id=id)
return result.fetchall()


def insert_resource_type(conn, resource_type, description):
def insert_resource_type(conn: sa.engine.Connection, resource_type: str, description: str) -> Any:
"""Create a new resource types."""
conn.execute(
sa.text(
Expand All @@ -49,13 +50,13 @@ def insert_resource_type(conn, resource_type, description):
)


def get_all_active_products_and_ids(conn):
def get_all_active_products_and_ids(conn: sa.engine.Connection) -> Any:
"""Return a list, with dicts containing keys `product_id` and `name` of active products."""
result = conn.execute(sa.text("SELECT product_id, name FROM products WHERE status='active'"))
return [{"product_id": row[0], "name": row[1]} for row in result.fetchall()]


def create_missing_modify_note_workflows(conn):
def create_missing_modify_note_workflows(conn: sa.engine.Connection) -> None:
products = get_all_active_products_and_ids(conn)

workflow_id = conn.execute(sa.text("SELECT workflow_id FROM workflows WHERE name = 'modify_note'"))
Expand All @@ -78,7 +79,7 @@ def create_missing_modify_note_workflows(conn):
)


def create_workflows(conn, new):
def create_workflows(conn: sa.engine.Connection, new: dict) -> None:
"""
Create a new workflow.
Expand Down Expand Up @@ -121,7 +122,7 @@ def create_workflows(conn, new):
)


def create_fixed_inputs(conn, product_id, new):
def create_fixed_inputs(conn: sa.engine.Connection, product_id: str, new: dict) -> Dict[str, UUID]:
"""
Create a fixed inputs for a product.
Expand Down Expand Up @@ -152,7 +153,7 @@ def create_fixed_inputs(conn, product_id, new):
return uuids


def create_products(conn, new):
def create_products(conn: sa.engine.Connection, new: dict) -> Dict[str, UUID]:
"""
Create a new workflow.
Expand Down Expand Up @@ -215,7 +216,7 @@ def create_products(conn, new):
return uuids


def create_product_blocks(conn, new):
def create_product_blocks(conn: sa.engine.Connection, new: dict) -> Dict[str, UUID]:
"""
Create a new workflow.
Expand Down Expand Up @@ -259,7 +260,7 @@ def create_product_blocks(conn, new):
return uuids


def create_resource_types_for_product_blocks(conn, new):
def create_resource_types_for_product_blocks(conn: sa.engine.Connection, new: dict) -> None:
"""Create new resource types and link them to existing product_blocks by product_block name.
Note: If the resource type already exists it will still add the resource type to the provided Product Blocks.
Expand All @@ -280,7 +281,7 @@ def create_resource_types_for_product_blocks(conn, new):
"resource_type1": "Resource description"
}
}
>>> create_resource_types(conn, new_stuff)
>>> create_resource_types(conn: sa.engine.Connection, new_stuff)
"""
insert_resource_type = sa.text(
"""INSERT INTO resource_types (resource_type, description) VALUES (:resource_type, :description)
Expand Down Expand Up @@ -320,7 +321,7 @@ def create_resource_types_for_product_blocks(conn, new):
)


def delete_resource_types(conn, delete):
def delete_resource_types(conn: sa.engine.Connection, delete: List[str]) -> None:
"""Delete a resource type and it's occurrences in product blocks.
Args:
Expand All @@ -330,7 +331,7 @@ def delete_resource_types(conn, delete):
Usage:
```python
obsolete_stuff = ["name_1", "name_2"]
delete_resource_types(conn, obsolete_stuff)
delete_resource_types(conn: sa.engine.Connection, obsolete_stuff)
```
"""
conn.execute(
Expand All @@ -345,7 +346,7 @@ def delete_resource_types(conn, delete):
conn.execute(sa.text("DELETE FROM resource_types WHERE resource_type in :obsolete"), obsolete=tuple(delete))


def create(conn, new):
def create(conn: sa.engine.Connection, new: dict) -> None:
"""
Call other functions in this file based on the schema.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
depends_on = None


def upgrade():
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
"""Add task_validate_products.
Revision ID: 3c8b9185c221
Revises: 3323bcb934e7
Create Date: 2020-04-06 09:17:49.395612
"""
from uuid import uuid4

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "3c8b9185c221"
down_revision = "3323bcb934e7"
branch_labels = None
depends_on = None

workflows = [
{"name": "task_validate_products", "description": "Validate products", "workflow_id": uuid4(), "target": "SYSTEM"},
]


def upgrade() -> None:
conn = op.get_bind()
for workflow in workflows:
conn.execute(
sa.text(
"INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING"
),
**workflow,
)


def downgrade() -> None:
conn = op.get_bind()
for workflow in workflows:
conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]})
2 changes: 2 additions & 0 deletions orchestrator/schedules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
from orchestrator.schedules.resume_workflows import run_resume_workflows
from orchestrator.schedules.scheduling import SchedulingFunction
from orchestrator.schedules.task_vacuum import vacuum_tasks
from orchestrator.schedules.validate_products import validate_products
from orchestrator.schedules.validate_subscriptions import validate_subscriptions

ALL_SCHEDULERS: List[SchedulingFunction] = [
run_resume_workflows,
vacuum_tasks,
validate_subscriptions,
validate_products,
]
25 changes: 25 additions & 0 deletions orchestrator/schedules/validate_products.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Copyright 2019-2020 SURF.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from orchestrator.db.models import ProcessTable
from orchestrator.schedules.scheduling import scheduler
from orchestrator.services.processes import start_process


@scheduler(name="Validate Products and inactive subscriptions", time_unit="day", at="02:30")
def validate_products() -> None:
if not ProcessTable.query.filter(
ProcessTable.workflow == "validate_products", ProcessTable.last_status != "completed"
).count():
start_process("validate_products")
2 changes: 1 addition & 1 deletion orchestrator/services/processes.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def _db_log_process_ex(pid: UUID, ex: Exception) -> None:
if p.last_status != ProcessStatus.WAITING:
p.last_status = ProcessStatus.FAILED
p.failed_reason = str(ex)
p.traceback = show_ex(ex) # type:ignore
p.traceback = show_ex(ex)
db.session.add(p)
try:
db.session.commit()
Expand Down
Loading

0 comments on commit c4b1e87

Please sign in to comment.