Skip to content

Commit

Permalink
fixing access
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg committed Apr 21, 2021
1 parent 3a2c8fb commit 043ee3b
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 13 deletions.
42 changes: 30 additions & 12 deletions packages/service-library/src/servicelib/rest_pagination_utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from math import ceil
from typing import Any, Dict, List, Optional

from pydantic import AnyHttpUrl, BaseModel, Field, PositiveInt, conint, validator
from pydantic import AnyHttpUrl, BaseModel, Extra, Field, PositiveInt, conint, validator
from yarl import URL


Expand All @@ -11,6 +11,9 @@ class PageMetaInfoLimitOffset(BaseModel):
offset: conint(ge=0) = 0
limit: PositiveInt

class Config:
extra = Extra.forbid


class PageLinks(BaseModel):
self: AnyHttpUrl
Expand All @@ -19,21 +22,36 @@ class PageLinks(BaseModel):
next: Optional[AnyHttpUrl]
last: AnyHttpUrl

class Config:
extra = Extra.forbid


class PageResponseLimitOffset(BaseModel):
meta: PageMetaInfoLimitOffset = Field(alias="_meta")
links: PageLinks = Field(alias="_links")
data: List[Any]

@validator("data")
class Config:
extra = Extra.forbid

@validator("data", always=True, pre=True)
@classmethod
def convert_none_to_empty_list(cls, v):
if v is None:
v = list()
return v

@validator("data", always=True, pre=True)
@classmethod
def check_data_size_smaller_than_limit(cls, v, values):
limit = values["meta"]["limit"]
limit = values["meta"].limit
if len(v) > limit:
raise ValueError(f"container size must be smaller than limit [{limit}]")
return v

@classmethod
def paginate_data(
cls,
data: List[Any],
request_url: URL,
total: int,
Expand All @@ -43,19 +61,19 @@ def paginate_data(
last_page = ceil(total / limit) - 1

return PageResponseLimitOffset(
data=data,
meta=PageMetaInfoLimitOffset(
_meta=PageMetaInfoLimitOffset(
total=total, count=len(data), limit=limit, offset=offset
),
links=PageLinks(
self=request_url,
first=request_url.update_query({"offset": 0}),
prev=request_url.update_query({"offset": offset - 1})
_links=PageLinks(
self=f"{request_url}",
first=f"{request_url.update_query({'offset': 0})}",
prev=f"{request_url.update_query({'offset': offset - 1})}"
if offset
else None,
next=request_url.update_query({"offset": offset + 1})
next=f"{request_url.update_query({'offset': offset + 1})}"
if offset < last_page
else None,
last=request_url.update_query({"offset": last_page}),
last=f"{request_url.update_query({'offset': last_page})}",
),
).dict(by_alias=True)
data=data,
)
Original file line number Diff line number Diff line change
Expand Up @@ -191,10 +191,11 @@ async def set_all_project_states(
limit=limit,
)
await set_all_project_states(projects, project_types)
return PageResponseLimitOffset(
return PageResponseLimitOffset.paginate_data(
data=projects,
request_url=request.url,
total=total_number_projects,
limit=limit,
offset=offset,
).dict(by_alias=True)

Expand Down

0 comments on commit 043ee3b

Please sign in to comment.