Skip to content
This repository has been archived by the owner on May 9, 2023. It is now read-only.

Refactor, cleanup and doc #33

Merged
merged 8 commits into from
Mar 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,13 @@ notes/
*.json
*.md
__pycache__/
docs/
dist/
*.ipynb
.coverage
.python-version
subgrounds.egg-info/
*.log
scratch/

!README.md
404 changes: 207 additions & 197 deletions Pipfile.lock

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion examples/uniswapv2_firehose.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import dash
from dash import html

from subgrounds.plotly_wrappers import Bar, Figure
from subgrounds.dash_wrappers import DataTable, AutoUpdate
from subgrounds.subgrounds import Subgrounds

Expand Down
88 changes: 48 additions & 40 deletions subgrounds/client.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
""" Small module containing low level functions related to sending
GraphQL http requests.
"""

from typing import Any
import requests
from functools import reduce

import logging
logger = logging.getLogger('subgrounds')


INTROSPECTION_QUERY: str = """
query IntrospectionQuery {
__schema {
Expand Down Expand Up @@ -97,6 +101,19 @@


def get_schema(url: str) -> dict[str, Any]:
""" Runs the introspection query on the GraphQL API served localed at
:attr:`url` and returns the result. In case of errors, an exception containing
the error message is thrown.

Args:
url (str): The url of the GraphQL API

Raises:
Exception: In case of GraphQL server error

Returns:
dict[str, Any]: The GraphQL API's schema in JSON
"""
resp = requests.post(
url,
json={"query": INTROSPECTION_QUERY},
Expand All @@ -109,51 +126,42 @@ def get_schema(url: str) -> dict[str, Any]:
raise Exception(resp["errors"]) from exn


def query(url: str, query_str: str, variables: dict[str, Any] = {}) -> dict[str, Any]:
logger.info(f'client.query: url = {url}, variables = {variables}\n{query_str}')
def query(
url: str,
query_str: str,
variables: dict[str, Any] = {}
) -> dict[str, Any]:
""" Executes the GraphQL query :attr:`query_str` with variables
:attr:`variables` against the API served at :attr:`url` and returns the
response data. In case of errors, an exception containing the error message is
thrown.

Args:
url (str): The URL of the GraphQL API
query_str (str): The GraphQL query string
variables (dict[str, Any], optional): Variables for the GraphQL query.
Defaults to {}.

Raises:
Exception: GraphQL error

Returns:
dict[str, Any]: Response data
"""
logger.info(
f'client.query: url = {url}, variables = {variables}\n{query_str}'
)
resp = requests.post(
url,
json={'query': query_str} if variables == {} else {'query': query_str, 'variables': variables},
json=(
{'query': query_str}
if variables == {}
else {'query': query_str, 'variables': variables}
),
headers={'Content-Type': 'application/json'}
).json()

try:
return resp['data']
except KeyError as exn:
raise Exception(resp['errors']) from exn


def merge_data(d1: dict, d2: dict) -> dict[str, Any]:
match (d1, d2):
case (list(), list()):
return d1 + d2

case (dict(), dict()):
return [d1, d2]

case (val1, _):
return val1


def repeat(url: str, query_str: str, variables: list[dict[str, Any]]) -> dict[str, Any]:
def merge(data1, data2):
match (data1, data2):
case (list(), list()):
return data1 + data2
case (dict(), dict()):
data = {}
for key in data1:
data[key] = merge_data(data1[key], data2[key])
return data
case (val1, _):
return val1

return reduce(
merge,
[query(url, query_str, vars) for vars in variables]
)


def paginate(url: str, query_str: str, n: int, page_size: int = 200) -> dict[str, Any]:
vars = [{'first': page_size, 'skip': i * page_size} for i in range(0, n % page_size + 1)]
return repeat(url, query_str, variables=vars)
Loading