Skip to content

Commit

Permalink
Merge pull request #192 from hackforla/101-BCK-Tests
Browse files Browse the repository at this point in the history
Added linting and correctly implemented the backend github action
  • Loading branch information
ryanmswan authored Jan 20, 2020
2 parents 9caa697 + cc8ebb7 commit a0df5d6
Show file tree
Hide file tree
Showing 12 changed files with 335 additions and 244 deletions.
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[flake8]
exclude = node_modules/*, dataAnalysis/*
29 changes: 17 additions & 12 deletions .github/workflows/Continuous_Integration_Backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,23 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [10.x, 11.x]
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install Packages
run: npm install
- name: Build project
run: npm run build
- name: Run Tests
run: export CI=true && npm run test -- --coverage
env:
MAPBOX_TOKEN: ${{ secrets.MAPBOX_TOKEN }}
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt install libpq-dev python3-dev
python -m pip install --upgrade pip
pip install -r server/requirements.txt
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: pytest server
15 changes: 15 additions & 0 deletions server/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,21 +1,34 @@
aiofiles==0.4.0
attrs==19.3.0
certifi==2019.9.11
chardet==3.0.4
Click==7.0
entrypoints==0.3
flake8==3.7.9
h11==0.8.1
h2==3.1.1
hpack==3.0.0
httpcore==0.3.0
httptools==0.0.13
hyperframe==5.2.0
idna==2.8
importlib-metadata==1.4.0
itsdangerous==1.1.0
Jinja2==2.10.3
MarkupSafe==1.1.1
mccabe==0.6.1
more-itertools==8.1.0
multidict==4.5.2
numpy==1.18.0
packaging==20.0
pandas==0.25.3
pluggy==0.13.1
psycopg2==2.8.4
py==1.8.1
pycodestyle==2.5.0
pyflakes==2.1.1
pyparsing==2.4.6
pytest==5.3.3
python-dateutil==2.8.1
pytz==2019.3
requests==2.22.0
Expand All @@ -28,5 +41,7 @@ SQLAlchemy==1.3.11
ujson==1.35
urllib3==1.25.7
uvloop==0.14.0
wcwidth==0.1.8
websockets==8.1
Werkzeug==0.16.0
zipp==1.0.0
49 changes: 19 additions & 30 deletions server/src/app.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,26 @@
import os
from sanic import Sanic
from sanic import response
from sanic.response import json
from services.time_to_close import time_to_close
from services.frequency import frequency
from services.ingress_service import ingress_service
from services.reporting import reports
from configparser import ConfigParser
from json import loads
from threading import Timer
from multiprocessing import cpu_count


app = Sanic(__name__)


def configure_app():
# Settings initialization
config = ConfigParser()
settings_file = os.path.join(os.getcwd(),'settings.cfg')
settings_file = os.path.join(os.getcwd(), 'settings.cfg')
config.read(settings_file)
app.config['Settings'] = config
if os.environ.get('DB_CONNECTION_STRING', None):
app.config['Settings']['Database']['DB_CONNECTION_STRING'] = os.environ.get('DB_CONNECTION_STRING')
app.config['Settings']['Database']['DB_CONNECTION_STRING'] =\
os.environ.get('DB_CONNECTION_STRING')
app.config["STATIC_DIR"] = os.path.join(os.getcwd(), "static")
os.makedirs(os.path.join(app.config["STATIC_DIR"], "temp"), exist_ok=True)

Expand All @@ -37,7 +36,10 @@ async def timetoclose(request):

# data = loads(ttc_worker.ttc_view_data())
# dates = loads(ttc_worker.ttc_view_dates())
summary = ttc_worker.ttc_summary(allData=True, serviced=False, allRequests=False, requestType="'Bulky Items'")
summary = ttc_worker.ttc_summary(allData=True,
serviced=False,
allRequests=False,
requestType="'Bulky Items'")

# return json(data_arr)
# return json(dates)
Expand All @@ -47,15 +49,18 @@ async def timetoclose(request):
@app.route('/requestfrequency')
async def requestfrequency(request):
freq_worker = frequency(app.config['Settings'])

data = freq_worker.freq_view_data(service=True, councils=[], aggregate=True)

data = freq_worker.freq_view_data(service=True,
councils=[],
aggregate=True)

return json(data)


@app.route('/sample-data')
async def sample_route(request):
sample_dataset = {'cool_key':['value1', 'value2'], app.config['REDACTED']:app.config['REDACTED']}
sample_dataset = {'cool_key': ['value1', 'value2'],
app.config['REDACTED']: app.config['REDACTED']}
return json(sample_dataset)


Expand All @@ -66,7 +71,7 @@ async def ingest(request):
{"sets": ["YearMappingKey","YearMappingKey","YearMappingKey"]}'''

ingress_worker = ingress_service(config=app.config['Settings'])
return_data = {'response':'ingest ok'}
return_data = {'response': 'ingest ok'}

for dataSet in request.json.get("sets", None):
target_data = app.config["Settings"]["YearMapping"][dataSet]
Expand All @@ -88,24 +93,6 @@ async def delete(request):
return_data = ingress_worker.delete()
return json(return_data)

@app.route('/biggestoffender')
async def biggestOffender(request):
startDate = request.json.get("startDate", None)
requestType = request.json.get("requestType", None)
councilName = request.json.get("councilName", None)

if not (startDate and requestType and councilName):
return json({"Error": "Missing arguments"})

offenderWorker = reports(app.config["Settings"])
csvFile = offenderWorker.biggestOffenderCSV(startDate, requestType, councilName)
# TODO: Put response csv into temp area
fileOutput = os.path.join(app.config["STATIC_DIR"], "temp/csvfile.csv")
f = open(fileOutput,'w')
f.write(csvFile)
f.close()
return await response.file(fileOutput)


@app.route('/test_multiple_workers')
async def test_multiple_workers(request):
Expand All @@ -115,5 +102,7 @@ async def test_multiple_workers(request):

if __name__ == '__main__':
configure_app()
app.run(host=app.config['Settings']['Server']['HOST'], port=int(app.config['Settings']['Server']['PORT']),
workers=cpu_count()//2, debug=app.config['Settings']['Server']['DEBUG'])
app.run(host=app.config['Settings']['Server']['HOST'],
port=int(app.config['Settings']['Server']['PORT']),
workers=cpu_count()//2,
debug=app.config['Settings']['Server']['DEBUG'])
72 changes: 54 additions & 18 deletions server/src/services/frequency.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,41 @@
from configparser import ConfigParser
import sqlalchemy as db
import pandas as pd
from datetime import datetime as dt
import numpy as np
import json


class frequency(object):
def __init__(self, config=None, tableName="ingest_staging_table"):
self.config = config
self.dbString = None if not self.config else self.config['Database']['DB_CONNECTION_STRING']
self.dbString = None if not self.config \
else self.config['Database']['DB_CONNECTION_STRING']

self.table = tableName
self.data = None
pass

def freq_view_all(self, serviced=False, aggregate=True):
"""
Returns the request type and associated dates for all data
Sorted by request type, followed by created date, service date (if applicable), and then closed date
Sorted by request type, followed by created date,
service date (if applicable), and then closed date
"""
# Todo: implement condition for serviced date
engine = db.create_engine(self.dbString)

if serviced:
query = "SELECT requesttype, createddate, closeddate, servicedate FROM %s" % self.table
query = "SELECT \
requesttype,\
createddate,\
closeddate,\
servicedate\
FROM %s" % self.table
else:
query = "SELECT requesttype, createddate, closeddate FROM %s" % self.table
query = "SELECT \
requesttype,\
createddate,\
closeddate\
FROM %s" % self.table

df = pd.read_sql_query(query, con=engine)

Expand All @@ -39,33 +49,55 @@ def freq_view_all(self, serviced=False, aggregate=True):

def freq_aggregate(self, df):
request_counts = df['requesttype'].value_counts()

return request_counts.to_json()

def freq_view_data(self, service=False, aggregate=True, councils=[], startdate="", enddate=""):
def freq_view_data(self,
service=False,
aggregate=True,
councils=[],
startdate="",
enddate=""):
"""
Returns the request type, neighborhood council, created and closed dates for all data
Sorted by request type, followed by neighborhood council #, then created date, and then closed date
Returns the request type, neighborhood council, created and
closed dates for all data sorted by request type, followed by
neighborhood council #, then created date, and then closed date
Returns serviced date as well if service is set to True
Returns data for all councils if councils=[], otherwise returns data for only the array of neighborhood council #s
Returns data for all councils if councils=[], otherwise returns data
for only the array of neighborhood council #s
Returns summary data as well if aggregate is set to True
Returns only entries created between startdate and enddate if values are set for those parameters
Format of startdate and enddate should be a string in the form 2019-12-01 23:02:05
Returns only entries created between startdate and enddate if values
are set for those parameters
Format of startdate and enddate should be a string in
the form 2019-12-01 23:02:05
"""
engine = db.create_engine(self.dbString)

if service:
df = pd.read_sql_query("SELECT requesttype, createddate, closeddate, servicedate, nc, ncname FROM %s" % self.table, con=engine)
df = pd.read_sql_query("SELECT\
requesttype,\
createddate,\
closeddate,\
servicedate,\
nc,\
ncname\
FROM %s" % self.table, con=engine)
df['servicedate'] = pd.to_datetime(df['servicedate'])

else:
df = pd.read_sql_query("SELECT requesttype, createddate, closeddate, nc, ncname FROM %s" % self.table, con=engine)
df = pd.read_sql_query("SELECT\
requesttype,\
createddate,\
closeddate,\
nc,\
ncname\
FROM %s" % self.table, con=engine)

df['closeddate'] = pd.to_datetime(df['closeddate'])

if councils != []:
df = df[df.nc.isin(councils)]

if startdate != "":
start = pd.to_datetime(startdate)
df = df[(df['createddate'] >= start)]
Expand All @@ -74,7 +106,10 @@ def freq_view_data(self, service=False, aggregate=True, councils=[], startdate="
end = pd.to_datetime(enddate)
df = df[df['createddate'] <= end]

df = df.sort_values(by=['requesttype', 'nc', 'createddate', 'closeddate'])
df = df.sort_values(by=['requesttype',
'nc',
'createddate',
'closeddate'])
df_json = json.loads(df.to_json(orient="records"))

if aggregate:
Expand All @@ -86,7 +121,8 @@ def freq_view_data(self, service=False, aggregate=True, councils=[], startdate="

return df_json

#Todo: filter by NC at the sql request stage instead of afterwards
# Todo: filter by NC at the sql request stage instead of afterwards


if __name__ == "__main__":
freq = frequency()
Expand Down
13 changes: 7 additions & 6 deletions server/src/services/ingress_service.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,30 @@
from .sqlIngest import DataHandler


class ingress_service(object):
def __init__(self, config=None):
self.config = config


async def ingest(self, from_dataset=None):
loader = DataHandler(config=self.config)
loader.loadData(fileName=from_dataset)
loader.cleanData()
loader.ingestData()
return {'response':'ingest ok'}
return {'response': 'ingest ok'}

def update(self):
return {'response':'update ok'}
return {'response': 'update ok'}

def delete(self):
return {'response':'delete ok'}
return {'response': 'delete ok'}

def hello_world(self):
return {'response':'hello from frequency service'}
return {'response': 'hello from frequency service'}


if __name__ == "__main__":
from configparser import ConfigParser
config = ConfigParser()
config.read('../settings.cfg')
worker = ingress_service(config = config)
worker = ingress_service(config=config)
worker.ingest()
32 changes: 0 additions & 32 deletions server/src/services/reporting.py

This file was deleted.

Loading

0 comments on commit a0df5d6

Please sign in to comment.