diff --git a/uk_bin_collection/tests/input.json b/uk_bin_collection/tests/input.json index de02f5544b..24617db29b 100644 --- a/uk_bin_collection/tests/input.json +++ b/uk_bin_collection/tests/input.json @@ -431,6 +431,13 @@ "url": "https://map.erewash.gov.uk/isharelive.web/myerewash.aspx", "wiki_name": "Erewash Borough Council" }, + "FalkirkCouncil": { + "url": "https://www.falkirk.gov.uk", + "wiki_command_url_override": "https://www.falkirk.gov.uk", + "uprn": "136065818", + "wiki_name": "Falkirk Council", + "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." + }, "FarehamBoroughCouncil": { "postcode": "PO14 4NR", "skip_get_url": true, @@ -618,6 +625,13 @@ "url": "https://www.ealing.gov.uk/site/custom_scripts/WasteCollectionWS/home/FindCollection", "wiki_name": "London Borough Ealing" }, + "LondonBoroughHarrow": { + "url": "https://www.harrow.gov.uk", + "wiki_command_url_override": "https://www.harrow.gov.uk", + "uprn": "100021298754", + "wiki_name": "London Borough Harrow", + "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." + }, "LondonBoroughHounslow": { "skip_get_url": true, "uprn": "100021577765", @@ -741,6 +755,13 @@ "url": "https://www.newport.gov.uk/", "wiki_name": "Newport City Council" }, + "NorthAyrshireCouncil": { + "url": "https://www.north-ayrshire.gov.uk/", + "wiki_command_url_override": "https://www.north-ayrshire.gov.uk/", + "uprn": "126045552", + "wiki_name": "North Ayrshire Council", + "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." + }, "NorthEastDerbyshireDistrictCouncil": { "postcode": "S42 5RB", "skip_get_url": true, diff --git a/uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py b/uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py new file mode 100644 index 0000000000..977d7ae2b9 --- /dev/null +++ b/uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py @@ -0,0 +1,54 @@ +import time + +import requests + +from uk_bin_collection.uk_bin_collection.common import * +from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass + + +# import the wonderful Beautiful Soup and the URL grabber +class CouncilClass(AbstractGetBinDataClass): + """ + Concrete classes have to implement all abstract operations of the + base class. They can also override some operations with a default + implementation. + """ + + def parse_data(self, page: str, **kwargs) -> dict: + + user_uprn = kwargs.get("uprn") + check_uprn(user_uprn) + bindata = {"bins": []} + + URI = f"https://recycling.falkirk.gov.uk/api/collections/{user_uprn}" + + # Make the GET request + response = requests.get(URI) + + # Parse the JSON response + bin_collection = response.json() + + # Loop through each collection in bin_collection + for collection in bin_collection["collections"]: + bin_type = collection["type"] + collection_dates = collection["dates"] + + # Loop through the dates for each collection type + for date in collection_dates: + print(f"Bin Type: {bin_type}") + print(f"Collection Date: {date}") + + dict_data = { + "type": bin_type, + "collectionDate": datetime.strptime( + date, + "%Y-%m-%d", + ).strftime("%d/%m/%Y"), + } + bindata["bins"].append(dict_data) + + bindata["bins"].sort( + key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") + ) + + return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/LondonBoroughHarrow.py b/uk_bin_collection/uk_bin_collection/councils/LondonBoroughHarrow.py new file mode 100644 index 0000000000..8009f96578 --- /dev/null +++ b/uk_bin_collection/uk_bin_collection/councils/LondonBoroughHarrow.py @@ -0,0 +1,46 @@ +import time + +import requests + +from uk_bin_collection.uk_bin_collection.common import * +from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass + + +# import the wonderful Beautiful Soup and the URL grabber +class CouncilClass(AbstractGetBinDataClass): + """ + Concrete classes have to implement all abstract operations of the + base class. They can also override some operations with a default + implementation. + """ + + def parse_data(self, page: str, **kwargs) -> dict: + + user_uprn = kwargs.get("uprn") + check_uprn(user_uprn) + bindata = {"bins": []} + + # Construct the URI + URI = f"https://www.harrow.gov.uk/ajax/bins?u={user_uprn}&r=12345" + + # Make the GET request + response = requests.get(URI) + + # Parse the JSON response + bin_collection = response.json() + + # Loop through all collections and extract bin type and collection date + for collection in bin_collection["results"]["collections"]["all"]: + + CollectTime = (collection["eventTime"]).split("T")[0] + print(CollectTime) + + dict_data = { + "type": collection["binType"], + "collectionDate": datetime.strptime(CollectTime, "%Y-%m-%d").strftime( + "%d/%m/%Y" + ), + } + bindata["bins"].append(dict_data) + + return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/NorthAyrshireCouncil.py b/uk_bin_collection/uk_bin_collection/councils/NorthAyrshireCouncil.py new file mode 100644 index 0000000000..f30c31b6f3 --- /dev/null +++ b/uk_bin_collection/uk_bin_collection/councils/NorthAyrshireCouncil.py @@ -0,0 +1,49 @@ +import time + +import requests + +from uk_bin_collection.uk_bin_collection.common import * +from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass + + +# import the wonderful Beautiful Soup and the URL grabber +class CouncilClass(AbstractGetBinDataClass): + """ + Concrete classes have to implement all abstract operations of the + base class. They can also override some operations with a default + implementation. + """ + + def parse_data(self, page: str, **kwargs) -> dict: + + user_uprn = kwargs.get("uprn") + check_uprn(user_uprn) + bindata = {"bins": []} + + URI = f"https://www.maps.north-ayrshire.gov.uk/arcgis/rest/services/AGOL/YourLocationLive/MapServer/8/query?f=json&outFields=*&returnDistinctValues=true&returnGeometry=false&spatialRel=esriSpatialRelIntersects&where=UPRN%20%3D%20%27{user_uprn}%27" + + # Make the GET request + response = requests.get(URI) + + # Parse the JSON response + result_json = response.json() + + # Extract bin collection dates + blue_bin = result_json["features"][0]["attributes"].get("BLUE_DATE_TEXT") + if blue_bin: + dict_data = {"type": "Blue Bin", "collectionDate": blue_bin} + bindata["bins"].append(dict_data) + grey_bin = result_json["features"][0]["attributes"].get("GREY_DATE_TEXT") + if grey_bin: + dict_data = {"type": "Grey Bin", "collectionDate": grey_bin} + bindata["bins"].append(dict_data) + purple_bin = result_json["features"][0]["attributes"].get("PURPLE_DATE_TEXT") + if purple_bin: + dict_data = {"type": "Purple Bin", "collectionDate": purple_bin} + bindata["bins"].append(dict_data) + brown_bin = result_json["features"][0]["attributes"].get("BROWN_DATE_TEXT") + if brown_bin: + dict_data = {"type": "Brown Bin", "collectionDate": brown_bin} + bindata["bins"].append(dict_data) + + return bindata