Skip to content

Commit

Permalink
Merge pull request #638 from dp247/032024-fixes
Browse files Browse the repository at this point in the history
March 2024 fixes
  • Loading branch information
dp247 authored Mar 24, 2024
2 parents 54bdb99 + 1e2ff3a commit ba3e246
Show file tree
Hide file tree
Showing 6 changed files with 61 additions and 22 deletions.
4 changes: 2 additions & 2 deletions uk_bin_collection/tests/input.json
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@
"wiki_name": "Mansfield District Council"
},
"MertonCouncil": {
"url": "https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServices.aspx?ID=28186366",
"url": "https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServices.aspx?ID=28298025",
"wiki_command_url_override": "https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServices.aspx?ID=XXXXXXXX",
"wiki_name": "Merton Council",
"wiki_note": "Follow the instructions [here](https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServicesSearch.aspx) until you get the \"Your recycling and rubbish collection days\" page then copy the URL and replace the URL in the command (the Address parameter is optional)."
Expand Down Expand Up @@ -1012,4 +1012,4 @@
"url": "https://waste-api.york.gov.uk/api/Collections/GetBinCollectionDataForUprn/",
"wiki_name": "York Council"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def parse_data(self, page: str, **kwargs) -> dict:

expand_postcode_box = wait.until(
EC.element_to_be_clickable(
(By.XPATH, "//a[@href='#when-and-where-should-i-put-my-bin-out']")
(By.ID, "when-and-where-should-i-put-my-bin-out")
)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ def parse_data(self, page: str, **kwargs) -> dict:
"https://www.gateshead.gov.uk/article/3150/Bin-collection-day-checker"
)

accept_button = WebDriverWait(driver, timeout=30).until(
EC.element_to_be_clickable(
(By.CLASS_NAME, "btn btn--contrast btn--complete")
)
)
accept_button.click()

# Wait for the postcode field to appear then populate it
inputElement_postcode = WebDriverWait(driver, 30).until(
EC.presence_of_element_located(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
# Huntingdon District Council District Council Bins Data
from bs4 import BeautifulSoup
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
from uk_bin_collection.uk_bin_collection.common import date_format
from datetime import datetime


# import the wonderful Beautiful Soup and the URL grabber
Expand All @@ -27,9 +29,8 @@ def parse_data(self, page, **kwargs) -> None:
data["bins"].append(
{
"type": bin_types[i],
"collectionDate": date.find("strong").get_text(strip=True),
"collectionDate": datetime.strptime(date.find("strong").get_text(strip=True), "%A %d %B %Y").strftime(date_format)
}
)
++i

return data
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,28 @@ def parse_data(self, page: str, **kwargs) -> dict:
for bin in h4.find_next_sibling("ul").find_all("li")
]

# Add the bins to the data dict
data["bins"] = [
{"type": bin_type, "collectionDate": collection_date}
for bin_type, collection_date in bins_with_dates
]
for bin_type, collection_date in bins_with_dates:
if '-' in collection_date:
date_part = collection_date.split(" - ")[1]
data["bins"].append(
{
"type": bin_type,
"collectionDate": datetime.strptime(date_part,"%d %b %Y").strftime(date_format)
}
)
elif len(collection_date.split(" ")) == 4:
data["bins"].append(
{
"type": bin_type,
"collectionDate": datetime.strptime(collection_date,"%A %d %b %Y").strftime(date_format)
}
)
else:
data["bins"].append(
{
"type": bin_type,
"collectionDate": datetime.strptime(collection_date,"%d %b %Y").strftime(date_format)
}
)

return data
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ def parse_data(self, page: str, **kwargs) -> dict:
# Get form data
s = requests.session()
cookies = {
"has_js": "1",
"SSESS6ec6d5d2d471c0357053d5993a839bce": "nDJusnUyqrl2rk8LaiyDv3VaLUwSadRLGLPUpG2e2PY",
"ntc-cookie-policy": "1",
'ntc-cookie-policy': '1',
'SSESS6ec6d5d2d471c0357053d5993a839bce': 'qBdR7XhmSMd5_PDBIqG0It2R0Fq67igrejRY-WOcskE',
'has_js': '1',
}
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
Expand All @@ -50,7 +50,7 @@ def parse_data(self, page: str, **kwargs) -> dict:
}
ajax_data = {
"postcode": user_postcode,
"form_build_id": "form-O65oY-ly-CYJlnQrhohK2uS1OP-4vXsS3CqYXD7-BeM",
"form_build_id": "form-BQ47tM0NKADE0s8toYkdSef3QBn6lDM-yBseqIOho80",
"form_id": "ntc_address_wizard",
"_triggering_element_name": "op",
"_triggering_element_value": "Find",
Expand All @@ -77,7 +77,7 @@ def parse_data(self, page: str, **kwargs) -> dict:
"backtotop",
],
"ajax_page_state[theme]": "ntc_bootstrap",
"ajax_page_state[theme_token]": "sAhZZMhTYHnHhQ1H7ruHKsid2GUGRRHlLQKP0RFrotA",
"ajax_page_state[theme_token]": "LN05JIzI6rocWDiBpDyVeywYveuS4jlxD_N0_hhp2Ko",
"ajax_page_state[css][0]": "1",
"ajax_page_state[css][modules/system/system.base.css]": "1",
"ajax_page_state[css][misc/ui/jquery.ui.core.css]": "1",
Expand Down Expand Up @@ -134,23 +134,30 @@ def parse_data(self, page: str, **kwargs) -> dict:
"ajax_page_state[jquery_version]": "1.10",
}
uprn_data = {
"house_number": user_uprn,
"house_number": '0000' + f'{user_uprn}',
"op": "Use",
"form_build_id": "form-O65oY-ly-CYJlnQrhohK2uS1OP-4vXsS3CqYXD7-BeM",
"form_build_id": "form-BQ47tM0NKADE0s8toYkdSef3QBn6lDM-yBseqIOho80",
"form_id": "ntc_address_wizard",
}
collections = []

response = s.post(
"https://my.northtyneside.gov.uk/system/ajax",
cookies=cookies,
# cookies=cookies,
headers=headers,
data=ajax_data,
verify=False,
)
response = s.post(
"https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
cookies=cookies,
# cookies=cookies,
headers=headers,
data=uprn_data,
verify=False,
)
response = s.get(
"https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
# cookies=cookies,
headers=headers,
data=uprn_data,
verify=False,
Expand All @@ -161,7 +168,11 @@ def parse_data(self, page: str, **kwargs) -> dict:
soup.prettify()
bin_text = soup.find("section", {"class": "block block-ntc-bins clearfix"})
regular_text = bin_text.select("p:nth-child(2) > strong")[0].text.strip()
special_text = bin_text.select("p:nth-child(4) > strong")[0].text.strip()
x = bin_text.select("p:nth-child(4) > strong")
if len(bin_text.select("p:nth-child(4) > strong")) == 1:
special_text = bin_text.select("p:nth-child(4) > strong")[0].text.strip()
else:
special_text = bin_text.select("p:nth-child(5) > strong")[0].text.strip()

# Since calendar only shows until end of March 2024, work out how many weeks that is
weeks_total = math.floor((datetime(2024, 4, 1) - datetime.now()).days / 7)
Expand All @@ -185,10 +196,12 @@ def parse_data(self, page: str, **kwargs) -> dict:
# Use the isoweek number to separate collections - at the time of writing 11th Jan is week 2, which
# is for the grey bin
if (item_as_date.date().isocalendar()[1] % 2) == 0:
collections.append(("Recycling bin (grey)", item_as_date))
else:
collections.append(("Regular bin (green)", item_as_date))

else:
collections.append(("Recycling bin (grey)", item_as_date))


# Add the special collection dates to the collection tuple
collections += [
("Special collection (bookable)", datetime.strptime(item, date_format))
Expand Down

0 comments on commit ba3e246

Please sign in to comment.