Skip to content

Commit

Permalink
change tabs to spaces
Browse files Browse the repository at this point in the history
  • Loading branch information
John Demetros committed Sep 15, 2019
1 parent c8b0975 commit 703badb
Show file tree
Hide file tree
Showing 2 changed files with 147 additions and 147 deletions.
160 changes: 80 additions & 80 deletions get_coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,97 +5,97 @@

def readCSV():

lot_names = []
lot_types = []
building_names = []
lot_names = []
lot_types = []
building_names = []

with open('LotBuildingNames.csv') as csvfile:
with open('LotBuildingNames.csv') as csvfile:

readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
if row[0]:
lot_names.append(row[0].strip())
if row[1]:
lot_types.append(row[1].strip())
if row[2]:
building_names.append(row[2].strip())
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
if row[0]:
lot_names.append(row[0].strip())
if row[1]:
lot_types.append(row[1].strip())
if row[2]:
building_names.append(row[2].strip())

return list(zip(lot_names, lot_types)), building_names
return list(zip(lot_names, lot_types)), building_names

def getLotIDs(lot_names):
lot_ids = []

tree = ET.parse('NorthCampus.osm')
root = tree.getroot()
inc = 0
for way in root.findall('way'):
for tag in way.findall('tag'):
if tag.attrib['k'] == 'name':
for name, t in lot_names:
if name in tag.attrib['v'] and 'Lot' in tag.attrib['v']:
lot_ids.append((name, t, way.get('id')))
break
inc += 1
return lot_ids
lot_ids = []

tree = ET.parse('NorthCampus.osm')
root = tree.getroot()
inc = 0
for way in root.findall('way'):
for tag in way.findall('tag'):
if tag.attrib['k'] == 'name':
for name, t in lot_names:
if name in tag.attrib['v'] and 'Lot' in tag.attrib['v']:
lot_ids.append((name, t, way.get('id')))
break
inc += 1
return lot_ids

def getBuildingIDs(building_names):
building_ids = []

tree = ET.parse('NorthCampus.osm')
root = tree.getroot()
for way in root.findall('way'):
for tag in way.findall('tag'):
if tag.attrib['k'] == 'name':
for name in building_names:
if name in tag.attrib['v']:
building_ids.append((name, way.get('id')))
break
return building_ids
building_ids = []

tree = ET.parse('NorthCampus.osm')
root = tree.getroot()
for way in root.findall('way'):
for tag in way.findall('tag'):
if tag.attrib['k'] == 'name':
for name in building_names:
if name in tag.attrib['v']:
building_ids.append((name, way.get('id')))
break
return building_ids

def getLotCoords(lot_ids):
lot_coords = []
api = overpy.Overpass()

for lot, types, id in lot_ids:
result = api.query('''way('''+ id + ''');out geom;''')
node = result.ways[0].get_nodes(True)
lat = []
lon = []
for n in node:
lat.append(float(n.lat))
lon.append(float(n.lon))
print(lot + " Lot done")
lot_coords.append({"name": lot, "type": types, "boundary_lat": lat, "boundary_long": lon, "capacity": 0, "available_spots": 0, "available_times":["0:00"]})
# troubleshooting (remove break when done)
# break
return lot_coords
lot_coords = []
api = overpy.Overpass()

for lot, types, id in lot_ids:
result = api.query('''way('''+ id + ''');out geom;''')
node = result.ways[0].get_nodes(True)
lat = []
lon = []
for n in node:
lat.append(float(n.lat))
lon.append(float(n.lon))
print(lot + " Lot done")
lot_coords.append({"name": lot, "type": types, "boundary_lat": lat, "boundary_long": lon, "capacity": 0, "available_spots": 0, "available_times":["0:00"]})
# troubleshooting (remove break when done)
# break
return lot_coords

def getBuildingCoords(building_ids):
building_coords = []
api = overpy.Overpass()

for building, id in building_ids:
result = api.query('''way('''+ id + ''');out geom;''')
node = result.ways[0].get_nodes(True)
lat = []
lon = []
for n in node:
lat.append(float(n.lat))
lon.append(float(n.lon))
print(building + " done")
building_coords.append({"name": building, "entrance_lat": [0.0], "entrance_lon": [0.0], "boundary_lat": lat, "boundary_long": lon})
# troubleshooting (remove break when done)
# break

return building_coords
building_coords = []
api = overpy.Overpass()

for building, id in building_ids:
result = api.query('''way('''+ id + ''');out geom;''')
node = result.ways[0].get_nodes(True)
lat = []
lon = []
for n in node:
lat.append(float(n.lat))
lon.append(float(n.lon))
print(building + " done")
building_coords.append({"name": building, "entrance_lat": [0.0], "entrance_lon": [0.0], "boundary_lat": lat, "boundary_long": lon})
# troubleshooting (remove break when done)
# break

return building_coords

def write2Json(lot_data, building_data):
data = {"lots": lot_data, "buildings": building_data}
# print(data)
# json.dumps(data, indent=4)
with open('LotData.json', 'w') as f:
json.dump(data, f, indent=2)
data = {"lots": lot_data, "buildings": building_data}
# print(data)
# json.dumps(data, indent=4)
with open('LotData.json', 'w') as f:
json.dump(data, f, indent=2)

lot_names, building_names = readCSV()

Expand All @@ -116,6 +116,6 @@ def write2Json(lot_data, building_data):
#w = result.ways[0].get_nodes(True)

#for node in w:
# print(float(node.lat))
# print(float(node.lon))
# print(float(node.lat))
# print(float(node.lon))

134 changes: 67 additions & 67 deletions get_lot_building_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,88 +6,88 @@

def getLotNames():

lot_names = []
types = []

url = 'http://www.buffalo.edu/parking/parking-places.html'
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")

for row in soup.findAll('tr')[1:]:

if (len(row.findAll('p')) == 0):
break

tempType = 0
# 0 for student, 1 for faculty/staff, 2 for both
if 'X' in row.findAll('p')[1].get_text() and 'X' in row.findAll('p')[2].get_text():
tempType = 2
elif 'X' in row.findAll('p')[1].get_text():
tempType = 1

lot_str = row.findAll('p')[0].get_text().split('(')[0]
lot_name = ""

# weird fix for now
if ' & ' in lot_str and ',' in lot_str:
lot_name = ''.join(lot_str.split(',')[0][0:-1])
# print(lot_name)
lot_names.append(lot_name + lot_str.split(',')[0][-1])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(',')[1].split()[0])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(' & ')[1])
types.append(tempType)
elif ' & ' in lot_str:
lot_name = ''.join(lot_str.split(' & ')[0][0:-1])
lot_names.append(lot_name + lot_str.split(' & ')[0][-1])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(' & ')[1])
types.append(tempType)
else:
lot_names.append(lot_str)
types.append(tempType)
return lot_names, types
lot_names = []
types = []

url = 'http://www.buffalo.edu/parking/parking-places.html'
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")

for row in soup.findAll('tr')[1:]:

if (len(row.findAll('p')) == 0):
break

tempType = 0
# 0 for student, 1 for faculty/staff, 2 for both
if 'X' in row.findAll('p')[1].get_text() and 'X' in row.findAll('p')[2].get_text():
tempType = 2
elif 'X' in row.findAll('p')[1].get_text():
tempType = 1

lot_str = row.findAll('p')[0].get_text().split('(')[0]
lot_name = ""

# weird fix for now
if ' & ' in lot_str and ',' in lot_str:
lot_name = ''.join(lot_str.split(',')[0][0:-1])
# print(lot_name)
lot_names.append(lot_name + lot_str.split(',')[0][-1])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(',')[1].split()[0])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(' & ')[1])
types.append(tempType)
elif ' & ' in lot_str:
lot_name = ''.join(lot_str.split(' & ')[0][0:-1])
lot_names.append(lot_name + lot_str.split(' & ')[0][-1])
types.append(tempType)
lot_names.append(lot_name + lot_str.split(' & ')[1])
types.append(tempType)
else:
lot_names.append(lot_str)
types.append(tempType)
return lot_names, types

def getBuildingNames():

building_names = []
building_names = []

url = 'http://www.buffalo.edu/administrative-services/managing-facilities/planning-designing-and-construction/building-profiles.html'
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
url = 'http://www.buffalo.edu/administrative-services/managing-facilities/planning-designing-and-construction/building-profiles.html'
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")

buildingsRaw = soup.find("div", {"id":"page"})\
.find("div", {"id" :"columns"})\
.find("div",{"id":"center"}).\
find("div", {"class":"par parsys"})\
.find("div", {"class":"tabs section"})\
.find(class_="tabs-component")\
.find(class_="tabs-component-box-wrapper")\
.find(class_="par parsys")\
.findAll(class_="teaser-title")
for name in buildingsRaw:
building_names.append(name.text)
return building_names
buildingsRaw = soup.find("div", {"id":"page"})\
.find("div", {"id" :"columns"})\
.find("div",{"id":"center"}).\
find("div", {"class":"par parsys"})\
.find("div", {"class":"tabs section"})\
.find(class_="tabs-component")\
.find(class_="tabs-component-box-wrapper")\
.find(class_="par parsys")\
.findAll(class_="teaser-title")
for name in buildingsRaw:
building_names.append(name.text)
return building_names


def write2CSV(lot_names, types, building_names):
fullData = [lot_names, types, building_names]
with open('LotBuildingNames.csv', 'w') as f:
writer = csv.writer(f)
for vals in zip_longest(*fullData):
writer.writerow(vals)
fullData = [lot_names, types, building_names]
with open('LotBuildingNames.csv', 'w') as f:
writer = csv.writer(f)
for vals in zip_longest(*fullData):
writer.writerow(vals)

lot_names, types = getLotNames()
building_names = getBuildingNames()
write2CSV(lot_names, types, building_names)

'''
for i in range(len(lot_names)):
print("Name: " + lot_names[i])
print("Type: " + str(types[i]) + "\n")
print("Name: " + lot_names[i])
print("Type: " + str(types[i]) + "\n")
for name in building_names:
print(name)
print(name)
'''

1 comment on commit 703badb

@JohnnyD1
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This commit associated with issues #34 #31 #32 #33 #36.

Please sign in to comment.