Skip to content

Commit

Permalink
Authentication in GMT (#872)
Browse files Browse the repository at this point in the history
* First version

* Added daily to allowed schedule modes for default user

* REmoving user_id from object repr

* SQL typo

* Fixed timeout tests

* Database is now reloaded from structure file instead of truncate

* Authenticate now returns user obj instead of just ID

* Updated diff test signature

* Using TRUNCATE CASCADE to clear DB

* DELETE script for retention expired

* Implemented measurement quota with many tests and refactorings

* Removed noise

* Email adding was not possible without user_id

* migration needs to be wrapped around [skip ci]

* user_id added to hog, ci and carbond

* CarbonDB user_id column [skip ci]

* Adding user_id to structure

* Added more JOINs for delete [skip ci]

* Added machine to error in client.py [skip ci]

* Run-ID link and class name added to errors

* Run-ID Link only of not empty [skip ci]

* Authentication token is now not DEFAULT in frontend. Only in API. Added tests

* Added no-transform class

* Guard claused import_csv

* Added comment

* Raising errors and applying caching (#896)
  • Loading branch information
ArneTR authored Sep 27, 2024
1 parent 65f956e commit 5a1eb6f
Show file tree
Hide file tree
Showing 33 changed files with 916 additions and 174 deletions.
51 changes: 31 additions & 20 deletions api/api_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,13 +608,14 @@ def __init__(
self.content = content
super().__init__(content, status_code, headers, media_type, background)

# The decorator will not work between requests, so we are not prone to stale data over time
@cache
def get_geo(ip):
try:
ip_obj = ipaddress.ip_address(ip)
if ip_obj.is_private:
return('52.53721666833642', '13.424863870661927')
except ValueError:
return (None, None)

ip_obj = ipaddress.ip_address(ip) # may raise a ValueError
if ip_obj.is_private:
error_helpers.log_error(f"Private IP was submitted to get_geo {ip}. This is normal in development, but should not happen in production.")
return('52.53721666833642', '13.424863870661927')

query = "SELECT ip_address, data FROM ip_data WHERE created_at > NOW() - INTERVAL '24 hours' AND ip_address=%s;"
db_data = DB().fetch_all(query, (ip,))
Expand All @@ -624,19 +625,20 @@ def get_geo(ip):

latitude, longitude = get_geo_ipapi_co(ip)

if latitude is False:
if not latitude:
latitude, longitude = get_geo_ip_api_com(ip)
if latitude is False:
if not latitude:
latitude, longitude = get_geo_ip_ipinfo(ip)
if not latitude:
raise RuntimeError(f"Could not get Geo-IP for {ip} after 3 tries")

#If all 3 fail there is something bigger wrong
return (latitude, longitude)


def get_geo_ipapi_co(ip):

response = requests.get(f"https://ipapi.co/{ip}/json/", timeout=10)
print(f"Accessing https://ipapi.co/{ip}/json/")

if response.status_code == 200:
resp_data = response.json()

Expand All @@ -650,6 +652,8 @@ def get_geo_ipapi_co(ip):

return (resp_data.get('latitude'), resp_data.get('longitude'))

error_helpers.log_error(f"Could not get Geo-IP from ipapi.co for {ip}. Trying next ...", response=response)

return (False, False)

def get_geo_ip_api_com(ip):
Expand All @@ -671,6 +675,8 @@ def get_geo_ip_api_com(ip):

return (resp_data.get('latitude'), resp_data.get('longitude'))

error_helpers.log_error(f"Could not get Geo-IP from ip-api.com for {ip}. Trying next ...", response=response)

return (False, False)

def get_geo_ip_ipinfo(ip):
Expand All @@ -694,8 +700,12 @@ def get_geo_ip_ipinfo(ip):

return (resp_data.get('latitude'), resp_data.get('longitude'))

error_helpers.log_error(f"Could not get Geo-IP from ipinfo.io for {ip}. Trying next ...", response=response)

return (False, False)

# The decorator will not work between requests, so we are not prone to stale data over time
@cache
def get_carbon_intensity(latitude, longitude):

if latitude is None or longitude is None:
Expand Down Expand Up @@ -726,12 +736,11 @@ def get_carbon_intensity(latitude, longitude):

return resp_data.get('carbonIntensity')

return None
error_helpers.log_error(f"Could not get carbon intensity from Electricitymaps.org for {params}", response=response)

def carbondb_add(client_ip, energydatas):
return None

latitude, longitude = get_geo(client_ip)
carbon_intensity = get_carbon_intensity(latitude, longitude)
def carbondb_add(client_ip, energydatas, user_id):

data_rows = []

Expand All @@ -752,10 +761,12 @@ def carbondb_add(client_ip, energydatas):
if field_value is None or str(field_value).strip() == '':
raise RequestValidationError(f"{field_name.capitalize()} is empty. Ignoring everything!")

if 'ip' in e:
# An ip has been given with the data. Let's use this:
latitude, longitude = get_geo(e['ip'])
carbon_intensity = get_carbon_intensity(latitude, longitude)
if 'ip' in e: # An ip has been given with the data. We prioritize that
latitude, longitude = get_geo(e['ip']) # cached
carbon_intensity = get_carbon_intensity(latitude, longitude) # cached
else:
latitude, longitude = get_geo(client_ip) # cached
carbon_intensity = get_carbon_intensity(latitude, longitude) # cached

energy_kwh = float(e['energy_value']) * 2.77778e-7 # kWh
co2_value = energy_kwh * carbon_intensity # results in g
Expand All @@ -764,12 +775,12 @@ def carbondb_add(client_ip, energydatas):
project_uuid = e['project'] if e['project'] is not None else ''
tags_clean = "{" + ",".join([f'"{tag.strip()}"' for tag in e['tags'].split(',') if e['tags']]) + "}" if e['tags'] is not None else ''

row = f"{e['type']}|{company_uuid}|{e['machine']}|{project_uuid}|{tags_clean}|{int(e['time_stamp'])}|{e['energy_value']}|{co2_value}|{carbon_intensity}|{latitude}|{longitude}|{client_ip}"
row = f"{e['type']}|{company_uuid}|{e['machine']}|{project_uuid}|{tags_clean}|{int(e['time_stamp'])}|{e['energy_value']}|{co2_value}|{carbon_intensity}|{latitude}|{longitude}|{client_ip}|{user_id}"
data_rows.append(row)

data_str = "\n".join(data_rows)
data_file = io.StringIO(data_str)

columns = ['type', 'company', 'machine', 'project', 'tags', 'time_stamp', 'energy_value', 'co2_value', 'carbon_intensity', 'latitude', 'longitude', 'ip_address']
columns = ['type', 'company', 'machine', 'project', 'tags', 'time_stamp', 'energy_value', 'co2_value', 'carbon_intensity', 'latitude', 'longitude', 'ip_address', 'user_id']

DB().copy_from(file=data_file, table='carbondb_energy_data', columns=columns, sep='|')
Loading

0 comments on commit 5a1eb6f

Please sign in to comment.