From 551566d8dac0c889ef1168f3cafdd65415199a02 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Tue, 16 Apr 2024 17:52:57 -0400 Subject: [PATCH 01/12] thinking about regional slicing --- argovisHelpers/helpers.py | 184 ++++++++++++++++++++++++++------------ 1 file changed, 128 insertions(+), 56 deletions(-) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 0f4d0e9..2be8b53 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -1,4 +1,7 @@ -import requests, datetime, copy, time, re, area, math +import requests, datetime, copy, time, re, area, math, urllib +from shapely.geometry import shape, box +import geopandas as gpd +import json # networking helpers @@ -11,10 +14,10 @@ def argofetch(route, options={}, apikey='', apiroot='https://argovis-api.colorad if option in options: options[option] = str(options[option]) - dl = requests.get(apiroot + route, params = options, headers={'x-argokey': apikey}) + dl = requests.get(apiroot.rstrip('/') + '/' + route.lstrip('/'), params = options, headers={'x-argokey': apikey}) statuscode = dl.status_code if verbose: - print(dl.url) + print(urllib.parse.unquote(dl.url)) dl = dl.json() if statuscode==429: @@ -29,10 +32,9 @@ def argofetch(route, options={}, apikey='', apiroot='https://argovis-api.colorad print('The temporospatial extent of your request is enormous! Consider using the `query` helper in this package to split it up into more manageable chunks.') elif statuscode >= 500 or (statuscode==200 and type(dl) is dict and 'code' in dl): print("Argovis' servers experienced an error. Please try your request again, and email argovis@colorado.edu if this keeps happening; please include the full details of the the request you made so we can help address.") - raise Exception(statuscode) + raise Exception(statuscode, dl) - if (statuscode==404) or (type(dl[0]) is dict and 'code' in dl[0] and dl[0]['code']==404): - return [], suggestedLatency + # no special action for 404 - a 404 due to a mangled route will return an error, while a valid search with no result will return []. return dl, suggestedLatency @@ -41,7 +43,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed r = re.sub('^/', '', route) r = re.sub('/$', '', r) - data_routes = ['argo', 'cchdo', 'drifters', 'tc', 'argotrajectories', 'easyocean', 'grids/rg09', 'grids/kg21', 'grids/glodap', 'timeseries/noaasst', 'timeseries/copernicussla', 'timeseries/ccmpwind'] + data_routes = ['argo', 'cchdo', 'drifters', 'tc', 'argotrajectories', 'easyocean', 'grids/rg09', 'grids/kg21', 'grids/glodap', 'timeseries/noaasst', 'timeseries/copernicussla', 'timeseries/ccmpwind', 'extended/ar'] scoped_parameters = { 'argo': ['id','platform', 'metadata'], @@ -55,7 +57,8 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'grids/glodap': ['id'], 'timeseries/noaasst': ['id'], 'timeseries/copernicussla': ['id'], - 'timeseries/ccmpwind': ['id'] + 'timeseries/ccmpwind': ['id'], + 'extended/ar': ['id'] } earliest_records = { @@ -67,10 +70,11 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'easyocean': parsetime("1983-10-08T00:00:00.000Z"), 'grids/rg09': parsetime("2004-01-14T00:00:00.000Z"), 'grids/kg21': parsetime("2005-01-14T00:00:00.000Z"), - 'grids/glodap': parsetime("0001-01-01T00:00:00.000Z"), + 'grids/glodap': parsetime("1000-01-01T00:00:00.000Z"), 'timeseries/noaasst': parsetime("1989-12-30T00:00:00.000Z"), 'timeseries/copernicussla': parsetime("1993-01-02T00:00:00Z"), - 'timeseries/ccmpwind': parsetime("1993-01-02T00:00:00Z") + 'timeseries/ccmpwind': parsetime("1993-01-02T00:00:00Z"), + 'extended/ar': parsetime("2000-01-01T00:00:00Z") } # plus a day vs the API, just to make sure we don't artificially cut off @@ -83,63 +87,83 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'easyocean': parsetime("2022-10-17T00:00:00.000Z"), 'grids/rg09': parsetime("2022-05-16T00:00:00.000Z"), 'grids/kg21': parsetime("2020-12-16T00:00:00.000Z"), - 'grids/glodap': parsetime("0001-01-02T00:00:00.000Z"), + 'grids/glodap': parsetime("1000-01-02T00:00:00.000Z"), 'timeseries/noaasst': parsetime("2023-01-30T00:00:00.000Z"), 'timeseries/copernicussla': parsetime("2022-08-01T00:00:00.000Z"), - 'timeseries/ccmpwind': parsetime("1993-12-31T00:00:00Z") + 'timeseries/ccmpwind': parsetime("2019-12-30T00:00:00Z"), + 'extended/ar': parsetime("2022-01-01T21:00:00Z") } if r in data_routes: # these are potentially large requests that might need to be sliced up + ## identify timeseries, need to be recombined differently after slicing + isTimeseries = r.split('/')[0] == 'timeseries' + ## if a data query carries a scoped parameter, no need to slice up: if r in scoped_parameters and not set(scoped_parameters[r]).isdisjoint(options.keys()): return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] - ## slice up in time bins: - start = None - end = None - if 'startDate' in options: - start = parsetime(options['startDate']) - else: - start = earliest_records[r] - if 'endDate' in options: - end = parsetime(options['endDate']) + if isTimeseries: + ## slice up in space bins - could do this for all in future, we'll see how it goes. + if 'polygon' not in options: # need to deal with boxes too + print('Please specify a polygon region for this search.') + pgons = split_polygon(options['polygon'], 5, 5) + ops = copy.deepcopy(options) + results = [] + delay = 0 + for i in range(len(pgons)): + ops['polygon'] = pgons[i] + increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += increment[0] + delay = increment[1] + time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay + return results else: - end = last_records[r] - - ### determine appropriate bin size - maxbulk = 1000000 # should be <= maxbulk used in generating an API 413 - timestep = 30 # days - - if 'polygon' in options: - extent = area.area({'type':'Polygon','coordinates':[ options['polygon'] ]}) / 13000 / 1000000 # poly area in units of 13000 sq. km. blocks - timestep = min(400, math.floor(maxbulk / extent)) - elif 'multipolygon' in options: - extents = [area.area({'type':'Polygon','coordinates':[x]}) / 13000 / 1000000 for x in options['multipolygon']] - extent = min(extents) - timestep = min(400,math.floor(maxbulk / extent)) - elif 'box' in options: - extent = area.area({'type':'Polygon','coordinates':[[ options['box'][0], [options['box'][1][0], options['box'][0][0]], options['box'][1], [options['box'][0][0], options['box'][1][0]], options['box'][0]]]}) / 13000 / 1000000 - timestep = min(400, math.floor(maxbulk / extent)) - - delta = datetime.timedelta(days=timestep) - times = [start] - while times[-1] + delta < end: - times.append(times[-1]+delta) - times.append(end) - times = [parsetime(x) for x in times] - results = [] - ops = copy.deepcopy(options) - delay = 0 - for i in range(len(times)-1): - ops['startDate'] = times[i] - ops['endDate'] = times[i+1] - increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) - results += increment[0] - delay = increment[1] - time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay - return results + ## slice up in time bins: + start = None + end = None + if 'startDate' in options: + start = parsetime(options['startDate']) + else: + start = earliest_records[r] + if 'endDate' in options: + end = parsetime(options['endDate']) + else: + end = last_records[r] + + ### determine appropriate bin size + maxbulk = 1000000 # should be <= maxbulk used in generating an API 413 + timestep = 30 # days + + if 'polygon' in options: + extent = area.area({'type':'Polygon','coordinates':[ options['polygon'] ]}) / 13000 / 1000000 # poly area in units of 13000 sq. km. blocks + timestep = min(400, math.floor(maxbulk / extent)) + elif 'multipolygon' in options: + extents = [area.area({'type':'Polygon','coordinates':[x]}) / 13000 / 1000000 for x in options['multipolygon']] + extent = min(extents) + timestep = min(400,math.floor(maxbulk / extent)) + elif 'box' in options: + extent = area.area({'type':'Polygon','coordinates':[[ options['box'][0], [options['box'][1][0], options['box'][0][0]], options['box'][1], [options['box'][0][0], options['box'][1][0]], options['box'][0]]]}) / 13000 / 1000000 + timestep = min(400, math.floor(maxbulk / extent)) + + delta = datetime.timedelta(days=timestep) + times = [start] + while times[-1] + delta < end: + times.append(times[-1]+delta) + times.append(end) + times = [parsetime(x) for x in times] + results = [] + ops = copy.deepcopy(options) + delay = 0 + for i in range(len(times)-1): + ops['startDate'] = times[i] + ops['endDate'] = times[i+1] + increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += increment[0] + delay = increment[1] + time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay + return results else: return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] @@ -180,7 +204,12 @@ def parsetime(time): time = time.replace('Z', '.000Z') return datetime.datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%fZ") elif type(time) is datetime.datetime: - return time.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + t = time.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + tokens = t.split('-') + if len(tokens[0]) < 4: + tokens[0] = ('000' + tokens[0])[-4:] + t = '-'.join(tokens) + return t else: raise ValueError(time) @@ -193,4 +222,47 @@ def units_inflate(data_doc, metadata_doc=None): return {data_info[0][i]: data_info[2][i][uindex] for i in range(len(data_info[0]))} +def combine_data_lists(lists): + # given a list of data lists, concat them appropriately; + # ie [[1,2],[3,4]] + [[5,6],[7,8]] = [[1,2,5,6], [3,4,7,8]] + + combined_list = [] + for sublists in zip(*lists): + combined_sublist = [] + for sublist in sublists: + combined_sublist.extend(sublist) + combined_list.append(combined_sublist) + return combined_list + +def split_polygon(geojson_polygon, max_lon_size, max_lat_size): + # slice a geojson polygon up into a list of smaller polygons of maximum extent in lon and lat + + polygon = shape({"type": "Polygon", "coordinates": [geojson_polygon]}) + + # Get the bounds of the polygon + min_lon, min_lat, max_lon, max_lat = polygon.bounds + + # Create a list to hold the smaller polygons + smaller_polygons = [] + + # Split the polygon into smaller polygons + lon = min_lon + lat = min_lat + while lon <= max_lon: + while lat <= max_lat: + # Create a bounding box for the current chunk + bounding_box = box(lon, lat, lon + max_lon_size, lat + max_lat_size) + + # Intersect the bounding box with the original polygon + chunk = polygon.intersection(bounding_box) + + # If the intersection is not empty, add it to the list of smaller polygons + if not chunk.is_empty: + # Convert the Shapely geometry to a GeoJSON polygon and add it to the list + smaller_polygons.append(json.loads(gpd.GeoSeries([chunk]).to_json())) + + lat += max_lat_size + lat = min_lat + lon += max_lon_size + return [x['features'][0]['geometry']['coordinates'][0] for x in smaller_polygons] \ No newline at end of file From 2f4d00e9518003b54fa8bd0b557957e75f950fb0 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Thu, 18 Apr 2024 21:00:00 -0400 Subject: [PATCH 02/12] use time or space splitting as optimal --- argovisHelpers/helpers.py | 208 ++++++++++++++++++++++++-------------- 1 file changed, 131 insertions(+), 77 deletions(-) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 2be8b53..0e91d1e 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -60,7 +60,86 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'timeseries/ccmpwind': ['id'], 'extended/ar': ['id'] } - + + if r in data_routes and (not 'compression' in options or options['compression']!='minimal'): + # these are potentially large requests that might need to be sliced up + + ## identify timeseries, need to be recombined differently after slicing + isTimeseries = r.split('/')[0] == 'timeseries' + + ## if a data query carries a scoped parameter, no need to slice up: + if r in scoped_parameters and not set(scoped_parameters[r]).isdisjoint(options.keys()): + return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] + + # should we slice by time or space? + times = slice_timesteps(options, r) + nspace = 999999 + if 'polygon' in options: + pgons = split_polygon(options['polygon'], 5, 5) + n_space = len(pgons) + elif 'box' in options: + boxes = split_box(options['box'], 5, 5) + n_space = len(boxes) + + if isTimeseries or n_space < len(times): + ## slice up in space bins - could do this for all in future, we'll see how it goes. + ops = copy.deepcopy(options) + results = [] + delay = 0 + if 'polygon' in options: + #pgons = split_polygon(options['polygon'], 5, 5) + for i in range(len(pgons)): + ops['polygon'] = pgons[i] + increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += increment[0] + delay = increment[1] + time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay + elif 'box' in options: + boxes = split_box(options['box'], 5, 5) + for i in range(len(boxes)): + ops['box'] = boxes[i] + increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += increment[0] + delay = increment[1] + time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay + # smaller polygons will trace geodesics differently than full polygons, need to doublecheck; + # do it for boxes too just to make sure nothing funny happened on the boundaries + ops = copy.deepcopy(options) + ops['compression'] = 'minimal' + true_ids = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose)[0] + true_ids = [x[0] for x in true_ids] + fetched_ids = [x['_id'] for x in results] + if len(fetched_ids) != len(list(set(fetched_ids))): + # deduplicate anything scooped up by multiple cells, like on cell borders + r = {x['_id']: x for x in results} + results = [r[i] for i in list(r.keys())] + fetched_ids = [x['_id'] for x in results] + to_drop = [item for item in fetched_ids if item not in true_ids] + to_add = [item for item in true_ids if item not in fetched_ids] + for id in to_add: + p, delay = argofetch(route, options={'id': id}, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += p + results = [x for x in results if x['_id'] not in to_drop] + return results + else: + results = [] + ops = copy.deepcopy(options) + delay = 0 + for i in range(len(times)-1): + ops['startDate'] = times[i] + ops['endDate'] = times[i+1] + increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + results += increment[0] + delay = increment[1] + time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay + return results + + else: + return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] + +def slice_timesteps(options, r): + # given a qsr option dict and data route, return a list of reasonable time divisions + earliest_records = { 'argo': parsetime("1997-07-27T20:26:20.002Z"), 'cchdo': parsetime("1972-07-23T09:11:00.000Z"), @@ -94,80 +173,40 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'extended/ar': parsetime("2022-01-01T21:00:00Z") } - if r in data_routes: - # these are potentially large requests that might need to be sliced up - - ## identify timeseries, need to be recombined differently after slicing - isTimeseries = r.split('/')[0] == 'timeseries' - - ## if a data query carries a scoped parameter, no need to slice up: - if r in scoped_parameters and not set(scoped_parameters[r]).isdisjoint(options.keys()): - return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] - - if isTimeseries: - ## slice up in space bins - could do this for all in future, we'll see how it goes. - if 'polygon' not in options: # need to deal with boxes too - print('Please specify a polygon region for this search.') - pgons = split_polygon(options['polygon'], 5, 5) - ops = copy.deepcopy(options) - results = [] - delay = 0 - for i in range(len(pgons)): - ops['polygon'] = pgons[i] - increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) - results += increment[0] - delay = increment[1] - time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay - return results - else: - ## slice up in time bins: - start = None - end = None - if 'startDate' in options: - start = parsetime(options['startDate']) - else: - start = earliest_records[r] - if 'endDate' in options: - end = parsetime(options['endDate']) - else: - end = last_records[r] - - ### determine appropriate bin size - maxbulk = 1000000 # should be <= maxbulk used in generating an API 413 - timestep = 30 # days - - if 'polygon' in options: - extent = area.area({'type':'Polygon','coordinates':[ options['polygon'] ]}) / 13000 / 1000000 # poly area in units of 13000 sq. km. blocks - timestep = min(400, math.floor(maxbulk / extent)) - elif 'multipolygon' in options: - extents = [area.area({'type':'Polygon','coordinates':[x]}) / 13000 / 1000000 for x in options['multipolygon']] - extent = min(extents) - timestep = min(400,math.floor(maxbulk / extent)) - elif 'box' in options: - extent = area.area({'type':'Polygon','coordinates':[[ options['box'][0], [options['box'][1][0], options['box'][0][0]], options['box'][1], [options['box'][0][0], options['box'][1][0]], options['box'][0]]]}) / 13000 / 1000000 - timestep = min(400, math.floor(maxbulk / extent)) - - delta = datetime.timedelta(days=timestep) - times = [start] - while times[-1] + delta < end: - times.append(times[-1]+delta) - times.append(end) - times = [parsetime(x) for x in times] - results = [] - ops = copy.deepcopy(options) - delay = 0 - for i in range(len(times)-1): - ops['startDate'] = times[i] - ops['endDate'] = times[i+1] - increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) - results += increment[0] - delay = increment[1] - time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay - return results - + maxbulk = 2000000 # should be <= maxbulk used in generating an API 413 + timestep = 30 # days + + if 'polygon' in options: + extent = area.area({'type':'Polygon','coordinates':[ options['polygon'] ]}) / 13000 / 1000000 # poly area in units of 13000 sq. km. blocks + elif 'multipolygon' in options: + extents = [area.area({'type':'Polygon','coordinates':[x]}) / 13000 / 1000000 for x in options['multipolygon']] + extent = min(extents) + elif 'box' in options: + extent = area.area({'type':'Polygon','coordinates':[[ options['box'][0], [options['box'][1][0], options['box'][0][0]], options['box'][1], [options['box'][0][0], options['box'][1][0]], options['box'][0]]]}) / 13000 / 1000000 + + timestep = math.floor(maxbulk / extent) + + ## slice up in time bins: + start = None + end = None + if 'startDate' in options: + start = parsetime(options['startDate']) else: - return argofetch(route, options=options, apikey=apikey, apiroot=apiroot, verbose=verbose)[0] - + start = earliest_records[r] + if 'endDate' in options: + end = parsetime(options['endDate']) + else: + end = last_records[r] + + delta = datetime.timedelta(days=timestep) + times = [start] + while times[-1] + delta < end: + times.append(times[-1]+delta) + times.append(end) + times = [parsetime(x) for x in times] + + return times + # data munging helpers def data_inflate(data_doc, metadata_doc=None): @@ -248,8 +287,8 @@ def split_polygon(geojson_polygon, max_lon_size, max_lat_size): # Split the polygon into smaller polygons lon = min_lon lat = min_lat - while lon <= max_lon: - while lat <= max_lat: + while lon < max_lon: + while lat < max_lat: # Create a bounding box for the current chunk bounding_box = box(lon, lat, lon + max_lon_size, lat + max_lat_size) @@ -265,4 +304,19 @@ def split_polygon(geojson_polygon, max_lon_size, max_lat_size): lat = min_lat lon += max_lon_size - return [x['features'][0]['geometry']['coordinates'][0] for x in smaller_polygons] \ No newline at end of file + return [x['features'][0]['geometry']['coordinates'][0] for x in smaller_polygons] + +def split_box(box, max_lon_size, max_lat_size): + # slice a box up into a list of smaller boxes of maximum extent in lon and lat + + smaller_boxes = [] + lon = box[0][0] + lat = box[0][1] + while lon < box[1][0]: + while lat < box[1][1]: + smaller_boxes.append([[lon, lat],[min(box[1][0], lon + max_lon_size), min(box[1][1], lat + max_lat_size)]]) + lat += max_lat_size + lat = box[0][1] + lon += max_lon_size + + return smaller_boxes \ No newline at end of file From 4455a002484d47340304d0e0c024fce3833e9892 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Thu, 18 Apr 2024 21:10:12 -0400 Subject: [PATCH 03/12] deps --- .github/workflows/test.yaml | 2 +- Dockerfile | 2 +- pyproject.toml | 4 +++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 36c31d8..c91be23 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -8,7 +8,7 @@ on: jobs: test: runs-on: ubuntu-latest - container: argovis/argovis_helpers:test-base-231026 + container: argovis/argovis_helpers:test-base-240418 services: database: diff --git a/Dockerfile b/Dockerfile index fd8efed..7ff0b02 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ FROM python:3.9 -RUN pip install requests pytest area numpy scipy +RUN pip install requests pytest area numpy scipy shapely geopandas WORKDIR /app COPY . . diff --git a/pyproject.toml b/pyproject.toml index b802101..925497c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,9 @@ classifiers = [ ] dependencies = [ "requests", - "area" + "area", + "shapely", + "geopandas" ] requires-python = ">=3.9" From d3034bee4215a5b603c712e019828ff304f2f2f2 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Thu, 18 Apr 2024 21:17:27 -0400 Subject: [PATCH 04/12] typo --- argovisHelpers/helpers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 0e91d1e..243d90f 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -1,7 +1,6 @@ -import requests, datetime, copy, time, re, area, math, urllib +import requests, datetime, copy, time, re, area, math, urllib, json from shapely.geometry import shape, box import geopandas as gpd -import json # networking helpers @@ -73,7 +72,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed # should we slice by time or space? times = slice_timesteps(options, r) - nspace = 999999 + n_space = 999999 if 'polygon' in options: pgons = split_polygon(options['polygon'], 5, 5) n_space = len(pgons) From 01e59747ad1425de7e3e84582bbecdff9c5830a2 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Thu, 18 Apr 2024 21:24:33 -0400 Subject: [PATCH 05/12] typo --- argovisHelpers/helpers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 243d90f..3d95c09 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -174,6 +174,7 @@ def slice_timesteps(options, r): maxbulk = 2000000 # should be <= maxbulk used in generating an API 413 timestep = 30 # days + extent = 360000000 / 13000 #// 360M sq km, all the oceans if 'polygon' in options: extent = area.area({'type':'Polygon','coordinates':[ options['polygon'] ]}) / 13000 / 1000000 # poly area in units of 13000 sq. km. blocks From 82319cac98ef6af909775ad689a093e5cf6ad1de Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Thu, 18 Apr 2024 21:28:50 -0400 Subject: [PATCH 06/12] typo --- tests/tests.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/tests/tests.py b/tests/tests.py index 8ef8c9a..981588f 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -161,25 +161,6 @@ def test_combine_data_lists(apiroot, apikey): assert helpers.combine_data_lists([a,b]) == [[1,2,5,6],[3,4,7,8]], 'failed to combine two data lists' assert helpers.combine_data_lists([a,b,c]) == [[1,2,5,6,10,11],[3,4,7,8,12,13]], 'failed to combine three data lists' -def test_combine_dicts(apiroot, apikey): - ''' - check basic behavior of combine_dics - ''' - - x = {'geolocation':{'type': 'Point', 'coordinates':[0,0]}, 'level':0, 'timeseries':[0,1,2], 'data': [[1,2,3],[4,5,6]]} - y = {'geolocation':{'type': 'Point', 'coordinates':[10,10]}, 'level':1, 'timeseries':[0,1,2], 'data': [[10,20,30],[40,50,60]]} - z = {'geolocation':{'type': 'Point', 'coordinates':[20,20]}, 'level':2, 'timeseries':[0,1,2], 'data': [[100,200,300],[400,500,600]]} - - X = {'geolocation':{'type': 'Point', 'coordinates':[0,0]}, 'level':0, 'timeseries':[3,4,5], 'data': [[11,21,31],[41,51,61]]} - Y = {'geolocation':{'type': 'Point', 'coordinates':[10,10]}, 'level':1, 'timeseries':[3,4,5], 'data': [[101,201,301],[401,501,601]]} - Z = {'geolocation':{'type': 'Point', 'coordinates':[20,20]}, 'level':2.1, 'timeseries':[3,4,5], 'data': [[1001,2001,3001],[4001,5001,6001]]} - - assert helpers.combine_dicts([x,y,z], [X,Z,Y]) == [ - {'geolocation':{'type': 'Point', 'coordinates':[0,0]}, 'level':0, 'timeseries':[0,1,2,3,4,5], 'data': [[1,2,3,11,21,31],[4,5,6,41,51,61]]}, - {'geolocation':{'type': 'Point', 'coordinates':[10,10]}, 'level':1, 'timeseries':[0,1,2,3,4,5], 'data': [[10,20,30,101,201,301],[40,50,60,401,501,601]]}, - {'geolocation':{'type': 'Point', 'coordinates':[20,20]}, 'level':2, 'timeseries':[0,1,2], 'data': [[100,200,300],[400,500,600]]}, - {'geolocation':{'type': 'Point', 'coordinates':[20,20]}, 'level':2.1, 'timeseries':[3,4,5], 'data': [[1001,2001,3001],[4001,5001,6001]]} - ], 'failed to combine timeseries fragments correctly' def test_timeseries_recombo(apiroot, apikey): ''' From 3bd1d93d5457bbce241618650acf638847cced7e Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Fri, 19 Apr 2024 01:45:59 -0400 Subject: [PATCH 07/12] corrections --- .github/workflows/test.yaml | 4 ++-- argovisHelpers/helpers.py | 6 +++--- tests/tests.py | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c91be23..fbe7569 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -12,11 +12,11 @@ jobs: services: database: - image: argovis/testdb:0.40 + image: argovis/testdb:0.41 redis: image: redis:7.0.2 api: - image: argovis/api:2.22.0 + image: argovis/api:2.24.1 env: ARGONODE: core diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 3d95c09..07abb4a 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -86,7 +86,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed results = [] delay = 0 if 'polygon' in options: - #pgons = split_polygon(options['polygon'], 5, 5) + pgons = split_polygon(options['polygon'], 5, 5) for i in range(len(pgons)): ops['polygon'] = pgons[i] increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) @@ -105,8 +105,8 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed # do it for boxes too just to make sure nothing funny happened on the boundaries ops = copy.deepcopy(options) ops['compression'] = 'minimal' - true_ids = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose)[0] - true_ids = [x[0] for x in true_ids] + true_ids = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) + true_ids = [x[0] for x in true_ids[0]] fetched_ids = [x['_id'] for x in results] if len(fetched_ids) != len(list(set(fetched_ids))): # deduplicate anything scooped up by multiple cells, like on cell borders diff --git a/tests/tests.py b/tests/tests.py index 981588f..a07c305 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -169,6 +169,7 @@ def test_timeseries_recombo(apiroot, apikey): slice_response = helpers.query('/timeseries/ccmpwind', options={'startDate':'1995-01-01T00:00:00Z', 'endDate':'2019-01-01T00:00:00Z', 'polygon': [[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]], 'data':'all'}, apikey=apikey, apiroot=apiroot) noslice_response = helpers.query('/timeseries/ccmpwind', options={'startDate':'1995-01-01T00:00:00Z', 'endDate':'2019-01-01T00:00:00Z', 'id': '0.125_0.125', 'data':'all'}, apikey=apikey, apiroot=apiroot) + assert slice_response[0]['data'] == noslice_response[0]['data'], 'mismatch on data recombination' assert slice_response[0]['timeseries'] == noslice_response[0]['timeseries'], 'mismatch on timestamp recombination' From 60bf3b95badb5378a09010c78f33fa4738969d0b Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Tue, 23 Apr 2024 18:54:11 -0400 Subject: [PATCH 08/12] attempt at not blowing the dateline --- .github/workflows/test.yaml | 2 +- argovisHelpers/helpers.py | 38 +++++++++++++++++++++++++++++-------- pyproject.toml | 6 +++--- 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fbe7569..c44bdc4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -16,7 +16,7 @@ jobs: redis: image: redis:7.0.2 api: - image: argovis/api:2.24.1 + image: argovis/api:2.24.3 env: ARGONODE: core diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 07abb4a..e8f5b6c 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -59,6 +59,10 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed 'timeseries/ccmpwind': ['id'], 'extended/ar': ['id'] } + + winding = False + if 'winding' in options: + winding = options['winding'] == 'true' if r in data_routes and (not 'compression' in options or options['compression']!='minimal'): # these are potentially large requests that might need to be sliced up @@ -74,10 +78,10 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed times = slice_timesteps(options, r) n_space = 999999 if 'polygon' in options: - pgons = split_polygon(options['polygon'], 5, 5) + pgons = split_polygon(options['polygon']) n_space = len(pgons) elif 'box' in options: - boxes = split_box(options['box'], 5, 5) + boxes = split_box(options['box']) n_space = len(boxes) if isTimeseries or n_space < len(times): @@ -86,7 +90,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed results = [] delay = 0 if 'polygon' in options: - pgons = split_polygon(options['polygon'], 5, 5) + pgons = split_polygon(options['polygon']) for i in range(len(pgons)): ops['polygon'] = pgons[i] increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) @@ -94,7 +98,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed delay = increment[1] time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay elif 'box' in options: - boxes = split_box(options['box'], 5, 5) + boxes = split_box(options['box']) for i in range(len(boxes)): ops['box'] = boxes[i] increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) @@ -273,10 +277,15 @@ def combine_data_lists(lists): combined_list.append(combined_sublist) return combined_list -def split_polygon(geojson_polygon, max_lon_size, max_lat_size): +def split_polygon(coords, max_lon_size=5, max_lat_size=5, winding=False): # slice a geojson polygon up into a list of smaller polygons of maximum extent in lon and lat - polygon = shape({"type": "Polygon", "coordinates": [geojson_polygon]}) + # if a polygon bridges the dateline and wraps its longitudes around, + # we need to detect this and un-wrap. + # assume bounded region is the smaller region unless winding is being enforced, per mongo + coords = dont_wrap_dateline(coords) + + polygon = shape({"type": "Polygon", "coordinates": [coords]}) # Get the bounds of the polygon min_lon, min_lat, max_lon, max_lat = polygon.bounds @@ -306,9 +315,13 @@ def split_polygon(geojson_polygon, max_lon_size, max_lat_size): return [x['features'][0]['geometry']['coordinates'][0] for x in smaller_polygons] -def split_box(box, max_lon_size, max_lat_size): +def split_box(box, max_lon_size=5, max_lat_size=5): # slice a box up into a list of smaller boxes of maximum extent in lon and lat + if box[0][0] > box[1][0]: + # unwrap the dateline + box[1][0] += 360 + smaller_boxes = [] lon = box[0][0] lat = box[0][1] @@ -319,4 +332,13 @@ def split_box(box, max_lon_size, max_lat_size): lat = box[0][1] lon += max_lon_size - return smaller_boxes \ No newline at end of file + return smaller_boxes + +def dont_wrap_dateline(coords): + # given a list of polygon coords, return them ensuring they dont modulo 360 over the dateline. + + for i in range(len(coords)-1): + if coords[i][0]*coords[i+1][0] < 0 and abs(coords[i][0] - coords[i+1][0]) > 180: + return [[lon + 360 if lon < 0 else lon, lat] for lon, lat in coords] + + return coords \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 925497c..64a15c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "argovisHelpers" -version = "0.0.22" +version = "0.0.23-rc2" description = "Helper functions to consume and parse information from University of Colorado's Argovis API." readme = "README.md" authors = [{name = "Katie Mills" }] @@ -17,8 +17,8 @@ classifiers = [ dependencies = [ "requests", "area", - "shapely", - "geopandas" + "shapely==1.8.0", + "geopandas==0.14.3" ] requires-python = ">=3.9" From 6584a579e6717c90381a580de72d931c9f027602 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Tue, 23 Apr 2024 20:59:56 -0400 Subject: [PATCH 09/12] global searches, starting to think about winding --- argovisHelpers/helpers.py | 44 ++++++++++++++++++++++++++++----------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index e8f5b6c..63609b0 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -76,31 +76,36 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed # should we slice by time or space? times = slice_timesteps(options, r) - n_space = 999999 + n_space = 2592 # number of 5x5 bins covering a globe if 'polygon' in options: - pgons = split_polygon(options['polygon']) + pgons = split_polygon(options['polygon'], winding=winding) n_space = len(pgons) elif 'box' in options: boxes = split_box(options['box']) n_space = len(boxes) if isTimeseries or n_space < len(times): - ## slice up in space bins - could do this for all in future, we'll see how it goes. + ## slice up in space bins ops = copy.deepcopy(options) results = [] delay = 0 - if 'polygon' in options: - pgons = split_polygon(options['polygon']) - for i in range(len(pgons)): - ops['polygon'] = pgons[i] + + if 'box' in options: + boxes = split_box(options['box']) + for i in range(len(boxes)): + ops['box'] = boxes[i] increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) results += increment[0] delay = increment[1] time.sleep(increment[1]*0.8) # assume the synchronous request is supplying at least some of delay - elif 'box' in options: - boxes = split_box(options['box']) - for i in range(len(boxes)): - ops['box'] = boxes[i] + else: + pgons = [] + if 'polygon' in options: + pgons = split_polygon(options['polygon'], winding=winding) + else: + pgons = generate_global_cells() + for i in range(len(pgons)): + ops['polygon'] = pgons[i] increment = argofetch(route, options=ops, apikey=apikey, apiroot=apiroot, suggestedLatency=delay, verbose=verbose) results += increment[0] delay = increment[1] @@ -125,6 +130,7 @@ def query(route, options={}, apikey='', apiroot='https://argovis-api.colorado.ed results = [x for x in results if x['_id'] not in to_drop] return results else: + ## slice up in time bins results = [] ops = copy.deepcopy(options) delay = 0 @@ -339,6 +345,20 @@ def dont_wrap_dateline(coords): for i in range(len(coords)-1): if coords[i][0]*coords[i+1][0] < 0 and abs(coords[i][0] - coords[i+1][0]) > 180: + # ie if any geodesic edge crosses the dateline with a modulo, we must need to remap. return [[lon + 360 if lon < 0 else lon, lat] for lon, lat in coords] - return coords \ No newline at end of file + return coords + +def generate_global_cells(lonstep=5, latstep=5): + cells = [] + lon = -180 + lat = -90 + while lon < 180: + while lat < 90: + cells.append([[lon,lat],[lon+lonstep,lat],[lon+lonstep,lat+latstep],[lon,lat+latstep],[lon,lat]]) + + lat += latstep + lat = -90 + lon += lonstep + return cells \ No newline at end of file From f41ec382b3ffb7342848eaea043d7af3fd760879 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Wed, 24 Apr 2024 18:35:15 -0400 Subject: [PATCH 10/12] rc and tests --- argovisHelpers/helpers.py | 84 +++++++++++++++++++++++++++------------ tests/tests.py | 55 +++++++++++++++++++------ 2 files changed, 100 insertions(+), 39 deletions(-) diff --git a/argovisHelpers/helpers.py b/argovisHelpers/helpers.py index 63609b0..e694a53 100644 --- a/argovisHelpers/helpers.py +++ b/argovisHelpers/helpers.py @@ -1,5 +1,6 @@ import requests, datetime, copy, time, re, area, math, urllib, json -from shapely.geometry import shape, box +from shapely.geometry import shape, box, Polygon +from shapely.ops import orient import geopandas as gpd # networking helpers @@ -293,33 +294,60 @@ def split_polygon(coords, max_lon_size=5, max_lat_size=5, winding=False): polygon = shape({"type": "Polygon", "coordinates": [coords]}) - # Get the bounds of the polygon - min_lon, min_lat, max_lon, max_lat = polygon.bounds - - # Create a list to hold the smaller polygons smaller_polygons = [] - # Split the polygon into smaller polygons - lon = min_lon - lat = min_lat - while lon < max_lon: - while lat < max_lat: - # Create a bounding box for the current chunk - bounding_box = box(lon, lat, lon + max_lon_size, lat + max_lat_size) - - # Intersect the bounding box with the original polygon - chunk = polygon.intersection(bounding_box) - - # If the intersection is not empty, add it to the list of smaller polygons - if not chunk.is_empty: - # Convert the Shapely geometry to a GeoJSON polygon and add it to the list - smaller_polygons.append(json.loads(gpd.GeoSeries([chunk]).to_json())) - - lat += max_lat_size + min_lon, min_lat, max_lon, max_lat = polygon.bounds + + if winding and is_cw(coords): + # if winding is being enforced and the polygon is cw wound, + # we're looking for everything outside the polygon. + + lon = min_lon-10 + lat = -90 + while lon < min_lon + 360: + while lat < max_lat+10: # < 90: + bounding_box = box(lon, lat, lon + max_lon_size, lat + max_lat_size) + chunk = bounding_box.difference(polygon) + if not chunk.is_empty: + # Convert the Shapely geometry to a GeoJSON polygon and add it to the list + shapes = json.loads(gpd.GeoSeries([chunk]).to_json()) + if shapes['features'][0]['geometry']['type'] == 'Polygon': + smaller_polygons.append(shapes['features'][0]['geometry']['coordinates'][0]) + elif shapes['features'][0]['geometry']['type'] == 'MultiPolygon': + for poly in shapes['features'][0]['geometry']['coordinates']: + smaller_polygons.append(poly[0]) + + lat += max_lat_size + lat = -90 + lon += max_lon_size + else: + # Split the polygon interior into smaller polygons + + lon = min_lon lat = min_lat - lon += max_lon_size - - return [x['features'][0]['geometry']['coordinates'][0] for x in smaller_polygons] + while lon < max_lon: + while lat < max_lat: + # Create a bounding box for the current chunk + bounding_box = box(lon, lat, lon + max_lon_size, lat + max_lat_size) + + # Intersect the bounding box with the original polygon + chunk = polygon.intersection(bounding_box) + + # If the intersection is not empty, add it to the list of smaller polygons + if not chunk.is_empty: + # Convert the Shapely geometry to a GeoJSON polygon and add it to the list + shapes = json.loads(gpd.GeoSeries([chunk]).to_json()) + if shapes['features'][0]['geometry']['type'] == 'Polygon': + smaller_polygons.append(shapes['features'][0]['geometry']['coordinates'][0]) + elif shapes['features'][0]['geometry']['type'] == 'MultiPolygon': + for poly in shapes['features'][0]['geometry']['coordinates']: + smaller_polygons.append(poly[0]) + + lat += max_lat_size + lat = min_lat + lon += max_lon_size + + return smaller_polygons def split_box(box, max_lon_size=5, max_lat_size=5): # slice a box up into a list of smaller boxes of maximum extent in lon and lat @@ -361,4 +389,8 @@ def generate_global_cells(lonstep=5, latstep=5): lat += latstep lat = -90 lon += lonstep - return cells \ No newline at end of file + return cells + +def is_cw(coords): + unwrap = dont_wrap_dateline(coords) + return Polygon(unwrap) == orient(Polygon(unwrap), sign=-1.0) diff --git a/tests/tests.py b/tests/tests.py index a07c305..d206e85 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -36,19 +36,19 @@ def test_argofetch_404(apiroot, apikey): profile = helpers.argofetch('/argo', options={'startDate':'2072-02-01T00:00:00Z', 'endDate':'2072-02-02T00:00:00Z'}, apikey=apikey, apiroot=apiroot)[0] assert profile == [] -def test_bulky_fetch(apiroot, apikey): - ''' - make sure argofetch handles rapid requests for the whole globe reasonably - ''' - - result = [] - delay = 0 - for i in range(3): - request = helpers.argofetch('/grids/rg09', options={'startDate': '2004-01-01T00:00:00Z', 'endDate': '2004-02-01T00:00:00Z', 'data':'rg09_temperature'}, apikey='regular', apiroot=apiroot) - result += request[0] - delay += request[1] - assert len(result) == 60, 'should have found 20x3 grid docs' - assert delay > 0, 'should have experienced at least some rate limiter delay' +# def test_bulky_fetch(apiroot, apikey): +# ''' +# make sure argofetch handles rapid requests for the whole globe reasonably +# ''' + +# result = [] +# delay = 0 +# for i in range(3): +# request = helpers.argofetch('/grids/rg09', options={'startDate': '2004-01-01T00:00:00Z', 'endDate': '2004-02-01T00:00:00Z', 'data':'rg09_temperature'}, apikey='regular', apiroot=apiroot) +# result += request[0] +# delay += request[1] +# assert len(result) == 60, 'should have found 20x3 grid docs' +# assert delay > 0, 'should have experienced at least some rate limiter delay' def test_polygon(apiroot, apikey): ''' @@ -184,3 +184,32 @@ def test_timeseries_recombo_edges(apiroot, apikey): assert 'timeseries' not in response[0], 'make sure timeseries recombination doesnt coerce a timeseries key onto a document that shouldnt have one' +def test_is_cw(apiroot, apikey): + ''' + check basic behavior of cw checker + ''' + + assert helpers.is_cw([[0,0],[0,10],[10,10],[10,0],[0,0]]), 'basic CW example failed' + assert not helpers.is_cw([[0,0],[10,0],[10,10],[0,10],[0,0]]), 'basic CCW example failed' + assert helpers.is_cw([[175,0],[175,10],[-175,10],[-175,0],[175,0]]), 'CW wrapping dateline example failed' + assert helpers.is_cw([[175,0],[175,10],[185,10],[185,0],[175,0]]), 'CW example crossing dateline failed' + +def test_generate_global_cells(apiroot, apikey): + ''' + check basic behavor of generate_global_cells + ''' + + assert len(helpers.generate_global_cells()) == 2592, 'global 5x5 grid generated wrong number of cells' + assert helpers.generate_global_cells()[0] == [[-180,-90],[-175,-90],[-175,-85],[-180,-85],[-180,-90]], 'first cell of globabl 5x5 grid generated incorrectly' + +def test_dont_wrap_dateline(apiroot, apikey): + ''' + check basic behavior of dont_wrap_dateline + ''' + + assert helpers.dont_wrap_dateline([[-175,0],[-175,10],[175,10],[175,0],[-175,0]]) == [[185,0],[185,10],[175,10],[175,0],[185,0]], 'basic dateline unwrap failed' + assert helpers.dont_wrap_dateline([[-175,0],[175,0],[175,10],[-175,10],[-175,0]]) == [[185,0],[175,0],[175,10],[185,10],[185,0]], 'unwrap cw' + assert helpers.dont_wrap_dateline([[5,0],[-5,0],[-5,5],[5,5],[5,0]]) == [[5,0],[-5,0],[-5,5],[5,5],[5,0]], 'unwrap shoudnt affect meridian crossing' + + + From d55e2f82600d8e3b31fb0f5463d41e2f1dfae8e3 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Wed, 24 Apr 2024 18:35:34 -0400 Subject: [PATCH 11/12] rc and tests --- tests/tests.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tests/tests.py b/tests/tests.py index d206e85..0f3c836 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -36,19 +36,19 @@ def test_argofetch_404(apiroot, apikey): profile = helpers.argofetch('/argo', options={'startDate':'2072-02-01T00:00:00Z', 'endDate':'2072-02-02T00:00:00Z'}, apikey=apikey, apiroot=apiroot)[0] assert profile == [] -# def test_bulky_fetch(apiroot, apikey): -# ''' -# make sure argofetch handles rapid requests for the whole globe reasonably -# ''' - -# result = [] -# delay = 0 -# for i in range(3): -# request = helpers.argofetch('/grids/rg09', options={'startDate': '2004-01-01T00:00:00Z', 'endDate': '2004-02-01T00:00:00Z', 'data':'rg09_temperature'}, apikey='regular', apiroot=apiroot) -# result += request[0] -# delay += request[1] -# assert len(result) == 60, 'should have found 20x3 grid docs' -# assert delay > 0, 'should have experienced at least some rate limiter delay' +def test_bulky_fetch(apiroot, apikey): + ''' + make sure argofetch handles rapid requests for the whole globe reasonably + ''' + + result = [] + delay = 0 + for i in range(3): + request = helpers.argofetch('/grids/rg09', options={'startDate': '2004-01-01T00:00:00Z', 'endDate': '2004-02-01T00:00:00Z', 'data':'rg09_temperature'}, apikey='regular', apiroot=apiroot) + result += request[0] + delay += request[1] + assert len(result) == 60, 'should have found 20x3 grid docs' + assert delay > 0, 'should have experienced at least some rate limiter delay' def test_polygon(apiroot, apikey): ''' From c13ce33b7942cf73fc695c4c40f0022ab90f2ac2 Mon Sep 17 00:00:00 2001 From: bkatiemills Date: Wed, 24 Apr 2024 19:50:32 -0400 Subject: [PATCH 12/12] build artefacts for 0.0.23 --- dist/argovisHelpers-0.0.23-py3-none-any.whl | Bin 0 -> 12124 bytes dist/argovisHelpers-0.0.23.tar.gz | Bin 0 -> 14796 bytes pyproject.toml | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 dist/argovisHelpers-0.0.23-py3-none-any.whl create mode 100644 dist/argovisHelpers-0.0.23.tar.gz diff --git a/dist/argovisHelpers-0.0.23-py3-none-any.whl b/dist/argovisHelpers-0.0.23-py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..3211e6574947998b9cb2eb7e9f68fd4844e7a391 GIT binary patch literal 12124 zcma)?b8u!|m-g@2Haf-~+qUg=(y?vZ?AW$#qhqUMr(;{4eErVUe9t@2J5%$XQ>#wx zKhE#0s=d#(*V=U{%7B5R0{{SMz$mz_W{G@g8p)p#(VxKm69$fEcCHpqk|x&nCXP-F zdU_VN7S4Kl^!6SyG~+XBvnr#LbfXN^N~4lfRFZVmKb0gI#$~C-W@e}$0gC^hYz&pA z*^Q8CSO5hW^bF?sawzIQ#`h(uo{%W_*jp{p7yW^^~BA%Sp_MOiq;K$Zo z5(MTDL6g4$c9Yn;wTZGsDyR6|XAjF@l%$la_u9S5f(%(?){7}83%(=pshyr$y~e~> zHm&06*{b^HldslQL!*49F1l8|ZZ26fxeP#saU5l)G8;-0j zNmGMF_mCUy*Fmos9coIH@TqD|Dpjp0MRMNbTqg~rz^C!o7a;on`NiapVHfZ@9%ADN zr`A}l)R1YWX)Qw{NWG%^OsgTN%GG+ZO?tI3rQD`Xj+PX+PgzS{xtfMT2EkeEbYz=x zy&f!04qF&f%?E;g!%d$?w=xM!q6DMTHBc(9*Z))H#@M1+b-p0U1mRVB`RvO$*nPeg z%Wb42iYF1+D}aPGo`X8ss|SPQ{$kPlrF8Dy)nN&BPDb#;aUpqAHu3kVMg3Ut)KM$r zO=|#g+LIW|4@MQ-rs-CtGjXhQ=nO*$jjv4bMV(Y`yjcD&=nM78fjLffTk8;SeYO_U zU-fE6?#pG0il;n^R;7^AAA;?i)IwN8-Tn;p!|0u~i9Xe3`!WE%NyrUb!DvPR!;Z2ufteEc}agyGb0b3cnCJ(uA zQW~K5!vdWyfOS5ejpo|R^UdGT04dnKO(bloKG~J5<|{QEqc98&jib(jGD0`qXD|#A zLe#}Q8rT#Fc9iG|Y`|!Ojit%(T|?DVlYN-Buz5#~H?pfK5-H$^^!alk_&Ax69-$-= zc2%|^SX7a2Ax079u?OQe9aLSe$I@?JM=NGzX%M4gCyIGGGI?x0YDx68Dk45k^VfS| zq%?0nD|xvEX;ec&EkBNfu4HnDq?i!?Pm+n*j2H+)L6153H%=BuhrSA20eJleJg|Fg z`)bk#BGPxc$vf;^;xEzs2tjss!=#THP0i-`CWFgAq7nEJ%an|Z+x(AU2IwA%8~AgO zLXQk6i621m8ng>^oT)x4JlLI~qJ4R*dGMn||`7+`;0a^MnW7SjUDQ z+o*_)L#Q*Tnn=x8d95Qs8;wh{Oy70QdJy-Mu6z>^E;RiL5<$rhyivsw9`?;^^w(<` z_=eb2TG6;z*~iAKkM2YuP{1@1qpi{Ve)cJjbFbmfl2Xg8R?DxvkD##IH zzFNh0C>W^@^|*rS=+|t&4_kc9O!bb6`y6e%tUK19P|y19kC2iREZW$0&djWkMJc~S zw?xQJuCYV68Jrr9qI>#9oUprwLqBC=K9{-rSmdusVv*0PBx1U!k@14uA`t6WsJMgQ zHb}B+n`Ot)?Ab_Frtc%~F`M#p@3D`SPe6Z)H|B{^DH{!@Ax%-~4i8X~OfZiW(?#gtLk9}5!(?#Kn6dyjc0=HlIBQNe zgImRB_LdIOj`R&!O7!_4#X_5EqCmZerd{qzlKtKWHREL_E?4+68*s{D_th!b8Bud# z+c*0QV{}S3duAy!=~nD5%2|#yJL0ZkVYIRd-Q!cNfn#tJeAcGKR4h_3*+J16O|{fT zT$t^oD6=l05i4wXojVphT}Z9cZnK|Ah?W;~SJ`(%24xsUoSre57^ViXm@PQ`QSCUr z*Hm%oId4b$TTgvm0$_JrVbK5+R#c(QuZmCIc_lu1@2N7$fSppruHTr?Yl|f5+GGpt z2{q;_4D4)E@&#Ghiy-$+17U zCM76oxebs+u2YYlsPxU^w4$BAGRb+S)z<{ z#2as4@Do?HD414flxDE|w)J1^Q%q!mkjRbi=cdh4+)12*8-abQ!7stt^B4@o`T-}a z?N7df*^F=PcM4A*7C&M3w!%|FuTeYbophyTwL;^&VDNU3qicec3jc6p7qE6DiFu7J zu;u&${Np04LnK8^L)~+e1Zch@HFjb)rm7ixonC(R?DA3);bO9}De28xI$u6v9FJ_H zg?L+D&^};wXYr2-P$w@cg6N=bZm@IPUTyVy<_Pg?yb>JCR?&?$>K?pv>deqrEpUUC zm@XRN>#~7US-Pv|okeQeN3kBT4SK&>1rVY>LxbC*WtwE+2=FlZLhdu{e%c$p2krXt zAr`n7G6)pr4@@eNG<~Fv8^#LcHAK1iN}Hvgz9&tSWIvTpK7W-kTYNW??nJ+(%GR}{ zC{=a6DaxML;PX2 zt)49RbQuT^v~R?mIJr->p#m?$sZ((>+e@-`g4RjFhOv93qp0BZ(0E)ReVFtKAjL66=QO3h>jK7wyL6I;=zcV5U@MW7;lRKMZ~7 znkBz~pN{?NP8G~o+utshcA2%k*IVDT)haDX82<1UYbsFZSb9e5VrE6Y4aZAb!KB7> zF6eUx3M6fn3?gu>@}sU^7jN8mlUOM(Q(P5t*mpBFgoV&=c7J>#JIT{e9OK2J(uCNV zJJjsg6=V!U%-fSC)X67&`Y3bWVfW8|0F=V(wS>sp$;Nh!J!8(?0fy{3nPm)0mU$bO zyt~335@F-_MRf4OJH*v;{8nO+e!APvZHTG~^Gd}F^qaa7%5Xc8a=H<&N>kG!5#h3q z`@3q@hO1kEG}{Kc18(@@(0+Yh@*&?w@sFG~MKk6uTPkwl25;(QGnR5g)f@vE(xhci zlhv1femO%HU>HV0Wb%8=U_NzJ)I;zq{=NVec}C3Y$FBj5gThaF65TJmCl)r9(tjs0 z@_c|mPCn!WKZjRX+rWf8jFmjbeWFkQ*)9U75YALF?&3f`*&Qp1{S-Q>y&2PKM0st0 z_r+bbmDm0bflvawd%L9~$$X51S1t_9OP$~RRKr{Ym<%W4o?o0UK#B}&^l0i%8_pQl zJ0e6kRiABvfIBF17cy~ZTW$o#3XocNZ5RmHPH8=}Ti##TXCXjpRW2}VHg?9`Kyi9+ zFf?p#EbVibhlZ+7ujr~dW^8f}bFvu}0qP};#)5M~(G}Z(;deWkXprLRMwz#-Y7a8& z9=eM+a6@7hK_e1er0Nmd+F%b~8a1;_z>UagA732PZ+-vGxosGf;_wYk*h{C?Z=;I= z5a`zliKRQiJyJZgvC6#LjG;wO$`>K^{RT5s%ABYq1Y`}fJBC8g7aCX~0h5neESz~m zM^ix*Q~Q02t)Ns4?Tn-<$~TaT{vnezJ>0#5Q5(zF=gcrb4o$+6G4(gcqB>SJVWEe& zPeCaVBlINn3}_%(_%$VPBfg5yfQFuh)zzF^G+VzPpt~&Rk>~PIvFmtxh(Tn--E0dp zL@2-24%-pZk>xy6c+u4E;TYGq1U$!8KBy>vcDqkr$+ENH&ag_)flblgnebI8Z4N!$ zm>R;oQcgE06#EVm{M?@js(qk16P@NP9$hQk$;vdQ<}977xig;&-v2|pim;#Udq9~rU)HR5sI1YtYUQa zOlvkP@4{e(87hJxE6AKV0pz_wpZjabeml)C6MnsY=RDglNEzQX!CF#PjTM9F`U%}^ zElE_^yaxvm$T`rv-}5vx1QW)JqSd6-23Vr&3cfffC!xJ6nQ>}0A|@Nvh_c-pZlL2g zKyX z_&r520?7%cGQt$-7f#1Us|l7Nv2uQfaU9L2XcL&Paxo4BTxbe&AzT>v<*otF=*VO4ID|TJ?tf z(?gAl0RTAv+S-`^XY=%@vAN*=?X=!j-}_eE$HgFWDmzzj&KWT+?X|FV)+t{*CCi(A zVsWn;8El+Gl>j2SZaMtv<%R`-@f~$_bh~;8yW>oO*2jwaJ#Yys+d$)}#N02Su9Grv z%%lD(?UjB<59oeeudDgZs|wX3ql)h_ak8;6X4FmgTNuQj-7e*b=UXJ7 z47#c1(+YR*)5;s~rd=n%gLrC1hm4oz`#qXI^#Cm*dC;~E3gMS-eH?}>M~6h?udY)I zfp8wq=ywRSEmIs_(KWj98sw9ySSAT(jvpr=&N%jSN~CY8^tjqEQ)H zC;gLvd#D`{aH}r|8v-PCSyNK7zLOpSdOCc+!aFz^b=Y>Y;7S$W!-}~&sp-(pa1Qoq zmmIGtS2g`{%@$--Du+W}bwG(5&}~ayibkVVj@hXpag{qVG+F3E`Ip82MD<7%A*#Bo54veQ%UgS5M82OAQU2%G@XDtdgib-R+aN-~XF zTEq!e&dIKCjFktz$v#!L37!5RYZ8T~VzYs-SEp*AJ*ML?@7}L1{cnM4sK#Sgq|-(O zjdt69GxNr&Nd!&FB+=vI>nplxNqG=VQ69LLW1t2@EB>w+wu5AqC<~foCcCrdX60O2s#kZ zdNYD576vX_PK=p_MeJ>CMGE(3djyqYG)c6wL?gc=#yTh_=a&{Aw8T!Xdf5L|cPvJn z#?Z7Ho1^*F06jq_t7N%Tq1j{b)`cqG?x3FY%P>{rmgW&kUl~*zwn4&4JoA$h9_`J~ z@~D^>qz~P|8O_!pA#02TB4WT!O^}22+_As^%ygEpMrFF-jJh+vXJIs=^dh+{vr(fj zRxeNt0WG#yEoa^!ZtoTfqa!Wnme`Qq&?H^+GSJT*7yC zWm6kPLL9o`FD?zqBihfhv&m7VXr4$bO`}P2>!?ypl_m)@N(KwMew~|+QUcY}ub!JO&Ues9xwV_^BT222@#5pGn`}nqOG35h{bl_ zqN}9#ql|6H;QAJQ#D3DYkUn5w6kXzHorqJ7E9b2T@CLaJ=}d4M+bj^N^BS^91t!TV zYlFf8qvU}<;0BU6c*j8K>IwdbQe4T=_Uft#`)<{_T`!#)HzaqRQel7=4C zEs3pD3sbJRtP1O_-v!yTS?SQ26+;qB{atM3bz)NhFxdk#|;;N_Si9yeHt58eP?lmFUX1c35t z7KXn^0*>w%uEE~@&@2=L8?+8#-duc}HOCs*>tQ!Lz?l5FICeE-bhh@FEz|t2w}*M2 zRIn%i1%4LOB9khkH}e;{t!5|aZ%(56QbU(XxUvhTiQ2l4l}dG2)xT&v8PN+ZVzpAE?DZ_@pa51|Gkmp+iZF2+MAt>4yIrRWQyP&2@w)ZqoE#2`J(@ z6~$42%%r&Wqdqu?Ou)i;E_F-UrV|EH z&HXj^ZpChDPJMm*2Y&M`Yf=fX6O09ALXplj5zg!w9`jB)ae(9Kh$-ocsekuJN=6VZ z{0=z?>KzX(7_2R8vU6f6X(5kAg*UH+?LE>yT_jjaHDYq`)apUm(mPopf3DVZ5Qs@2 zCj@xhhD{s$snPP;LYqD=Zo6bk5Gk=Urgxas#5)s>S7^vX0=MCupfrbQ=dXn3BP+l z9Az#exzu&6?;ZP1=U&P?#mDXDU6w~cwv2Bq#Z1Q^UE?@@z!{=m9=7L&%d%W&#K=-B z5f*Lkcw6;uSzYcr0(rZr?#}#BCRxZZBEm?F?T|UbnC3DMl$3k{vLr%%uS>g>x8rbb ziX%H>55E$<410Vc*<;$b!oNe^5UL+!iJ4en@{v0qYO!4!`Ol0E5x>4OIIR_#RuduZ zoC0gv(D%nNMAttBs7NriE}FQvoOSW5sK88P29c;?>G5N zoYOSFl|%#+A%L=VI*s}ZwZuIJy+!DU$Z_HB@kHVp^oHa~VZ_xFgwPKfQPsf>MSdsJ z2v}MrFr6G0NKc;B$Jrd>y%);x(enQ^=S88DNYihy^VS>gW!4)aVL&FKdXbk4(*yJk zhZDoG8(mvT)rZkW?+fWP3p*ogIXN$a1Q>ymSApThX?PV6ju5AfO#$WxYma5<1hnxZ z>t72bH)+pbpS%V3bbZY{6Us3X5Xfl)}r z%BJK4KcOcnIK&IXTxD_LRz2Jf#`+NN3F{j9-86iD#=iP>X1cP*yslY3o|#r_j{A_k zv6`&dI8FpWJ}Uhj@o~}3o%fSVB2LkvtZA#MaWui#3>vaoq-8^=vYz?{3x`QUP(v_J zj3eAR(!btXJCeA(e{xoNV$9y1CP(>U%>4VtomElHy1zRIbz^nu;DQ#~6fHQjX}4<6 z0PcS3%z+U{A+T}>eg{&h+}#|k+XEw-5^MkD1w#lP6k!pC4t-$^T(R$Jb;+5Vr#CG- z`$1{t*AMn7Gts+Di4r?CjCHi!{U7~CO|6l!Hai+K;5AkW?7NI9m|u87`86A?Ydp`BlcnabVR%@d*xz4eQnuR_G`F0YLmmp*zd_WR z9og{@-U6Hh#o^%7_}6ctp@Rjw+h9xdEIk(b!fDe2<`fumY&-xwViR!R{NjIsFs>-r z--xA@Vt&ByNYL56vd}8P@H~^#D%|W%HS+b>J;+fCEo1PmzuNmY^jI*O`*dFW>K1co z#%0ecsJ!YrxpLJ)sV1nXR?`rp;}|;&?DKMxs+wLw=$M5=o@<4~y&?iw^8Zgr0ZL!a3AZlz!Wv+Qt7QufJfsvi(I zMii)z4bp_rP^atIijHe{_mYYJ^A~Oq=h7r&D*+;zQlwi_10bjOFr6C!?$t~feZ833Q>d_#NerG zHR)s+l)dAa?@%1HP@KS=x``-6dJ{n@eUnqO0WIbZwbaf$AB10R$*`5cN7#t&z!d!b zOm>y_!`B$2xNf~eGLHqyQoWi%)wI5{!RCsYB72!)74pin{T}%!eq{25i+@T<#1jVo zTVkNv=gZ*s$2k4etQ#`krilf?UN=_<7i`{P50X7RT)1lvDPPPIGQwaPmc3dM>;gt+ z2hJ@McEwn60cDkO*6r6EWn2hcAq?r>c9sG%nMFE$7vj9j!I1stNd#**n!XEem&*Nm z1KYGSAmFjK$i8>jRn{y%?rT~Wa}!$*ZqO|5sr4K!|I$Lf4ljsL}A4~Xon%$-&KVBar)GXOe!VcnL$=N8xTh)3OJU4HA}9!^c37=-RsrNKFbaw=boiZ5QqZTZl9y_iRqQ$z&9Im?`jp9#Me z(ufh%ix&L#3hq3aB=$;aKnRgvhp;sQsZhcpM=bK^BDeh8mv3K!%ap9_%1-%)e4{Cm zEKv8jnJ9oa&f6+(O)*DiFSuywcA67Z<<^W0kvbIQcIa_6rt|NrmDFM~t@zvJaD|K| zbTApROQ1geQLNEzXYb*yDL`g7&t=ay7}80NMf~fPn+d;Yy~|;S{mRr&7O$7V^IuS( zzlMWX(G{Ay?P#jYeU}dUu^gwKDJ+j7p2oRh5c`cCN+im~z~(4POCa zX8@#YTMw{0?A#gdB~M-+Tps>!>T_vcvNU0q{>ET_NDGmEVOTsE5=rVxqQAAg@?~A6 zU@sAowyS(pPq<#Kg+122! zRs!G_&=D%in9LYo%U`A>=M6LjVqcx+fL~5s@+Dfxz9M%_Z*9UId@s(aqR^o{bW=B5w*%f6JM$tO4A@b%mvrolr#{Gb2E6eo`br7s0 zAa+NMT^ydGX0{a0**r$0^Cjh5oYGxJ^6^hAv7YLX7j7}f=dp6 zQ0Ayen~_P`f&XCoMi}55Ov=F;oTG~OfciW9yHuWYtJpc%IFhQiGnVKt$Ai=t>FyXd z?{+A?cw%EXvq6#TK%I)yNTcu&3AJ0LR!g~6GxtqVoe}ToGoN8zS8^DyFbj2tM{mp} z0ezEmi}^X(viq>k8M6d0>@y?Telu-zqzqM$t@&Stwtnl-$cd z_)_YDr~1KxUP^9Io&!A2Izf-IgO@__k3~k<9sYMv{U?`lHl4?)l9ov$>Y!J)RA9ed zyvG1?+oan?YeXKIbsYr>QO0nd%6jBH$BEG_^6<)&1UBPgQi^JXc+gJ}&gut+T_C;8 z=%(+7H@?M z+Faabm7+00#NjaE_PvSEei)VGKE<&RDD_?ZM1$u(G8u6vP$|t%CQv3@;3!gTqR`zE zNXx^QUW>tF)iO~@hjJM+Cci(M^BO?VQqn2rf=%6}VWH!@#`8cz$6@_aP2)NyJ`)8I zId*GxXJ62CoKW6cFxc#T;b1+vMkciOi z-tMo%L#CcI&FyBqPYcjLo(OONfbw4s59t``8R?l>=#4F$oaroVP3;(Dr9{N!Rm60a z^z7Cck^N>$Id2xFl{j4NyjUVYtcYXrRfdfu;cw6ljWY{;-!>uEY7$ArdOEjzJ14WZ zyZ9ba2-YOrKR({U(gZp*@DhrcKa54{Y{BvJIJVBWcaAfeGU!8sI3PDb)_QIv5`~>- zqwVb3`HA{2jEp3B2=W6k*-*2h`R2jh!k35f@GtXV`kPyX_>XnGxYC)}_*J)`rINqWMh z`FoexJ7)9EZp+qBr~cL#$rUz>@brt&gjcnmk_1lPOnE??Vxt<@9ea(xp4es(pZ)pY zx@v)nzt%bzmdV`fB)2p7aFW}o{Jf;yF(e#T@Mi|(2wm|L@XJz18} zwsywS7c6B$aQD63RXnL5XBzl-w==-IICM`5IPi{zg@?6H0nCmlyFZQi^FJa*=&(E| z(4mw>iVvbjJrG&c_q|dE)Q^)l%==)5q`Se|0qU1KslYq*=PT)(4#;xU`Wo2&dW!L? zm4gDE0epwzMaA&uS;-3WxSee2>x|K#IwmyU4eh|-t>Q7^;{;ADWssmqD!K-_esonl zwXuvs@7ShdAQr6Jb4Kbbj9NY9I|QL(t)^0(>Hz!kw|Ie3<7O4Z_W8O5vFUowqvPl) zIN!tpMXmlH+#y-js|gCg2wLj=`b?gc8qQ?N@4KlM#4?fbW6Ct%C27hhNXP1nybvX# za&TI}c6l^yN|FQ7z_x^KP4lJeUnHEuS8`JV(8Nbof;Aa^=r=ykR4;J`Q}Dy`C?L)+rU!7CL{V_QZ&5RSO_?J->l%&;gl z$F;T!BT+{W%|X2a*vwgh->k@Gc_DQwr2n?KhKWS%>%s zjr@krhlFhK)C^%ao=Z?=ESaCD(c~0HZD{UvE+d#*G^k}8V?KT6nKBw;y!S$_)(MLj zC5z=^5UaRdL|WC%XlD!tI|mF2+~LnDoiU2ltgP|-9m|R=Ge6{T&T|jgoQPpB_q-^b z`=j)6Ah)~9c|uqy2Od_DFT33_s|*FBW^eYPpfCGw2zM%v6vB2sn&CHrfkfxT-9wI- z?QKUdi7%F?FM2zezkd^jwzpmzf`11?YYV#H2p7b%$vZ*o^m2o0u`N2M=TUFx49I z>tKV6E{EYb@pm$(5~Wz1b*c%TYuX`BN%C6Pk#h)>O*N-C=8F+E+0LHkcTG65j*IB^ z82BM|Xad;ZiNksAE2Ir7OPhwl1Z;a9`{RJlVnQ5d)8o29C}X}Jg1bP!{v_W`c;Syi zlrk~(H+%S8fz6d(q>?WAq_SgnO|*yxZA%({Rp3H~z!}PP-nf*JP zJZ(0jIk-ZKyhrdd>;O3753*+hllm`FMIdLVip^%GR$2#V0*WJyE=ksMB!)rJk1+bEM! zLhE6NOI18%fggTJw>B?p>V8^-53&9W&IsLV6Y;xbTMeSivq@AXT?8mAm8PQ0Rjb&i z?t}MHovO2Hg_W`xr!{0ra`i%ax#9jXmLNfX|76JgN8#~~=q01`N5cORP@%1zB7$k0(wPtG(dGcL01I4VrZ&`HydF*GPkN{`XfM=(H@E6p;_vM~K*nLU6V zpJAALp#6yeq?4W+lWkB2Qc=tOf|ZhORR)%^{+ybaky(61-JUF2lyFv3M z85kqB(x&euSS>X=dA5ql1&R0;Tc}M)ppyXwVLLgZJK1dTbTP!P-XH+6H1=D_!U_hO z7j^4Z2EQy6w`>Mbi(x7I^JShoyPjEske~W^62))7Sd8ICd~V0MRURiX0625d8ekt= zf44X_R3#UwNIGbgnqw`RZdgAx2vcfpZ=s(`BR_U%{I*ow#UD9S)U{^LF9KX=Fu&x{1wr!geXJXs7`NcLSwryv^iEZ1q{hfd9vu^j*uIhTPy1J|Ss_`^& z3=B*}7Oe#s(AdS&(Zkx+%G}P$+{KlVg^7iUjh)HO*ahU;@7r^etH~;0#W2X-0l5^B zGUnqh0`JsdSOc%m-Hv$Ex~;mHT9u5lxllT_#P0IsC?J3xJP?9bwoR3Lvz;_s=L-ZX z4gyr`^~MV@C;Ro*Zm}tZ_$Q@$rry2p*{3;k>(IA&hb5MH?HhS_PbV}9SL9w8 zkgsW)K22t6L5$NSS#99h;^|>KV$f{gA}frs8t`Kdr3zeub+q$zfHrP5s9wdJzCOJL zc3r=G{2;$d+KyUr#Ca6=!hiRzZ`A_(H+GwV(7nX=vafTmK*jnEL(v~@4~^r1=0xDG z_z!o<*Y3J47d~TgqiQvF5s*I}NIe?SvpBiK{sPR^u+;}T7W`PY=)`%LdX}$4XoDa> z=Mrmu>%IVQ)qaRS9?kdU9LY3@G_{I%teTiCqg2kfkSr8%_sA=5Jdi-X`XCoH6>13lCVQ`Ihi;TQPgN_gAU9Ls=0jQ1rNX7lQ zV&sYcJqf8+E#1Fi3my?(z-A9FZRcuz0X&VQ(ilBl)GaV4?X8V8~q*$Bh6I z+|;*h!T38ws{FUQ%U$E;kzPw)t>Ok%I{kmtu1C8YYS=qi48&Ls@cOK;gm*uenY(UdOE)$5p_AnE5_IRFaLH- zs8>Ke*=&YiF+bgosXiP;rrw74_}M@Bx@v+RCI($wJX~zYR+7E-yM3yHDjnco`Gh1| zJMOv}hs9FA@CUhKMiKV#^7y$NzMs9VTkC56Q%uwiSOlN?>!{$x=M{9?dN%1b(m3mOC{7es=TnO0#0lev2zG-rkyi}%?^4|I>pY>Z}eu2ll-jkshk z1aj+Ee*_fI0sEqU`qE8->zp3C*B+!Gy}IAPFJfU$V*!Crs*ixK*3F5v0G->(?2gx{cQHo!o}9{Gy(tTehAWo5KzXd44Epeb zl3*RYU?D}D(=CKwYE0DrEa(@0AU3zqvz-!&^JtvoV!O}aiw~_z3L;vsUmY43Od{^c zd{@MYix#(ptBAj;^fTPYE!ok|9%pPQmMn9(r3+@cCzDX}E!)To{hn(q zOH1>;u~KSpOfAK#y##l!q=Nx%+Q|@aIn9)?kC_(oXj_jnc@#^hxe_uJ<92i>;g9`N zd3F9BJfQREvsax_f_GHWC&re(x-i=tT4RG*_KdO-Ok)_92WuoSVRxpvNc>#_7TfOv zAR6s=xOjU03g$s%qI1(0Ypx1%j(-EuSij%VAR=BvOT1PY^w;{B2@)mLf0_mjh&gOO zOUd~Opl1kt#h&M&8!sf5tB(nW+{|{RHd!y%Bll7Q3Rextli+d;JyH%NOhQs+;*@?c9SQAwJVq%UPuhG1ynr1I~03pvuDr~?cTvQ>f!$8rI7gd*Z$CeH zN$51A5-GX3Tu2yv!OW;I1+Z|1m*fFo<48_5l?Tw(Hoonvn8|-uS6AOmmEkaocxwLX zH{5AGr+sUB`&+8>ZIbTMy(v8$Dp^GmA~nrd;g1kyyVh0Az(CS76-~@w?;|c_;@tyy zJ+-tW*Gv26L}q;2;@m|UqJ|V$ddq?kXtCZNK}HpqKtRT*jj$8HK~a$w!JffNbV~>G zw5s2nv(wktC3_sYM)I{VN2;aN1%j2wEMht@v$3k7)Z_;cMrW@zmBvA;yrAQLbVw&K zOThAi8^LKr#89ggS79+vX9Ru>+)XkA=U|7kh~?VFEV-bo%*muA%Frd7xZ+FWUS0ew%{~7o+vHQaQ1T23C-g00|j*Dxb<^~`jCPu*72H|AhABFWF6kj$}Jmz~ZK9*of1`zWQ z_{#dj{fhr=VcUlTjJX#U{249&w@M3=JqjGTse1y}w(DM7;Ew{gw@toJv-M2&kVlbx z!hB+Ie_*hc+5gg}GWZVYxtetiM%hco^dn6kSn#l)^1&Ayv5t&}`0vi`_XXv>6OxF` zHjgIRBcAr!MnG%+1Ityo?!RZW6lj?m^B|lpdj)uh_ZX?3uDoQ7<4F-PTR;VMn#rE~ zt;Xh98Lx-*Uizx~Pc}+`w~1#km*JRQ^*$5+?yk5jBAF0*@fgLInu7I9&+#<>!9$1D zM6rNIP=-?R3pSbJ0mXlZpJfZ7MDOj0KTUeyYPyB2L6Bif)Z_jq^nkrZf_z76o!l&5 z0OJb+{scm@l*l<-1lfq^r@Lx!uU~3NoWiCaYb$b_J8rX$KC1hMH)sM4U9(#+Hn@r@ zxi=d^0na@37<6&M@r;Gtp4KsklCiVIMBiGSlux{;BQuCfLqq%7-}4OWJAJWqlF6(d zB2)`DuGE}gbR~+E0m`q%$OQ$jrb9ZFMU+g5pfda#2%jcv3$#?Bm8cyx=4QX0oTPJ7 z3oCw)xSs+6_=ghKcLS`?yrJJr?w^6N&)1ikD|Gd8xoj+)$#vc$;f0w~WA}!Cc9dkZ%AVZnn@# zehICx?-sb!FytS2^yF-?NejqE1B#Tf_Nx*OCj@{9-Qo#vmol>BKoU;>xq?P?s95Lz z#`ykUg!}n3w}{=)?kvxhH2`00wY3ECTG0h%s*?;j(~U5VgJkX&V&xOO{>%gQsfP)H zeRCH@>OE3X$_0stms|Kn#@`4ida+3EpAeJSjp-?I4I|uk4w82Rrk-shzHL#lIFDt> zbl6Tk*Aie9tv*wqa7JplmBZ?ug}4p@zl4$TF%$v&>#wNLL+?^vro1hkvShhewyw1q z5MNYcc!U>+k|%S#ti?xp9RR@LC#h9-ps4m(cuKbtZObncf z2P!jxzL_MN?@2r)H(LD@|MflC8LlLLfjyyTUY;Q~FI${~{6*pa0(J)UMMk3zddxkM zXwG|~={6$&7{1KrdqoUuk_$(F;r7&y&-`&B-h&==p9q&AJDwod6&C`oVMewE2EzS@4=oT_adqM0O3qHMo^)f#$3- zF1_IKV?9F-ZtbPv7F6`B`C5Ml&Y14Mko_s{P`Ojvg;v6ODI&^zj5+c z<;y4RpM?;DhwJV=+0{hb5+)5d+eVzA>i{p3y$@VmPBe|Ik}xjoAGr$zMZ(A>fXsz> zH=+wvy&bPLp`2eFofeW9DtR2XD2Sg#i~B=pZ_B|s<0)BXn)^Dw$5?!%8cn;NFWR}A z)TX&RZReZseZS{|i@j*%Z_wQF_+#k!IQ*)06Msu6jj;)&=K&|EKiXU53bL^4qI6Lk z*pNuvF>aM`Ywjs{jdbiKcNnk+m8zvoB>}cz+OB7tseX+$Dv+ca{^Ic}tt_zcyZ0_o zet7{X)gs;AI|896*#sQBb|R1R3~U$-s=TO`++da5ozjFSq?iHT45%RQVo}|1WlybQ zLF0w}M=UYpHp@tdHjCB8ty)C2rJ#FmEkxzyL5oTK=V!(_@e4fj48!oO0-{^2ftKT_ zTqjPfXeq)er>UF-watInp3;|2=SFHTf)5Y9TQ;a=Vtxu}=3O(=QcU0nZ8qR3uZ0V# zwlQ9NWg4fIt7al9HQrCle#LWPB}0AAs!ec!DbhTcnI>6_SVJsG{HV6s6uA}HKH)&B z7F?hg$w*nifWd1iw)kx!)1cZ2Z@=Bh13X*EhD@pOT>8bN`Eou0ssKxzfDe5Ga#jG-Hk+34dTQA=K^cHq&%)pz|~*2Tz%6vT3ipVKeRja^v!OcY2!DVj zmgHXCd&N4=`7=qPTax9;5XhvdoZcPeR#iEq9mDzU31%6&Ybe&E*kRq#1;T?Ut%jxD zORv|S2nT;9&;rfmf6xv5mz7Gqe${tKs0TZ%l68OcC71sGf=d?6I*|%UJfTh~^d=hj z3eYXY?fKy=62v-=5gwzG3TT;EAX>IUj)FqAxGlTXN^U^U$^a-}zDh)3Kk;IrFEFv> zZ2UMD-X3qX$k^M)`Vp70U9MESTbP}e9`Pca5>~OMJ!M$Oq@&%aBXN9(hm3T z7=2?U#ayjf+$e(ZD_LE!BwwzXElbDJ&^;t#E}ceVr8_SJ&a?{Z25N{Mp-djB)8g{2 z;i)PXz-p|pR1>x5VtNcal3Z`PCTRsA+8S2J;!GE2nD6Rgwd&b^OV?MPY0{_oO%LD) z?CUJFVP)a`W*CCeunGcshyxFdEiI+NDb6VqW~_}#2aGVwuN4xKHl_~8KIX#@@OtbIf@E9M&GFM zMNOWLn>#mXNIKkq05J)XtLz^^F@mATfH4;kHGx?0C$UWfYJQ7U7#_LTq;mgP7C*%p z3WQ$pG(GAL_0yRruY!gWT3m_Ohbr&MbDSaihVl-hKbpI7F}~F$sPF)hj}H)uH$*om z2facOyB)PB5$W4vYAKo;ruMc;K(sTKiz|$SvB!5(&Va}!Doj9-EB5N^DVlDF7=dcF zOSRWMi9?H*SZb1@mKGK7?M()Z4-XK@+^y*{^{z2^p({zdR9vAhGfjOZv{vu;N_}E| zgN6!1q1f*Q|L$K}xI)}w%_K!Z$D>|z*+X`tyL8Oz#qT0EuI{oKBvm9biyjBR81vFB zS9hyMKECQQXP9iF1nE_!#{bbTbL6#Wb!=od_jgpHp|zG&p2S4P78Y{MnByWVzsoD$ zCn4sBgfB3FYw)?Or>@3rk+E&tT@A7j&df`)>Chpmgd%0V3@Wx{6Pv%Xp?xOHyDbdt zC=_Cw3oCS^9^sR=c%2@Tag<-4oL-*7ik$5}gr(H&o5YI|zpft~aFS~iL4S}SZip}= z)XfFTsY14BJ)NWkeuRiLx<(eL%-<(k9&nTEh_Y2p4#l1MCyxEas0#rEVj(^Mp=^xw zBQ9(~I(lo1hL_8d3x%@Jye;JNS$b1GFW;ti!75c_%}vpn#~skpPXpUd>d?0&Uvy4d z-(S|JLv{G_?3QQ}NObOzmDN7=hRmRfVl()4j%@2!mtsc8h^+BQerxvo_lmN-XHY6( zNHhT@{SW=qLf8!V5p1Wf1p;Kc&fcV*RB$j){LI~%~ z5sNIHp-U!(rrUs4V}GHcW@yyj8qZ~%)4wm%?o2NZ+4RJ*OeU6I#=YrO?C9qpW2&5c zYKxg^5W}#FcnUHuNg7pGa`{38VwHGAhB4BzPGevp_ouucOma?momE-?J zGpHzQ(Hv-CG+R_Dy-UzXI=NCP)^sS5JfmbUVq6xvM0H8p!Lx?6u?+u~WolF%Wn zz)e^?+STV^=_gsf!)M~o;;Bz#AbJ-61^6wg4Dfo0iJE^y*Z=tC7WyTi z6|XBi7O3pF|1bYpvPa$VKlk4KA7)$!U20cIIEYU8*j-hv036r%I+k2UuvK>vcB=2t zm&6#7ElwpJ3lE4t>YdF9yT$PHJu^f7DKmj-ph;=I$f-lB1ReO(=8K)`bV2_1Ql9>I zu+)9Qw4I#9xE>}_huJQBi|depI2M7oFGz4?zWl4>pt5;JuA`td*mi?3I zV{-AB(sNib=yob`ZmOB_QBw}Kz#AjBGlBvCtNOJ_iKNsuEpn{ig;Tv;r%w@8*6eB+ zDVA2m_K9pSl(>*nEjDBZFYnCP*^$96C$&nd3>>~PSl~!e6Tup-Ad1-;>8Hj}{nOJW9E~ExZ7t1Nh2? zs1-GUBiLxnL{68jn*(gcdLJ`DX%+;m8(Y$+3bng1wOF=>j7oxtpat*qqH2dMw5{dV z`5ni?7iT&t{&LQl4Q|3c#R z4tt1=@1YZd8lLPhM}eOTwYw7{9|#4P^&|oMl~nyznU4K?0dSA<3Gz zk8S}+eb&aOfIfn$8heuK3gKJ4NGe%#B48*Wu7yNVRSjY#=M&cALJ=N66186`)3Yw1 z*bOPAK+H6p+>(IsC;TI)q8_`5+@Yw}(9k+`B1xXSAm8|D`ejGZV9|01C9+hcRr3gD#W}s_ z2#p6to1a;iwvmy}urljMuteyB;dD>XA&xU&bQnuaNoDs&o~Z97gy-Nvb6g)kJhZZ` z7lh~)=j2Hf+xI6b?!<{^erQINWI0z-!3$GNR9k}_GU&-f&xIs;7`BCjh=rN#;Qk)O z%t=H^j`p|dzkG?PU@DNJMOIva%yN!oS*@h z(&WppR)h7FET9;jrAFgb`3hBu;e4)@fXff)$9$thLHIf_cOlMN8DG#H~ySK>gIQk|OaFf&UG{PNp`wv1sx1|s}qN`*Emo>*GQ2No*jf{r$ zK606DUHCJkyMrG2_15y{^b%DaUZ&eflq~Zmk?S>;csNbZ&CX;JN1Y~81-s_mviT&& z>oAkVf6OuwFzszK7|Z20Vk~=00!}VpW}YXm+xRs+);h}l^0uhB&bTe4Lz~FjkrD&Q z|89dRei}kb0dF!N_?pv=h85hkAATLJXZR47u|m%-O%d4o$JE-brm{pIfmDkO#MhFe z?IGspS@_lR6?{*@2%d|Q!lAUZl;BN> z<2j=Zk0OktOUWt~;xDB`9@}2P;SA2UQLJPZ$?jXAeUG|<84?pppXki>qrFlxc%w=8 zSlUvj3I+?7NE}5`bOE7~sdo)9lvHrj8kY=Qn%1=8fNO6qbBMrZ2Bc(E#roR?X6A?wV;nd)~~4GxBkfS*0rjn#bfEjeOWU%h#@Lg$;wAb zIa&DTN4tr1Uh+9ZBQw9NijNA^=gs&sTm@a?zJ0~4)*@QZ!7KKz{IgdUgPWM^4~}na z>0x|tB{!2e)61%9e&`J&v4`>lbxv^X@Dr;@DEJ$lRvW<*3AQQy9k!QO&+6xG8`IY+ zSR+1x#LZHrQQ>Dzdlo(T)va)@<-Z{LA8~h?JDK^l{orh*msxa z_Tx2FOq4huLXrNy6ZPVG>ua~HX?2t;Ue-9*KTbQFN`Q%~ z45p4SNc>u8cKBjbKG)BrOw;U=Xr7W%S$zDa(=3L?*4=u%IGg4l9un;si>%$OP(^a6 zRw6l*D*#+*3n7R61Owv_C00uBP&VzBgfvzMW!G`n%-_>dpZH{eCH$)qU&l1AEBuky zKV&uxWdq`~hy}-DtX;1k{r8}})e7!{pRqX25r^{D*U&s@?aMXmAdonzSAhwLxZSl* zHax@4N4#)5wqo$y-p0qXJZNwHwa3`)`U~$w+>eCDM|Hsj%`enU?w9Tn;xq5=#<Px?vt~_bXRr-rIGe17 z&_s*9qyWiDt27gSme?MU>L{ZE;g~muEocyfTbOjNw&_?Z%(k95$KB>T&ll^m#S@YZ zg)li>Dq9+4o$v#;xzDQbfQfugy??8G8=Y>u^BCNQwTqV3BV=4}Dp5%xyBSnR-?2uG zPhkm&Vf+Rpfp(U=*8`%v?Ms(t^jRQF(Hk_VHBp&q+qC1;96QiD6rfY0Dr z1`Bw{zX!20m#y*KqK5Tsf?7XX2?@y~<={t&sjvutWkhR%v&ZYSzLbA@p|;Ov2FaaS zb3lCJ55lE7S6O}`&x_=qhWhY11g(YG9?0~@EHOf&En+#RRaTAY zv*V~{?m_phBGE1SaWLJm4OB|x*+mWuV5a5H3*UI^?Xr{*%|&~S+OqtPifui@D(nG$7igZ#jWcSZ)H^u|3WyQq)~px^j~^DICP?on1gy)3O;a4=u8@R5cD@dK zFTp(==BKvcKI$4IA*-LzgmoTok}BAFG4!NnEo<@dGVzBhk_FC4vd9O+f`8_yqLwJO zp{kBokVZYCi$(?{za8`J6kd{8#RdDyJKSb6S9d(@@!@!aK??FQn99*U%%>$AihI6A zm4>3_BYwET{WhmYkgm&A6I!n-A{o}zk&i(vPNdpKQ^RS8TmkdF$ujM#`G)UFlV)F~ z`{kwm=}fk4w@|)tRtKUMP8)@b!MacP zMYXr(p}a0Qo=*9#lw*WxtEaY4)djImdMpJ(~yV{K~C(BixWwQRNS9CX$xp*OctsA zTAnNZIZCGUd+0tK z118pirKZIwECbrDv@qFcS^xG zDfSl@yiX6u1rG8l=4SmVZ!}{h(i!;(Vdn%J0)Civ2-{MfH1QGtJfWuMj#W8~=%*EaSS)8iY0LU#afnHS0$Xs z>7WCJBK)Br-Im<2h@f(xf`vixR0C1OAlbTci<$vCRp$u#-Ju3>O0-2L*8h=(U^r4s zpCyh()HxpEyDZzR=->1BHvyw3fkvv80#PN1K_foH2XDm&BoMKpItV#0`o^c?zv+Ee zZ9Zo)noN&C!@D!(JMSj6X?8-dNb~5-f_aVdx?C0(O>v06sF8*rOpt_-tw>aQ6mZ|K z?5o6b#Jfje;wDD$%Y0WNNu*ZtQ494?juvn>iISIxkbL_Wt`BpVmYwoFxaEb>9lq#H zE1L|QsTA1odU3S35FEO5mPs<~C?tjkMe!3sXL7j(23mchSAcg^dMe{(NnX{OrL(fT zF9t*lVnDM>U(v8Bv|)Xx>**}?vWGf%C<8z{q^HbRXG;1y^+gG&AD2g?ci|0H0)D1H z;lQozdG5x1qN@iOHBUnWi}4J%aXu~Lho)C|ikX!RXH|h2f_|j9cXQ_}Vhzuwj-@Yg zNvgG|0$KD^x*wooi}_;#Hz|b+#po`>Rsutqb5a-g%Z98D#MowY-mUDW}K1xXMx8$8t#JUIj4|4GA97)MQ(zW68Kzh zDDgE)_G7B^bBl3L4FBFI`I8SkJiWL--)ugO1!_6Ad+0`gL2J*k#O8XS0`GP8Zo_m% zp0Ta0Mql}Al-Xs=Po{1fIIvR=FmhyM(__}B5Yh-o(1%D1oakBVrWZ|U6>7&@!%4v* z+5bdL|KjdN&I@rKViEc)Y#c#$kb=dAYK@TFU|V`a_V+#n=@a^>W!)FrQipnG?d4bh z-CRENdu7Y9b68#$tLv&8gr!631jw0p<4~4~yz8{F0#5i+fMiWv^qfYljj6^HeoDFCT2om zEa`BIL@12=tVB54bZWj1guVRl(chr>ats(0u~fbRSJ7IX8MepeE}||0U*G3O-+9x~ z^sBFftMtR-9n{SWgMr7i86hIW9{x|KVUl5q*QX0jkC!xe#Q<|4kC5Pk_HF(tcF*K* z4-eE30sjDhm#q-T(eemt;`n&Q01HPF;Q2ZxKd*lP0J6V+rossHp~EvuWcQT`Y4@M$ z>-7Ej{c`&I=d$~`|&<77hs*j&lX!MD&EZ4-os}Q?pt}{U)PGI)syEGs$BrDu8$mvm} zi#LnNv4lW)$baGCuuFEE6h#C4IIYKkHWwSbLKqpCShQZS{TV6@$xOJu&*L#nWkO}Eark}tWD6aXiqr56$JX+Yc{ z3^Boytz`8IL#jcykt0^TZItWn*ms+niMogJF5ip(K_DF3o=##xMo+!HS1oUu2ZgtpbJQef;lBe8ne>C^Mn*EF&X2=>p+ zymcE^?CotmDsPaBo_MALE$J-o+rTSaFv?${htd2ikkKqU|AwkIPv2?#r2l2g_~5|8 zIo))0ylQ+Vf2sxgn(1<{QJgRVH1Ezd979OZ>sHH%rU}!$nrfDz!I;>}=BF`kc*))?;ZN~rX5i#YijCRnx9<}9#5v*7V}=qnm4_d0W9@vlWWj^--#W$^%k z6c^Y<(lAtUzL`3_Ri{pag~?T_dH?6e?`DDHNghIs=_F8?YtfJ9$)a@l3*MN}7N!Kt zNr+-tjsq?D$2O7ex6sT59b$bs%vw{Oc24wa(#uSiS0d5hQ^+eH{Gied)TSK<$*s%n zd$LmIq=3H&3-o_WV)xU?A;wHUg-z1*)&k%@75`(pu7nesfqHp<9Xwb1D@C7YQQrX} z-{m_S_LkSeUDrOwz;`Uo8vob7Qn$XXnd-90?xj=8EQ6#K-SbW`gMb(GaOOzBE~0BM zCG}VeQ4?bt(LaFgq83-xCQiQ986OT+fa-yW90nIv^w)c0n~VzG($KGk(cTY^9Fs5R z5#WDP2P7T^3fzB;jsktQ$6|qu#mYIrXZgCK>VREM)*tM{fd3KT;Yfh{$P?f(!0qLC z0x)hxffWd(((Im*gtyI1ZCNgDE*ix6|6K!|{n8HuCU8CghkcD~KezpgtUm!=FX=*g z5)m~2lMGWq_bN0$RD%@Z;l%d#T3Z(<&HWa+>sOT2utxIg*(Qpgb-Qe7uTh04 ztfJ#ciuJT&gGOOx zp5#FIPhg+o5B&@9efE1YTp1pYTPAuzZ4Xkh1;g@d#a&Lrv1GDwaNENdW zGT8?Uxb8FK50vvRN==ub@e>d~i2YTwd0ufOa}Rc-!LKLDUTPz)s|$m$ue)5!n>BZ< zZEe{PVKp+lvG{WKw1@DJ|C^=5BOiw1N4U+EiGSEA!*;-q_NGx{Cx(OE$Uda`GFzZ8{j?VJ%KA>juk3e{V#o6}7V^sl91Y^^~ zdyj;|Lk{UhR^GAp;``<<{EPK{H@QLqS529DB^7~8eY1o)3lE$uRc}2AqDR($u963M zyIFOnjdNTf_lU@%hwsU@8Z?2Y!4wgm0!g_GW@~ps5;i1QVFGtFAP&(`MS!|x(coxn$CpciPa ziy&Xz_OpW8HaKK4D{h#P&CJb;DMOWdap@02PpPe&RA&)zhqh{z`yAOyAV(fxRmC79 zfLZ438?UYcMkB?8s|^I`k#d$_2k#&Pe@TdTV%F7VC}t4=4A{fv)5akEm4p&*fgG$U zVOMZkX5Pn9-*R~f$i+IulAJxfbNim$Klbyt3SXN92zlll$)|cCh~Mj5Lx;pJqt~Gx z7osl-R0Ub*^c6UCW)M*DY2FlSlO%jEaUNoh`^s68U6orL=VgUk9Ct=OvWCB1%p4>R zrtD4cBE#ocg%-z`99-5q*(Rm%Q3&6_bm zMF110iE*Tz7%`edg_6A_=}Kr(fX|A_!~g1J%+LPvK83#<5RVZ{gCy~07=1vCJdkt1 z#>-N0#=7mRkVnb4|8ze}U)PjLFjG|c10}lvK9^)nm{CEoVvnGHfH91b;yBhSHbKhP z;uI=Em!ftcGeIr&lyzWb;LqU5AZ!L!=^olfpP<_;D4-W>VLN_*@HOP(FT{7xLX;sD2`G&lYTWSRYHsZ7GvvfuE^}VC z^4R#it8Y_W?c;I#O%UAUg659fd@&W6W(QV?-=BN!_ah9gG;ZHeq5(@LP8_2c%Be$& z2~#C@KGt@hLdZv@0MRmCRASHML=Hy)eLUvOs6aK}&(l_r`0h!wHuufQc_mQWd4^DN zftGP~ZXf=WYq)745i*Q8GJ0Nrshi>{@tXI!Wyx9%D}MOje^TQrRB0z7-J(tpZZe04 z+GRv)H34Kg3dO+nA`03c*fKP(HWcI7;Ca|1J#b4zey=JQH}s1GHQAEJ<1IeY&e*?H z!mVuclhT}nYD&xNIa2jde$j=h^qZFRJWI4x<@P#&(BzKN@QNgMN{P21t^*!)ll%Q; zBqE94chQS0txZgi`Mp96omh*f3n{xl}kI}cB?M=-Itc* z{`zPhw=+H?IY0rmuhIr?f7>ikri8cutP%K?+7)ywrl1_&#~nMWD&(9pEvhoJvsmKb z;o`r}5HGS?X8p8K$-qe=Kh@2P_?o{K^+xoiV?VE~7^&iqn|NOnE&5^;jqM0m9F-dh z$SmtEA1hx|l%jX&bdBl;$0zvZwCR$w_d~1U39sMO9`2)GR6aTt?lZ{d2tFlD7d4gJU^7v z{;waSQO3rYV19*CTvqz6p_6Y$^4}#>yf-uMYjl%TJDHaYT9Vr$G&!Xv(bN)RA#Fr4 zCc+$>M|(V!eVRPQqr;yswDu8}YhQC+pB# z`qo~#s7W@2`FA5;`YBMNYPx*kIcweu`Mmz+=%Yd}w+@pr8kI!{Y)nJvJ@$l*y2NOY zM-BZ{c4hz48aX9d$yQ9;rbsTT(Ik=4L$we0AgDRvPp;X_@R4a#(0(SRjUt&-g68+n zhTwrUAM_~*A0&|`Ve-oWRy5@-T&p-Z>twqaGuXZfw3`)IBa)6Hz~8P-046e~(n~*q zD!%~LCLm>HH(D0%RtUc~B~-C6V}bguvdgeiVC!DdBT*S|0i1#vY?+xNzo01T>!Dp_ z4({T&+Qh!p(&zZY_~=N=lgPy}IQIdu7%e?Kfc=TV%(Q(vXrSIGa4GkTY64`>YvhjL zl6nbE9 z=w2igj9jStINAvo gV?NnsF9)hyI-;xpAM6s@0EkR#j~xqQ1q$;20Tylj9smFU literal 0 HcmV?d00001 diff --git a/pyproject.toml b/pyproject.toml index 64a15c2..887845c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "argovisHelpers" -version = "0.0.23-rc2" +version = "0.0.23" description = "Helper functions to consume and parse information from University of Colorado's Argovis API." readme = "README.md" authors = [{name = "Katie Mills" }]