-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathprocess3dep.py
156 lines (134 loc) · 6.23 KB
/
process3dep.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
"""
process3dep.py
Purpose: Query the USGS Sciencebase catalog via their Python API to retrieve a specified set of records, and output those records to GeoBlacklight json files.
Author: Jim Lacy, University of Wisconsin-Madison
Thanks to Dell Long and Drew Ignizio from the US Geological Survey Fort Collins Science Center for providing code samples and guidance!
"""
from sciencebasepy import SbSession
import json
from datetime import datetime
import uuid
####### Define constants used for all items
# pick one of the following base IDs
base_item_id = "543e6b86e4b0fd76af69cf4c" # 3DEP 1-Meter
outputfolder = "r:/scripts/collections/USGS_3dep/gbl/" # be sure to incude trailing slash
extent_id = 20 # Sciencebase ID for Wisconsin. No idea how they come up with these state IDs. Not FIPS.
fields = "identifiers,title,webLinks,id,body,dates,spatial" # fields to retrieve in query
maxRecords = 100 # Max number of records to query at a time. There is a hard limit of 1000 enforced by Sciencebase.
dct_isPartOf_sm = ["USGS Digital Elevation Data"] # are these data part of a collection?
dc_rights_s = "Public"
dc_format_s = "DEM"
dc_type_s = "Dataset"
layer_geom_type_s = "Raster"
dct_provenance_s = "U.S. Geological Survey" # could be retrieved from json response, but this works
dc_creator_sm = ["U.S. Geological Survey"]
dc_publisher_sm = ["U.S. Geological Survey"]
dc_language_s = "English"
dc_subject_sm = ["Elevation"]
dct_spatial_sm = [""] # We don't use spatial keywords at UW
# fields unique to University of Wisconsin
uw_deprioritize_item_b = True # we want topo records to appear lower in search results so they don't overwhelm other items
uw_supplemental_s = ""
uw_notice_s = ""
# end University of Wisconsin
###### End of constants
# Begin processing
sb = SbSession() # create a new session
print("Processing...")
# Send query to Sciencebase and store json response in "items"
# See https://github.com/usgs/sciencebasepy/blob/master/Searching%20ScienceBase%20with%20ScienceBasePy.ipynb for syntax guidance
items = sb.find_items({'ancestors': base_item_id,'filter': 'extentQuery={"extent":' + str(extent_id) + '}','fields':fields,'max': maxRecords})
#print("Found %s items" % items['total'])
while items and 'items' in items:
for item in items['items']:
#print(item)
for i_link in item['webLinks']:
try:
if i_link['type']=='download':
downloadUrl = i_link['uri']
except:
downloadUrl = ''
# Geoblacklight currently only supports one download per item
# future: include GeoTIFFs when they are available
"""try:
if i_link['title'] == 'GeoTIFF':
downloadUrl = i_link['uri']
except:
downloadUrl = '' """
try:
if i_link['type'] == 'browseImage':
thumbnail_path_ss = i_link['uri']
except:
thumbnail_path_ss = ''
try:
if i_link['type'] == 'Online Link':
onlinelink = i_link['uri']
except:
onlinelink = ''
for metadata_link in item['identifiers']:
try:
#print(metadata_link)
if metadata_link['scheme'] == 'processingUrl':
metadataUrl = metadata_link['key']
except:
metadataUrl = ''
dct_references_s = '{"http://schema.org/url":"%s","http://schema.org/downloadUrl":"%s","http://www.opengis.net/cat/csw/csdgm":"%s"}' % (onlinelink,downloadUrl,metadataUrl)
for date in item['dates']:
try:
if date['type'] == 'Start':
pubdate = date['dateString']
if len(pubdate) == 4:
# assume field only contains a year in length of date is four characters
year = int(pubdate)
#print(year)
else:
fulldate = datetime.strptime(pubdate, '%Y-%m-%d')
year = int(datetime.strftime(fulldate,'%Y'))
#print(year)
except:
year = 9999
for boundingBox, coordinates in item['spatial'].items():
#print(coordinates)
#for key in coordinates:
# print(key + ':', coordinates[key])
east = coordinates['maxX']
west = coordinates['minX']
north = coordinates['maxY']
south = coordinates['minY']
solr_geom = "ENVELOPE(%s,%s,%s,%s)" % (west, east, north, south)
###### Construct json object for our GBL record
uniqueID = str(uuid.uuid4())
data = {}
data["geoblacklight_version"] = "1.0"
data["dc_identifier_s"] = uniqueID
data["dc_title_s"] = item['title']
data["dc_description_s"] = item['body']
data["dc_rights_s"] = dc_rights_s
data["dct_provenance_s"] = dct_provenance_s
data["dc_format_s"] = dc_format_s
data["dc_language_s"] = dc_language_s
data["layer_slug_s"] = uniqueID
data["layer_geom_type_s"] = layer_geom_type_s
data["dct_isPartOf_sm"] = dct_isPartOf_sm
data["dc_creator_sm"] = dc_creator_sm
data["dc_publisher_sm"] = dc_publisher_sm
data["dc_type_s"] = dc_type_s
data["dc_subject_sm"] = dc_subject_sm
data["dct_spatial_sm"] = dct_spatial_sm
data["dct_temporal_sm"] = [str(year)]
data["solr_geom"] = solr_geom
data["solr_year_i"] = year
data["dct_issued_s"] = str(year)
data["dct_references_s"] = dct_references_s
# fields unique to University of Wisconsin
data["thumbnail_path_ss"] = thumbnail_path_ss
data["uw_supplemental_s"] = uw_supplemental_s
data["uw_notice_s"] = uw_notice_s
data["uw_deprioritize_item_b"] = uw_deprioritize_item_b
# end of UW fields
outfile = outputfolder + "%s.json" % (uniqueID) # files named by UUID
out = json.dumps(data,indent=4)
jsonfile = open(outfile, 'w')
jsonfile.write(out)
jsonfile.close()
items = sb.next(items) #grab the next set of results, and continue