From 725f5d010c99b7ad752212482d8a699c9aeaff22 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Thu, 26 Sep 2024 18:55:43 +1000 Subject: [PATCH 01/60] Setup route for dev --- src/criteria_assessment/criteria.jl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index f7416c8..1353716 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -148,6 +148,15 @@ function setup_region_routes(config, auth) return file(mask_path; headers=COG_HEADERS) end + @get auth("/suitability/assess/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) + # somewhere:8000/suitability/assess/region-name/reeftype?criteria_names=Depth,Slope&lb=-9.0,0.0&ub=-2.0,40 + # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 + + qp = queryparams(req) + @debug "In region assessment route" + return assess_region(reg_assess_data, reg, qp, rtype, config) + end + @get auth("/bounds/{reg}") function (req::Request, reg::String) rst_stack = reg_assess_data[reg].stack From 81db0cc74d4ff64c371c4845bdf621e4af276d9f Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Thu, 26 Sep 2024 18:56:14 +1000 Subject: [PATCH 02/60] Fix usage of `contains` Some strange conflict I wasn't seeing before with the various `contains` methods --- src/criteria_assessment/query_parser.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/criteria_assessment/query_parser.jl b/src/criteria_assessment/query_parser.jl index a996302..6f52d4c 100644 --- a/src/criteria_assessment/query_parser.jl +++ b/src/criteria_assessment/query_parser.jl @@ -39,6 +39,8 @@ Remove rugosity layer from consideration if region is not Townsville. Rugosity data currently only exists for the Townsville region. """ function remove_rugosity(reg, criteria_names, lbs, ubs) + + # Need to specify `Base` as geospatial packages also define `contains()`. if !Base.contains(reg, "Townsville") # Remove rugosity layer from consideration as it doesn't exist for regions # outside of Townsville. From 1fbda693ac1a8660fabe1f1df2653306f9b5397f Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Thu, 26 Sep 2024 18:59:18 +1000 Subject: [PATCH 03/60] Initial implementation --- src/criteria_assessment/criteria.jl | 1 + .../site_identification.jl | 151 ++++++++++++++++++ 2 files changed, 152 insertions(+) create mode 100644 src/criteria_assessment/site_identification.jl diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 1353716..5353f02 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -8,6 +8,7 @@ using Oxygen: json include("query_parser.jl") include("tiles.jl") +include("site_identification.jl") const REEF_TYPE = [:slopes, :flats] diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl new file mode 100644 index 0000000..0bee420 --- /dev/null +++ b/src/criteria_assessment/site_identification.jl @@ -0,0 +1,151 @@ +"""Methods for identifying potential deployment locations.""" + +using FLoops, ThreadsX + +""" + proportion_suitable(subsection::BitMatrix)::Matrix{Int16} + +Calculate the the proportion of the subsection that is suitable for deployments. +Subsection is the surrounding a rough hectare area centred on each cell of a raster marked +as being suitable according to user-selected criteria. +""" +function proportion_suitable(x::BitMatrix)::Matrix{Int16} + x_len, y_len = size(x) + x′ = zeros(Int16, size(x)) + + @floop for row_col in ThreadsX.findall(x) + (row, col) = Tuple(row_col) + x_left = max(col - 4, 1) + x_right = min(col + 4, x_len) + + y_top = max(row - 4, 1) + y_bottom = min(row + 4, y_len) + + x′[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) + end + + return x′ +end + +""" + filter_distances( + target_rast::Raster, + gdf::DataFrame, + dist_nm + )::Raster + +Exclude pixels in target_rast that are beyond `dist_nm` (nautical miles) from a geometry +in `gdf`. Target_rast and gdf should be in the same CRS (EPSG:7844 / GDA2020 for GBR-reef-guidance-assessment). + +# Arguments +- `target_rast` : Raster of suitable pixels (Bool) to filter pixels from. +- `gdf` : DataFrame with `geometry` column that contains vector objects of interest. +- `dist_nm` : Filtering distance from geometry object in nautical miles. + +# Returns +- `tmp_areas` : Raster of filtered pixels containing only pixels within target distance +from a geometry centroid. +""" +function filter_distances(target_rast::Raster, gdf::DataFrame, dist; units::String="NM")::Raster + tmp_areas = copy(target_rast) + + # First dimension is the rows (latitude) + # Second dimension is the cols (longitude) + raster_lat = Vector{Float64}(tmp_areas.dims[1].val) + raster_lon = Vector{Float64}(tmp_areas.dims[2].val) + + @floop for row_col in findall(tmp_areas) + (lat_ind, lon_ind) = Tuple(row_col) + point = AG.createpoint() + + lon = raster_lon[lon_ind] + lat = raster_lat[lat_ind] + AG.addpoint!(point, lon, lat) + + pixel_dists = AG.distance.([point], port_locs.geometry) + geom_point = gdf[argmin(pixel_dists), :geometry] + geom_point = (AG.getx(geom_point, 0), AG.gety(geom_point, 0)) + + dist_nearest = Distances.haversine(geom_point, (lon, lat)) + + # Convert from meters to nautical miles + if units == "NM" + dist_nearest = dist_nearest / 1852 + end + + # Convert from meters to kilometers + if units == "km" + dist_nearest = dist_nearest / 1000 + end + + tmp_areas.data[lon_ind, lat_ind] = dist_nearest < dist ? 1 : 0 + end + + return tmp_areas +end + +""" +# TODO: Better name, and address duplication in "/assess/{reg}/{rtype}" +""" +function _temp_assess_region(reg_assess_data, reg, qp, rtype, config) + criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(qp)...) + + # Otherwise, create the file + @debug "$(now()) : Assessing criteria" + assess = reg_assess_data[reg] + mask_data = make_threshold_mask( + assess, + Symbol(rtype), + CriteriaBounds.(criteria_names, lbs, ubs) + ) + + return mask_data +end + +function assess_region(reg_assess_data, reg, qp, rtype, config) + @debug "Assessing region's suitability score" + + file_id = string(hash(qp)) + assessed_tmp_path = _cache_location(config) + assessed_path = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + + if isfile(assessed_path) + return file(assessed_path) + end + + # Make mask of suitable locations + mask_data = _temp_assess_region(reg_assess_data, reg, qp, rtype, config) + + # Assess remaining pixels for their suitability + @debug "Calculating proportional suitability score" + suitability_scores = proportion_suitable(mask_data.data) + + @debug "$(now()) : Running on thread $(threadid())" + @debug "Writing to $(assessed_path)" + Rasters.write( + assessed_path, + rebuild(mask_data, suitability_scores); + ext=".tiff", + source="gdal", + driver="COG", + options=Dict{String,String}( + "COMPRESS"=>"DEFLATE", + "SPARSE_OK"=>"TRUE", + "OVERVIEW_COUNT"=>"5", + "BLOCKSIZE"=>"256", + "NUM_THREADS"=>n_gdal_threads(config) + ), + force=true + ) + + return file(assessed_path) + + + # Apply rotation-based polygon search + # assess_reef_site() + + # Filter overlapping polygons. + + # Return geojson of suitable deployment "plots" + # output_geojson() +end From a0937a261ec758f1d79c30f9220606bcc843e7f8 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 12:33:22 +1000 Subject: [PATCH 04/60] Add function to filter lookup table based on user criteria --- src/criteria_assessment/query_thresholds.jl | 36 +++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/src/criteria_assessment/query_thresholds.jl b/src/criteria_assessment/query_thresholds.jl index 2fffaf2..ca36ca6 100644 --- a/src/criteria_assessment/query_thresholds.jl +++ b/src/criteria_assessment/query_thresholds.jl @@ -192,6 +192,42 @@ function apply_criteria_thresholds( return res end +""" + apply_criteria_lookup( + reg_criteria, + rtype::Symbol, + ruleset::Vector{CriteriaBounds{Function}} + ) + +Filter valid_lookup table by applying user defined `ruleset` criteria. + +# Arguments +- `reg_criteria` : RegionalCriteria containing valid_rtype lookup table for filtering. +- `rtype` : Flats or slope category for assessment. +- `ruleset` : User defined ruleset for upper and lower bounds. + +# Returns +Filtered lookup table containing points that meet all criteria in `ruleset`. +""" +function apply_criteria_lookup( + reg_criteria::RegionalCriteria, + rtype::Symbol, + ruleset::Vector{CriteriaBounds{Function}} +) + lookup = getfield(reg_criteria, Symbol(:valid_, rtype)) + lookup.all_crit .= 1 + + for threshold in ruleset + lookup.all_crit = lookup.all_crit .& threshold.rule(lookup.name) + end + + lookup = lookup[lookup.all_crit, :] + lookup.lon = first.(GI.coordinates.(lookup.geometry)) + lookup.lat = last.(GI.coordinates.(lookup.geometry)) + + return lookup +end + """ make_threshold_mask(reg::String, rtype::Symbol, crit_map) From a47b50ae70246fc115dc5ae17c0d70ff8abfa1fc Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 12:35:59 +1000 Subject: [PATCH 05/60] Add proportion_suitable to use custom window and site assessment code --- .../site_identification.jl | 107 +++++++++++++----- 1 file changed, 79 insertions(+), 28 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 0bee420..ec8042b 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -3,23 +3,26 @@ using FLoops, ThreadsX """ - proportion_suitable(subsection::BitMatrix)::Matrix{Int16} + proportion_suitable(subsection::BitMatrix, window::Tuple=(-4,5))::Matrix{Int16} Calculate the the proportion of the subsection that is suitable for deployments. Subsection is the surrounding a rough hectare area centred on each cell of a raster marked as being suitable according to user-selected criteria. + +# Arguments +- `x` : Matrix of boolean pixels after filtering with user criteria. +- `window` : Window size to assess. Default window (-4,5) assesses a square hectare around each target pixel where the resolution of pixels is 10m. """ -function proportion_suitable(x::BitMatrix)::Matrix{Int16} - x_len, y_len = size(x) +function proportion_suitable(x::BitMatrix, window::Tuple=(-4,5))::Matrix{Int16} x′ = zeros(Int16, size(x)) @floop for row_col in ThreadsX.findall(x) (row, col) = Tuple(row_col) - x_left = max(col - 4, 1) - x_right = min(col + 4, x_len) + x_left = max(col + window[1], 1) + x_right = min(col + window[2], size(x, 1)) - y_top = max(row - 4, 1) - y_bottom = min(row + 4, y_len) + y_top = max(row + window[1], 1) + y_bottom = min(row + window[2], size(x, 2)) x′[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) end @@ -102,15 +105,34 @@ function _temp_assess_region(reg_assess_data, reg, qp, rtype, config) return mask_data end +function _temp_filter_lookup(reg_assess_data, reg, qp, rtype, config) + criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(qp)...) + + # Otherwise, create the file + @debug "$(now()) : Assessing criteria table" + assess = reg_assess_data[reg] + crit_lookup = apply_criteria_lookup( + assess, + Symbol(rtype), + CriteriaBounds.(criteria_names, lbs, ubs) + ) + + return crit_lookup +end + function assess_region(reg_assess_data, reg, qp, rtype, config) @debug "Assessing region's suitability score" file_id = string(hash(qp)) assessed_tmp_path = _cache_location(config) - assessed_path = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") - if isfile(assessed_path) - return file(assessed_path) + file_id = string(hash(qp)) + assessed_tmp_path = _cache_location(config) + assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") + + if isfile(assessed_path_geojson) + return file(assessed_path_geojson) end # Make mask of suitable locations @@ -121,25 +143,53 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) suitability_scores = proportion_suitable(mask_data.data) @debug "$(now()) : Running on thread $(threadid())" - @debug "Writing to $(assessed_path)" - Rasters.write( - assessed_path, - rebuild(mask_data, suitability_scores); - ext=".tiff", - source="gdal", - driver="COG", - options=Dict{String,String}( - "COMPRESS"=>"DEFLATE", - "SPARSE_OK"=>"TRUE", - "OVERVIEW_COUNT"=>"5", - "BLOCKSIZE"=>"256", - "NUM_THREADS"=>n_gdal_threads(config) - ), - force=true + # @debug "Writing to $(assessed_path)" + # Rasters.write( + # assessed_path, + # rebuild(mask_data, suitability_scores); + # ext=".tiff", + # source="gdal", + # driver="COG", + # options=Dict{String,String}( + # "COMPRESS"=>"DEFLATE", + # "SPARSE_OK"=>"TRUE", + # "OVERVIEW_COUNT"=>"5", + # "BLOCKSIZE"=>"256", + # "NUM_THREADS"=>n_gdal_threads(config) + # ), + # force=true + # ) + + #return file(assessed_path) + + # Need dataframe of valid_lookup pixels + crit_pixels = _temp_filter_lookup(reg_assess_data, reg, qp, rtype, config) + + # Need rebuild(mask_data, suitability_scores) as input to identify_potential_sites_edges + scan_locs = rebuild(mask_data, suitability_scores) + res = abs(step(dims(scan_locs, X))) + target_crs = convert(EPSG, crs(scan_locs)) + scan_locs = identify_search_pixels(scan_locs, x -> x .> suitability_threshold) + + # Need reef outlines + gdf = REGIONAL_DATA["reef_outlines"] + reef_outlines = buffer_simplify(gdf) + reef_outlines = polygon_to_lines.(reef_outlines) + + x_dist, y_dist + + initial_polygons = identify_potential_sites_edges( + crit_pixels, + scan_locs, + res, + gdf, + x_dist, + y_dist, + target_crs, + reef_outlines, + REGIONAL_DATA["region_long_names"][reg] ) - - return file(assessed_path) - + output_geojson(filter_sites(test_results), assessed_path_geojson) # Apply rotation-based polygon search # assess_reef_site() @@ -148,4 +198,5 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) # Return geojson of suitable deployment "plots" # output_geojson() + return file(assessed_path_geojson) end From a4d722e4cc8ef128db486182a0311f4d7b06682e Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 12:36:36 +1000 Subject: [PATCH 06/60] Add common_functions for outputting and preprocessing inputs --- src/site_assessment/common_functions.jl | 51 +++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index 12c2089..e9ac395 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -287,6 +287,22 @@ function output_geojson( return nothing end +""" + output_geojson(df::DataFrame, destination_path::String)::Nothing + +Writes out GeoJSON file to a target directory. Output file will be located at location: +`destination_path`. + +# Arguments +- `df` : DataFrame intended for writing to geojson file. +- `destination_path` : File path to write geojson file to. +""" +function output_geojson(df::DataFrame, destination_path::String)::Nothing + GDF.write(destination_path, df; crs=GI.crs(first(df.geometry))) + + return nothing +end + """ identify_search_pixels(input_raster::Raster, criteria_function)::DataFrame @@ -312,3 +328,38 @@ function identify_search_pixels(input_raster::Raster, criteria_function)::DataFr return DataFrame(; indices=indices, lon=indices_lon, lat=indices_lat) end + +""" + buffer_simplify( + gdf::DataFrame; + number_verts::Int64=30, + buffer_dist_m::Int64=40 + )::Vector{GeoInterface.Wrappers.WrapperGeometry} + +Simplify and buffer the polygons in a GeoDataFrame to account for uncertainty and inaccuracies +in the reef outlines. + +# Arguments +- `gdf` : GeoDataFrame containing the reef polygons in `gdf.geometry`. +- `number_verts` : Number of vertices to simplify the reefs to. Default is 30 vertices. +- `buffer_dist_m` : Buffering distance in meters to account for innacuracies in reef outlines. Default distance is 40m. + +# Returns +Vector containing buffered and simplified reef polygons +""" +function buffer_simplify( + gdf::DataFrame; + number_verts::Int64=30, + buffer_dist_m::Int64=40 +)::Vector{GeoInterface.Wrappers.WrapperGeometry} + reef_buffer = GO.simplify(gdf.geometry; number=number_verts) + for row in eachrow(reef_buffer) + lat = GO.centroid(row)[2] + row = GO.simplify( + GO.buffer(row, meters_to_degrees(buffer_dist_m, lat)); + number=number_verts + ) + end + + return reef_buffer +end From 27fbc77b59dfd629d5b9b4952eb22989c9a2fa0e Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 16:08:23 +1000 Subject: [PATCH 07/60] Fixes to function definitions where wrong types defined --- src/criteria_assessment/query_thresholds.jl | 6 +++--- src/site_assessment/common_functions.jl | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/criteria_assessment/query_thresholds.jl b/src/criteria_assessment/query_thresholds.jl index ca36ca6..ee6e588 100644 --- a/src/criteria_assessment/query_thresholds.jl +++ b/src/criteria_assessment/query_thresholds.jl @@ -212,16 +212,16 @@ Filtered lookup table containing points that meet all criteria in `ruleset`. function apply_criteria_lookup( reg_criteria::RegionalCriteria, rtype::Symbol, - ruleset::Vector{CriteriaBounds{Function}} + ruleset ) lookup = getfield(reg_criteria, Symbol(:valid_, rtype)) lookup.all_crit .= 1 for threshold in ruleset - lookup.all_crit = lookup.all_crit .& threshold.rule(lookup.name) + lookup.all_crit = lookup.all_crit .& threshold.rule(lookup[!, threshold.name]) end - lookup = lookup[lookup.all_crit, :] + lookup = lookup[BitVector(lookup.all_crit), :] lookup.lon = first.(GI.coordinates.(lookup.geometry)) lookup.lat = last.(GI.coordinates.(lookup.geometry)) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index e9ac395..1f493c6 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -351,7 +351,7 @@ function buffer_simplify( gdf::DataFrame; number_verts::Int64=30, buffer_dist_m::Int64=40 -)::Vector{GeoInterface.Wrappers.WrapperGeometry} +)::Vector{GIWrap.WrapperGeometry} reef_buffer = GO.simplify(gdf.geometry; number=number_verts) for row in eachrow(reef_buffer) lat = GO.centroid(row)[2] From 2082bd98aa821ed0bf6ff180981bb84a12f8906f Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 16:08:51 +1000 Subject: [PATCH 08/60] Add a new request type that performs site suitability assessment --- src/criteria_assessment/criteria.jl | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 5353f02..7be7703 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -151,13 +151,37 @@ function setup_region_routes(config, auth) @get auth("/suitability/assess/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) # somewhere:8000/suitability/assess/region-name/reeftype?criteria_names=Depth,Slope&lb=-9.0,0.0&ub=-2.0,40 - # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 + # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0 qp = queryparams(req) @debug "In region assessment route" return assess_region(reg_assess_data, reg, qp, rtype, config) end + @get auth("/suitability/site-suitability/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) + # 127.0.0.1:8000/suitability/site-suitability/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0&SuitabilityThreshold=95&xdist=450&ydist=50 + + qp = queryparams(req) + criteria_names = [ + "Depth", + "Benthic", + "Geomorphic", + "Slope", + "Turbidity", + "WavesHs", + "WavesTp", + "Rugosity", + "PortDistSlopes", + "PortDistFlats" + ] + criteria_qp = filter(k -> string(k.first) ∈ criteria_names, qp) + + assessment_qp = filter(k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp) + + @debug "In site assessment" + return site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) + end + @get auth("/bounds/{reg}") function (req::Request, reg::String) rst_stack = reg_assess_data[reg].stack From 62a5aa5f4d26a8203ed40fde93e09f48fe3bfc9f Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 16:09:59 +1000 Subject: [PATCH 09/60] Separate site assessment function from region assessment raster --- .../site_identification.jl | 95 ++++++++++++------- 1 file changed, 59 insertions(+), 36 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index ec8042b..ed03882 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -127,12 +127,8 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) assessed_tmp_path = _cache_location(config) assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") - file_id = string(hash(qp)) - assessed_tmp_path = _cache_location(config) - assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") - - if isfile(assessed_path_geojson) - return file(assessed_path_geojson) + if isfile(assessed_path_tif) + return file(assessed_path_tif) end # Make mask of suitable locations @@ -143,32 +139,64 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) suitability_scores = proportion_suitable(mask_data.data) @debug "$(now()) : Running on thread $(threadid())" - # @debug "Writing to $(assessed_path)" - # Rasters.write( - # assessed_path, - # rebuild(mask_data, suitability_scores); - # ext=".tiff", - # source="gdal", - # driver="COG", - # options=Dict{String,String}( - # "COMPRESS"=>"DEFLATE", - # "SPARSE_OK"=>"TRUE", - # "OVERVIEW_COUNT"=>"5", - # "BLOCKSIZE"=>"256", - # "NUM_THREADS"=>n_gdal_threads(config) - # ), - # force=true - # ) - - #return file(assessed_path) + @debug "Writing to $(assessed_path)" + Rasters.write( + assessed_path, + rebuild(mask_data, suitability_scores); + ext=".tiff", + source="gdal", + driver="COG", + options=Dict{String,String}( + "COMPRESS"=>"DEFLATE", + "SPARSE_OK"=>"TRUE", + "OVERVIEW_COUNT"=>"5", + "BLOCKSIZE"=>"256", + "NUM_THREADS"=>n_gdal_threads(config) + ), + force=true + ) + + return file(assessed_path) +end + +function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) + @debug "Assessing region's suitability score" + + file_id = string(hash(assessment_qp)) + assessed_tmp_path = _cache_location(config) + assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") + + if isfile(assessed_path_geojson) + return file(assessed_path_geojson) + end + + file_id = string(hash(criteria_qp)) + assessed_tmp_path = _cache_location(config) + assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + + if isfile(assessed_path_tif) + scan_locs = Raster(assessed_path_tif) + else + # Make mask of suitable locations + mask_data = _temp_assess_region(reg_assess_data, reg, criteria_qp, rtype, config) + + # Assess remaining pixels for their suitability + @debug "Calculating proportional suitability score" + suitability_scores = proportion_suitable(mask_data.data) + + # Need rebuild(mask_data, suitability_scores) as input to identify_potential_sites_edges + scan_locs = rebuild(mask_data, suitability_scores) + end # Need dataframe of valid_lookup pixels - crit_pixels = _temp_filter_lookup(reg_assess_data, reg, qp, rtype, config) + @debug "Pre-processing assessment inputs." + #Main.@infiltrate + crit_pixels = _temp_filter_lookup(reg_assess_data, reg, criteria_qp, rtype, config) - # Need rebuild(mask_data, suitability_scores) as input to identify_potential_sites_edges - scan_locs = rebuild(mask_data, suitability_scores) res = abs(step(dims(scan_locs, X))) target_crs = convert(EPSG, crs(scan_locs)) + + suitability_threshold = parse(Int64, (assessment_qp["SuitabilityThreshold"])) scan_locs = identify_search_pixels(scan_locs, x -> x .> suitability_threshold) # Need reef outlines @@ -176,8 +204,10 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) reef_outlines = buffer_simplify(gdf) reef_outlines = polygon_to_lines.(reef_outlines) - x_dist, y_dist + x_dist = parse(Int64, assessment_qp["xdist"]) + y_dist = parse(Int64, assessment_qp["ydist"]) + @debug "Performing polygon site assessment." initial_polygons = identify_potential_sites_edges( crit_pixels, scan_locs, @@ -189,14 +219,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) reef_outlines, REGIONAL_DATA["region_long_names"][reg] ) - output_geojson(filter_sites(test_results), assessed_path_geojson) - - # Apply rotation-based polygon search - # assess_reef_site() - - # Filter overlapping polygons. + output_geojson(filter_sites(initial_polygons), assessed_path_geojson) - # Return geojson of suitable deployment "plots" - # output_geojson() return file(assessed_path_geojson) end From 0d1efba175be11f25dd0c29314932654fdcb8b2c Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 16:10:54 +1000 Subject: [PATCH 10/60] Fix issue with data cache not storing GeoDataFrame correctly. Remove GeoDataFrame storage from cache and add the gdf when it is required after loading the cached data --- src/ReefGuideAPI.jl | 41 +++++++++++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 6de6767..a8975a1 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -55,26 +55,46 @@ Load regional data to act as an in-memory cache. OrderedDict of `RegionalCriteria` for each region. """ function setup_regional_data(config::Dict) + reef_data_path = config["prepped_data"]["PREPPED_DATA_DIR"] + if @isdefined(REGIONAL_DATA) @debug "Using previously generated regional data store." sleep(1) # Pause so message is noticeably visible + + reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") + REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) + + REGIONAL_DATA["region_long_names"] = Dict( + "FarNorthern" => "Far Northern Management Area", + "Cairns-Cooktown" => "Cairns/Cooktown Management Area", + "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", + "Mackay-Capricorn" => "Mackay/Capricorn Management Area" + ) return REGIONAL_DATA end - # Check disk-based store + # # Check disk-based store reg_cache_dir = config["server_config"]["REGIONAL_CACHE_DIR"] reg_cache_fn = joinpath(reg_cache_dir, "regional_cache.dat") if isfile(reg_cache_fn) @debug "Loading regional data cache from disk" @eval const REGIONAL_DATA = deserialize($(reg_cache_fn)) + + reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") + REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) + + REGIONAL_DATA["region_long_names"] = Dict( + "FarNorthern" => "Far Northern Management Area", + "Cairns-Cooktown" => "Cairns/Cooktown Management Area", + "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", + "Mackay-Capricorn" => "Mackay/Capricorn Management Area" + ) return REGIONAL_DATA end @debug "Setting up regional data store..." - reef_data_path = config["prepped_data"]["PREPPED_DATA_DIR"] - - regional_assessment_data = OrderedDict{String,RegionalCriteria}() + regional_assessment_data = OrderedDict{String, Any}() for reg in get_regions() data_paths = String[] data_names = String[] @@ -122,6 +142,9 @@ function setup_regional_data(config::Dict) ) end + regional_assessment_data["reef_outlines"] = "" + regional_assessment_data["region_long_names"] = "" + # Store cache on disk to avoid excessive cold startup times @debug "Saving regional data cache to disk" serialize(reg_cache_fn, regional_assessment_data) @@ -129,6 +152,16 @@ function setup_regional_data(config::Dict) # Remember, `@eval` runs in global scope. @eval const REGIONAL_DATA = $(regional_assessment_data) + reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") + REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) + + REGIONAL_DATA["region_long_names"] = Dict( + "FarNorthern" => "Far Northern Management Area", + "Cairns-Cooktown" => "Cairns/Cooktown Management Area", + "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", + "Mackay-Capricorn" => "Mackay/Capricorn Management Area" + ) + return REGIONAL_DATA end From 2a3ed3e92413a97be252638f81ae12d0b5ac219e Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Fri, 27 Sep 2024 16:51:26 +1000 Subject: [PATCH 11/60] Add Port Distance Criteria into the bounds Modifies criteria_data_map() to include port distances as well --- src/criteria_assessment/criteria.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 7be7703..d40db65 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -29,7 +29,9 @@ function criteria_data_map() :WavesTp => "_waves_Tp", :Rugosity => "_rugosity", :ValidSlopes => "_valid_slopes", - :ValidFlats => "_valid_flats" + :ValidFlats => "_valid_flats", + :PortDistSlopes => "_PortDistSlopes", + :PortDistFlats => "_PortDistFlats" ) end From 15fe5042bda3f64ec3fd7c440963e8fa30873a37 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Mon, 30 Sep 2024 08:39:46 +1000 Subject: [PATCH 12/60] Add docstrings to assess_region and site_assess_region functions --- .../site_identification.jl | 37 ++++++++++++++++++- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index ed03882..3d6450e 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -120,6 +120,22 @@ function _temp_filter_lookup(reg_assess_data, reg, qp, rtype, config) return crit_lookup end +""" + assess_region(reg_assess_data, reg, qp, rtype, config) + +Perform raster suitability assessment based on user defined criteria. + +# Arguments +- `reg_assess_data` : Dictionary containing the regional data paths, reef outlines and full region names. +- `reg` : Name of the region being assessed (format `Cairns-Cooktown` rather than `Cairns/Cooktown Management Area`). +- `qp` : Dict containing bounds for each variable being filtered. +- `rtype` : Type of zone to assess (flats or slopes). +- `config` : Information from `.config.toml` file. + +# Returns +GeoTiff file of surrounding hectare suitability (1-100%) based on the criteria bounds input +by a user. +""" function assess_region(reg_assess_data, reg, qp, rtype, config) @debug "Assessing region's suitability score" @@ -159,6 +175,23 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) return file(assessed_path) end +""" + site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) + +Perform site suitability assessment using polygon searches with user defined criteria. + +# Arguments +- `reg_assess_data` : Dictionary containing the regional data paths, reef outlines and full region names. +- `reg` : Name of the region being assessed (format `Cairns-Cooktown` rather than `Cairns/Cooktown Management Area`). +- `criteria_qp` : Dict containing bounds for each variable being filtered. +- `assessment_qp` : Dict containing the dimensions of the search polygon (`xdist`, `ydist`) and the `SuitabilityThreshold` to identify search pixels. +- `rtype` : Type of zone to assess (flats or slopes). +- `config` : Information from `.config.toml` file. + +# Returns +GeoJSON file containing result site polygons after filtering out polygons < 0.33 score and +keeping the highest scoring polygon where intersecting polygons occur. +""" function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) @debug "Assessing region's suitability score" @@ -200,7 +233,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt scan_locs = identify_search_pixels(scan_locs, x -> x .> suitability_threshold) # Need reef outlines - gdf = REGIONAL_DATA["reef_outlines"] + gdf = reg_assess_data["reef_outlines"] reef_outlines = buffer_simplify(gdf) reef_outlines = polygon_to_lines.(reef_outlines) @@ -217,7 +250,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt y_dist, target_crs, reef_outlines, - REGIONAL_DATA["region_long_names"][reg] + reg_assess_data["region_long_names"][reg] ) output_geojson(filter_sites(initial_polygons), assessed_path_geojson) From bfc4375f8ac380493c8db95b38ebb76ff465affc Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Mon, 30 Sep 2024 10:05:44 +1000 Subject: [PATCH 13/60] correct tif output path name --- src/criteria_assessment/site_identification.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 3d6450e..295d4f6 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -157,7 +157,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) @debug "$(now()) : Running on thread $(threadid())" @debug "Writing to $(assessed_path)" Rasters.write( - assessed_path, + assessed_path_tif, rebuild(mask_data, suitability_scores); ext=".tiff", source="gdal", @@ -172,7 +172,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) force=true ) - return file(assessed_path) + return file(assessed_path_tif) end """ From e6d213da512ee6e92c09e0b8888c9aff8b2a0b18 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Wed, 2 Oct 2024 09:47:20 +1000 Subject: [PATCH 14/60] Fix tif output path object in assess_site --- src/criteria_assessment/site_identification.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 295d4f6..b726a0c 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -155,7 +155,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) suitability_scores = proportion_suitable(mask_data.data) @debug "$(now()) : Running on thread $(threadid())" - @debug "Writing to $(assessed_path)" + @debug "Writing to $(assessed_path_tif)" Rasters.write( assessed_path_tif, rebuild(mask_data, suitability_scores); From bc97d1ff086d74f4e9ee06c3903160e055f503ce Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Wed, 2 Oct 2024 14:20:06 +1000 Subject: [PATCH 15/60] Fix incorrect raster latitude accessing in assessment --- src/site_assessment/best_fit_polygons.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 4c15a9e..9fe46e7 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -126,10 +126,10 @@ function identify_potential_sites_edges( reef_lines = reef_lines[gdf.management_area .== region] gdf = gdf[gdf.management_area .== region, :] max_count = ( - (x_dist / degrees_to_meters(res, mean(indices_pixels.dims[2]))) * + (x_dist / degrees_to_meters(res, mean(search_pixels.lat))) * ( - (y_dist + 2 * degrees_to_meters(res, mean(indices_pixels.dims[2]))) / - degrees_to_meters(res, mean(indices_pixels.dims[2])) + (y_dist + 2 * degrees_to_meters(res, mean(search_pixels.lat))) / + degrees_to_meters(res, mean(search_pixels.lat)) ) ) From c06307f0e35808007d144279ff38a7e6190178a8 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Wed, 2 Oct 2024 15:25:38 +1000 Subject: [PATCH 16/60] Formatting editted scripts --- src/ReefGuideAPI.jl | 2 +- src/criteria_assessment/criteria.jl | 16 +++++++++---- .../site_identification.jl | 24 +++++++++++-------- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index a8975a1..c5ed010 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -94,7 +94,7 @@ function setup_regional_data(config::Dict) @debug "Setting up regional data store..." - regional_assessment_data = OrderedDict{String, Any}() + regional_assessment_data = OrderedDict{String,Any}() for reg in get_regions() data_paths = String[] data_names = String[] diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index d40db65..7d18a57 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -151,7 +151,9 @@ function setup_region_routes(config, auth) return file(mask_path; headers=COG_HEADERS) end - @get auth("/suitability/assess/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) + @get auth("/suitability/assess/{reg}/{rtype}") function ( + req::Request, reg::String, rtype::String + ) # somewhere:8000/suitability/assess/region-name/reeftype?criteria_names=Depth,Slope&lb=-9.0,0.0&ub=-2.0,40 # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0 @@ -160,7 +162,9 @@ function setup_region_routes(config, auth) return assess_region(reg_assess_data, reg, qp, rtype, config) end - @get auth("/suitability/site-suitability/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) + @get auth("/suitability/site-suitability/{reg}/{rtype}") function ( + req::Request, reg::String, rtype::String + ) # 127.0.0.1:8000/suitability/site-suitability/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0&SuitabilityThreshold=95&xdist=450&ydist=50 qp = queryparams(req) @@ -178,10 +182,14 @@ function setup_region_routes(config, auth) ] criteria_qp = filter(k -> string(k.first) ∈ criteria_names, qp) - assessment_qp = filter(k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp) + assessment_qp = filter( + k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp + ) @debug "In site assessment" - return site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) + return site_assess_region( + reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config + ) end @get auth("/bounds/{reg}") function (req::Request, reg::String) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index b726a0c..1513e9c 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -13,7 +13,7 @@ as being suitable according to user-selected criteria. - `x` : Matrix of boolean pixels after filtering with user criteria. - `window` : Window size to assess. Default window (-4,5) assesses a square hectare around each target pixel where the resolution of pixels is 10m. """ -function proportion_suitable(x::BitMatrix, window::Tuple=(-4,5))::Matrix{Int16} +function proportion_suitable(x::BitMatrix, window::Tuple=(-4, 5))::Matrix{Int16} x′ = zeros(Int16, size(x)) @floop for row_col in ThreadsX.findall(x) @@ -49,7 +49,9 @@ in `gdf`. Target_rast and gdf should be in the same CRS (EPSG:7844 / GDA2020 for - `tmp_areas` : Raster of filtered pixels containing only pixels within target distance from a geometry centroid. """ -function filter_distances(target_rast::Raster, gdf::DataFrame, dist; units::String="NM")::Raster +function filter_distances( + target_rast::Raster, gdf::DataFrame, dist; units::String="NM" +)::Raster tmp_areas = copy(target_rast) # First dimension is the rows (latitude) @@ -141,7 +143,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) file_id = string(hash(qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") if isfile(assessed_path_tif) return file(assessed_path_tif) @@ -163,11 +165,11 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) source="gdal", driver="COG", options=Dict{String,String}( - "COMPRESS"=>"DEFLATE", - "SPARSE_OK"=>"TRUE", - "OVERVIEW_COUNT"=>"5", - "BLOCKSIZE"=>"256", - "NUM_THREADS"=>n_gdal_threads(config) + "COMPRESS" => "DEFLATE", + "SPARSE_OK" => "TRUE", + "OVERVIEW_COUNT" => "5", + "BLOCKSIZE" => "256", + "NUM_THREADS" => n_gdal_threads(config) ), force=true ) @@ -197,7 +199,9 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(assessment_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") + assessed_path_geojson = joinpath( + assessed_tmp_path, file_id * "_potential_sites.geojson" + ) if isfile(assessed_path_geojson) return file(assessed_path_geojson) @@ -205,7 +209,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(criteria_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") if isfile(assessed_path_tif) scan_locs = Raster(assessed_path_tif) From 2a9eb694c0c1765fe7e1df755d2b8331064c86cc Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 09:12:34 +1000 Subject: [PATCH 17/60] Address PR comments on formatting and duplicated code --- src/ReefGuideAPI.jl | 133 ++++++++---------- src/criteria_assessment/criteria.jl | 13 +- src/criteria_assessment/query_thresholds.jl | 2 +- .../site_identification.jl | 40 +++--- src/site_assessment/common_functions.jl | 38 +---- 5 files changed, 78 insertions(+), 148 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index c5ed010..cb6645f 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -56,101 +56,75 @@ OrderedDict of `RegionalCriteria` for each region. """ function setup_regional_data(config::Dict) reef_data_path = config["prepped_data"]["PREPPED_DATA_DIR"] + reg_cache_dir = config["server_config"]["REGIONAL_CACHE_DIR"] + reg_cache_fn = joinpath(reg_cache_dir, "regional_cache.dat") if @isdefined(REGIONAL_DATA) @debug "Using previously generated regional data store." sleep(1) # Pause so message is noticeably visible - reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") - REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) - - REGIONAL_DATA["region_long_names"] = Dict( - "FarNorthern" => "Far Northern Management Area", - "Cairns-Cooktown" => "Cairns/Cooktown Management Area", - "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", - "Mackay-Capricorn" => "Mackay/Capricorn Management Area" - ) - return REGIONAL_DATA - end - - # # Check disk-based store - reg_cache_dir = config["server_config"]["REGIONAL_CACHE_DIR"] - reg_cache_fn = joinpath(reg_cache_dir, "regional_cache.dat") - if isfile(reg_cache_fn) + elseif isfile(reg_cache_fn) @debug "Loading regional data cache from disk" @eval const REGIONAL_DATA = deserialize($(reg_cache_fn)) - reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") - REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) - - REGIONAL_DATA["region_long_names"] = Dict( - "FarNorthern" => "Far Northern Management Area", - "Cairns-Cooktown" => "Cairns/Cooktown Management Area", - "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", - "Mackay-Capricorn" => "Mackay/Capricorn Management Area" - ) - return REGIONAL_DATA - end - - @debug "Setting up regional data store..." + else + @debug "Setting up regional data store..." - regional_assessment_data = OrderedDict{String,Any}() - for reg in get_regions() - data_paths = String[] - data_names = String[] + regional_assessment_data = OrderedDict{String,Union{RegionalCriteria,DataFrame,Dict}}() + for reg in get_regions() + data_paths = String[] + data_names = String[] - slope_table = nothing - flat_table = nothing + slope_table = nothing + flat_table = nothing - for (k, dp) in criteria_data_map() - g = glob("$reg*$dp.tif", reef_data_path) - if length(g) == 0 - continue - end + for (k, dp) in criteria_data_map() + g = glob("$reg*$dp.tif", reef_data_path) + if length(g) == 0 + continue + end - push!(data_paths, first(g)) - push!(data_names, string(k)) - if occursin("valid", string(dp)) - # Load up Parquet files - parq_file = replace(first(g), ".tif" => "_lookup.parq") - - if occursin("slope", string(dp)) - slope_table = GeoParquet.read(parq_file) - elseif occursin("flat", string(dp)) - flat_table = GeoParquet.read(parq_file) - else - msg = "Unknown lookup found: $(parq_file). Must be 'slope' or 'flat'" - throw(ArgumentError(msg)) + push!(data_paths, first(g)) + push!(data_names, string(k)) + if occursin("valid", string(dp)) + # Load up Parquet files + parq_file = replace(first(g), ".tif" => "_lookup.parq") + + if occursin("slope", string(dp)) + slope_table = GeoParquet.read(parq_file) + elseif occursin("flat", string(dp)) + flat_table = GeoParquet.read(parq_file) + else + msg = "Unknown lookup found: $(parq_file). Must be 'slope' or 'flat'" + throw(ArgumentError(msg)) + end end end - end - # Pre-extract long/lat coordinates - coords = GI.coordinates.(slope_table.geometry) - slope_table[!, :lons] .= first.(coords) - slope_table[!, :lats] .= last.(coords) - - coords = GI.coordinates.(flat_table.geometry) - flat_table[!, :lons] .= first.(coords) - flat_table[!, :lats] .= last.(coords) - - rst_stack = RasterStack(data_paths; name=data_names, lazy=true) - regional_assessment_data[reg] = RegionalCriteria( - rst_stack, - slope_table, - flat_table - ) - end - - regional_assessment_data["reef_outlines"] = "" - regional_assessment_data["region_long_names"] = "" + # Pre-extract long/lat coordinates + coords = GI.coordinates.(slope_table.geometry) + slope_table[!, :lons] .= first.(coords) + slope_table[!, :lats] .= last.(coords) + + coords = GI.coordinates.(flat_table.geometry) + flat_table[!, :lons] .= first.(coords) + flat_table[!, :lats] .= last.(coords) + + rst_stack = RasterStack(data_paths; name=data_names, lazy=true) + regional_assessment_data[reg] = RegionalCriteria( + rst_stack, + slope_table, + flat_table + ) + end - # Store cache on disk to avoid excessive cold startup times - @debug "Saving regional data cache to disk" - serialize(reg_cache_fn, regional_assessment_data) + # Store cache on disk to avoid excessive cold startup times + @debug "Saving regional data cache to disk" + serialize(reg_cache_fn, regional_assessment_data) - # Remember, `@eval` runs in global scope. - @eval const REGIONAL_DATA = $(regional_assessment_data) + # Remember, `@eval` runs in global scope. + @eval const REGIONAL_DATA = $(regional_assessment_data) + end reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) @@ -162,6 +136,9 @@ function setup_regional_data(config::Dict) "Mackay-Capricorn" => "Mackay/Capricorn Management Area" ) + preservation_zone_path = joinpath(reef_data_path, "GBRMPA_preservation_zone_exclusion.gpkg") + REGIONAL_DATA["GBRMPA_preservation_zones"] = GDF.read(preservation_zone_path) + return REGIONAL_DATA end diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 7d18a57..47d4356 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -168,18 +168,7 @@ function setup_region_routes(config, auth) # 127.0.0.1:8000/suitability/site-suitability/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0&SuitabilityThreshold=95&xdist=450&ydist=50 qp = queryparams(req) - criteria_names = [ - "Depth", - "Benthic", - "Geomorphic", - "Slope", - "Turbidity", - "WavesHs", - "WavesTp", - "Rugosity", - "PortDistSlopes", - "PortDistFlats" - ] + criteria_names = keys(criteria_data_map()) criteria_qp = filter(k -> string(k.first) ∈ criteria_names, qp) assessment_qp = filter( diff --git a/src/criteria_assessment/query_thresholds.jl b/src/criteria_assessment/query_thresholds.jl index ee6e588..7b6af61 100644 --- a/src/criteria_assessment/query_thresholds.jl +++ b/src/criteria_assessment/query_thresholds.jl @@ -199,7 +199,7 @@ end ruleset::Vector{CriteriaBounds{Function}} ) -Filter valid_lookup table by applying user defined `ruleset` criteria. +Filter lookup table by applying user defined `ruleset` criteria. # Arguments - `reg_criteria` : RegionalCriteria containing valid_rtype lookup table for filtering. diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 1513e9c..5776426 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -13,7 +13,7 @@ as being suitable according to user-selected criteria. - `x` : Matrix of boolean pixels after filtering with user criteria. - `window` : Window size to assess. Default window (-4,5) assesses a square hectare around each target pixel where the resolution of pixels is 10m. """ -function proportion_suitable(x::BitMatrix, window::Tuple=(-4, 5))::Matrix{Int16} +function proportion_suitable(x::BitMatrix; window::Tuple=(-4,5))::Matrix{Int16} x′ = zeros(Int16, size(x)) @floop for row_col in ThreadsX.findall(x) @@ -37,8 +37,8 @@ end dist_nm )::Raster -Exclude pixels in target_rast that are beyond `dist_nm` (nautical miles) from a geometry -in `gdf`. Target_rast and gdf should be in the same CRS (EPSG:7844 / GDA2020 for GBR-reef-guidance-assessment). +Exclude pixels in `target_rast` that are beyond `dist_nm` (nautical miles) from a geometry +in `gdf`. `target_rast` and `gdf` should be in the same CRS (EPSG:7844 / GDA2020 for GBR-reef-guidance-assessment). # Arguments - `target_rast` : Raster of suitable pixels (Bool) to filter pixels from. @@ -49,9 +49,7 @@ in `gdf`. Target_rast and gdf should be in the same CRS (EPSG:7844 / GDA2020 for - `tmp_areas` : Raster of filtered pixels containing only pixels within target distance from a geometry centroid. """ -function filter_distances( - target_rast::Raster, gdf::DataFrame, dist; units::String="NM" -)::Raster +function filter_distances(target_rast::Raster, gdf::DataFrame, dist; units::String="NM")::Raster tmp_areas = copy(target_rast) # First dimension is the rows (latitude) @@ -143,7 +141,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) file_id = string(hash(qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") if isfile(assessed_path_tif) return file(assessed_path_tif) @@ -157,24 +155,24 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) suitability_scores = proportion_suitable(mask_data.data) @debug "$(now()) : Running on thread $(threadid())" - @debug "Writing to $(assessed_path_tif)" + @debug "Writing to $(assessed_path)" Rasters.write( - assessed_path_tif, + assessed_path, rebuild(mask_data, suitability_scores); ext=".tiff", source="gdal", driver="COG", options=Dict{String,String}( - "COMPRESS" => "DEFLATE", - "SPARSE_OK" => "TRUE", - "OVERVIEW_COUNT" => "5", - "BLOCKSIZE" => "256", - "NUM_THREADS" => n_gdal_threads(config) + "COMPRESS"=>"DEFLATE", + "SPARSE_OK"=>"TRUE", + "OVERVIEW_COUNT"=>"5", + "BLOCKSIZE"=>"256", + "NUM_THREADS"=>n_gdal_threads(config) ), force=true ) - return file(assessed_path_tif) + return file(assessed_path) end """ @@ -199,9 +197,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(assessment_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_geojson = joinpath( - assessed_tmp_path, file_id * "_potential_sites.geojson" - ) + assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") if isfile(assessed_path_geojson) return file(assessed_path_geojson) @@ -209,7 +205,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(criteria_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") if isfile(assessed_path_tif) scan_locs = Raster(assessed_path_tif) @@ -237,7 +233,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt scan_locs = identify_search_pixels(scan_locs, x -> x .> suitability_threshold) # Need reef outlines - gdf = reg_assess_data["reef_outlines"] + gdf = REGIONAL_DATA["reef_outlines"] reef_outlines = buffer_simplify(gdf) reef_outlines = polygon_to_lines.(reef_outlines) @@ -254,9 +250,9 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt y_dist, target_crs, reef_outlines, - reg_assess_data["region_long_names"][reg] + REGIONAL_DATA["region_long_names"][reg] ) - output_geojson(filter_sites(initial_polygons), assessed_path_geojson) + output_geojson(assessed_path_geojson, filter_sites(initial_polygons)) return file(assessed_path_geojson) end diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index 1f493c6..fc73af2 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -256,48 +256,16 @@ function filter_sites(res_df::DataFrame)::DataFrame end """ - output_geojson( - df::DataFrame, - region::String, - output_dir::String - )::Nothing - -Writes out GeoJSON file to a target directory. Output file will be located at location: -"`output_dir`/output_sites_`region`.geojson" - -# Arguments -- `df` : DataFrame intended for writing to geojson file. -- `region` : Region name for labelling output file. -- `output_dir` : Directory to write geojson file to. -""" -function output_geojson( - df::DataFrame, - region::String, - output_dir::String -)::Nothing - GDF.write( - joinpath( - output_dir, - "output_sites_$(region).geojson" - ), - df; - crs=GI.crs(first(df.geometry)) - ) - - return nothing -end - -""" - output_geojson(df::DataFrame, destination_path::String)::Nothing + output_geojson(destination_path::String, df::DataFrame)::Nothing Writes out GeoJSON file to a target directory. Output file will be located at location: `destination_path`. # Arguments -- `df` : DataFrame intended for writing to geojson file. - `destination_path` : File path to write geojson file to. +- `df` : DataFrame intended for writing to geojson file. """ -function output_geojson(df::DataFrame, destination_path::String)::Nothing +function output_geojson(destination_path::String, df::DataFrame)::Nothing GDF.write(destination_path, df; crs=GI.crs(first(df.geometry))) return nothing From d9d621fb7cb89efb7f98df41bf3d22f1be7add19 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 09:22:00 +1000 Subject: [PATCH 18/60] Formatting document --- src/ReefGuideAPI.jl | 8 +++++-- .../site_identification.jl | 24 +++++++++++-------- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index cb6645f..3318f7d 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -70,7 +70,9 @@ function setup_regional_data(config::Dict) else @debug "Setting up regional data store..." - regional_assessment_data = OrderedDict{String,Union{RegionalCriteria,DataFrame,Dict}}() + regional_assessment_data = OrderedDict{ + String,Union{RegionalCriteria,DataFrame,Dict} + }() for reg in get_regions() data_paths = String[] data_names = String[] @@ -136,7 +138,9 @@ function setup_regional_data(config::Dict) "Mackay-Capricorn" => "Mackay/Capricorn Management Area" ) - preservation_zone_path = joinpath(reef_data_path, "GBRMPA_preservation_zone_exclusion.gpkg") + preservation_zone_path = joinpath( + reef_data_path, "GBRMPA_preservation_zone_exclusion.gpkg" + ) REGIONAL_DATA["GBRMPA_preservation_zones"] = GDF.read(preservation_zone_path) return REGIONAL_DATA diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 5776426..241cb44 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -13,7 +13,7 @@ as being suitable according to user-selected criteria. - `x` : Matrix of boolean pixels after filtering with user criteria. - `window` : Window size to assess. Default window (-4,5) assesses a square hectare around each target pixel where the resolution of pixels is 10m. """ -function proportion_suitable(x::BitMatrix; window::Tuple=(-4,5))::Matrix{Int16} +function proportion_suitable(x::BitMatrix; window::Tuple=(-4, 5))::Matrix{Int16} x′ = zeros(Int16, size(x)) @floop for row_col in ThreadsX.findall(x) @@ -49,7 +49,9 @@ in `gdf`. `target_rast` and `gdf` should be in the same CRS (EPSG:7844 / GDA202 - `tmp_areas` : Raster of filtered pixels containing only pixels within target distance from a geometry centroid. """ -function filter_distances(target_rast::Raster, gdf::DataFrame, dist; units::String="NM")::Raster +function filter_distances( + target_rast::Raster, gdf::DataFrame, dist; units::String="NM" +)::Raster tmp_areas = copy(target_rast) # First dimension is the rows (latitude) @@ -141,7 +143,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) file_id = string(hash(qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") if isfile(assessed_path_tif) return file(assessed_path_tif) @@ -163,11 +165,11 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) source="gdal", driver="COG", options=Dict{String,String}( - "COMPRESS"=>"DEFLATE", - "SPARSE_OK"=>"TRUE", - "OVERVIEW_COUNT"=>"5", - "BLOCKSIZE"=>"256", - "NUM_THREADS"=>n_gdal_threads(config) + "COMPRESS" => "DEFLATE", + "SPARSE_OK" => "TRUE", + "OVERVIEW_COUNT" => "5", + "BLOCKSIZE" => "256", + "NUM_THREADS" => n_gdal_threads(config) ), force=true ) @@ -197,7 +199,9 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(assessment_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_geojson = joinpath(assessed_tmp_path, file_id*"_potential_sites.geojson") + assessed_path_geojson = joinpath( + assessed_tmp_path, file_id * "_potential_sites.geojson" + ) if isfile(assessed_path_geojson) return file(assessed_path_geojson) @@ -205,7 +209,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt file_id = string(hash(criteria_qp)) assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id*"_suitable.tiff") + assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") if isfile(assessed_path_tif) scan_locs = Raster(assessed_path_tif) From ea95c59ef7c30c96c1eb8303c9ec1d569a0c0724 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 10:31:43 +1000 Subject: [PATCH 19/60] Fix criteria qp key search --- src/criteria_assessment/criteria.jl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 47d4356..8547bef 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -166,10 +166,9 @@ function setup_region_routes(config, auth) req::Request, reg::String, rtype::String ) # 127.0.0.1:8000/suitability/site-suitability/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0&SuitabilityThreshold=95&xdist=450&ydist=50 - qp = queryparams(req) - criteria_names = keys(criteria_data_map()) - criteria_qp = filter(k -> string(k.first) ∈ criteria_names, qp) + criteria_names = string.(keys(criteria_data_map())) + criteria_qp = filter(k -> k.first ∈ criteria_names, qp) assessment_qp = filter( k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp From 7437c4b4011b3a74ffa55ad41e130f7eeccbac72 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 11:08:21 +1000 Subject: [PATCH 20/60] Move region long names into the polygon search function --- src/criteria_assessment/site_identification.jl | 2 +- src/site_assessment/best_fit_polygons.jl | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 241cb44..b0ac114 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -254,7 +254,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt y_dist, target_crs, reef_outlines, - REGIONAL_DATA["region_long_names"][reg] + reg ) output_geojson(assessed_path_geojson, filter_sites(initial_polygons)) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 9fe46e7..0e0e978 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -101,7 +101,7 @@ Method is currently opperating for CRS in degrees units. - `y_dist` : Length of vertical side of search box (in meters). - `target_crs` : CRS of the input Rasters. Using GeoFormatTypes.EPSG(). - `reef_lines` : Vector containing reef outline segments created from polygons in `gdf.geometry` (Must be separate object to `gdf` rather than column). -- `region` : Management region name in GBRMPA format - e.g. "Mackay/Capricorn Management Area" +- `region` : Region name, e.g. "Cairns-Cooktown" or "FarNorthern". - `degree_step` : Degree to perform rotations around identified edge angle. - `n_rot_p_side` : Number of rotations to perform clockwise and anticlockwise around the identified edge angle. Default 2 rotations. - `surr_threshold` : Theshold used to skip searching where the proportion of suitable pixels is too low. @@ -123,8 +123,9 @@ function identify_potential_sites_edges( n_rot_p_side::Int64=2, surr_threshold::Float64=0.33 )::DataFrame - reef_lines = reef_lines[gdf.management_area .== region] - gdf = gdf[gdf.management_area .== region, :] + region_long = REGIONAL_DATA["region_long_names"][region] + reef_lines = reef_lines[gdf.management_area .== region_long] + gdf = gdf[gdf.management_area .== region_long, :] max_count = ( (x_dist / degrees_to_meters(res, mean(search_pixels.lat))) * ( From 85b18d7329f1065044ac4640b8b4d0d1c513c5be Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 11:08:50 +1000 Subject: [PATCH 21/60] Move initial regional cache creation to a separate function --- src/ReefGuideAPI.jl | 137 ++++++++++++++++++++++++-------------------- 1 file changed, 75 insertions(+), 62 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 3318f7d..f8a54fc 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -42,6 +42,77 @@ function get_regions() return regions end +""" + initialize_regional_data_cache(reef_data_path::String, reg_cache_fn::String) + +Create initial regional data store with data from `reef_data_path`, excluding geospatial +data and save to `reg_cache_fn` path. +""" +function initialize_regional_data_cache(reef_data_path::String, reg_cache_fn::String) + regional_assessment_data = OrderedDict{ + String,Union{RegionalCriteria,DataFrame,Dict} + }() + for reg in get_regions() + data_paths = String[] + data_names = String[] + + slope_table = nothing + flat_table = nothing + + for (k, dp) in criteria_data_map() + g = glob("$reg*$dp.tif", reef_data_path) + if length(g) == 0 + continue + end + + push!(data_paths, first(g)) + push!(data_names, string(k)) + if occursin("valid", string(dp)) + # Load up Parquet files + parq_file = replace(first(g), ".tif" => "_lookup.parq") + + if occursin("slope", string(dp)) + slope_table = GeoParquet.read(parq_file) + elseif occursin("flat", string(dp)) + flat_table = GeoParquet.read(parq_file) + else + msg = "Unknown lookup found: $(parq_file). Must be 'slope' or 'flat'" + throw(ArgumentError(msg)) + end + end + end + + # Pre-extract long/lat coordinates + coords = GI.coordinates.(slope_table.geometry) + slope_table[!, :lons] .= first.(coords) + slope_table[!, :lats] .= last.(coords) + + coords = GI.coordinates.(flat_table.geometry) + flat_table[!, :lons] .= first.(coords) + flat_table[!, :lats] .= last.(coords) + + rst_stack = RasterStack(data_paths; name=data_names, lazy=true) + regional_assessment_data[reg] = RegionalCriteria( + rst_stack, + slope_table, + flat_table + ) + end + + regional_assessment_data["region_long_names"] = Dict( + "FarNorthern" => "Far Northern Management Area", + "Cairns-Cooktown" => "Cairns/Cooktown Management Area", + "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", + "Mackay-Capricorn" => "Mackay/Capricorn Management Area" + ) + + # Store cache on disk to avoid excessive cold startup times + @debug "Saving regional data cache to disk" + serialize(reg_cache_fn, regional_assessment_data) + + return regional_assessment_data +end + """ setup_regional_data(config::Dict) @@ -69,61 +140,10 @@ function setup_regional_data(config::Dict) else @debug "Setting up regional data store..." - - regional_assessment_data = OrderedDict{ - String,Union{RegionalCriteria,DataFrame,Dict} - }() - for reg in get_regions() - data_paths = String[] - data_names = String[] - - slope_table = nothing - flat_table = nothing - - for (k, dp) in criteria_data_map() - g = glob("$reg*$dp.tif", reef_data_path) - if length(g) == 0 - continue - end - - push!(data_paths, first(g)) - push!(data_names, string(k)) - if occursin("valid", string(dp)) - # Load up Parquet files - parq_file = replace(first(g), ".tif" => "_lookup.parq") - - if occursin("slope", string(dp)) - slope_table = GeoParquet.read(parq_file) - elseif occursin("flat", string(dp)) - flat_table = GeoParquet.read(parq_file) - else - msg = "Unknown lookup found: $(parq_file). Must be 'slope' or 'flat'" - throw(ArgumentError(msg)) - end - end - end - - # Pre-extract long/lat coordinates - coords = GI.coordinates.(slope_table.geometry) - slope_table[!, :lons] .= first.(coords) - slope_table[!, :lats] .= last.(coords) - - coords = GI.coordinates.(flat_table.geometry) - flat_table[!, :lons] .= first.(coords) - flat_table[!, :lats] .= last.(coords) - - rst_stack = RasterStack(data_paths; name=data_names, lazy=true) - regional_assessment_data[reg] = RegionalCriteria( - rst_stack, - slope_table, - flat_table - ) - end - - # Store cache on disk to avoid excessive cold startup times - @debug "Saving regional data cache to disk" - serialize(reg_cache_fn, regional_assessment_data) - + regional_assessment_data = initialize_regional_data_cache( + reef_data_path, + reg_cache_fn + ) # Remember, `@eval` runs in global scope. @eval const REGIONAL_DATA = $(regional_assessment_data) end @@ -131,13 +151,6 @@ function setup_regional_data(config::Dict) reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) - REGIONAL_DATA["region_long_names"] = Dict( - "FarNorthern" => "Far Northern Management Area", - "Cairns-Cooktown" => "Cairns/Cooktown Management Area", - "Townsville-Whitsunday" => "Townsville/Whitsunday Management Area", - "Mackay-Capricorn" => "Mackay/Capricorn Management Area" - ) - preservation_zone_path = joinpath( reef_data_path, "GBRMPA_preservation_zone_exclusion.gpkg" ) From 93325d44a88f7d403f019788f8e75f370ec2783c Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 13:02:27 +1000 Subject: [PATCH 22/60] Remove unneeded comments/debug --- src/criteria_assessment/criteria.jl | 2 -- src/criteria_assessment/site_identification.jl | 1 - 2 files changed, 3 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 8547bef..e3bcdbc 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -158,7 +158,6 @@ function setup_region_routes(config, auth) # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0 qp = queryparams(req) - @debug "In region assessment route" return assess_region(reg_assess_data, reg, qp, rtype, config) end @@ -174,7 +173,6 @@ function setup_region_routes(config, auth) k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp ) - @debug "In site assessment" return site_assess_region( reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config ) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index b0ac114..2cd1a82 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -227,7 +227,6 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt # Need dataframe of valid_lookup pixels @debug "Pre-processing assessment inputs." - #Main.@infiltrate crit_pixels = _temp_filter_lookup(reg_assess_data, reg, criteria_qp, rtype, config) res = abs(step(dims(scan_locs, X))) From 74d869e7202977d860959705cf0bd4b98931a0f6 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 14:53:52 +1000 Subject: [PATCH 23/60] Fixed incorrect window movement in proportion_suitable --- src/criteria_assessment/site_identification.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 2cd1a82..76b01a9 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -19,10 +19,10 @@ function proportion_suitable(x::BitMatrix; window::Tuple=(-4, 5))::Matrix{Int16} @floop for row_col in ThreadsX.findall(x) (row, col) = Tuple(row_col) x_left = max(col + window[1], 1) - x_right = min(col + window[2], size(x, 1)) + x_right = min(col + window[2], size(x, 2)) y_top = max(row + window[1], 1) - y_bottom = min(row + window[2], size(x, 2)) + y_bottom = min(row + window[2], size(x, 1)) x′[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) end @@ -157,9 +157,9 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) suitability_scores = proportion_suitable(mask_data.data) @debug "$(now()) : Running on thread $(threadid())" - @debug "Writing to $(assessed_path)" + @debug "Writing to $(assessed_path_tif)" Rasters.write( - assessed_path, + assessed_path_tif, rebuild(mask_data, suitability_scores); ext=".tiff", source="gdal", @@ -174,7 +174,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) force=true ) - return file(assessed_path) + return file(assessed_path_tif) end """ From 776893055f7cf8b9003071c1e6718cf2cab7332a Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 3 Oct 2024 14:59:41 +1000 Subject: [PATCH 24/60] Remove preservation zone data --- src/ReefGuideAPI.jl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index f8a54fc..ccaa439 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -151,11 +151,6 @@ function setup_regional_data(config::Dict) reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) - preservation_zone_path = joinpath( - reef_data_path, "GBRMPA_preservation_zone_exclusion.gpkg" - ) - REGIONAL_DATA["GBRMPA_preservation_zones"] = GDF.read(preservation_zone_path) - return REGIONAL_DATA end From 65274642ec5b03266c934c965d83ac29b518cc49 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Thu, 3 Oct 2024 17:32:39 +1000 Subject: [PATCH 25/60] Update function name for brevity --- src/criteria_assessment/query_thresholds.jl | 6 +++--- src/criteria_assessment/tiles.jl | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/criteria_assessment/query_thresholds.jl b/src/criteria_assessment/query_thresholds.jl index 7b6af61..89f51b0 100644 --- a/src/criteria_assessment/query_thresholds.jl +++ b/src/criteria_assessment/query_thresholds.jl @@ -229,7 +229,7 @@ function apply_criteria_lookup( end """ - make_threshold_mask(reg::String, rtype::Symbol, crit_map) + threshold_mask(reg::String, rtype::Symbol, crit_map) Generate mask for a given region and reef type (slopes or flats) according to thresholds applied to a set of criteria. @@ -248,7 +248,7 @@ applied to a set of criteria. # Returns True/false mask indicating locations within desired thresholds. """ -function make_threshold_mask(reg_criteria, rtype::Symbol, crit_map)::Raster +function threshold_mask(reg_criteria, rtype::Symbol, crit_map)::Raster valid_lookup = getfield(reg_criteria, Symbol(:valid_, rtype)) mask_layer = apply_criteria_thresholds( reg_criteria.stack, @@ -258,7 +258,7 @@ function make_threshold_mask(reg_criteria, rtype::Symbol, crit_map)::Raster return mask_layer end -function make_threshold_mask( +function threshold_mask( reg_criteria, rtype::Symbol, crit_map, diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index 6c275b4..a2324d0 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -189,7 +189,7 @@ function setup_tile_routes(config, auth) # Extract relevant data based on tile coordinates @debug "Thread $(thread_id) - $(now()) : Extracting tile data" - mask_data = make_threshold_mask( + mask_data = threshold_mask( reg_assess_data[reg], Symbol(rtype), CriteriaBounds.(criteria_names, lbs, ubs), From f85f2f172f7b60a5ce3870fd4cdba92affd461d6 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Thu, 3 Oct 2024 17:34:06 +1000 Subject: [PATCH 26/60] Use more informative name and avoid duplication/complexity Remove `_temp_filter_lookup` as the name is not so informative and it seemed to be used only in one place --- src/criteria_assessment/criteria.jl | 9 +--- .../site_identification.jl | 45 ++++++++++--------- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index e3bcdbc..78245c3 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -118,16 +118,9 @@ function setup_region_routes(config, auth) return file(mask_path; headers=COG_HEADERS) end - criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(qp)...) - # Otherwise, create the file @debug "$(now()) : Assessing criteria" - assess = reg_assess_data[reg] - mask_data = make_threshold_mask( - assess, - Symbol(rtype), - CriteriaBounds.(criteria_names, lbs, ubs) - ) + mask_data = mask_region(reg_assess_data, reg, qp, rtype) @debug "$(now()) : Running on thread $(threadid())" @debug "Writing to $(mask_path)" diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 76b01a9..21e6ada 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -90,15 +90,24 @@ function filter_distances( end """ -# TODO: Better name, and address duplication in "/assess/{reg}/{rtype}" + mask_region(reg_assess_data, reg, qp, rtype) + +# Arguments +- `reg_assess_data` : Regional assessment data +- `reg` : The region name to assess +- `qp` : query parameters +- `rtype` : region type (one of `:slopes` or `:flats`) + +# Returns +Raster of region with locations that meet criteria masked. """ -function _temp_assess_region(reg_assess_data, reg, qp, rtype, config) +function mask_region(reg_assess_data, reg, qp, rtype) criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(qp)...) # Otherwise, create the file @debug "$(now()) : Assessing criteria" assess = reg_assess_data[reg] - mask_data = make_threshold_mask( + mask_data = threshold_mask( assess, Symbol(rtype), CriteriaBounds.(criteria_names, lbs, ubs) @@ -107,21 +116,6 @@ function _temp_assess_region(reg_assess_data, reg, qp, rtype, config) return mask_data end -function _temp_filter_lookup(reg_assess_data, reg, qp, rtype, config) - criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(qp)...) - - # Otherwise, create the file - @debug "$(now()) : Assessing criteria table" - assess = reg_assess_data[reg] - crit_lookup = apply_criteria_lookup( - assess, - Symbol(rtype), - CriteriaBounds.(criteria_names, lbs, ubs) - ) - - return crit_lookup -end - """ assess_region(reg_assess_data, reg, qp, rtype, config) @@ -150,7 +144,7 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) end # Make mask of suitable locations - mask_data = _temp_assess_region(reg_assess_data, reg, qp, rtype, config) + mask_data = mask_region(reg_assess_data, reg, qp, rtype) # Assess remaining pixels for their suitability @debug "Calculating proportional suitability score" @@ -215,7 +209,7 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt scan_locs = Raster(assessed_path_tif) else # Make mask of suitable locations - mask_data = _temp_assess_region(reg_assess_data, reg, criteria_qp, rtype, config) + mask_data = mask_region(reg_assess_data, reg, qp, rtype) # Assess remaining pixels for their suitability @debug "Calculating proportional suitability score" @@ -227,7 +221,16 @@ function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rt # Need dataframe of valid_lookup pixels @debug "Pre-processing assessment inputs." - crit_pixels = _temp_filter_lookup(reg_assess_data, reg, criteria_qp, rtype, config) + criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(criteria_qp)...) + + # Otherwise, create the file + @debug "$(now()) : Assessing criteria table" + assess = reg_assess_data[reg] + crit_pixels = apply_criteria_lookup( + assess, + Symbol(rtype), + CriteriaBounds.(criteria_names, lbs, ubs) + ) res = abs(step(dims(scan_locs, X))) target_crs = convert(EPSG, crs(scan_locs)) From 638fa51b0e5f285e7d632eda0b7e20fbc67b1a46 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 10:52:20 +1100 Subject: [PATCH 27/60] Wrap reef outline loading in if statement to avoid re-loading every time --- src/ReefGuideAPI.jl | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index ccaa439..1655f7a 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -132,12 +132,9 @@ function setup_regional_data(config::Dict) if @isdefined(REGIONAL_DATA) @debug "Using previously generated regional data store." - sleep(1) # Pause so message is noticeably visible - elseif isfile(reg_cache_fn) @debug "Loading regional data cache from disk" @eval const REGIONAL_DATA = deserialize($(reg_cache_fn)) - else @debug "Setting up regional data store..." regional_assessment_data = initialize_regional_data_cache( @@ -148,8 +145,15 @@ function setup_regional_data(config::Dict) @eval const REGIONAL_DATA = $(regional_assessment_data) end - reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") - REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) + # If REGIONAL_DATA is defined, but failed to load supported data that cannot + # be cached to disk (e.g., the reef outlines), then it will cause errors later on. + # Then there's no way to address this, even between web server sessions, as `const` + # values cannot be modified. + # We check for existance and try to load again if needed. + if !haskey(REGIONAL_DATA, "reef_outlines") + reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") + REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) + end return REGIONAL_DATA end From 24477e78d4139973ad392f61291cffc2adc546f2 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 12:05:04 +1100 Subject: [PATCH 28/60] Helper to create cache filenames --- src/ReefGuideAPI.jl | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 1655f7a..cd3d679 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -178,6 +178,25 @@ function _cache_location(config::Dict)::String return cache_loc end +""" + cache_filename(qp::Dict, config::Dict, suffix::String, ext::String) + +Generate a filename for a cache. + +# Arguments +- `qp` : Query parameters to hash +- `config` : app configuration (to extract cache parent directory from) +- `suffix` : a suffix to use in the filename (pass `""` if none required) +- `ext` : file extension to use +""" +function cache_filename(qp::Dict, config::Dict, suffix::String, ext::String) + file_id = string(hash(qp)) + temp_path = _cache_location(config) + cache_file_path = joinpath(temp_path, "$(file_id)$(suffix).$(ext)") + + return cache_file_path +end + """ n_gdal_threads(config::Dict)::String From 3277a68e4ebdad940fc5621555fd661017b941e0 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 12:05:12 +1100 Subject: [PATCH 29/60] Formatting --- src/site_assessment/common_functions.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index fc73af2..eb0b441 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -278,7 +278,8 @@ Identifies all pixels in an input raster that return true for the function `crit # Arguments - `input_raster` : Raster containing pixels for the target region. -- `criteria_function` : Function that returns a boolean value for each pixel in `input_raster`. Pixels that return true will be targetted in analysis. +- `criteria_function` : Function that returns a boolean value for each pixel in `input_raster`. + Pixels that return true will be targetted in analysis. # Returns DataFrame containing indices, lon and lat for each pixel that is intended for further analysis. From 8e5ef058259278b928123c8ce7223485e676fe6a Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 12:07:27 +1100 Subject: [PATCH 30/60] Helper to write COG files out with common options --- src/criteria_assessment/criteria.jl | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 78245c3..57c312e 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -82,6 +82,35 @@ end # Define struct type definition to auto-serialize/deserialize to JSON StructTypes.StructType(::Type{CriteriaBounds}) = StructTypes.Struct() +""" + _write_cog(file_path::String, data::Raster)::Nothing + +Write out a COG using common options. + +# Arguments +- `file_path` : Path to write data out to +- `data` : Raster data to write out +""" +function _write_cog(file_path::String, data::Raster)::Nothing + Rasters.write( + file_path, + data; + ext=".tiff", + source="gdal", + driver="COG", + options=Dict{String,String}( + "COMPRESS" => "DEFLATE", + "SPARSE_OK" => "TRUE", + "OVERVIEW_COUNT" => "5", + "BLOCKSIZE" => tile_size(config), + "NUM_THREADS" => n_gdal_threads(config) + ), + force=true + ) + + return nothing +end + """ criteria_middleware(handle) From 605fd9e3049f5c793475dfda0f956ef818462a7d Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 12:09:53 +1100 Subject: [PATCH 31/60] Use cache helpers --- src/criteria_assessment/criteria.jl | 32 +++++++++++------------------ src/criteria_assessment/tiles.jl | 6 ++---- 2 files changed, 14 insertions(+), 24 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 57c312e..4ccd7cb 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -139,10 +139,7 @@ function setup_region_routes(config, auth) @get auth("/assess/{reg}/{rtype}") function (req::Request, reg::String, rtype::String) qp = queryparams(req) - file_id = string(hash(qp)) - mask_temp_path = _cache_location(config) - mask_path = joinpath(mask_temp_path, file_id * ".tiff") - + mask_path = cache_filename(qp, config, "", "tiff") if isfile(mask_path) return file(mask_path; headers=COG_HEADERS) end @@ -154,21 +151,7 @@ function setup_region_routes(config, auth) @debug "$(now()) : Running on thread $(threadid())" @debug "Writing to $(mask_path)" # Writing time: ~10-25 seconds - Rasters.write( - mask_path, - UInt8.(mask_data); - ext=".tiff", - source="gdal", - driver="COG", - options=Dict{String,String}( - "COMPRESS" => "DEFLATE", - "SPARSE_OK" => "TRUE", - "OVERVIEW_COUNT" => "5", - "BLOCKSIZE" => "256", - "NUM_THREADS" => n_gdal_threads(config) - ), - force=true - ) + _write_cog(mask_path, UInt8.(mask_data)) return file(mask_path; headers=COG_HEADERS) end @@ -180,7 +163,10 @@ function setup_region_routes(config, auth) # 127.0.0.1:8000/suitability/assess/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0 qp = queryparams(req) - return assess_region(reg_assess_data, reg, qp, rtype, config) + assessed_fn = cache_filename(qp, config, "$(reg)_suitable", "tiff") + if isfile(assessed_fn) + return file(assessed_fn; headers=COG_HEADERS) + end end @get auth("/suitability/site-suitability/{reg}/{rtype}") function ( @@ -188,6 +174,12 @@ function setup_region_routes(config, auth) ) # 127.0.0.1:8000/suitability/site-suitability/Cairns-Cooktown/slopes?Depth=-4.0:-2.0&Slope=0.0:40.0&Rugosity=0.0:6.0&SuitabilityThreshold=95&xdist=450&ydist=50 qp = queryparams(req) + suitable_sites_fn = cache_filename( + qp, config, "$(reg)_potential_sites", "geojson" + ) + if isfile(suitable_sites_fn) + return file(suitable_sites_fn) + end criteria_names = string.(keys(criteria_data_map())) criteria_qp = filter(k -> k.first ∈ criteria_names, qp) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index a2324d0..99cffb2 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -165,11 +165,9 @@ function setup_tile_routes(config, auth) reg_assess_data = setup_regional_data(config) @get auth("/tile/{z}/{x}/{y}") function (req::Request, z::Int64, x::Int64, y::Int64) # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 - qp = queryparams(req) - file_id = string(hash(qp)) - mask_temp_path = _cache_location(config) - mask_path = joinpath(mask_temp_path, file_id * ".png") + qp = queryparams(req) + mask_path = cache_filename(qp, config, "", "png") if isfile(mask_path) return file(mask_path; headers=TILE_HEADERS) end From cd80b3d0f220e29f34dd08f65a5907ac09e84556 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Sun, 6 Oct 2024 12:14:47 +1100 Subject: [PATCH 32/60] Separate concerns with arguably more understandable names `site_assess_region()` may be removed, to be determined in discussion with @BG-AIMS --- src/criteria_assessment/criteria.jl | 33 +++++-- .../site_identification.jl | 88 +++++++++++++------ 2 files changed, 87 insertions(+), 34 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 4ccd7cb..a96c995 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -167,6 +167,14 @@ function setup_region_routes(config, auth) if isfile(assessed_fn) return file(assessed_fn; headers=COG_HEADERS) end + + assessed = assess_region(reg_assess_data, reg, qp, rtype) + + @debug "$(now()) : Running on thread $(threadid())" + @debug "Writing to $(assessed_fn)" + _write_cog(assessed_fn, assessed) + + return file(assessed_fn; headers=COG_HEADERS) end @get auth("/suitability/site-suitability/{reg}/{rtype}") function ( @@ -180,16 +188,29 @@ function setup_region_routes(config, auth) if isfile(suitable_sites_fn) return file(suitable_sites_fn) end - criteria_names = string.(keys(criteria_data_map())) - criteria_qp = filter(k -> k.first ∈ criteria_names, qp) - assessment_qp = filter( + # Assess location suitability if needed + assessed_fn = cache_filename(qp, config, "$(reg)_suitable", "tiff") + if isfile(assessed_fn) + assessed = file(assessed_fn; headers=COG_HEADERS) + else + assessed = assess_region(reg_assess_data, reg, qp, rtype) + _write_cog(assessed_fn, assessed) + end + + # Extract criteria and assessment + criteria_names = string.(keys(criteria_data_map())) + pixel_criteria = filter(k -> k.first ∈ criteria_names, qp) + site_criteria = filter( k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp ) - return site_assess_region( - reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config - ) + best_sites = filter_sites(assess_sites( + reg_assess_data, reg, pixel_criteria, site_criteria, assessed + )) + + output_geojson(suitable_sites_fn, best_sites) + return file(suitable_sites_fn) end @get auth("/bounds/{reg}") function (req::Request, reg::String) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 21e6ada..d575d72 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -117,7 +117,7 @@ function mask_region(reg_assess_data, reg, qp, rtype) end """ - assess_region(reg_assess_data, reg, qp, rtype, config) + assess_region(reg_assess_data, reg, qp, rtype) Perform raster suitability assessment based on user defined criteria. @@ -126,23 +126,14 @@ Perform raster suitability assessment based on user defined criteria. - `reg` : Name of the region being assessed (format `Cairns-Cooktown` rather than `Cairns/Cooktown Management Area`). - `qp` : Dict containing bounds for each variable being filtered. - `rtype` : Type of zone to assess (flats or slopes). -- `config` : Information from `.config.toml` file. # Returns GeoTiff file of surrounding hectare suitability (1-100%) based on the criteria bounds input by a user. """ -function assess_region(reg_assess_data, reg, qp, rtype, config) +function assess_region(reg_assess_data, reg, qp, rtype) @debug "Assessing region's suitability score" - file_id = string(hash(qp)) - assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") - - if isfile(assessed_path_tif) - return file(assessed_path_tif) - end - # Make mask of suitable locations mask_data = mask_region(reg_assess_data, reg, qp, rtype) @@ -150,25 +141,66 @@ function assess_region(reg_assess_data, reg, qp, rtype, config) @debug "Calculating proportional suitability score" suitability_scores = proportion_suitable(mask_data.data) - @debug "$(now()) : Running on thread $(threadid())" - @debug "Writing to $(assessed_path_tif)" - Rasters.write( - assessed_path_tif, - rebuild(mask_data, suitability_scores); - ext=".tiff", - source="gdal", - driver="COG", - options=Dict{String,String}( - "COMPRESS" => "DEFLATE", - "SPARSE_OK" => "TRUE", - "OVERVIEW_COUNT" => "5", - "BLOCKSIZE" => "256", - "NUM_THREADS" => n_gdal_threads(config) - ), - force=true + return rebuild(mask_data, suitability_scores) +end + +""" + assess_sites(reg_assess_data::Dict, reg::String, pixel_criteria::Dict, site_criteria::Dict, assess_locs::Raster) + +# Arguments +- `reg_assess_data` : +- `reg` : Short region name +- `pixel_criteria` : parameters to assess specific locations with +- `site_criteria` : parameters to assess sites based on their polygonal representation +- `assess_locs` : Raster of suitability scores for each valid pixel + +# Returns +GeoDataFrame of all potential sites +""" +function assess_sites( + reg_assess_data::Dict, + reg::String, + pixel_criteria::Dict, + site_criteria::Dict, + assess_locs::Raster +) + criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(pixel_criteria)...) + + # Otherwise, create the file + @debug "$(now()) : Assessing criteria table" + assess = reg_assess_data[reg] + crit_pixels = apply_criteria_lookup( + assess, + Symbol(rtype), + CriteriaBounds.(criteria_names, lbs, ubs) + ) + + res = abs(step(dims(assess_locs, X))) + target_crs = convert(EPSG, crs(assess_locs)) + + suitability_threshold = parse(Int64, (site_criteria["SuitabilityThreshold"])) + assess_locs = identify_search_pixels(assess_locs, x -> x .> suitability_threshold) + + # Need reef outlines to indicate direction of the reef edge + gdf = REGIONAL_DATA["reef_outlines"] + reef_outlines = buffer_simplify(gdf) + reef_outlines = polygon_to_lines.(reef_outlines) + + x_dist = parse(Int64, site_criteria["xdist"]) + y_dist = parse(Int64, site_criteria["ydist"]) + initial_polygons = identify_potential_sites_edges( + crit_pixels, + assess_locs, + res, + gdf, + x_dist, + y_dist, + target_crs, + reef_outlines, + reg ) - return file(assessed_path_tif) + return initial_polygons end """ From 2a008fea782b27613c93265bdceed6e9e2cd527c Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Tue, 8 Oct 2024 09:24:44 +1000 Subject: [PATCH 33/60] Formatting criteria script --- src/criteria_assessment/criteria.jl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index a96c995..995dd7f 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -205,9 +205,11 @@ function setup_region_routes(config, auth) k -> string(k.first) ∈ ["SuitabilityThreshold", "xdist", "ydist"], qp ) - best_sites = filter_sites(assess_sites( - reg_assess_data, reg, pixel_criteria, site_criteria, assessed - )) + best_sites = filter_sites( + assess_sites( + reg_assess_data, reg, pixel_criteria, site_criteria, assessed + ) + ) output_geojson(suitable_sites_fn, best_sites) return file(suitable_sites_fn) From 121f5f581bd733cf5a1ee43af1461ac40164dc98 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Tue, 8 Oct 2024 09:59:30 +1000 Subject: [PATCH 34/60] Add config for COG writing using tile_size --- src/ReefGuideAPI.jl | 4 ++-- src/criteria_assessment/criteria.jl | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index cd3d679..d37238e 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -217,7 +217,7 @@ end Retrieve the configured size of map tiles in pixels (width and height / lon and lat). """ -function tile_size(config::Dict)::Tuple +function tile_size(config::Dict)::String tile_dims = try res = parse(Int, config["server_config"]["TILE_SIZE"]) (res, res) @@ -225,7 +225,7 @@ function tile_size(config::Dict)::Tuple (256, 256) # 256x256 end - return tile_dims + return string(tile_dims[1]) end function get_auth_router(config::Dict) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 995dd7f..fb96b6e 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -91,7 +91,7 @@ Write out a COG using common options. - `file_path` : Path to write data out to - `data` : Raster data to write out """ -function _write_cog(file_path::String, data::Raster)::Nothing +function _write_cog(file_path::String, data::Raster, config)::Nothing Rasters.write( file_path, data; @@ -151,7 +151,7 @@ function setup_region_routes(config, auth) @debug "$(now()) : Running on thread $(threadid())" @debug "Writing to $(mask_path)" # Writing time: ~10-25 seconds - _write_cog(mask_path, UInt8.(mask_data)) + _write_cog(mask_path, UInt8.(mask_data), config) return file(mask_path; headers=COG_HEADERS) end @@ -172,7 +172,7 @@ function setup_region_routes(config, auth) @debug "$(now()) : Running on thread $(threadid())" @debug "Writing to $(assessed_fn)" - _write_cog(assessed_fn, assessed) + _write_cog(assessed_fn, assessed, config) return file(assessed_fn; headers=COG_HEADERS) end @@ -195,7 +195,7 @@ function setup_region_routes(config, auth) assessed = file(assessed_fn; headers=COG_HEADERS) else assessed = assess_region(reg_assess_data, reg, qp, rtype) - _write_cog(assessed_fn, assessed) + _write_cog(assessed_fn, assessed, config) end # Extract criteria and assessment From 56cd7647dfd2256a99e0901b2d5044d487d60d97 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 11:00:16 +1100 Subject: [PATCH 35/60] Add type info to config argument --- src/criteria_assessment/criteria.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index fb96b6e..4b52e29 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -83,7 +83,7 @@ end StructTypes.StructType(::Type{CriteriaBounds}) = StructTypes.Struct() """ - _write_cog(file_path::String, data::Raster)::Nothing + _write_cog(file_path::String, data::Raster, config::Dict)::Nothing Write out a COG using common options. @@ -91,7 +91,7 @@ Write out a COG using common options. - `file_path` : Path to write data out to - `data` : Raster data to write out """ -function _write_cog(file_path::String, data::Raster, config)::Nothing +function _write_cog(file_path::String, data::Raster, config::Dict)::Nothing Rasters.write( file_path, data; @@ -102,7 +102,7 @@ function _write_cog(file_path::String, data::Raster, config)::Nothing "COMPRESS" => "DEFLATE", "SPARSE_OK" => "TRUE", "OVERVIEW_COUNT" => "5", - "BLOCKSIZE" => tile_size(config), + "BLOCKSIZE" => string(first(tile_size(config))), "NUM_THREADS" => n_gdal_threads(config) ), force=true From 204a15fc50688eb8ea70a393cf106419f20b974a Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 12:20:19 +1100 Subject: [PATCH 36/60] Remove unused function --- .../site_identification.jl | 94 +------------------ 1 file changed, 1 insertion(+), 93 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index d575d72..ea51e6f 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -148,7 +148,7 @@ end assess_sites(reg_assess_data::Dict, reg::String, pixel_criteria::Dict, site_criteria::Dict, assess_locs::Raster) # Arguments -- `reg_assess_data` : +- `reg_assess_data` : Regional assessment data - `reg` : Short region name - `pixel_criteria` : parameters to assess specific locations with - `site_criteria` : parameters to assess sites based on their polygonal representation @@ -202,95 +202,3 @@ function assess_sites( return initial_polygons end - -""" - site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) - -Perform site suitability assessment using polygon searches with user defined criteria. - -# Arguments -- `reg_assess_data` : Dictionary containing the regional data paths, reef outlines and full region names. -- `reg` : Name of the region being assessed (format `Cairns-Cooktown` rather than `Cairns/Cooktown Management Area`). -- `criteria_qp` : Dict containing bounds for each variable being filtered. -- `assessment_qp` : Dict containing the dimensions of the search polygon (`xdist`, `ydist`) and the `SuitabilityThreshold` to identify search pixels. -- `rtype` : Type of zone to assess (flats or slopes). -- `config` : Information from `.config.toml` file. - -# Returns -GeoJSON file containing result site polygons after filtering out polygons < 0.33 score and -keeping the highest scoring polygon where intersecting polygons occur. -""" -function site_assess_region(reg_assess_data, reg, criteria_qp, assessment_qp, rtype, config) - @debug "Assessing region's suitability score" - - file_id = string(hash(assessment_qp)) - assessed_tmp_path = _cache_location(config) - assessed_path_geojson = joinpath( - assessed_tmp_path, file_id * "_potential_sites.geojson" - ) - - if isfile(assessed_path_geojson) - return file(assessed_path_geojson) - end - - file_id = string(hash(criteria_qp)) - assessed_tmp_path = _cache_location(config) - assessed_path_tif = joinpath(assessed_tmp_path, file_id * "_suitable.tiff") - - if isfile(assessed_path_tif) - scan_locs = Raster(assessed_path_tif) - else - # Make mask of suitable locations - mask_data = mask_region(reg_assess_data, reg, qp, rtype) - - # Assess remaining pixels for their suitability - @debug "Calculating proportional suitability score" - suitability_scores = proportion_suitable(mask_data.data) - - # Need rebuild(mask_data, suitability_scores) as input to identify_potential_sites_edges - scan_locs = rebuild(mask_data, suitability_scores) - end - - # Need dataframe of valid_lookup pixels - @debug "Pre-processing assessment inputs." - criteria_names, lbs, ubs = remove_rugosity(reg, parse_criteria_query(criteria_qp)...) - - # Otherwise, create the file - @debug "$(now()) : Assessing criteria table" - assess = reg_assess_data[reg] - crit_pixels = apply_criteria_lookup( - assess, - Symbol(rtype), - CriteriaBounds.(criteria_names, lbs, ubs) - ) - - res = abs(step(dims(scan_locs, X))) - target_crs = convert(EPSG, crs(scan_locs)) - - suitability_threshold = parse(Int64, (assessment_qp["SuitabilityThreshold"])) - scan_locs = identify_search_pixels(scan_locs, x -> x .> suitability_threshold) - - # Need reef outlines - gdf = REGIONAL_DATA["reef_outlines"] - reef_outlines = buffer_simplify(gdf) - reef_outlines = polygon_to_lines.(reef_outlines) - - x_dist = parse(Int64, assessment_qp["xdist"]) - y_dist = parse(Int64, assessment_qp["ydist"]) - - @debug "Performing polygon site assessment." - initial_polygons = identify_potential_sites_edges( - crit_pixels, - scan_locs, - res, - gdf, - x_dist, - y_dist, - target_crs, - reef_outlines, - reg - ) - output_geojson(assessed_path_geojson, filter_sites(initial_polygons)) - - return file(assessed_path_geojson) -end From ab7d28fd39b7d6f7b8ea194802ff24193b152d17 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Tue, 8 Oct 2024 11:21:52 +1000 Subject: [PATCH 37/60] Revert to tuple output for tile_size --- src/ReefGuideAPI.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index d37238e..cd3d679 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -217,7 +217,7 @@ end Retrieve the configured size of map tiles in pixels (width and height / lon and lat). """ -function tile_size(config::Dict)::String +function tile_size(config::Dict)::Tuple tile_dims = try res = parse(Int, config["server_config"]["TILE_SIZE"]) (res, res) @@ -225,7 +225,7 @@ function tile_size(config::Dict)::String (256, 256) # 256x256 end - return string(tile_dims[1]) + return tile_dims end function get_auth_router(config::Dict) From b6cfff6ba9aefbd72d67d2171d1b84f23013035e Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Tue, 8 Oct 2024 13:56:22 +1000 Subject: [PATCH 38/60] Rename site identification function to identify_edge_aligned_sites And add rtype and debug statement to assess_sites() --- README.md | 4 ++-- docs/src/getting_started.md | 4 ++-- src/ReefGuideAPI.jl | 2 +- src/criteria_assessment/criteria.jl | 2 +- src/criteria_assessment/site_identification.jl | 16 +++++++++++++--- src/site_assessment/best_fit_polygons.jl | 8 ++++---- src/site_assessment/common_functions.jl | 4 ++-- 7 files changed, 25 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index f7bba17..708a32c 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ Locally, write times with four threads configured range from 10 to 15 seconds. ## Reef edge alignment for site searching -`identify_potential_sites_edges()` can be used to identify potential sites that only align with +`identify_edge_aligned_sites()` can be used to identify potential sites that only align with the nearest reef edge (or specified rotations away from this angle). This method works by identifying the closest edge of reef polygon geometries that have been converted into lines. @@ -140,7 +140,7 @@ The following processing is required before use: - `lon` and `lat` columns (FLoat64) must be added to the GeoDataFrame. - E.g. `valid_pixels.lon = first.(GI.coordinates.(valid_pixels.geometry))` The column used for masking should be the same as the column specified as geometry_col in - `identify_potential_sites_edges` (default = `:geometry`). + `identify_edge_aligned_sites` (default = `:geometry`). ## Docker build and run diff --git a/docs/src/getting_started.md b/docs/src/getting_started.md index cc1ecfd..d49d387 100644 --- a/docs/src/getting_started.md +++ b/docs/src/getting_started.md @@ -139,7 +139,7 @@ Locally, write times with four threads configured range from 10 to 15 seconds. ## Reef edge alignment for site searching -`identify_potential_sites_edges()` can be used to identify potential sites that only align with +`identify_edge_aligned_sites()` can be used to identify potential sites that only align with the nearest reef edge (or specified rotations away from this angle). This method works by identifying the closest edge of reef polygon geometries that have been converted into lines. @@ -159,4 +159,4 @@ The following processing is required before use: - `lon` and `lat` columns (FLoat64) must be added to the GeoDataFrame. - E.g. `valid_pixels.lon = first.(GI.coordinates.(valid_pixels.geometry))` The column used for masking should be the same as the column specified as geometry_col in - `identify_potential_sites_edges` (default = `:geometry`). + `identify_edge_aligned_sites` (default = `:geometry`). diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index cd3d679..a530d9d 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -277,7 +277,7 @@ export # Methods to assess/identify deployment "plots" of reef. export assess_reef_site, - identify_potential_sites_edges, + identify_edge_aligned_sites, filter_sites, output_geojson diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 4b52e29..1345f46 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -207,7 +207,7 @@ function setup_region_routes(config, auth) best_sites = filter_sites( assess_sites( - reg_assess_data, reg, pixel_criteria, site_criteria, assessed + reg_assess_data, reg, rtype, pixel_criteria, site_criteria, assessed ) ) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index ea51e6f..20a098c 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -145,11 +145,19 @@ function assess_region(reg_assess_data, reg, qp, rtype) end """ - assess_sites(reg_assess_data::Dict, reg::String, pixel_criteria::Dict, site_criteria::Dict, assess_locs::Raster) + assess_sites( + reg_assess_data::OrderedDict, + reg::String, + rtype::String, + pixel_criteria::Dict, + site_criteria::Dict, + assess_locs::Raster + ) # Arguments - `reg_assess_data` : Regional assessment data - `reg` : Short region name +- `rtype` : Slopes or Flats assessment type - `pixel_criteria` : parameters to assess specific locations with - `site_criteria` : parameters to assess sites based on their polygonal representation - `assess_locs` : Raster of suitability scores for each valid pixel @@ -158,8 +166,9 @@ end GeoDataFrame of all potential sites """ function assess_sites( - reg_assess_data::Dict, + reg_assess_data::OrderedDict, reg::String, + rtype::String, pixel_criteria::Dict, site_criteria::Dict, assess_locs::Raster @@ -188,7 +197,8 @@ function assess_sites( x_dist = parse(Int64, site_criteria["xdist"]) y_dist = parse(Int64, site_criteria["ydist"]) - initial_polygons = identify_potential_sites_edges( + @debug "Assessing site polygons for $(size(assess_locs, 1)) locations." + initial_polygons = identify_edge_aligned_sites( crit_pixels, assess_locs, res, diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 0e0e978..eb6e6f5 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -71,7 +71,7 @@ function assess_reef_site( end """ - identify_potential_sites_edges( + identify_edge_aligned_sites( df::DataFrame, search_pixels::DataFrame, res::Float64, @@ -109,7 +109,7 @@ Method is currently opperating for CRS in degrees units. # Returns DataFrame containing highest score, rotation and polygon for each assessment at pixels in indices. """ -function identify_potential_sites_edges( +function identify_edge_aligned_sites( df::DataFrame, search_pixels::DataFrame, res::Float64, @@ -247,7 +247,7 @@ function assess_reef_site( end """ - identify_potential_sites_edges( + identify_edge_aligned_sites( rst_stack::RasterStack, search_pixels::DataFrame, gdf::DataFrame, @@ -280,7 +280,7 @@ and used for searching with custom rotation parameters. # Returns DataFrame containing highest score, rotation and polygon for each assessment at pixels in indices. """ -function identify_potential_sites_edges( +function identify_edge_aligned_sites( rst_stack::RasterStack, search_pixels::DataFrame, gdf::DataFrame, diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index eb0b441..c1ff91e 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -216,12 +216,12 @@ Where site polygons are overlapping, keep only the highest scoring site polygon. # Arguments - `res_df` : Results DataFrame containing potential site polygons - (output from `identify_potential_sites()` or `identify_potential_sites_edges()`). + (output from `identify_potential_sites()` or `identify_edge_aligned_sites()`). # Returns DataFrame containing only the highest scoring sites where site polygons intersect, and containing only sites with scores greater than the `surr_threshold` specified in -`identify_potential_sites_edges()` (default=0.33). +`identify_edge_aligned_sites()` (default=0.33). """ function filter_sites(res_df::DataFrame)::DataFrame res_df.row_ID = 1:size(res_df, 1) From fc2fe7ecb4880c2246db08651e9799959b9d7985 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 16:18:01 +1100 Subject: [PATCH 39/60] Create an empty tile and reuse for no data cases --- src/ReefGuideAPI.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index a530d9d..533267b 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -241,6 +241,12 @@ Invokes warm up of regional data cache to reduce later spin up times. """ function warmup_cache(config_path::String) config = TOML.parsefile(config_path) + + no_data_path = cache_filename(Dict("no_data"=>"none"), config, "no_data", "png") + if !isfile(no_data_path) + save(no_data_path, zeros(RGBA, tile_size(config))) + end + return setup_regional_data(config) end From 530825a7a79a859afe297ce3fd033c81bc6d11ec Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 16:19:11 +1100 Subject: [PATCH 40/60] Use cached empty tile instead of recreating each time --- src/criteria_assessment/tiles.jl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index 99cffb2..f58a858 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -164,8 +164,13 @@ function setup_tile_routes(config, auth) reg_assess_data = setup_regional_data(config) @get auth("/tile/{z}/{x}/{y}") function (req::Request, z::Int64, x::Int64, y::Int64) + # http://127.0.0.1:8000/tile/{z}/{x}/{y}?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 + # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 + # http://127.0.0.1:8000/tile/7/115/69?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 + no_data_path = cache_filename(Dict("no_data"=>"none"), config, "no_data", "png") + qp = queryparams(req) mask_path = cache_filename(qp, config, "", "png") if isfile(mask_path) @@ -195,18 +200,13 @@ function setup_tile_routes(config, auth) (lat_min, lat_max) ) - if any(size(mask_data) .== 0) || all(size(mask_data) .< tile_size(config)) + if any(size(mask_data) .== 0) @debug "Thread $(thread_id) - No data for $reg ($rtype) at $z/$x/$y" - save(mask_path, zeros(RGBA, tile_size(config))) - return file(mask_path; headers=TILE_HEADERS) + return file(no_data_path; headers=TILE_HEADERS) end @debug "Thread $(thread_id) - Extracted data size: $(size(mask_data))" - # Working: - # http://127.0.0.1:8000/tile/7/115/69?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 - # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 - # Using if block to avoid type instability @debug "Thread $(thread_id) - $(now()) : Creating PNG (with transparency)" if any(size(mask_data) .> tile_size(config)) From 361a78b7298a59e8b0989066496d0696fcd8fa4c Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 16:19:46 +1100 Subject: [PATCH 41/60] Simplify if/else and correct orientation at higher zoom levels --- src/criteria_assessment/tiles.jl | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index f58a858..9a89157 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -209,27 +209,23 @@ function setup_tile_routes(config, auth) # Using if block to avoid type instability @debug "Thread $(thread_id) - $(now()) : Creating PNG (with transparency)" - if any(size(mask_data) .> tile_size(config)) - if any(size(mask_data) .== size(reg_assess_data[reg].stack)) || (z < 12) - # Account for geographic positioning when zoomed out further than - # raster area - resampled = adjusted_nearest(mask_data, z, x, y, tile_size(config)) - else - # Zoomed in close so less need to account for curvature - # BSpline(Constant()) is equivalent to nearest neighbor. - # See details in: https://juliaimages.org/ImageTransformations.jl/stable/reference/#Low-level-warping-API - resampled = imresize( - mask_data, tile_size(config); method=BSpline(Constant()) - ) - end - - img = zeros(RGBA, size(resampled)) - img[resampled .== 1] .= RGBA(0, 0, 0, 1) + + img = zeros(RGBA, tile_size(config)) + if (z < 12) + # Account for geographic positioning when zoomed out further than + # raster area + resampled = adjusted_nearest(mask_data, z, x, y, tile_size(config)) else - img = zeros(RGBA, size(mask_data)) - img[mask_data .== 1] .= RGBA(0, 0, 0, 1) + # Zoomed in close so less need to account for curvature + # BSpline(Constant()) is equivalent to nearest neighbor. + # See details in: https://juliaimages.org/ImageTransformations.jl/stable/reference/#Low-level-warping-API + resampled = imresize( + mask_data.data', tile_size(config); method=BSpline(Constant()) + ) end + img[resampled .== 1] .= RGBA(0, 0, 0, 1) + @debug "Thread $(thread_id) - $(now()) : Saving and serving file" save(mask_path, img) return file(mask_path; headers=TILE_HEADERS) From 043065ee20fde2ad6e4359e1200756b48c42f8b3 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 17:07:03 +1100 Subject: [PATCH 42/60] Add warmup to server spinup --- Manifest.toml | 134 +++++++++++++++++-------------- src/ReefGuideAPI.jl | 3 + src/criteria_assessment/tiles.jl | 4 +- 3 files changed, 78 insertions(+), 63 deletions(-) diff --git a/Manifest.toml b/Manifest.toml index 0b173e6..3d72c84 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -289,9 +289,9 @@ version = "0.7.6" [[deps.CodecZstd]] deps = ["TranscodingStreams", "Zstd_jll"] -git-tree-sha1 = "5e41a52bec3b0881a7eb54f5391b779994504186" +git-tree-sha1 = "d0073f473757f0d39ac9707f1eb03b431573cbd8" uuid = "6b39b394-51ab-5f42-8807-6242bab2b4c2" -version = "0.8.5" +version = "0.8.6" [[deps.ColorSchemes]] deps = ["ColorTypes", "ColorVectorSpace", "Colors", "FixedPointNumbers", "PrecompileTools", "Random"] @@ -414,10 +414,10 @@ uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" version = "1.16.0" [[deps.DataFrames]] -deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "REPL", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"] -git-tree-sha1 = "04c738083f29f86e62c8afc341f0967d8717bdb8" +deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"] +git-tree-sha1 = "fb61b4812c49343d7ef0b533ba982c46021938a6" uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" -version = "1.6.1" +version = "1.7.0" [[deps.DataStructures]] deps = ["Compat", "InteractiveUtils", "OrderedCollections"] @@ -452,10 +452,10 @@ uuid = "244e2a9f-e319-4986-a169-4d1fe445cd52" version = "0.1.2" [[deps.DelaunayTriangulation]] -deps = ["AdaptivePredicates", "EnumX", "ExactPredicates", "Random"] -git-tree-sha1 = "94eb20e6621600f4315813b1d1fc9b8a5a6a34db" +deps = ["AdaptivePredicates", "EnumX", "ExactPredicates", "PrecompileTools", "Random"] +git-tree-sha1 = "668bb97ea6df5e654e6288d87d2243591fe68665" uuid = "927a84f5-c5f4-47a5-9785-b46e178433df" -version = "1.4.0" +version = "1.6.0" [[deps.DiffResults]] deps = ["StaticArraysCore"] @@ -512,9 +512,9 @@ uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[deps.Distributions]] deps = ["AliasTables", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsAPI", "StatsBase", "StatsFuns"] -git-tree-sha1 = "e6c693a0e4394f8fda0e51a5bdf5aef26f8235e9" +git-tree-sha1 = "d7477ecdafb813ddee2ae727afa94e9dcb5f3fb0" uuid = "31c24e10-a181-5473-b8eb-7969acd0382f" -version = "0.25.111" +version = "0.25.112" [deps.Distributions.extensions] DistributionsChainRulesCoreExt = "ChainRulesCore" @@ -585,9 +585,9 @@ version = "1.8.0" [[deps.FFTW_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "c6033cc3892d0ef5bb9cd29b7f2f0331ea5184ea" +git-tree-sha1 = "4d81ed14783ec49ce9f2e168208a12ce1815aa25" uuid = "f5851436-0d7a-5f13-b9de-f02708fd171a" -version = "3.3.10+0" +version = "3.3.10+1" [[deps.FLoops]] deps = ["BangBang", "Compat", "FLoopsBase", "InitialValues", "JuliaVariables", "MLStyle", "Serialization", "Setfield", "Transducers"] @@ -644,19 +644,21 @@ weakdeps = ["PDMats", "SparseArrays", "Statistics"] FillArraysStatisticsExt = "Statistics" [[deps.FiniteDiff]] -deps = ["ArrayInterface", "LinearAlgebra", "Setfield", "SparseArrays"] -git-tree-sha1 = "f9219347ebf700e77ca1d48ef84e4a82a6701882" +deps = ["ArrayInterface", "LinearAlgebra", "Setfield"] +git-tree-sha1 = "b10bdafd1647f57ace3885143936749d61638c3b" uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" -version = "2.24.0" +version = "2.26.0" [deps.FiniteDiff.extensions] FiniteDiffBandedMatricesExt = "BandedMatrices" FiniteDiffBlockBandedMatricesExt = "BlockBandedMatrices" + FiniteDiffSparseArraysExt = "SparseArrays" FiniteDiffStaticArraysExt = "StaticArrays" [deps.FiniteDiff.weakdeps] BandedMatrices = "aae01518-5342-5314-be14-df237901396f" BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0" + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" [[deps.FixedPointNumbers]] @@ -721,9 +723,9 @@ version = "0.4.2" [[deps.GeoInterface]] deps = ["Extents", "GeoFormatTypes"] -git-tree-sha1 = "5921fc0704e40c024571eca551800c699f86ceb4" +git-tree-sha1 = "2f6fce56cdb8373637a6614e14a5768a88450de2" uuid = "cf35fbd7-0cd7-5166-be24-54bfbe79505f" -version = "1.3.6" +version = "1.3.7" [[deps.GeoInterfaceMakie]] deps = ["GeoInterface", "GeometryBasics", "MakieCore"] @@ -763,9 +765,9 @@ version = "0.4.11" [[deps.GeometryOps]] deps = ["CoordinateTransformations", "DataAPI", "DelaunayTriangulation", "ExactPredicates", "GeoInterface", "GeometryBasics", "InteractiveUtils", "LinearAlgebra", "SortTileRecursiveTree", "Statistics", "Tables"] -git-tree-sha1 = "54a1777d49f40dccd749b6df98b8cbe178dee0b9" +git-tree-sha1 = "51857a37476d46ff9ee99d188de1b4ce0382594d" uuid = "3251bfac-6a57-4b6d-aa61-ac1fef2975ab" -version = "0.1.12" +version = "0.1.13" [deps.GeometryOps.extensions] GeometryOpsFlexiJoinsExt = "FlexiJoins" @@ -802,9 +804,9 @@ version = "1.1.2" [[deps.Graphs]] deps = ["ArnoldiMethod", "Compat", "DataStructures", "Distributed", "Inflate", "LinearAlgebra", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics"] -git-tree-sha1 = "ebd18c326fa6cee1efb7da9a3b45cf69da2ed4d9" +git-tree-sha1 = "1dc470db8b1131cfc7fb4c115de89fe391b9e780" uuid = "86223c79-3864-5bf0-83f7-82e725a168b6" -version = "1.11.2" +version = "1.12.0" [[deps.HDF5_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LLVMOpenMP_jll", "LazyArtifacts", "LibCURL_jll", "Libdl", "MPICH_jll", "MPIPreferences", "MPItrampoline_jll", "MicrosoftMPI_jll", "OpenMPI_jll", "OpenSSL_jll", "TOML", "Zlib_jll", "libaec_jll"] @@ -832,9 +834,9 @@ version = "0.1.17" [[deps.Hwloc_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "5e19e1e4fa3e71b774ce746274364aef0234634e" +git-tree-sha1 = "dd3b49277ec2bb2c6b94eb1604d4d0616016f7a6" uuid = "e33a78d0-f292-5ffc-b300-72abe9b543c8" -version = "2.11.1+0" +version = "2.11.2+0" [[deps.HypergeometricFunctions]] deps = ["LinearAlgebra", "OpenLibm_jll", "SpecialFunctions"] @@ -867,9 +869,9 @@ version = "0.1.5" [[deps.ImageBinarization]] deps = ["HistogramThresholding", "ImageCore", "LinearAlgebra", "Polynomials", "Reexport", "Statistics"] -git-tree-sha1 = "f5356e7203c4a9954962e3757c08033f2efe578a" +git-tree-sha1 = "33485b4e40d1df46c806498c73ea32dc17475c59" uuid = "cbc4b850-ae4b-5111-9e64-df94c024a13d" -version = "0.3.0" +version = "0.3.1" [[deps.ImageContrastAdjustment]] deps = ["ImageBase", "ImageCore", "ImageTransformations", "Parameters"] @@ -1030,9 +1032,9 @@ version = "0.15.1" [[deps.IntervalArithmetic]] deps = ["CRlibm_jll", "MacroTools", "RoundingEmulator"] -git-tree-sha1 = "fe30dec78e68f27fc416901629c6e24e9d5f057b" +git-tree-sha1 = "8e125d40cae3a9f4276cdfeb4fcdb1828888a4b3" uuid = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253" -version = "0.22.16" +version = "0.22.17" weakdeps = ["DiffRules", "ForwardDiff", "IntervalSets", "LinearAlgebra", "RecipesBase"] [deps.IntervalArithmetic.extensions] @@ -1127,9 +1129,9 @@ version = "0.1.5" [[deps.JpegTurbo_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "c84a835e1a09b289ffcd2271bf2a337bbdda6637" +git-tree-sha1 = "25ee0be4d43d0269027024d75a24c24d6c6e590c" uuid = "aacddb02-875f-59d6-b918-886e6ef4fbf8" -version = "3.0.3+0" +version = "3.0.4+0" [[deps.JuliaVariables]] deps = ["MLStyle", "NameResolution"] @@ -1166,9 +1168,9 @@ weakdeps = ["Serialization"] [[deps.LZO_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "70c5da094887fd2cae843b8db33920bac4b6f07d" +git-tree-sha1 = "854a9c268c43b77b0a27f22d7fab8d33cdb3a731" uuid = "dd4b983a-f0e5-5f8d-a1b7-129d4a5fb1ac" -version = "2.10.2+0" +version = "2.10.2+1" [[deps.LaTeXStrings]] git-tree-sha1 = "50901ebc375ed41dbf8058da26f9de442febbbec" @@ -1259,9 +1261,9 @@ version = "4.5.1+1" [[deps.LightBSON]] deps = ["DataStructures", "Dates", "DecFP", "FNVHash", "JSON3", "Sockets", "StructTypes", "Transducers", "UUIDs", "UnsafeArrays", "WeakRefStrings"] -git-tree-sha1 = "1c98cccebf21f97c5a0cc81ff8cebffd1e14fb0f" +git-tree-sha1 = "ce253ad53efeed8201656971874d8cd9dad0227e" uuid = "a4a7f996-b3a6-4de6-b9db-2fa5f350df41" -version = "0.2.20" +version = "0.2.21" [[deps.LineSearches]] deps = ["LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "Printf"] @@ -1317,9 +1319,9 @@ weakdeps = ["ChainRulesCore", "ForwardDiff", "SpecialFunctions"] [[deps.Lz4_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "7f26c8fc5229e68484e0b3447312c98e16207d11" +git-tree-sha1 = "abf88ff67f4fd89839efcae2f4c39cbc4ecd0846" uuid = "5ced341a-0733-55b8-9ab6-a4889d929147" -version = "1.10.0+0" +version = "1.10.0+1" [[deps.MIMEs]] git-tree-sha1 = "65f28ad4b594aebe22157d6fac869786a255b7eb" @@ -1339,9 +1341,9 @@ version = "0.4.17" [[deps.MPICH_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Hwloc_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "MPIPreferences", "TOML"] -git-tree-sha1 = "19d4bd098928a3263693991500d05d74dbdc2004" +git-tree-sha1 = "7715e65c47ba3941c502bffb7f266a41a7f54423" uuid = "7cb0a576-ebde-5e09-9194-50597f1243b4" -version = "4.2.2+0" +version = "4.2.3+0" [[deps.MPIPreferences]] deps = ["Libdl", "Preferences"] @@ -1351,9 +1353,9 @@ version = "0.1.11" [[deps.MPItrampoline_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "MPIPreferences", "TOML"] -git-tree-sha1 = "8c35d5420193841b2f367e658540e8d9e0601ed0" +git-tree-sha1 = "70e830dab5d0775183c99fc75e4c24c614ed7142" uuid = "f1f71cc9-e9ae-5b93-9b94-4fe0e1ad3748" -version = "5.4.0+0" +version = "5.5.1+0" [[deps.MacroTools]] deps = ["Markdown", "Random"] @@ -1363,9 +1365,9 @@ version = "0.5.13" [[deps.MakieCore]] deps = ["ColorTypes", "GeometryBasics", "IntervalSets", "Observables"] -git-tree-sha1 = "b0e2e3473af351011e598f9219afb521121edd2b" +git-tree-sha1 = "4604f03e5b057e8e62a95a44929cafc9585b0fe9" uuid = "20f20a25-4f0e-4fdf-b5d1-57303727442b" -version = "0.8.6" +version = "0.8.9" [[deps.ManualMemory]] git-tree-sha1 = "bcaef4fc7a0cfe2cba636d84cda54b5e4e4ca3cd" @@ -1515,9 +1517,9 @@ version = "1.7.1" [[deps.OnlineStatsBase]] deps = ["AbstractTrees", "Dates", "LinearAlgebra", "OrderedCollections", "Statistics", "StatsBase"] -git-tree-sha1 = "9a067a4ea67d1ebab4554b73792dd429f098387c" +git-tree-sha1 = "a5a5a68d079ce531b0220e99789e0c1c8c5ed215" uuid = "925886fa-5bf2-5e8e-b522-a9147a512338" -version = "1.7.0" +version = "1.7.1" [[deps.OpenBLAS_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] @@ -1666,10 +1668,22 @@ uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad" version = "0.2.2" [[deps.Polynomials]] -deps = ["LinearAlgebra", "RecipesBase"] -git-tree-sha1 = "a14a99e430e42a105c898fcc7f212334bc7be887" +deps = ["LinearAlgebra", "RecipesBase", "Requires", "Setfield", "SparseArrays"] +git-tree-sha1 = "1a9cfb2dc2c2f1bd63f1906d72af39a79b49b736" uuid = "f27b6e38-b328-58d1-80ce-0feddd5e7a45" -version = "3.2.4" +version = "4.0.11" + + [deps.Polynomials.extensions] + PolynomialsChainRulesCoreExt = "ChainRulesCore" + PolynomialsFFTWExt = "FFTW" + PolynomialsMakieCoreExt = "MakieCore" + PolynomialsMutableArithmeticsExt = "MutableArithmetics" + + [deps.Polynomials.weakdeps] + ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" + FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" + MakieCore = "20f20a25-4f0e-4fdf-b5d1-57303727442b" + MutableArithmetics = "d8a4904e-b15c-11e9-3269-09a3773c0cb0" [[deps.PooledArrays]] deps = ["DataAPI", "Future"] @@ -1702,9 +1716,9 @@ version = "0.2.0" [[deps.PrettyTables]] deps = ["Crayons", "LaTeXStrings", "Markdown", "PrecompileTools", "Printf", "Reexport", "StringManipulation", "Tables"] -git-tree-sha1 = "66b20dd35966a748321d3b2537c4584cf40387c7" +git-tree-sha1 = "1101cd475833706e4d0e7b122218257178f48f34" uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d" -version = "2.3.2" +version = "2.4.0" [[deps.Printf]] deps = ["Unicode"] @@ -2058,9 +2072,9 @@ weakdeps = ["ChainRulesCore", "InverseFunctions"] [[deps.StringManipulation]] deps = ["PrecompileTools"] -git-tree-sha1 = "a04cabe79c5f01f4d723cc6704070ada0b9d46d5" +git-tree-sha1 = "a6b1675a536c5ad1a60e5a5153e1fee12eb146e3" uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e" -version = "0.3.4" +version = "0.4.0" [[deps.StructArrays]] deps = ["ConstructionBase", "DataAPI", "Tables"] @@ -2170,9 +2184,9 @@ uuid = "06e1c1a7-607b-532d-9fad-de7d9aa2abac" version = "0.5.0" [[deps.TranscodingStreams]] -git-tree-sha1 = "e84b3a11b9bece70d14cce63406bbc79ed3464d2" +git-tree-sha1 = "0c45878dcfdcfa8480052b6ab162cdd138781742" uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.11.2" +version = "0.11.3" [[deps.Transducers]] deps = ["Accessors", "Adapt", "ArgCheck", "BangBang", "Baselet", "CompositionsBase", "ConstructionBase", "DefineSingletons", "Distributed", "InitialValues", "Logging", "Markdown", "MicroCollections", "Requires", "SplittablesBase", "Tables"] @@ -2242,9 +2256,9 @@ version = "0.6.3" [[deps.WellKnownGeometry]] deps = ["GeoFormatTypes", "GeoInterface"] -git-tree-sha1 = "af8adada82cfd2791bb1e39a87cbf538bb76fdaf" +git-tree-sha1 = "5b29dedd69787822220775b8baaea377173a2b02" uuid = "0f680547-7be7-4555-8820-bb198eeb646b" -version = "0.2.3" +version = "0.2.4" [[deps.WoodburyMatrices]] deps = ["LinearAlgebra", "SparseArrays"] @@ -2306,9 +2320,9 @@ version = "1.2.13+1" [[deps.Zstd_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "e678132f07ddb5bfa46857f0d7620fb9be675d3b" +git-tree-sha1 = "555d1076590a6cc2fdee2ef1469451f872d8b41b" uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" -version = "1.5.6+0" +version = "1.5.6+1" [[deps.boost_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] @@ -2335,15 +2349,15 @@ version = "100.701.300+0" [[deps.libpng_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"] -git-tree-sha1 = "d7015d2e18a5fd9a4f47de711837e980519781a4" +git-tree-sha1 = "b70c870239dc3d7bc094eb2d6be9b73d27bef280" uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f" -version = "1.6.43+1" +version = "1.6.44+0" [[deps.libsixel_jll]] deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Pkg", "libpng_jll"] -git-tree-sha1 = "d4f63314c8aa1e48cd22aa0c17ed76cd1ae48c3c" +git-tree-sha1 = "7dfa0fd9c783d3d0cc43ea1af53d69ba45c447df" uuid = "075b6546-f08a-558a-be8f-8157d0f608a5" -version = "1.10.3+0" +version = "1.10.3+1" [[deps.libzip_jll]] deps = ["Artifacts", "Bzip2_jll", "GnuTLS_jll", "JLLWrappers", "Libdl", "XZ_jll", "Zlib_jll", "Zstd_jll"] diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 533267b..3440045 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -242,6 +242,7 @@ Invokes warm up of regional data cache to reduce later spin up times. function warmup_cache(config_path::String) config = TOML.parsefile(config_path) + # Create re-usable empty tile no_data_path = cache_filename(Dict("no_data"=>"none"), config, "no_data", "png") if !isfile(no_data_path) save(no_data_path, zeros(RGBA, tile_size(config))) @@ -253,6 +254,8 @@ end function start_server(config_path) @info "Launching server... please wait" + ReefGuideAPI.warmup_cache(config_path) + @info "Parsing configuration from $(config_path)..." config = TOML.parsefile(config_path) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index 9a89157..fa1c839 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -2,7 +2,7 @@ Helper methods to support tiling """ -using ImageIO, Images, Interpolations +using Images, ImageIO, Interpolations # HTTP response headers for tile images const TILE_HEADERS = [ @@ -207,9 +207,7 @@ function setup_tile_routes(config, auth) @debug "Thread $(thread_id) - Extracted data size: $(size(mask_data))" - # Using if block to avoid type instability @debug "Thread $(thread_id) - $(now()) : Creating PNG (with transparency)" - img = zeros(RGBA, tile_size(config)) if (z < 12) # Account for geographic positioning when zoomed out further than From bb86803f73023f830088d18a2e73bce46aec13ba Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Tue, 8 Oct 2024 17:09:24 +1100 Subject: [PATCH 43/60] Appease the great formatter --- src/ReefGuideAPI.jl | 2 +- src/criteria_assessment/tiles.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 3440045..ece071f 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -243,7 +243,7 @@ function warmup_cache(config_path::String) config = TOML.parsefile(config_path) # Create re-usable empty tile - no_data_path = cache_filename(Dict("no_data"=>"none"), config, "no_data", "png") + no_data_path = cache_filename(Dict("no_data" => "none"), config, "no_data", "png") if !isfile(no_data_path) save(no_data_path, zeros(RGBA, tile_size(config))) end diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index fa1c839..1673500 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -169,7 +169,7 @@ function setup_tile_routes(config, auth) # http://127.0.0.1:8000/tile/7/115/69?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 - no_data_path = cache_filename(Dict("no_data"=>"none"), config, "no_data", "png") + no_data_path = cache_filename(Dict("no_data" => "none"), config, "no_data", "png") qp = queryparams(req) mask_path = cache_filename(qp, config, "", "png") From 3616eec9e6f7cccb9acb13b7a9f5221f633781f5 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Wed, 9 Oct 2024 09:02:29 +1000 Subject: [PATCH 44/60] Remove unnecessary trim() causing confusion in indices --- src/site_assessment/common_functions.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index c1ff91e..2f9a05b 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -285,7 +285,7 @@ Identifies all pixels in an input raster that return true for the function `crit DataFrame containing indices, lon and lat for each pixel that is intended for further analysis. """ function identify_search_pixels(input_raster::Raster, criteria_function)::DataFrame - pixels = trim(criteria_function(input_raster)) + pixels = criteria_function(input_raster) indices = findall(pixels) indices_lon = Vector{Union{Missing,Float64}}(missing, size(indices, 1)) indices_lat = Vector{Union{Missing,Float64}}(missing, size(indices, 1)) From 7b5f3bfc368178df00d2a778f14bf95981596055 Mon Sep 17 00:00:00 2001 From: Benjamin Grier Date: Thu, 10 Oct 2024 17:09:02 +1000 Subject: [PATCH 45/60] Add details to proportion_sutiable docs and clarify size calls --- src/criteria_assessment/site_identification.jl | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 20a098c..1198d07 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -7,22 +7,27 @@ using FLoops, ThreadsX Calculate the the proportion of the subsection that is suitable for deployments. Subsection is the surrounding a rough hectare area centred on each cell of a raster marked -as being suitable according to user-selected criteria. +as being suitable according to user-selected criteria. Cells on the edges of a raster object +are assessed using a smaller surrounding area, rather than shifting the window inward. # Arguments - `x` : Matrix of boolean pixels after filtering with user criteria. -- `window` : Window size to assess. Default window (-4,5) assesses a square hectare around each target pixel where the resolution of pixels is 10m. +- `window` : The number of pixels +/- to assess as the moving window. For example, the default +window (-4,5) assesses a square that contains 4 pixels to the left/above the target pixel, +the target pixel, and 5 pixels to the right/below the target pixel. This default window +assesses a hectare around each target pixel where the resolution of pixels is 10m. """ function proportion_suitable(x::BitMatrix; window::Tuple=(-4, 5))::Matrix{Int16} - x′ = zeros(Int16, size(x)) + subsection_dims = size(x) + x′ = zeros(Int16, subsection_dims) @floop for row_col in ThreadsX.findall(x) (row, col) = Tuple(row_col) x_left = max(col + window[1], 1) - x_right = min(col + window[2], size(x, 2)) + x_right = min(col + window[2], subsection_dims[2]) y_top = max(row + window[1], 1) - y_bottom = min(row + window[2], size(x, 1)) + y_bottom = min(row + window[2], subsection_dims[1]) x′[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) end From 9e2af94965a9433d45e38d08db2dd6c5dc03f7e6 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 11:06:48 +1100 Subject: [PATCH 46/60] Update docstring for clarity --- .../site_identification.jl | 43 +++++++++++-------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 1198d07..0643858 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -3,36 +3,43 @@ using FLoops, ThreadsX """ - proportion_suitable(subsection::BitMatrix, window::Tuple=(-4,5))::Matrix{Int16} + proportion_suitable(subsection::BitMatrix, square_offset::Tuple=(-4,5))::Matrix{Int16} Calculate the the proportion of the subsection that is suitable for deployments. -Subsection is the surrounding a rough hectare area centred on each cell of a raster marked -as being suitable according to user-selected criteria. Cells on the edges of a raster object -are assessed using a smaller surrounding area, rather than shifting the window inward. +The `subsection` is the surrounding a rough hectare area centred on each cell of a raster +marked as being suitable according to user-selected criteria. + +Cells on the edges of a raster object are assessed using a smaller surrounding area, rather +than shifting the window inward. In usual applications, there will be no target pixel +close to the edge due to the use of buffer areas. # Arguments - `x` : Matrix of boolean pixels after filtering with user criteria. -- `window` : The number of pixels +/- to assess as the moving window. For example, the default -window (-4,5) assesses a square that contains 4 pixels to the left/above the target pixel, -the target pixel, and 5 pixels to the right/below the target pixel. This default window -assesses a hectare around each target pixel where the resolution of pixels is 10m. +- `square_offset` : The number of pixels +/- around a center "target" pixel to assess as the + moving window. Defaults to (-4, 5). + Assuming a 10m² pixel, the default `square_offset` resolves to a one + hectare area. + +# Returns +Matrix of values 0 - 100 indicating the percentage of the area around the target pixel that +meet suitability criteria. """ -function proportion_suitable(x::BitMatrix; window::Tuple=(-4, 5))::Matrix{Int16} +function proportion_suitable(x::BitMatrix; square_offset::Tuple=(-4, 5))::Matrix{Int16} subsection_dims = size(x) - x′ = zeros(Int16, subsection_dims) + target_area = zeros(Int16, subsection_dims) @floop for row_col in ThreadsX.findall(x) (row, col) = Tuple(row_col) - x_left = max(col + window[1], 1) - x_right = min(col + window[2], subsection_dims[2]) + x_left = max(col + square_offset[1], 1) + x_right = min(col + square_offset[2], subsection_dims[2]) - y_top = max(row + window[1], 1) - y_bottom = min(row + window[2], subsection_dims[1]) + y_top = max(row + square_offset[1], 1) + y_bottom = min(row + square_offset[2], subsection_dims[1]) - x′[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) + target_area[row, col] = Int16(sum(@views x[y_top:y_bottom, x_left:x_right])) end - return x′ + return target_area end """ @@ -46,7 +53,7 @@ Exclude pixels in `target_rast` that are beyond `dist_nm` (nautical miles) from in `gdf`. `target_rast` and `gdf` should be in the same CRS (EPSG:7844 / GDA2020 for GBR-reef-guidance-assessment). # Arguments -- `target_rast` : Raster of suitable pixels (Bool) to filter pixels from. +- `target_rast` : Boolean raster of suitable pixels to filter. - `gdf` : DataFrame with `geometry` column that contains vector objects of interest. - `dist_nm` : Filtering distance from geometry object in nautical miles. @@ -64,7 +71,7 @@ function filter_distances( raster_lat = Vector{Float64}(tmp_areas.dims[1].val) raster_lon = Vector{Float64}(tmp_areas.dims[2].val) - @floop for row_col in findall(tmp_areas) + @floop for row_col in ThreadsX.findall(tmp_areas) (lat_ind, lon_ind) = Tuple(row_col) point = AG.createpoint() From 9c12b181f1f36b387a2a061e9b93cb2ae7a016f4 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 11:06:54 +1100 Subject: [PATCH 47/60] Update comment for clarity --- src/ReefGuideAPI.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index ece071f..4969dad 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -145,11 +145,12 @@ function setup_regional_data(config::Dict) @eval const REGIONAL_DATA = $(regional_assessment_data) end - # If REGIONAL_DATA is defined, but failed to load supported data that cannot - # be cached to disk (e.g., the reef outlines), then it will cause errors later on. + # If REGIONAL_DATA is defined, but failed to load supporting data that cannot be + # cached to disk, such as the reef outlines, (e.g., due to incorrect config), then it + # will cause errors later on. # Then there's no way to address this, even between web server sessions, as `const` # values cannot be modified. - # We check for existance and try to load again if needed. + # Here, we check for existence and try to load again if needed. if !haskey(REGIONAL_DATA, "reef_outlines") reef_outline_path = joinpath(reef_data_path, "rrap_canonical_outlines.gpkg") REGIONAL_DATA["reef_outlines"] = GDF.read(reef_outline_path) From f0234d420ee7995ecc4c0dc6991dd71d0c1eb678 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 11:09:13 +1100 Subject: [PATCH 48/60] Move variable closer to usage --- src/criteria_assessment/tiles.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index 1673500..d6ab3fd 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -169,8 +169,6 @@ function setup_tile_routes(config, auth) # http://127.0.0.1:8000/tile/7/115/69?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 # http://127.0.0.1:8000/tile/8/231/139?region=Cairns-Cooktown&rtype=slopes&Depth=-9.0:0.0&Slope=0.0:40.0&Rugosity=0.0:3.0 - no_data_path = cache_filename(Dict("no_data" => "none"), config, "no_data", "png") - qp = queryparams(req) mask_path = cache_filename(qp, config, "", "png") if isfile(mask_path) @@ -201,6 +199,8 @@ function setup_tile_routes(config, auth) ) if any(size(mask_data) .== 0) + no_data_path = cache_filename(Dict("no_data" => "none"), config, "no_data", "png") + @debug "Thread $(thread_id) - No data for $reg ($rtype) at $z/$x/$y" return file(no_data_path; headers=TILE_HEADERS) end From 668544281da9514f2acc95ab440c2afe9b8a7d19 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 11:17:49 +1100 Subject: [PATCH 49/60] Fix bug Previous implementation used the same values based on x_dist and latitude. I think this only works if the location sizes are equal in both x and y distance. In 99% of cases this will be true, but protecting against the rare case it is not. --- src/site_assessment/best_fit_polygons.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index eb6e6f5..5fc525d 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -127,7 +127,7 @@ function identify_edge_aligned_sites( reef_lines = reef_lines[gdf.management_area .== region_long] gdf = gdf[gdf.management_area .== region_long, :] max_count = ( - (x_dist / degrees_to_meters(res, mean(search_pixels.lat))) * + (x_dist / degrees_to_meters(res, mean(search_pixels.lon))) * ( (y_dist + 2 * degrees_to_meters(res, mean(search_pixels.lat))) / degrees_to_meters(res, mean(search_pixels.lat)) @@ -148,10 +148,10 @@ function identify_edge_aligned_sites( rot_angle = initial_search_rotation(pixel, geom_buff, gdf, reef_lines) bounds = [ - lon - meters_to_degrees(x_dist / 2, lat), - lon + meters_to_degrees(x_dist / 2, lat), - lat - meters_to_degrees(x_dist / 2, lat), - lat + meters_to_degrees(x_dist / 2, lat) + lon - meters_to_degrees(x_dist / 2, lon), + lon + meters_to_degrees(x_dist / 2, lon), + lat - meters_to_degrees(y_dist / 2, lat), + lat + meters_to_degrees(y_dist / 2, lat) ] rel_pix = df[ From d0427ab3ca27449bdea21995436b7eb5903a60e4 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 11:19:28 +1100 Subject: [PATCH 50/60] Appease the great formatter --- src/criteria_assessment/tiles.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/criteria_assessment/tiles.jl b/src/criteria_assessment/tiles.jl index d6ab3fd..cb9c16d 100644 --- a/src/criteria_assessment/tiles.jl +++ b/src/criteria_assessment/tiles.jl @@ -199,7 +199,9 @@ function setup_tile_routes(config, auth) ) if any(size(mask_data) .== 0) - no_data_path = cache_filename(Dict("no_data" => "none"), config, "no_data", "png") + no_data_path = cache_filename( + Dict("no_data" => "none"), config, "no_data", "png" + ) @debug "Thread $(thread_id) - No data for $reg ($rtype) at $z/$x/$y" return file(no_data_path; headers=TILE_HEADERS) From e257ec95f49588eecc9038a3429e00dae718e794 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 12:16:57 +1100 Subject: [PATCH 51/60] Fix offset bug resulting in empty search set --- src/site_assessment/best_fit_polygons.jl | 37 +++++++++++++++++------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 5fc525d..61f0f8d 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -46,8 +46,9 @@ function assess_reef_site( n_per_side::Int64=2, surr_threshold::Float64=0.33 )::Tuple{Float64,Int64,GI.Wrappers.Polygon,Int64} - rotations = - (start_rot - (degree_step * n_per_side)):degree_step:(start_rot + (degree_step * n_per_side)) + rot_start = (start_rot - (degree_step * n_per_side)) + rot_end = (start_rot + (degree_step * n_per_side)) + rotations = rot_start:degree_step:rot_end n_rotations = length(rotations) score = zeros(n_rotations) best_poly = Vector(undef, n_rotations) @@ -60,13 +61,16 @@ function assess_reef_site( best_poly[j] = rot_geom if score[j] < surr_threshold + # Early exit as there's no point in searching further. + # Changing the rotation is unlikely to improve the score. qc_flag[j] = 1 break end end return score[argmax(score)], - argmax(score) - (n_per_side + 1), best_poly[argmax(score)], + argmax(score) - (n_per_side + 1), + best_poly[argmax(score)], maximum(qc_flag) end @@ -127,7 +131,7 @@ function identify_edge_aligned_sites( reef_lines = reef_lines[gdf.management_area .== region_long] gdf = gdf[gdf.management_area .== region_long, :] max_count = ( - (x_dist / degrees_to_meters(res, mean(search_pixels.lon))) * + (x_dist / degrees_to_meters(res, mean(search_pixels.lat))) * ( (y_dist + 2 * degrees_to_meters(res, mean(search_pixels.lat))) / degrees_to_meters(res, mean(search_pixels.lat)) @@ -147,16 +151,27 @@ function identify_edge_aligned_sites( pixel = GO.Point(lon, lat) rot_angle = initial_search_rotation(pixel, geom_buff, gdf, reef_lines) + lon_offset = abs(meters_to_degrees(x_dist / 2, lon)) + lat_offset = abs(meters_to_degrees(y_dist / 2, lat)) bounds = [ - lon - meters_to_degrees(x_dist / 2, lon), - lon + meters_to_degrees(x_dist / 2, lon), - lat - meters_to_degrees(y_dist / 2, lat), - lat + meters_to_degrees(y_dist / 2, lat) + lon - lon_offset, + lon + lon_offset, + lat - lat_offset, + lat + lat_offset ] - rel_pix = df[ - (df.lon .> bounds[1]) .& (df.lon .< bounds[2]) .& (df.lat .> bounds[3]) .& (df.lat .< bounds[4]), - :] + lower_lon = (df.lon .>= bounds[1]) + upper_lon = (df.lon .<= bounds[2]) + lower_lat = (df.lat .>= bounds[3]) + upper_lat = (df.lat .<= bounds[4]) + rel_pix = df[lower_lon .& upper_lon .& lower_lat .& upper_lat, :] + + if nrow(rel_pix) == 0 + best_score[i] = 0.0 + best_rotation[i] = 0 + quality_flag[i] = 0 + continue + end b_score, b_rot, b_poly, qc_flag = assess_reef_site( rel_pix, From 020f61156a23a1a218e0b36ebd6ae13772e29d48 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 12:38:40 +1100 Subject: [PATCH 52/60] Fix cached file not being correctly used --- src/criteria_assessment/criteria.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/criteria_assessment/criteria.jl b/src/criteria_assessment/criteria.jl index 1345f46..1eb9d0b 100644 --- a/src/criteria_assessment/criteria.jl +++ b/src/criteria_assessment/criteria.jl @@ -192,7 +192,7 @@ function setup_region_routes(config, auth) # Assess location suitability if needed assessed_fn = cache_filename(qp, config, "$(reg)_suitable", "tiff") if isfile(assessed_fn) - assessed = file(assessed_fn; headers=COG_HEADERS) + assessed = Raster(assessed_fn) else assessed = assess_region(reg_assess_data, reg, qp, rtype) _write_cog(assessed_fn, assessed, config) From d1b238282c59dcc8cee209080fa09e97c6f323a2 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 12:38:57 +1100 Subject: [PATCH 53/60] Do not ignore QC --- src/site_assessment/common_functions.jl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index 2f9a05b..3ddb3b8 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -232,11 +232,6 @@ function filter_sites(res_df::DataFrame)::DataFrame continue end - if row.qc_flag == 1 - push!(ignore_list, row.row_ID) - continue - end - if any(GO.intersects.([row.poly], res_df[:, :poly])) intersecting_polys = res_df[(GO.intersects.([row.poly], res_df[:, :poly])), :] if maximum(intersecting_polys.score) <= row.score @@ -252,7 +247,7 @@ function filter_sites(res_df::DataFrame)::DataFrame rename!(res_df, :poly => :geometry) - return res_df[res_df.row_ID .∉ [unique(ignore_list)], Not(:qc_flag, :row_ID)] + return res_df[res_df.row_ID .∉ [unique(ignore_list)], :] end """ From 73b61235ed870a835c64f40d4ed8eba37f9d0902 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:13:36 +1100 Subject: [PATCH 54/60] Fix bug in cache location! --- src/ReefGuideAPI.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 4969dad..1bf41e3 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -166,13 +166,14 @@ Retrieve cache location for geotiffs. """ function _cache_location(config::Dict)::String cache_loc = try - in_debug = "DEBUG_MODE" in config["server_config"] + in_debug = haskey(config["server_config"], "DEBUG_MODE") if in_debug && lowercase(config["server_config"]["DEBUG_MODE"]) == "true" mktempdir() else config["server_config"]["TIFF_CACHE_DIR"] end - catch + catch err + @info string(err) mktempdir() end From 6ac26f3ae3be2617d920b7976e82841e0ebab7df Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:14:06 +1100 Subject: [PATCH 55/60] Standardize to `geometry` column to avoid later renaming --- src/site_assessment/best_fit_polygons.jl | 5 ++++- src/site_assessment/common_functions.jl | 7 +++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 61f0f8d..56a21f5 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -191,7 +191,10 @@ function identify_edge_aligned_sites( end return DataFrame(; - score=best_score, orientation=best_rotation, qc_flag=quality_flag, poly=best_poly + score=best_score, + orientation=best_rotation, + qc_flag=quality_flag, + geometry=best_poly ) end diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index 3ddb3b8..df7bb1d 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -232,8 +232,9 @@ function filter_sites(res_df::DataFrame)::DataFrame continue end - if any(GO.intersects.([row.poly], res_df[:, :poly])) - intersecting_polys = res_df[(GO.intersects.([row.poly], res_df[:, :poly])), :] + poly = row.geometry + if any(GO.intersects.([poly], res_df[:, :geometry])) + intersecting_polys = res_df[(GO.intersects.([poly], res_df[:, :geometry])), :] if maximum(intersecting_polys.score) <= row.score for x_row in eachrow(intersecting_polys[intersecting_polys.row_ID .!= row.row_ID, :]) @@ -245,8 +246,6 @@ function filter_sites(res_df::DataFrame)::DataFrame end end - rename!(res_df, :poly => :geometry) - return res_df[res_df.row_ID .∉ [unique(ignore_list)], :] end From a5d8e6d1f70b0aa0736547a0124329e018944cf1 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:15:07 +1100 Subject: [PATCH 56/60] Assess pre-scored raster instead of trying to re-apply criteria --- src/site_assessment/best_fit_polygons.jl | 30 ++++++------------------ 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 56a21f5..7fff632 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -214,7 +214,7 @@ Assess given reef site for it's suitability score at different specified rotatio initial reef-edge rotation. # Arguments -- `rst` : Raster or RasterStack object used to assess site suitability. +- `rst` : Raster of suitability scores. - `geom` : Initial site polygon with no rotation applied. - `ruleset` : Criteria ruleset to apply to `rst` pixels when assessing which pixels are suitable. - `degree_step` : Degree value to vary each rotation by. Default = 15 degrees. @@ -228,8 +228,7 @@ initial reef-edge rotation. """ function assess_reef_site( rst::Union{Raster,RasterStack}, - geom::GI.Wrappers.Polygon, - ruleset::Dict{Symbol,Function}; + geom::GI.Wrappers.Polygon; degree_step::Float64=15.0, start_rot::Float64=0.0, n_per_side::Int64=2 @@ -240,24 +239,16 @@ function assess_reef_site( score = zeros(n_rotations) best_poly = Vector(undef, n_rotations) + target_crs = GI.crs(rst) for (j, r) in enumerate(rotations) - rot_geom = rotate_geom(geom, r) + rot_geom = rotate_geom(geom, r, target_crs) c_rst = crop(rst; to=rot_geom) if !all(size(c_rst) .> (0, 0)) @warn "No data found!" continue end - window = trues(size(c_rst)) - for (n, crit_rule) in ruleset - window .= window .& crit_rule(c_rst[n]) - if count(window) < ceil(length(window) / 3) - # Stop checking other rules if below hard threshold - break - end - end - - score[j] = mean(window) + score[j] = mean(c_rst) best_poly[j] = rot_geom end @@ -314,12 +305,6 @@ function identify_edge_aligned_sites( gdf = gdf[gdf.management_area .== region, :] res = abs(step(dims(rst_stack, X))) - # # TODO: Dynamically build this ruleset - ruleset = Dict( - :Depth => (data) -> within_thresholds(data, -9.0, -2.0), - :WavesTp => (data) -> within_thresholds(data, 0.0, 5.9) - ) - # Search each location to assess best_score = zeros(length(search_pixels.lon)) best_poly = Vector(undef, length(search_pixels.lon)) @@ -334,8 +319,7 @@ function identify_edge_aligned_sites( b_score, b_rot, b_poly = assess_reef_site( rst_stack, - geom_buff, - ruleset; + geom_buff; degree_step=degree_step, start_rot=rot_angle, n_per_side=n_rot_per_side @@ -346,5 +330,5 @@ function identify_edge_aligned_sites( best_poly[i] = b_poly end - return DataFrame(; score=best_score, orientation=best_rotation, poly=best_poly) + return DataFrame(; score=best_score, orientation=best_rotation, geometry=best_poly) end From ef85fd35334fa9d6af8a5e6cb16463c21f785061 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:47:12 +1100 Subject: [PATCH 57/60] Move multithreading packages to top level --- src/ReefGuideAPI.jl | 2 ++ src/criteria_assessment/site_identification.jl | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ReefGuideAPI.jl b/src/ReefGuideAPI.jl index 1bf41e3..cf81fa5 100644 --- a/src/ReefGuideAPI.jl +++ b/src/ReefGuideAPI.jl @@ -12,6 +12,8 @@ using OrderedCollections using Memoization using SparseArrays +using FLoops, ThreadsX + import GeoDataFrames as GDF using ArchGDAL, diff --git a/src/criteria_assessment/site_identification.jl b/src/criteria_assessment/site_identification.jl index 0643858..600da8d 100644 --- a/src/criteria_assessment/site_identification.jl +++ b/src/criteria_assessment/site_identification.jl @@ -1,7 +1,5 @@ """Methods for identifying potential deployment locations.""" -using FLoops, ThreadsX - """ proportion_suitable(subsection::BitMatrix, square_offset::Tuple=(-4,5))::Matrix{Int16} From 7a12510f714d77542de07607450a1317fb942567 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:48:19 +1100 Subject: [PATCH 58/60] Avoid repeated call to recreate identical subsets of data Also preallocate --- src/site_assessment/best_fit_polygons.jl | 3 ++- src/site_assessment/common_functions.jl | 10 +++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index 7fff632..af1c466 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -143,6 +143,7 @@ function identify_edge_aligned_sites( best_poly = Vector(undef, length(search_pixels.lon)) best_rotation = zeros(Int64, length(search_pixels.lon)) quality_flag = zeros(Int64, length(search_pixels.lon)) + bounds = zeros(4) for (i, index) in enumerate(eachrow(search_pixels)) lon = index.lon lat = index.lat @@ -153,7 +154,7 @@ function identify_edge_aligned_sites( lon_offset = abs(meters_to_degrees(x_dist / 2, lon)) lat_offset = abs(meters_to_degrees(y_dist / 2, lat)) - bounds = [ + bounds .= Float64[ lon - lon_offset, lon + lon_offset, lat - lat_offset, diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index df7bb1d..3b344f5 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -78,12 +78,11 @@ end Filter out reefs that are > `dist` (meters) from the target pixel (currently `dist` is hardcoded in `initial_search_rotation()`). """ function filter_far_polygons( - gdf::DataFrame, + geoms, pixel::GeometryBasics.Point, lat::Float64, dist::Union{Int64,Float64} )::BitVector - geoms = gdf[:, first(GI.geometrycolumns(gdf))] return (GO.distance.(GO.centroid.(geoms), [pixel]) .< meters_to_degrees(dist, lat)) end @@ -177,11 +176,12 @@ function initial_search_rotation( reef_outlines::Vector{Vector{GeometryBasics.Line{2,Float64}}}; search_buffer::Union{Int64,Float64}=20000.0 )::Float64 - distance_indices = filter_far_polygons(gdf, pixel, pixel[2], search_buffer) + geoms = gdf[!, first(GI.geometrycolumns(gdf))] + distance_indices = filter_far_polygons(geoms, pixel, pixel[2], search_buffer) reef_lines = reef_outlines[distance_indices] reef_lines = reef_lines[ - GO.within.([pixel], gdf[distance_indices, first(GI.geometrycolumns(gdf))]) -] + GO.within.([pixel], geoms[distance_indices]) + ] reef_lines = vcat(reef_lines...) # If a pixel is outside of a polygon, use the closest polygon instead. From 8eb3f1d4c1c8678c020340629f76138c98bd5dc9 Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:49:03 +1100 Subject: [PATCH 59/60] Use multithreading to cut down processing time --- src/site_assessment/best_fit_polygons.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/site_assessment/best_fit_polygons.jl b/src/site_assessment/best_fit_polygons.jl index af1c466..06ea98c 100644 --- a/src/site_assessment/best_fit_polygons.jl +++ b/src/site_assessment/best_fit_polygons.jl @@ -44,7 +44,7 @@ function assess_reef_site( degree_step::Float64=15.0, start_rot::Float64=0.0, n_per_side::Int64=2, - surr_threshold::Float64=0.33 + surr_threshold::Float64=0.2 )::Tuple{Float64,Int64,GI.Wrappers.Polygon,Int64} rot_start = (start_rot - (degree_step * n_per_side)) rot_end = (start_rot + (degree_step * n_per_side)) @@ -54,7 +54,7 @@ function assess_reef_site( best_poly = Vector(undef, n_rotations) qc_flag = zeros(Int64, n_rotations) - for (j, r) in enumerate(rotations) + @floop for (j, r) in enumerate(rotations) rot_geom = rotate_geom(geom, r, target_crs) score[j] = size(rel_pix[GO.intersects.([rot_geom], rel_pix.geometry), :], 1) / max_count From c6fdcdc0292c91be994a20b51daa68dc2f41c14e Mon Sep 17 00:00:00 2001 From: Takuya Iwanaga Date: Fri, 11 Oct 2024 13:57:20 +1100 Subject: [PATCH 60/60] Apply formatting --- src/site_assessment/common_functions.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/site_assessment/common_functions.jl b/src/site_assessment/common_functions.jl index 3b344f5..ad579e7 100644 --- a/src/site_assessment/common_functions.jl +++ b/src/site_assessment/common_functions.jl @@ -180,8 +180,8 @@ function initial_search_rotation( distance_indices = filter_far_polygons(geoms, pixel, pixel[2], search_buffer) reef_lines = reef_outlines[distance_indices] reef_lines = reef_lines[ - GO.within.([pixel], geoms[distance_indices]) - ] + GO.within.([pixel], geoms[distance_indices]) +] reef_lines = vcat(reef_lines...) # If a pixel is outside of a polygon, use the closest polygon instead.