Skip to content

Commit

Permalink
Merge pull request #1 from PatBall1/master
Browse files Browse the repository at this point in the history
Update
  • Loading branch information
0scarJ1ang authored Aug 20, 2023
2 parents 803d2f9 + 59fd376 commit 751363e
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 8 deletions.
50 changes: 45 additions & 5 deletions detectree2/models/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
import pycocotools.mask as mask_util
import rasterio
from rasterio.crs import CRS
from shapely.affinity import scale
from shapely.geometry import Polygon, box, shape
from shapely.ops import unary_union
from shapely.ops import orient, unary_union


def polygon_from_mask(masked_arr):
Expand Down Expand Up @@ -407,11 +408,34 @@ def load_geopandas_dataframes(folder):


# Function to normalize and average polygons, considering weights
#def normalize_polygon(polygon, num_points):
# total_perimeter = polygon.length
# distance_between_points = total_perimeter / num_points
# points = [polygon.boundary.interpolate(i * distance_between_points) for i in range(num_points)]
# return Polygon(points)

def normalize_polygon(polygon, num_points):
# Orient polygon to ensure consistent vertex order (counterclockwise)
polygon = orient(polygon, sign=1.0)

# Get all points
points = list(polygon.exterior.coords)

# Get point with minimum average of x and y
min_avg_point = min(points, key=lambda point: sum(point)/len(point))

# Rotate points to start from min_avg_point
min_avg_point_idx = points.index(min_avg_point)
points = points[min_avg_point_idx:] + points[:min_avg_point_idx]

# Create a new polygon with ordered points
polygon = Polygon(points)

total_perimeter = polygon.length
distance_between_points = total_perimeter / num_points
points = [polygon.boundary.interpolate(i * distance_between_points) for i in range(num_points)]
return Polygon(points)

normalized_points = [polygon.boundary.interpolate(i * distance_between_points) for i in range(num_points)]
return Polygon(normalized_points)


def average_polygons(polygons, weights=None, num_points=300):
Expand All @@ -427,7 +451,23 @@ def average_polygons(polygons, weights=None, num_points=300):
avg_point_at_i = sum(points_at_i) / len(normalized_polygons)
avg_polygon_points.append(tuple(avg_point_at_i))
avg_polygon = Polygon(avg_polygon_points)
return avg_polygon

# Compute the average centroid of the input polygons
average_centroid = (
np.mean([poly.centroid.x for poly in polygons]),
np.mean([poly.centroid.y for poly in polygons])
)

# Compute the average area of the input polygons
average_area = np.mean([poly.area for poly in polygons])

# Calculate the scale factor
scale_factor = np.sqrt(average_area / avg_polygon.area)

# Scale the average polygon
avg_polygon_scaled = scale(avg_polygon, xfact=scale_factor, yfact=scale_factor, origin=average_centroid)

return avg_polygon_scaled


def combine_and_average_polygons(gdfs, iou = 0.9):
Expand Down Expand Up @@ -465,7 +505,7 @@ def combine_and_average_polygons(gdfs, iou = 0.9):
match_confidence = row_match.Confidence_score if "Confidence_score" in combined_gdf.columns else None

intersection = polygon.intersection(match)
if intersection.area / match.area > iou:
if intersection.area / (polygon.area + match.area - intersection.area) > iou:
significant_matches.append(match)
if match_confidence is not None:
significant_confidences.append(match_confidence)
Expand Down
3 changes: 2 additions & 1 deletion detectree2/models/predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

def predict_on_data(
directory: str = "./",
out_folder: str = "predictions",
predictor=DefaultPredictor,
eval=False,
save: bool = True,
Expand All @@ -30,7 +31,7 @@ def predict_on_data(
Predicts crowns for all png images present in a directory and outputs masks as jsons.
"""

pred_dir = os.path.join(directory, "predictions")
pred_dir = os.path.join(directory, out_folder)

Path(pred_dir).mkdir(parents=True, exist_ok=True)

Expand Down
2 changes: 1 addition & 1 deletion docs/source/tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ Point to a trained model, set up the configuration state and make predictions on
trained_model = "./230103_randresize_full.pth"
cfg = setup_cfg(update_model=trained_model)
predict_on_data(tiles_path, DefaultPredictor(cfg))
predict_on_data(tiles_path, predictor=DefaultPredictor(cfg))
Once the predictions have been made on the tiles, it is necessary to project them back into geographic space.

Expand Down
15 changes: 14 additions & 1 deletion model_garden/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,23 @@ transferability owing to random resize augmentation during training.

* Appropriate tile size ~ 100 m (with some flexibility)

## urban_trees.pth
## urban_trees_Cambridge20230630.pth

A new model for mapping trees in urban environments. Available upon requests.

* Appropriate tile size ~ 200 m

### Hyperparameters

- Learning rate: 0.01709
- Data loader workers: 6
- Gamma: 0.08866
- Backbone freeze at: 2
- Warmup iterations: 184
- Batch size per image: 623
- Weight decay: 0.006519
- AP50: 62.0

 
 

Expand Down
3 changes: 3 additions & 0 deletions model_garden/urban_trees_Cambridge_20230630.pth
Git LFS file not shown
3 changes: 3 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,13 @@
install_requires=[
"pyyaml==5.1",
"GDAL>=1.11",
"numpy",
"rtree",
"proj",
"geos",
"pypng",
"pygeos",
"shapely",
"geopandas",
"rasterio==1.3a3",
"fiona",
Expand Down

0 comments on commit 751363e

Please sign in to comment.