Skip to content

Commit

Permalink
prep for Version 1.0.3
Browse files Browse the repository at this point in the history
prep for Version 1.0.3
  • Loading branch information
jblindsay committed Dec 9, 2019
1 parent 5051628 commit fb1e82d
Show file tree
Hide file tree
Showing 466 changed files with 8,983 additions and 10,697 deletions.
Binary file modified .DS_Store
Binary file not shown.
8 changes: 4 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

[package]
name = "whitebox_tools"
version = "1.0.2"
version = "1.0.3"
authors = ["John Lindsay <[email protected]>"]
description = "A library for analyzing geospatial data."
keywords = ["geospatial", "GIS", "remote sensing", "geomatics", "image processing", "lidar", "spatial analysis"]
Expand All @@ -13,7 +13,7 @@ edition = "2018"
[dependencies]
byteorder = "^1.3.1"
chrono = "0.4.6"
kdtree = "0.5.1"
kdtree = "0.6.0"
libflate = "0.1.18"
lzw = "0.10.0"
nalgebra = "0.18.0"
Expand Down
70 changes: 0 additions & 70 deletions build.py

This file was deleted.

45 changes: 0 additions & 45 deletions lib_test.py

This file was deleted.

30 changes: 30 additions & 0 deletions readme.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,36 @@ for more details.
* Release Notes: *
******************

Version 1.0.3 (09-12-2019)
- Added the BreachDepressionsLeastCost tool, which performs a modified form of the Lindsay
and Dhun (2015) impact minimizing breaching algorithm. This modified algorithm is very
efficient and can provide an excellent method for creating depressionless DEMs from large
DEMs, including those derived from LiDAR. It is particularly well suited to breaching
through road embankments, approximately the pathway of culverts.
- The FillDepressions tool algorithm has been completely re-developed. The new algorithm is
significantly faster than the previous method, which was based on the Wang and Lui method.
For legacy reasons, the previous tool has been retained and renamed FillDepressonsWangAndLui.
Notice that this new method also incorporates significantly improved flat area correction
that maintains general flowpaths of filled areas.
- The Sink and DepthInSink tools have been updated to use the new depression filling algorithm.
- Added the ClassifyBuildingsInLidar tool to reclassify LiDAR points within a LAS file
to the building class value (6) that are located within one or more building footprint
contained in an input polygon vector file.
- Added the NaturalNeighbourInterpolation tool for performing Sibson's (1981) interpolation
method on input point data.
- Added the UpslopeDepressionStorage tool to estimate the average upslope depression
storage capacity (DSC).
- Added the LidarRfbInterpolation tool for performing a radial basis function interpolation
of LiDAR data sets.
- The WhiteboxTools Runner user interface has been significantly improved (many thanks to
Rachel Broders for these contributions).
- Fixed a bug in which the photometric interpretation was not being set by certain raster
decoders, including the SAGA encoder. This was causing an error when outputting GeoTIFF
files.
- Updated the ConstructVectorTIN and TINGridding tools to include a maximum triangle edge
length to help avoid the creation of spurious long and narrow triangles in convex regions
along the data boundaries.

Version 1.0.2 (01-11-2019)
- Added the BurnStreamsAtRoads tool.
- Added a two-sample K-S test (TwoSampleKsTest) for comparing the distributions of two rasters.
Expand Down
55 changes: 55 additions & 0 deletions src/algorithms/delaunay_triangulation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ println!("{:?}", result.triangles); // [0, 2, 1, 0, 3, 2]

use crate::structures::Point2D;
use std::f64;
use std::collections::HashSet;

/// Represents the area outside of the triangulation.
/// Halfedges on the convex hull (which don't have an adjacent halfedge)
Expand Down Expand Up @@ -165,6 +166,60 @@ impl Triangulation {
result
}

pub fn natural_neighbours_from_incoming_edge(&self, start: usize) -> Vec<usize> {
let mut result = vec![];
//result.push(self.triangles[self.next_halfedge(start)]);
let mut incoming = start;
let mut outgoing: usize;
loop {
result.push(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}
result
}

pub fn natural_neighbours_2nd_order(&self, start: usize) -> Vec<usize> {
let mut set = HashSet::new();
let mut edges = vec![];
// result.push(self.triangles[self.next_halfedge(start)]);
// set.insert(self.triangles[self.next_halfedge(start)]);
let mut incoming = start;
let mut outgoing: usize;
loop {
set.insert(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
edges.push(outgoing);
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}

for start in edges {
incoming = start;
loop {
set.insert(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}
}

set.into_iter().map(|i| i).collect()
}

/// Returns the indices of the adjacent triangles to a triangle.
pub fn triangles_adjacent_to_triangle(&self, triangle: usize) -> Vec<usize> {
let mut adjacent_triangles: Vec<usize> = vec![];
Expand Down
2 changes: 2 additions & 0 deletions src/raster/arcascii_raster.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ pub fn read_arcascii(
yllcenter - (0.5 * configs.resolution_y) + (configs.rows as f64) * configs.resolution_y;
}

configs.photometric_interp = PhotometricInterpretation::Continuous;

Ok(())
}

Expand Down
2 changes: 2 additions & 0 deletions src/raster/arcbinary_raster.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ pub fn read_arcbinary(
}
}

configs.photometric_interp = PhotometricInterpretation::Continuous;

configs.data_type = DataType::F32;

// set the North, East, South, and West coodinates
Expand Down
18 changes: 16 additions & 2 deletions src/raster/geotiff/geokeys.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use super::Ifd;
use crate::utils::{ByteOrderReader, Endianness};
use crate::spatial_ref_system;
use std::collections::HashMap;
use std::fmt;
use std::mem::transmute;
Expand Down Expand Up @@ -205,8 +206,20 @@ impl GeoKeys {
if keyword_map.contains_key(&key_code) {
match keyword_map.get(&key_code) {
Some(hm) => match hm.get(&value_offset) {
Some(v) => value = format!("{} ({})", v.to_string(), value_offset), //v.to_string(),
None => value = format!("Unrecognized value ({})", value_offset),
Some(v) => {
value = if key_code == 3072 || key_code == 2048 {
format!("{} ({})", v.to_string(), spatial_ref_system::esri_wkt_from_epsg(value_offset))
} else {
format!("{} ({})", v.to_string(), value_offset)
};
},
None => {
value = if key_code == 3072 || key_code == 2048 {
spatial_ref_system::esri_wkt_from_epsg(value_offset)
} else {
format!("Unrecognized value ({})", value_offset)
};
}
},
None => value = format!("Unrecognized value ({})", key_code),
}
Expand Down Expand Up @@ -2651,6 +2664,7 @@ pub fn get_keyword_map() -> HashMap<u16, HashMap<u16, &'static str>> {
kw.insert(3076u16, proj_linear_units_map);

let vertical_cs_type_map = hashmap![
1127=>"Canadian Geodetic Vertical Datum of 2013 (CGVD2013)",
5001=>"VertCS_Airy_1830_ellipsoid",
5002=>"VertCS_Airy_Modified_1849_ellipsoid",
5003=>"VertCS_ANS_ellipsoid",
Expand Down
Loading

0 comments on commit fb1e82d

Please sign in to comment.