diff --git a/.DS_Store b/.DS_Store index 2c3a358aa..72619a9f8 100755 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/Cargo.lock b/Cargo.lock index af9efc0d0..2936d8e5b 100755 --- a/Cargo.lock +++ b/Cargo.lock @@ -114,7 +114,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc120354d1b5ec6d7aaf4876b602def75595937b5e15d356eb554ab5177e08bb" dependencies = [ - "clipboard-win", + "clipboard-win 4.4.2", "log", "objc", "objc-foundation", @@ -122,7 +122,7 @@ dependencies = [ "parking_lot", "thiserror", "winapi", - "x11rb", + "x11rb 0.9.0", ] [[package]] @@ -370,6 +370,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "clipboard-win" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fdf5e01086b6be750428ba4a40619f847eb2e95756eee84b18e06e5f0b50342" +dependencies = [ + "lazy-bytes-cast", + "winapi", +] + [[package]] name = "clipboard-win" version = "4.4.2" @@ -462,6 +472,20 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6" +[[package]] +name = "copypasta" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "133fc8675ee3a4ec9aa513584deda9aa0faeda3586b87f7f0f2ba082c66fb172" +dependencies = [ + "clipboard-win 3.1.1", + "objc", + "objc-foundation", + "objc_id", + "smithay-clipboard", + "x11-clipboard", +] + [[package]] name = "core-foundation" version = "0.9.3" @@ -877,6 +901,12 @@ dependencies = [ "str-buf", ] +[[package]] +name = "evalexpr" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6cffb2e9992a43637801ac84ca58bdb5acb2a426ab88a1e3d343b96cfaa845c" + [[package]] name = "expat-sys" version = "2.1.6" @@ -1652,6 +1682,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "lazy-bytes-cast" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10257499f089cd156ad82d0a9cd57d9501fa2c989068992a97eb3c27836f206b" + [[package]] name = "lazy_static" version = "1.4.0" @@ -3778,12 +3814,14 @@ dependencies = [ name = "whitebox_plugins" version = "2.3.0" dependencies = [ + "evalexpr", "fasteval", "kd-tree 0.4.1", "kdtree", "nalgebra 0.18.1", "num_cpus", "rand 0.7.3", + "rayon", "rstar 0.9.3", "tsp-rs", "typenum", @@ -3812,6 +3850,7 @@ version = "2.0.0" dependencies = [ "anyhow", "case", + "copypasta", "eframe", "egui", "egui_extras", @@ -4111,6 +4150,15 @@ dependencies = [ "winapi", ] +[[package]] +name = "x11-clipboard" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "980b9aa9226c3b7de8e2adb11bf20124327c054e0e5812d2aac0b5b5a87e7464" +dependencies = [ + "x11rb 0.10.1", +] + [[package]] name = "x11-dl" version = "2.20.0" @@ -4134,6 +4182,28 @@ dependencies = [ "winapi-wsapoll", ] +[[package]] +name = "x11rb" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "592b4883219f345e712b3209c62654ebda0bb50887f330cbd018d0f654bfd507" +dependencies = [ + "gethostname", + "nix 0.24.2", + "winapi", + "winapi-wsapoll", + "x11rb-protocol", +] + +[[package]] +name = "x11rb-protocol" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56b245751c0ac9db0e006dc812031482784e434630205a93c73cfefcaabeac67" +dependencies = [ + "nix 0.24.2", +] + [[package]] name = "xcursor" version = "0.3.4" diff --git a/WhiteboxTools_darwin_m_series.zip b/WhiteboxTools_darwin_m_series.zip deleted file mode 100644 index 22b50e02a..000000000 Binary files a/WhiteboxTools_darwin_m_series.zip and /dev/null differ diff --git a/doc_img/.DS_Store b/doc_img/.DS_Store new file mode 100644 index 000000000..5008ddfcf Binary files /dev/null and b/doc_img/.DS_Store differ diff --git a/readme.txt b/readme.txt index 95769b075..153be6547 100755 --- a/readme.txt +++ b/readme.txt @@ -56,6 +56,13 @@ for more details. * Release Notes: * ****************** +Version 2.4.0 (xx-xx-20xx) +- Added the ExtractByAttribute tool to filter out vector features by attribute characteristics. +- Added the DeviationFromRegionalDirection tool. +- Added the OtsuThresholding tool, which uses Ostu's method for optimal binary thresholding, + transforming the input image into background and foreground pixels. +- Fixed a bug with polygon holes in the RasterToVectorPolygons tool. + Version 2.3.0 (28-03-2023) - Added the new Whitebox Runner v2.0. This version of WbRunner is an entirely new application with many advancements over the previous version of the WbRunner. It is now written in pure Rust (compared with diff --git a/whitebox-plugins/.DS_Store b/whitebox-plugins/.DS_Store index 362703459..6de47b3a5 100755 Binary files a/whitebox-plugins/.DS_Store and b/whitebox-plugins/.DS_Store differ diff --git a/whitebox-plugins/Cargo.toml b/whitebox-plugins/Cargo.toml index 7f0372263..c94abbd00 100755 --- a/whitebox-plugins/Cargo.toml +++ b/whitebox-plugins/Cargo.toml @@ -20,6 +20,10 @@ path = "src/edge_contamination/main.rs" name = "exposure_towards_wind_flux" path = "src/exposure_towards_wind_flux/main.rs" +[[bin]] +name = "extract_by_attribute" +path = "src/extract_by_attribute/main.rs" + [[bin]] name = "gaussian_scale_space" path = "src/gaussian_scale_space/main.rs" @@ -56,6 +60,10 @@ path = "src/max_upslope_value/main.rs" name = "normalize_lidar" path = "src/normalize_lidar/main.rs" +[[bin]] +name = "otsu_thresholding" +path = "src/otsu_thresholding/main.rs" + [[bin]] name = "qin_flow_accumulation" path = "src/qin_flow_accumulation/main.rs" @@ -89,12 +97,14 @@ name = "vector_stream_network_analysis" path = "src/vector_stream_network_analysis/main.rs" [dependencies] +evalexpr = "10.0.0" fasteval = "0.2.4" kd-tree = { version = "0.4.1", features = ["rayon"] } kdtree = "0.6.0" nalgebra = "0.18.0" num_cpus = "1.13.0" rand = { version = "0.7", features = ["small_rng"] } +rayon = "1.7.0" rstar = "0.9.3" tsp-rs = "0.1.0" typenum = "1.15.0" diff --git a/whitebox-plugins/src/.DS_Store b/whitebox-plugins/src/.DS_Store index 296498a1f..5fb4ee350 100755 Binary files a/whitebox-plugins/src/.DS_Store and b/whitebox-plugins/src/.DS_Store differ diff --git a/whitebox-plugins/src/extract_by_attribute/extract_by_attribute.json b/whitebox-plugins/src/extract_by_attribute/extract_by_attribute.json new file mode 100755 index 000000000..112145480 --- /dev/null +++ b/whitebox-plugins/src/extract_by_attribute/extract_by_attribute.json @@ -0,0 +1,34 @@ +{ + "tool_name": "ExtractByAttribute", + "exe": "extract_by_attribute", + "short_description": "Extracts features from an input vector into an output file based on attribute properties.", + "toolbox": "GIS Analysis", + "license": "MIT", + "example": ">> .*EXE_NAME -r=ExtractByAttribute -i=input.shp -o=output.shp --statement=\"ELEV>500.0\"", + "parameters": [ + { + "name": "Input Vector File", + "flags": ["-i", "--input"], + "description": "Name of the input vector file.", + "parameter_type": {"ExistingFile":{"Vector":"Any"}}, + "default_value": null, + "optional": true + }, + { + "name": "Output Vector File", + "flags": ["-o", "--output"], + "description": "Name of the output LiDAR points.", + "parameter_type": {"NewFile":{"Vector":"Any"}}, + "default_value": null, + "optional": true + }, + { + "name": "Statement:", + "flags": ["-s", "--statement"], + "description": "Modify statement e.g. ELEV>500.0.", + "parameter_type": "String", + "default_value": "", + "optional": false + } + ] +} \ No newline at end of file diff --git a/whitebox-plugins/src/extract_by_attribute/main.rs b/whitebox-plugins/src/extract_by_attribute/main.rs new file mode 100755 index 000000000..015790982 --- /dev/null +++ b/whitebox-plugins/src/extract_by_attribute/main.rs @@ -0,0 +1,283 @@ +/* +Authors: Dr. John Lindsay +Created: 21/07/2021 +Last Modified: 21/07/2021 +License: MIT +*/ + +use std::env; +use std::f64; +use std::io::{Error, ErrorKind}; +use std::path; +use std::str; +use std::time::Instant; +// use std::sync::mpsc; +// use std::sync::Arc; +// use std::thread; +// use num_cpus; +use whitebox_common::utils::get_formatted_elapsed_time; +use whitebox_vector::{FieldData, Shapefile, ShapeType}; +use evalexpr::*; + +/// The +fn main() { + let args: Vec = env::args().collect(); + + if args[1].trim() == "run" { + match run(&args) { + Ok(_) => {} + Err(e) => panic!("{:?}", e), + } + } + + if args.len() <= 1 || args[1].trim() == "help" { + // print help + help(); + } + + if args[1].trim() == "version" { + // print version information + version(); + } +} + +fn help() { + let mut ext = ""; + if cfg!(target_os = "windows") { + ext = ".exe"; + } + + let exe_name = &format!("extract_by_attribute{}", ext); + let sep: String = path::MAIN_SEPARATOR.to_string(); + let s = r#" + extract_by_attribute Help + + The ExtractByAttribute tool can be used to perform an complex mathematical operations on one or more input + raster images on a cell-to-cell basis. + + The following commands are recognized: + help Prints help information. + run Runs the tool. + version Prints the tool version information. + + The following flags can be used with the 'run' command: + -o, --output Name of the output raster file. + --statement Statement of a mathematical expression e.g. "raster1" > 35.0. + + Input/output file names can be fully qualified, or can rely on the working directory contained in + the WhiteboxTools settings.json file. + + Example Usage: + >> .*EXE_NAME run -i=DEM.tif --statement='value > 2500.0' --true=2500.0 --false=DEM.tif --output=onlyLowPlaces.tif + "# + .replace("*", &sep) + .replace("EXE_NAME", exe_name); + println!("{}", s); +} + +fn version() { + const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); + println!( + "extract_by_attribute v{} by Dr. John B. Lindsay (c) 2023.", + VERSION.unwrap_or("Unknown version") + ); +} + +fn get_tool_name() -> String { + String::from("ExtractByAttribute") // This should be camel case and is a reference to the tool name. +} + +fn run(args: &Vec) -> Result<(), std::io::Error> { + let tool_name = get_tool_name(); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + // Read in the environment variables and get the necessary values + let configurations = whitebox_common::configs::get_configs()?; + let mut working_directory = configurations.working_directory.clone(); + if !working_directory.is_empty() && !working_directory.ends_with(&sep) { + working_directory += &sep; + } + // let max_procs = configurations.max_procs; + + // read the arguments + let mut statement = String::new(); + let mut input_file: String = String::new(); + let mut output_file: String = String::new(); + + if args.len() <= 1 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with too few parameters.", + )); + } + for i in 0..args.len() { + let arg = if !args[i].contains("--statement") { + args[i].replace("\"", "").replace("\'", "") + } else { + args[i].clone() + }; + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-input" { + input_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-o" || flag_val == "-output" { + output_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if arg.contains("-statement") { + statement = arg.replace("--statement=", "") + .replace("-statement=", "") + .replace("--statement", "") + .replace("-statement", ""); + } + } + + if configurations.verbose_mode { + let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); + // 28 = length of the 'Powered by' by statement. + println!("{}", "*".repeat(welcome_len)); + println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); + println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); + println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); + println!("{}", "*".repeat(welcome_len)); + } + + let mut progress: usize; + let mut old_progress: usize = 1; + + let start = Instant::now(); + + if !input_file.contains(&sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + + if !output_file.contains(&sep) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + + let input = Shapefile::read(&input_file)?; + + let precompiled = build_operator_tree(&statement).unwrap(); // Do proper error handling here + + // create output file + let mut output = Shapefile::initialize_using_file(&output_file, &input, input.header.shape_type, true)?; + + let att_fields = input.attributes.get_fields(); + let mut contains_fid = false; + for i in 0..att_fields.len() { + if att_fields[i].name.to_lowercase() == "fid" { + contains_fid = true; + break; + } + } + + let mut num_extracted = 0; + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + if record.shape_type != ShapeType::Null { + let att_data = input.attributes.get_record(record_num); + let mut context = HashMapContext::new(); + for i in 0..att_fields.len() { + match &att_data[i] { + FieldData::Int(val) => { + _ = context.set_value(att_fields[i].name.clone().into(), (*val as i64).into()); + } + FieldData::Real(val) => { + _ = context.set_value(att_fields[i].name.clone().into(), (*val).into()); + }, + FieldData::Text(val) => { + _ = context.set_value(att_fields[i].name.clone().into(), (&**val).into()); + }, + FieldData::Date(val) => { + _ = context.set_value(att_fields[i].name.clone().into(), format!("{}", *val).into()); + }, + FieldData::Bool(val) => { + _ = context.set_value(att_fields[i].name.clone().into(), (*val).into()); + }, + FieldData::Null => { + _ = context.set_value(att_fields[i].name.clone().into(), "null".into()); + } + } + } + + _ = context.set_value("null".into(), "null".into()); + _ = context.set_value("NULL".into(), "null".into()); + _ = context.set_value("none".into(), "null".into()); + _ = context.set_value("NONE".into(), "null".into()); + + if !contains_fid { // add the FID + _ = context.set_value("FID".into(), (record_num as i64).into()); + } + + // let ret = eval_boolean_with_context(&statement, &context); + let ret = precompiled.eval_boolean_with_context(&context); + if ret.is_ok() { + let value = ret.unwrap_or(false); + if value { + output.add_record(record.clone()); + output.attributes.add_record(att_data.clone(), false); + + num_extracted += 1; + + // tx.send((i, true)).unwrap(); + // } else { + // tx.send((i, false)).unwrap(); + } + // } else { + // tx.send((i, false)).unwrap(); + } + } + + if configurations.verbose_mode { + progress = + (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + } + + if configurations.verbose_mode { + println!("Number of extracted features: {num_extracted}"); + } + + if num_extracted > 0 { + if configurations.verbose_mode { + println!("Saving data...") + } + let _ = match output.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + } else { + println!("WARNING: No features were selected and therefore no output file will be written."); + } + + let elapsed_time = get_formatted_elapsed_time(start); + + if configurations.verbose_mode { + println!( + "\n{}", + &format!("Elapsed Time (Including I/O): {}", elapsed_time) + ); + } + + Ok(()) +} diff --git a/whitebox-plugins/src/heat_map/heat_map.json b/whitebox-plugins/src/heat_map/heat_map.json index 8be8bd709..a1adbd15d 100755 --- a/whitebox-plugins/src/heat_map/heat_map.json +++ b/whitebox-plugins/src/heat_map/heat_map.json @@ -3,7 +3,7 @@ "exe": "heat_map", "short_description": "Calculates a heat map, or kernel density estimation (KDE), for an input point set.", "toolbox": "GIS Analysis", - "license": "Proprietary", + "license": "MIT", "example": ">> .*EXE_NAME -r=HeatMap -i=points.shp -o=density.tif --bandwidth=1000.0 --kernel='quartic' --cell_size=10.0", "parameters": [ { diff --git a/whitebox-plugins/src/individual_tree_detection/main.rs b/whitebox-plugins/src/individual_tree_detection/main.rs index 4336504cb..54913b7cd 100644 --- a/whitebox-plugins/src/individual_tree_detection/main.rs +++ b/whitebox-plugins/src/individual_tree_detection/main.rs @@ -368,33 +368,35 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { pd = input[point_num]; if !pd.withheld() && !pd.is_classified_noise() && (!only_use_veg || pd.is_classified_vegetation()) { p = input.get_transformed_coords(point_num); - radius = if p.z < min_height { - min_search_radius - } else if p.z > max_height { - max_search_radius - } else { - min_search_radius + (p.z - min_height) / height_range * radius_range - }; - let found = kdtree.within_radius(&[p.x, p.y], radius); - is_highest_pt = true; - for i in 0..found.len() { - index_n = found[i].id; - p2 = input.get_transformed_coords(index_n); - if p2.z > p.z { - is_highest_pt = false; - break; + if p.z >= min_height { + radius = if p.z < min_height { + min_search_radius + } else if p.z > max_height { + max_search_radius + } else { + min_search_radius + (p.z - min_height) / height_range * radius_range + }; + let found = kdtree.within_radius(&[p.x, p.y], radius); + is_highest_pt = true; + for i in 0..found.len() { + index_n = found[i].id; + p2 = input.get_transformed_coords(index_n); + if p2.z > p.z { + is_highest_pt = false; + break; + } } - } - if is_highest_pt { - output.add_point_record(p.x, p.y); - output.attributes.add_record( - vec![ - FieldData::Int(point_num as i32 + 1i32), - FieldData::Real(p.z) - ], - false, - ); + if is_highest_pt { + output.add_point_record(p.x, p.y); + output.attributes.add_record( + vec![ + FieldData::Int(point_num as i32 + 1i32), + FieldData::Real(p.z) + ], + false, + ); + } } } } diff --git a/whitebox-plugins/src/install_wb_extension/install_wb_extension.json b/whitebox-plugins/src/install_wb_extension/install_wb_extension.json index 4d40ad401..7bbe50539 100755 --- a/whitebox-plugins/src/install_wb_extension/install_wb_extension.json +++ b/whitebox-plugins/src/install_wb_extension/install_wb_extension.json @@ -4,7 +4,7 @@ "short_description": "Use to install a Whitebox extension product.", "toolbox": "Whitebox Utilities", "license": "Proprietary", - "example": ">> .*EXE_NAME -r=LaunchWbRunner", + "example": ">> .*EXE_NAME -r=InstallWbExtension", "parameters": [ { "name": "Whitebox Extension Product Name", diff --git a/whitebox-plugins/src/install_wb_extension/main.rs b/whitebox-plugins/src/install_wb_extension/main.rs index 125c01af1..793ef18fc 100644 --- a/whitebox-plugins/src/install_wb_extension/main.rs +++ b/whitebox-plugins/src/install_wb_extension/main.rs @@ -12,11 +12,12 @@ use std::{ process::Command, }; -/// This tool can be used to launch the Whitebox Runner application from within other Whitebox front-ends. -/// The purpose of this tool is to make the Whitebox Runner more accessible from other Whitebox front-ends. -/// However, note that you can also launch the Whitebox Runner simply by double-clicking on the executable -/// file (`whitebox_runner.exe` on Windows, `whitebox_tools` on other systems) located within your WBT -/// directory, containing your Whitebox installation. +/// This tool can be used to install the [Whitebox Toolset Extension](https://www.whiteboxgeo.com/whitebox-toolset-extension/) +/// (WTE). The WTE is a commercial add-on for the WhiteboxTools Open Core and contains more than 60 advanced tools for +/// geospatial data processing. The WTE easily integrates into your current WhiteboxTools environment. This tool will launch +/// the Whitebox Runner, allowing users a convenient way to install the extension. While this tool will install the extension +/// running the extension tools does require a valid license, which can be purchased from +/// [Whitebox Geospatial Inc.](https://www.whiteboxgeo.com/extension-pricing/). fn main() { let args: Vec = env::args().collect(); @@ -76,9 +77,9 @@ fn version() { ); } -pub fn get_tool_name() -> String { - String::from("InstallWbExtension") // This should be camel case and is a reference to the tool name. -} +// fn get_tool_name() -> String { +// String::from("InstallWbExtension") // This should be camel case and is a reference to the tool name. +// } fn run(args: &Vec) -> Result<(), std::io::Error> { // read the arguments diff --git a/whitebox-plugins/src/launch_wb_runner/main.rs b/whitebox-plugins/src/launch_wb_runner/main.rs index ea949a018..b6513a828 100644 --- a/whitebox-plugins/src/launch_wb_runner/main.rs +++ b/whitebox-plugins/src/launch_wb_runner/main.rs @@ -77,9 +77,9 @@ fn version() { ); } -pub fn get_tool_name() -> String { - String::from("LaunchWbRunner") // This should be camel case and is a reference to the tool name. -} +// fn get_tool_name() -> String { +// String::from("LaunchWbRunner") // This should be camel case and is a reference to the tool name. +// } fn run(args: &Vec) -> Result<(), std::io::Error> { // read the arguments diff --git a/whitebox-plugins/src/otsu_thresholding/main.rs b/whitebox-plugins/src/otsu_thresholding/main.rs new file mode 100755 index 000000000..1113ce242 --- /dev/null +++ b/whitebox-plugins/src/otsu_thresholding/main.rs @@ -0,0 +1,431 @@ +/* +Authors: Whitebox Geospatial Inc. (c) +Developer: Dr. John Lindsay +Created: 03/06/2023 +Last Modified: 03/06/2023 +License: Whitebox Geospatial Inc. License Agreement +*/ + +use std::env; +use std::f64; +use std::io::{Error, ErrorKind}; +use std::path; +use std::str; +use std::time::Instant; +use std::sync::mpsc; +use std::sync::Arc; +use std::thread; +use whitebox_common::utils::get_formatted_elapsed_time; +use whitebox_raster::*; +use num_cpus; + +/// This tool uses [Ostu's method](https://en.wikipedia.org/wiki/Otsu%27s_method) for optimal automatic binary thresholding, +/// transforming an input image (`--input`) into background and foreground pixels (`--output`). Otsu’s method uses the grayscale +/// image histogram to detect an optimal threshold value that separates two regions with maximum inter-class variance. +/// The process begins by calculating the image histogram of the input. +/// +/// # References +/// Otsu, N., 1979. A threshold selection method from gray-level histograms. IEEE transactions on +/// systems, man, and cybernetics, 9(1), pp.62-66. +/// +/// # See Also +/// `ImageSegmentation` +fn main() { + + let args: Vec = env::args().collect(); + + if args[1].trim() == "run" { + match run(&args) { + Ok(_) => {}, + Err(e) => panic!("{:?}", e), + } + } + + if args.len() <= 1 || args[1].trim() == "help" { + // print help + help(); + } + + if args[1].trim() == "version" { + // print version information + version(); + } + +} + +fn help() { + let mut ext = ""; + if cfg!(target_os = "windows") { + ext = ".exe"; + } + + let exe_name = &format!("otsu_thresholding{}", ext); + let sep: String = path::MAIN_SEPARATOR.to_string(); + let s = r#" + otsu_thresholding Help + + This tool performs a Canny edge-detection filter on an input image. + + The following commands are recognized: + help Prints help information. + run Runs the tool. + version Prints the tool version information. + + The following flags can be used with the 'run' command: + -d, --dem Name of the input DEM raster file. + -o, --output Name of the output raster file. + --azimuth Wind azimuth, in degrees. + --max_dist Optional maximum search distance. Minimum value is 5 x cell size. + --z_factor Optional multiplier for when the vertical and horizontal units are not the same. + + Input/output file names can be fully qualified, or can rely on the + working directory contained in the WhiteboxTools settings.json file. + + Example Usage: + >> .*EXE_NAME run -i=input.tif -o=new.tif --sigma=0.25 --low=0.1 --high=0.2 + + Note: Use of this tool requires a valid license. To obtain a license, + contact Whitebox Geospatial Inc. (support@whiteboxgeo.com). + "# + .replace("*", &sep) + .replace("EXE_NAME", exe_name); + println!("{}", s); +} + +fn version() { + const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); + println!( + "otsu_thresholding v{} by Dr. John B. Lindsay (c) 2023.", + VERSION.unwrap_or("Unknown version") + ); +} + +fn get_tool_name() -> String { + String::from("OtsuThresholding") +} + +fn run(args: &Vec) -> Result<(), std::io::Error> { + let tool_name = get_tool_name(); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + // Read in the environment variables and get the necessary values + let configurations = whitebox_common::configs::get_configs()?; + let mut working_directory = configurations.working_directory.clone(); + if !working_directory.is_empty() && !working_directory.ends_with(&sep) { + working_directory += &sep; + } + let max_procs = configurations.max_procs as isize; + + // read the arguments + let mut input_file: String = String::new(); + let mut output_file: String = String::new(); + // let mut num_bins = 256usize; + + if args.len() <= 1 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with too few parameters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-input" { + input_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-o" || flag_val == "-output" { + output_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + // } else if flag_val == "-num_bins" { + // num_bins = if keyval { + // vec[1] + // .to_string() + // .parse::() + // .expect(&format!("Error parsing {}", flag_val)) + // } else { + // args[i + 1] + // .to_string() + // .parse::() + // .expect(&format!("Error parsing {}", flag_val)) + // }; + } + } + + if configurations.verbose_mode { + let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); + // 28 = length of the 'Powered by' by statement. + println!("{}", "*".repeat(welcome_len)); + println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); + println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); + println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); + println!("{}", "*".repeat(welcome_len)); + } + + let mut progress: usize; + let mut old_progress: usize = 1; + + let start = Instant::now(); + + if !input_file.contains(&sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + if !output_file.contains(&sep) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + + + // Read in the input raster + let input = Raster::new(&input_file, "r")?; + let configs = input.configs.clone(); + let rows = configs.rows as isize; + let columns = configs.columns as isize; + let nodata = configs.nodata; + + let input = Arc::new(input); + + ///////////////////////////// + // Calculate the histogram // + ///////////////////////////// + + let is_rgb_image = if input.configs.data_type == DataType::RGB24 + || input.configs.data_type == DataType::RGBA32 + || input.configs.photometric_interp == PhotometricInterpretation::RGB + { + true + } else { + false + }; + + + if configurations.verbose_mode { + println!("Calculating histogram..."); + } + + + + // create the histogram + let mut num_bins = 1024usize; + let min_value: f64; + let range: f64; + let bin_size: f64; + + if !is_rgb_image { + min_value = input.configs.minimum; + range = input.configs.maximum - min_value; + if range.round() as usize > num_bins { + num_bins = range.round() as usize; + } + bin_size = range / (num_bins - 1) as f64; + } else { + min_value = 0f64; + range = 1f64; + bin_size = range / (num_bins - 1) as f64; + } + + let mut num_procs = num_cpus::get() as isize; + if max_procs > 0 && max_procs < num_procs { + num_procs = max_procs; + } + + let (tx, rx) = mpsc::channel(); + for tid in 0..num_procs { + let input = input.clone(); + let tx = tx.clone(); + thread::spawn(move || { + let mut histo = vec![0; num_bins]; + let mut z: f64; + let mut bin: usize; + + let input_fn: Box usize> = if !is_rgb_image { + Box::new(|row: isize, col: isize| -> usize { + let x = input.get_value(row, col); + ((x - min_value) / bin_size).floor() as usize + }) + } else { + Box::new(|row: isize, col: isize| -> usize { + let value = input.get_value(row, col); + let x = value2i(value); + ((x - min_value) / bin_size).floor() as usize + }) + }; + + for row in (0..rows).filter(|r| r % num_procs == tid) { + for col in 0..columns { + z = input.get_value(row, col); + if z != nodata { + bin = input_fn(row, col); + histo[bin] += 1; + } + } + } + + tx.send(histo).unwrap(); + }); + } + + let mut histo = vec![0; num_bins]; + let mut total_cells = 0; + let mut procs_completed = 0; + for _ in 0..num_procs { + let proc_histo = rx.recv().expect("Error receiving data from thread."); + for bin in 0..num_bins { + histo[bin] += proc_histo[bin]; + total_cells += proc_histo[bin]; + } + + if configurations.verbose_mode { + procs_completed += 1; + progress = (100.0_f64 * procs_completed as f64 / num_procs as f64) as usize; + if progress != old_progress { + println!("Calculating histogram: {}%", progress); + old_progress = progress; + } + } + } + + + + + let mut cumulative_histo = Vec::with_capacity(num_bins); + for bin in 0..num_bins { + if bin > 0 { + cumulative_histo.push(histo[bin] + cumulative_histo[bin - 1]); + } else { + cumulative_histo.push(histo[bin]); + } + } + let cdf: Vec = cumulative_histo.into_iter().map(|x| x as f64 / total_cells as f64).collect(); + + let (mut w0, mut w1): (f64, f64); + let (mut m0, mut m1): (f64, f64); + let mut var: f64; + let mut max_var = 0f64; + let mut max_i = 0; + for bin in 0..num_bins-1 { + w0 = cdf[bin]; + w1 = 1.0 - w0; + m0 = (0..=bin).into_iter().map(|i| i * histo[i]).sum::() as f64 / (w0 * total_cells as f64); + m1 = (bin+1..num_bins).into_iter().map(|i| i * histo[i]).sum::() as f64 / (w1 * total_cells as f64); + var = w0 * w1 * (m0 - m1).powi(2); + if var > max_var { + max_var = var; + max_i = bin; + } + } + + if configurations.verbose_mode { + println!("Max variance: {:.2?}\nGreytone bin of max variance: {max_i} of {num_bins} bins", max_var); + } + + let out_nodata = -32768.0; + + let (tx, rx) = mpsc::channel(); + for tid in 0..num_procs { + let input = input.clone(); + let tx = tx.clone(); + thread::spawn(move || { + let mut z: f64; + let mut bin: usize; + + let input_fn: Box usize> = if !is_rgb_image { + Box::new(|row: isize, col: isize| -> usize { + let x = input.get_value(row, col); + ((x - min_value) / bin_size).floor() as usize + }) + } else { + Box::new(|row: isize, col: isize| -> usize { + let value = input.get_value(row, col); + let x = value2i(value); + ((x - min_value) / bin_size).floor() as usize + }) + }; + + for row in (0..rows).filter(|r| r % num_procs == tid) { + let mut data = vec![out_nodata; columns as usize]; + for col in 0..columns { + z = input.get_value(row, col); + if z != nodata { + bin = input_fn(row, col); + if bin <= max_i { + data[col as usize] = 0.0; + } else { + data[col as usize] = 1.0; + } + } + } + tx.send((row, data)).unwrap(); + } + }); + } + + let mut out_configs = input.configs.clone(); + out_configs.data_type = DataType::I16; + out_configs.nodata = out_nodata; + out_configs.photometric_interp = PhotometricInterpretation::Categorical; + let mut output = Raster::initialize_using_config(&output_file, &out_configs); + + for r in 0..rows { + let (row, data) = rx.recv().expect("Error receiving data from thread."); + output.set_row_data(row, data); + + if configurations.verbose_mode { + progress = (100.0_f64 * r as f64 / (rows - 1) as f64) as usize; + if progress != old_progress { + println!("Calculating index: {}%", progress); + old_progress = progress; + } + } + } + + drop(input); + + + ////////////////////// + // Output the image // + ////////////////////// + + let elapsed_time = get_formatted_elapsed_time(start); + + if configurations.verbose_mode { + println!( + "{}", + &format!("Elapsed Time (excluding I/O): {}", elapsed_time) + ); + } + + if configurations.verbose_mode { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if configurations.verbose_mode { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + + Ok(()) +} + +fn value2i(value: f64) -> f64 { + let r = (value as u32 & 0xFF) as f64 / 255f64; + let g = ((value as u32 >> 8) & 0xFF) as f64 / 255f64; + let b = ((value as u32 >> 16) & 0xFF) as f64 / 255f64; + + (r + g + b) / 3f64 +} diff --git a/whitebox-plugins/src/otsu_thresholding/otsu_thresholding.json b/whitebox-plugins/src/otsu_thresholding/otsu_thresholding.json new file mode 100755 index 000000000..b2d76415c --- /dev/null +++ b/whitebox-plugins/src/otsu_thresholding/otsu_thresholding.json @@ -0,0 +1,26 @@ +{ + "tool_name": "OtsuThresholding", + "exe": "otsu_thresholding", + "short_description": "Applies Ostu's method for optimal binary thresholding of a continuous image.", + "toolbox": "Image Processing Tools", + "license": "MIT", + "example": ".*EXE_NAME -r=OtsuThresholding -i=input.tif -o=segmented.tif --num_bins=1024", + "parameters": [ + { + "name": "Input Raster", + "flags": ["-i", "--input"], + "description": "Name of the input DEM raster file.", + "parameter_type": {"ExistingFile":"Raster"}, + "default_value": null, + "optional": false + }, + { + "name": "Output Raster File", + "flags": ["-o", "--output"], + "description": "Name of the output raster file.", + "parameter_type": {"NewFile":"Raster"}, + "default_value": null, + "optional": false + } + ] +} \ No newline at end of file diff --git a/whitebox-plugins/src/repair_stream_vector_topology/main.rs b/whitebox-plugins/src/repair_stream_vector_topology/main.rs index 72c8ff4e2..61ccb9960 100644 --- a/whitebox-plugins/src/repair_stream_vector_topology/main.rs +++ b/whitebox-plugins/src/repair_stream_vector_topology/main.rs @@ -1,13 +1,16 @@ /* Authors: Prof. John Lindsay Created: 03/08/2021 (oringinally in Whitebox Toolset Extension) -Last Modified: 23/03/2023 +Last Modified: 19/05/2023 License: MIT */ +use rayon::prelude::*; use rstar::primitives::{GeomWithData, Line}; use rstar::RTree; +use std::sync::{Arc, Mutex}; use std::env; +use std::fs; use std::f64; use std::io::{Error, ErrorKind}; use std::ops::Index; @@ -43,11 +46,18 @@ const EPSILON: f64 = std::f64::EPSILON; /// /// ![](../../doc_img/RepairStreamVectorTopology.png) /// -/// The user must specify the name of the input vector stream network (`--input`) and the output file -/// (`--output`). Additionally, a distance threshold for snapping dangling arcs (`--snap`) must be -/// specified. This distance is in the input layer's x-y units. The tool works best on projected input +/// The user may optinally specify the name of the input vector stream network (`--input`) and the output file +/// (`--output`). Note that if an input file is not specified by the user, the tool will search for all vector +/// files (*.shp) files contained within the current working directory. This feature can be very useful when +/// you need to process a large number of stream files contained within a single directory. The tool will +/// process the files in parallel in this batch mode. +/// +/// A distance threshold for snapping dangling arcs (`--snap`) must be specified by the user. This distance +/// is in the input layer's x-y units. The tool works best on projected input /// data, however, if the input are in geographic coordinates (latitude and longitude), then specifying a -/// small valued snap distance is advisable. Notice that the attributes of the input layer will not be +/// small valued snap distance is advisable. +/// +/// Notice that the attributes of the input layer will not be /// carried over to the output file because there is not a one-for-one feature correspondence between the /// two files due to the joins and splits of stream segments. Instead the output attribute table will /// only contain a feature ID (FID) entry. @@ -195,18 +205,71 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { println!("{}", "*".repeat(welcome_len)); } - let mut progress: usize; - let mut old_progress: usize = 1; + // let mut progress: usize; + // let mut old_progress: usize = 1; + let old_progress = Arc::new(Mutex::new(1usize)); + let tiles_completed = Arc::new(Mutex::new(0usize)); let start = Instant::now(); - if !input_file.contains(&sep) && !input_file.contains("/") { - input_file = format!("{}{}", working_directory, input_file); + let mut inputs = vec![]; + let mut outputs = vec![]; + if input_file.is_empty() { + if working_directory.is_empty() { + return Err(Error::new(ErrorKind::InvalidInput, + "This tool must be run by specifying either an individual input file or a working directory.")); + } + if std::path::Path::new(&working_directory).is_dir() { + for entry in fs::read_dir(working_directory.clone())? { + let s = entry? + .path() + .into_os_string() + .to_str() + .expect("Error reading path string") + .to_string(); + if s.to_lowercase().ends_with(".shp") { + inputs.push(s); + outputs.push( + inputs[inputs.len() - 1] + .replace(".shp", "_repaired.shp") + .replace(".SHP", "_repaired.shp"), + ) + } + } + } else { + return Err(Error::new( + ErrorKind::InvalidInput, + format!("The input directory ({}) is incorrect.", working_directory), + )); + } + } else { + if !input_file.contains(path::MAIN_SEPARATOR) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + inputs.push(input_file.clone()); + if output_file.is_empty() { + output_file = input_file + .clone() + .replace(".las", ".tif") + .replace(".LAS", ".tif") + .replace(".laz", ".tif") + .replace(".LAZ", ".tif") + .replace(".zlidar", ".tif"); + } + if !output_file.contains(path::MAIN_SEPARATOR) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + outputs.push(output_file); } + - if !output_file.contains(&sep) && !output_file.contains("/") { - output_file = format!("{}{}", working_directory, output_file); - } + // if !input_file.contains(&sep) && !input_file.contains("/") { + // input_file = format!("{}{}", working_directory, input_file); + // } + + // if !output_file.contains(&sep) && !output_file.contains("/") { + // output_file = format!("{}{}", working_directory, output_file); + // } if snap_dist <= 0f64 { if configurations.verbose_mode { @@ -214,226 +277,241 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { } } - let input = Shapefile::read(&input_file)?; + let input_num_features = Arc::new(Mutex::new(0usize)); + let output_num_features = Arc::new(Mutex::new(0usize)); - // Make sure the input vector file is of polyline type - if input.header.shape_type.base_shape_type() != ShapeType::PolyLine { - return Err(Error::new( - ErrorKind::InvalidInput, - "The vector data must be of PolyLine base shape type.", - )); - } + (0..inputs.len()).into_par_iter().for_each(|k| { + let input_file = inputs[k].clone(); + let output_file = outputs[k].clone(); - // Read each line segment into an rtree. - type Location = GeomWithData<[f64; 2], usize>; - let mut line_segments = vec![]; - let mut end_nodes = vec![]; - let (mut part_start, mut part_end): (usize, usize); - let mut fid = 0usize; // fid is unique to each part in the vector - let mut segment_num = 0usize; - let mut polylines = vec![]; - for record_num in 0..input.num_records { - let record = input.get_record(record_num); - for part in 0..record.num_parts as usize { - part_start = record.parts[part] as usize; - part_end = if part < record.num_parts as usize - 1 { - record.parts[part + 1] as usize - 1 - } else { - record.num_points as usize - 1 - }; + let input = Shapefile::read(&input_file).expect("Error reading file"); //?; - polylines.push( - Polyline::new( - &record.points[part_start..=part_end], - fid - ) - ); - - // segment_num = 0; - // for i in part_start+1..=part_end { - // line_segments.push( - // LineWithData::new( - // (fid, segment_num), - // // fid, - // [record.points[i-1].x, record.points[i-1].y], - // [record.points[i].x, record.points[i].y] - // ) - // ); - // segment_num += 1; - // } + { + let mut input_num_features = input_num_features.lock().expect("Error unlocking input_num_features."); + *input_num_features += input.num_records; + } + + // Make sure the input vector file is of polyline type + if input.header.shape_type.base_shape_type() != ShapeType::PolyLine { + // return Err(Error::new( + // ErrorKind::InvalidInput, + // "The vector data must be of PolyLine base shape type.", + // )); + panic!("The vector data must be of PolyLine base shape type."); + } + + let mut progress: usize; + + // Read each line segment into an rtree. + type Location = GeomWithData<[f64; 2], usize>; + let mut line_segments = vec![]; + let mut end_nodes = vec![]; + let (mut part_start, mut part_end): (usize, usize); + let mut fid = 0usize; // fid is unique to each part in the vector + let mut segment_num = 0usize; + let mut polylines = vec![]; + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + for part in 0..record.num_parts as usize { + part_start = record.parts[part] as usize; + part_end = if part < record.num_parts as usize - 1 { + record.parts[part + 1] as usize - 1 + } else { + record.num_points as usize - 1 + }; - end_nodes.push(Location::new( - [record.points[part_start].x, record.points[part_start].y], - fid - )); + polylines.push( + Polyline::new( + &record.points[part_start..=part_end], + fid + ) + ); + + // segment_num = 0; + // for i in part_start+1..=part_end { + // line_segments.push( + // LineWithData::new( + // (fid, segment_num), + // // fid, + // [record.points[i-1].x, record.points[i-1].y], + // [record.points[i].x, record.points[i].y] + // ) + // ); + // segment_num += 1; + // } + + end_nodes.push(Location::new( + [record.points[part_start].x, record.points[part_start].y], + fid + )); - end_nodes.push(Location::new( - [record.points[part_end].x, record.points[part_end].y], - fid - )); + end_nodes.push(Location::new( + [record.points[part_end].x, record.points[part_end].y], + fid + )); - fid += 1; - } + fid += 1; + } - if configurations.verbose_mode { - progress = - (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; - if progress != old_progress { - println!("Reading vector: {}%", progress); - old_progress = progress; + if configurations.verbose_mode && inputs.len() == 1 { + progress = (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Reading vector: {}%", progress); + *old_progress = progress; + } } } - } - let mut num_polylines = polylines.len(); // will be updated after the joins. + let mut num_polylines = polylines.len(); // will be updated after the joins. - // Find all of the segments that can be joined because they link at non-confluences. - let endnode_tree = RTree::bulk_load(end_nodes); - let precision = EPSILON * 10f64; - let mut p1: Point2D; - let mut connections = vec![[num_polylines, num_polylines]; num_polylines]; - let mut connected_polyline: usize; - let mut num_neighbours: usize; - for fid in 0..num_polylines { - // fid = polylines[poly_id].id1; - p1 = polylines[fid].get_first_node(); - let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + // Find all of the segments that can be joined because they link at non-confluences. + let endnode_tree = RTree::bulk_load(end_nodes); + let precision = EPSILON * 10f64; + let mut p1: Point2D; + let mut connections = vec![[num_polylines, num_polylines]; num_polylines]; + let mut connected_polyline: usize; + let mut num_neighbours: usize; + for fid in 0..num_polylines { + // fid = polylines[poly_id].id1; + p1 = polylines[fid].get_first_node(); + let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); - connected_polyline = num_polylines; - num_neighbours = 0; - for p in ret { - if p.data != fid { - connected_polyline = p.data; - num_neighbours += 1; + connected_polyline = num_polylines; + num_neighbours = 0; + for p in ret { + if p.data != fid { + connected_polyline = p.data; + num_neighbours += 1; + } + } + if num_neighbours == 1 { + connections[fid][0] = connected_polyline; } - } - if num_neighbours == 1 { - connections[fid][0] = connected_polyline; - } - p1 = polylines[fid].get_last_node(); - let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + p1 = polylines[fid].get_last_node(); + let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision); - connected_polyline = num_polylines; - num_neighbours = 0; - for p in ret { - if p.data != fid { - connected_polyline = p.data; - num_neighbours += 1; + connected_polyline = num_polylines; + num_neighbours = 0; + for p in ret { + if p.data != fid { + connected_polyline = p.data; + num_neighbours += 1; + } + } + if num_neighbours == 1 { + connections[fid][1] = connected_polyline; } - } - if num_neighbours == 1 { - connections[fid][1] = connected_polyline; - } - if configurations.verbose_mode { - progress = - (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; - if progress != old_progress { - println!("Looking for joins in arcs: {}%", progress); - old_progress = progress; + if configurations.verbose_mode && inputs.len() == 1 { + progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Looking for joins in arcs: {}%", progress); + *old_progress = progress; + } } } - } - // now perform the actual joins - let mut marked_for_deletion = vec![false; num_polylines]; - for fid in 0..num_polylines { - // We're looking for segments where one end is joined and the other end is not. These are - // valid starting segements for chains of joined segments. - // if fid == 21414 || fid == 16471 || fid == 3703 || fid == 3683 { - // println!("{} {} {} {} {}", fid, connections[fid][0], connections[fid][1], marked_for_deletion[fid], num_polylines); - // } - if !marked_for_deletion[fid] { - let is_joined_at_start = connections[fid][0] < num_polylines && connections[fid][1] == num_polylines; - let mut is_joined_at_end = connections[fid][0] == num_polylines && connections[fid][1] < num_polylines; + // now perform the actual joins + let mut marked_for_deletion = vec![false; num_polylines]; + for fid in 0..num_polylines { + // We're looking for segments where one end is joined and the other end is not. These are + // valid starting segements for chains of joined segments. // if fid == 21414 || fid == 16471 || fid == 3703 || fid == 3683 { - // println!("{} {} {} {} {}", fid, is_joined_at_start, connections[fid][0], is_joined_at_end, connections[fid][1]); + // println!("{} {} {} {} {}", fid, connections[fid][0], connections[fid][1], marked_for_deletion[fid], num_polylines); // } - if is_joined_at_start || is_joined_at_end { - // let flag_high = fid == 3683; - marked_for_deletion[fid] = true; - // It's a start to a connected chain. - let mut pl = Polyline::new_empty(fid); - if is_joined_at_end { - pl.vertices.extend_from_slice(&polylines[fid].vertices.clone()); - } else { - let mut rev = polylines[fid].vertices.clone(); - rev.reverse(); - pl.vertices.extend_from_slice(&rev); - } - // let mut current_fid = if connections[fid][0] < num_polylines { - // connections[fid][0] - // } else { - // connections[fid][1] - // }; - let mut current_fid = fid; - loop { - // if flag_high { - // let t1 = if connections[current_fid][0] < num_polylines { - // marked_for_deletion[connections[current_fid][0]] - // } else { - // true - // }; - - // let t2 = if connections[current_fid][1] < num_polylines { - // marked_for_deletion[connections[current_fid][1]] - // } else { - // true - // }; - - // println!("{} {} {} {} {}", current_fid, connections[current_fid][0], t1, connections[current_fid][1], t2); - // } - marked_for_deletion[current_fid] = true; - // is_joined_at_end = false; - current_fid = if connections[current_fid][0] < num_polylines && !marked_for_deletion[connections[current_fid][0]] { - connections[current_fid][0] - } else if connections[current_fid][1] < num_polylines && !marked_for_deletion[connections[current_fid][1]] { - // is_joined_at_end = true; - connections[current_fid][1] - } else { - break; - }; - - // which way is it joined? - is_joined_at_end = false; - if pl.get_last_node().distance(&polylines[current_fid].get_first_node()) <= precision { - is_joined_at_end = true; - } - + if !marked_for_deletion[fid] { + let is_joined_at_start = connections[fid][0] < num_polylines && connections[fid][1] == num_polylines; + let mut is_joined_at_end = connections[fid][0] == num_polylines && connections[fid][1] < num_polylines; + // if fid == 21414 || fid == 16471 || fid == 3703 || fid == 3683 { + // println!("{} {} {} {} {}", fid, is_joined_at_start, connections[fid][0], is_joined_at_end, connections[fid][1]); + // } + if is_joined_at_start || is_joined_at_end { + // let flag_high = fid == 3683; + marked_for_deletion[fid] = true; + // It's a start to a connected chain. + let mut pl = Polyline::new_empty(fid); if is_joined_at_end { - pl.vertices.extend_from_slice(&polylines[current_fid].vertices.clone()); + pl.vertices.extend_from_slice(&polylines[fid].vertices.clone()); } else { - let mut rev = polylines[current_fid].vertices.clone(); + let mut rev = polylines[fid].vertices.clone(); rev.reverse(); pl.vertices.extend_from_slice(&rev); } - } + // let mut current_fid = if connections[fid][0] < num_polylines { + // connections[fid][0] + // } else { + // connections[fid][1] + // }; + let mut current_fid = fid; + loop { + // if flag_high { + // let t1 = if connections[current_fid][0] < num_polylines { + // marked_for_deletion[connections[current_fid][0]] + // } else { + // true + // }; + + // let t2 = if connections[current_fid][1] < num_polylines { + // marked_for_deletion[connections[current_fid][1]] + // } else { + // true + // }; + + // println!("{} {} {} {} {}", current_fid, connections[current_fid][0], t1, connections[current_fid][1], t2); + // } + marked_for_deletion[current_fid] = true; + // is_joined_at_end = false; + current_fid = if connections[current_fid][0] < num_polylines && !marked_for_deletion[connections[current_fid][0]] { + connections[current_fid][0] + } else if connections[current_fid][1] < num_polylines && !marked_for_deletion[connections[current_fid][1]] { + // is_joined_at_end = true; + connections[current_fid][1] + } else { + break; + }; + + // which way is it joined? + is_joined_at_end = false; + if pl.get_last_node().distance(&polylines[current_fid].get_first_node()) <= precision { + is_joined_at_end = true; + } + + if is_joined_at_end { + pl.vertices.extend_from_slice(&polylines[current_fid].vertices.clone()); + } else { + let mut rev = polylines[current_fid].vertices.clone(); + rev.reverse(); + pl.vertices.extend_from_slice(&rev); + } + } - polylines.push(pl); + polylines.push(pl); + } } } - } - for i in (0..num_polylines).rev() { - if marked_for_deletion[i] { - polylines.remove(i); + for i in (0..num_polylines).rev() { + if marked_for_deletion[i] { + polylines.remove(i); + } } - } - num_polylines = polylines.len(); + num_polylines = polylines.len(); - // remove any zero-length segments. - for fid in 0..num_polylines { - for i in (1..polylines[fid].len()).rev() { - if polylines[fid][i].distance(&polylines[fid][i-1]) <= precision { - polylines[fid].vertices.remove(i); + // remove any zero-length segments. + for fid in 0..num_polylines { + for i in (1..polylines[fid].len()).rev() { + if polylines[fid][i].distance(&polylines[fid][i-1]) <= precision { + polylines[fid].vertices.remove(i); + } } } - } @@ -443,266 +521,287 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { - end_nodes = vec![]; - for fid in 0..num_polylines { - polylines[fid].id = fid; + end_nodes = vec![]; + for fid in 0..num_polylines { + polylines[fid].id = fid; - segment_num = 0; - for i in 1..polylines[fid].vertices.len() { - line_segments.push( - GeomWithData::new( - Line::new( - [polylines[fid].vertices[i-1].x, polylines[fid].vertices[i-1].y], - [polylines[fid].vertices[i].x, polylines[fid].vertices[i].y] - ), - (fid, segment_num) - ) - ); - segment_num += 1; - } + segment_num = 0; + for i in 1..polylines[fid].vertices.len() { + line_segments.push( + GeomWithData::new( + Line::new( + [polylines[fid].vertices[i-1].x, polylines[fid].vertices[i-1].y], + [polylines[fid].vertices[i].x, polylines[fid].vertices[i].y] + ), + (fid, segment_num) + ) + ); + segment_num += 1; + } - p1 = polylines[fid].get_first_node(); - end_nodes.push(Location::new( - [p1.x, p1.y], - fid - )); + p1 = polylines[fid].get_first_node(); + end_nodes.push(Location::new( + [p1.x, p1.y], + fid + )); - p1 = polylines[fid].get_last_node(); - end_nodes.push(Location::new( - [p1.x, p1.y], - fid - )); + p1 = polylines[fid].get_last_node(); + end_nodes.push(Location::new( + [p1.x, p1.y], + fid + )); - if configurations.verbose_mode { - progress = - (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; - if progress != old_progress { - println!("Looking for dangling arcs: {}%", progress); - old_progress = progress; + if configurations.verbose_mode && inputs.len() == 1 { + progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Looking for dangling arcs: {}%", progress); + *old_progress = progress; + } } } - } - let endnode_tree = RTree::bulk_load(end_nodes); - let line_segments_tree = RTree::bulk_load(line_segments); - let snap_dist_sq = snap_dist * snap_dist; - // let mut points: Vec; - let mut num_vertices: usize; - let mut min_dist: f64; - let mut dist: f64; - let mut point = Point2D::new(0f64, 0f64); // just to satisfy the need to initialize. - let mut p2: Point2D; - let mut line_seg: LineSegment; - let mut line_seg2: LineSegment = LineSegment::new(Point2D::new(0f64, 0f64), Point2D::new(0f64, 0f64)); - let mut joined_feature: usize = 0; - for poly_id in 0..polylines.len() { - fid = polylines[poly_id].id; - p1 = polylines[fid].get_first_node(); - let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); - // See if any of the line segments within the snap distance are from a different polyline. - // If so, find the nearest point. - min_dist = f64::INFINITY; - for line in ret { - if line.data.0 != fid { - let geom = line.geom(); - let p = geom.nearest_point(&[p1.x, p1.y]); - p2 = Point2D::new(p[0], p[1]); - dist = p1.distance(&p2); - if dist < min_dist { - min_dist = dist; - point = p2; - segment_num = line.data.1; - joined_feature = line.data.0; - line_seg2 = LineSegment::new( - Point2D::new(geom.from[0], geom.from[1]), - Point2D::new(geom.to[0], geom.to[1]) - ); + let endnode_tree = RTree::bulk_load(end_nodes); + let line_segments_tree = RTree::bulk_load(line_segments); + let snap_dist_sq = snap_dist * snap_dist; + // let mut points: Vec; + let mut num_vertices: usize; + let mut min_dist: f64; + let mut dist: f64; + let mut point = Point2D::new(0f64, 0f64); // just to satisfy the need to initialize. + let mut p2: Point2D; + let mut line_seg: LineSegment; + let mut line_seg2: LineSegment = LineSegment::new(Point2D::new(0f64, 0f64), Point2D::new(0f64, 0f64)); + let mut joined_feature: usize = 0; + for poly_id in 0..polylines.len() { + fid = polylines[poly_id].id; + p1 = polylines[fid].get_first_node(); + let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); + // See if any of the line segments within the snap distance are from a different polyline. + // If so, find the nearest point. + min_dist = f64::INFINITY; + for line in ret { + if line.data.0 != fid { + let geom = line.geom(); + let p = geom.nearest_point(&[p1.x, p1.y]); + p2 = Point2D::new(p[0], p[1]); + dist = p1.distance(&p2); + if dist < min_dist { + min_dist = dist; + point = p2; + segment_num = line.data.1; + joined_feature = line.data.0; + line_seg2 = LineSegment::new( + Point2D::new(geom.from[0], geom.from[1]), + Point2D::new(geom.to[0], geom.to[1]) + ); + } } } - } - // how many endnodes is this endnode in contact with? This is for y-junctions - let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); - num_neighbours = 0; - for p in ret_endnodes { - if p.data != fid { - num_neighbours += 1; + // how many endnodes is this endnode in contact with? This is for y-junctions + let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + num_neighbours = 0; + for p in ret_endnodes { + if p.data != fid { + num_neighbours += 1; + } } - } - if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { - // Is it an undershoot or an overshoot? - // if it is an overshoot, then the nearest point will have a distance of zero with - // the current line segment too. That is, it will be coincident. - line_seg = LineSegment::new(p1, polylines[fid][1]); - - if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { - // It's an undershoot, add the point to the start of the polyline. - polylines[fid].insert(0, point); - // all the split indices will be one less than they should be now that we've - // inserted a vertex at the start. - polylines[fid].splits_offset_by_one = true; - polylines[joined_feature].insert_split_point(segment_num, point); - // points.push(point); - } else { // It's an overshoot. - point = match line_seg.get_intersection(&line_seg2) { - Some(ls) => ls.p1, - None => point // do nothing - }; - if polylines[fid][1].distance(&point) > precision { + if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { + // Is it an undershoot or an overshoot? + // if it is an overshoot, then the nearest point will have a distance of zero with + // the current line segment too. That is, it will be coincident. + line_seg = LineSegment::new(p1, polylines[fid][1]); + + if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { + // It's an undershoot, add the point to the start of the polyline. polylines[fid].insert(0, point); - polylines[fid].remove(1); + // all the split indices will be one less than they should be now that we've + // inserted a vertex at the start. + polylines[fid].splits_offset_by_one = true; + polylines[joined_feature].insert_split_point(segment_num, point); + // points.push(point); + } else { // It's an overshoot. + point = match line_seg.get_intersection(&line_seg2) { + Some(ls) => ls.p1, + None => point // do nothing + }; + if polylines[fid][1].distance(&point) > precision { + polylines[fid].insert(0, point); + polylines[fid].remove(1); + } + polylines[joined_feature].insert_split_point(segment_num, point); } - polylines[joined_feature].insert_split_point(segment_num, point); } - } - p1 = polylines[fid].get_last_node(); - let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); - min_dist = f64::INFINITY; - for line in ret { - if line.data.0 != fid { - let geom = line.geom(); - let p = geom.nearest_point(&[p1.x, p1.y]); - p2 = Point2D::new(p[0], p[1]); - dist = p1.distance(&p2); - if dist < min_dist { - min_dist = dist; - point = p2; - segment_num = line.data.1; - joined_feature = line.data.0; - line_seg2 = LineSegment::new( - Point2D::new(geom.from[0], geom.from[1]), - Point2D::new(geom.to[0], geom.to[1]) - ); + p1 = polylines[fid].get_last_node(); + let ret = line_segments_tree.locate_within_distance([p1.x, p1.y], snap_dist_sq); + min_dist = f64::INFINITY; + for line in ret { + if line.data.0 != fid { + let geom = line.geom(); + let p = geom.nearest_point(&[p1.x, p1.y]); + p2 = Point2D::new(p[0], p[1]); + dist = p1.distance(&p2); + if dist < min_dist { + min_dist = dist; + point = p2; + segment_num = line.data.1; + joined_feature = line.data.0; + line_seg2 = LineSegment::new( + Point2D::new(geom.from[0], geom.from[1]), + Point2D::new(geom.to[0], geom.to[1]) + ); + } } } - } - // how many endnodes is this endnode in contact with? This is for y-junctions - let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); - num_neighbours = 0; - for p in ret_endnodes { - if p.data != fid { - num_neighbours += 1; + // how many endnodes is this endnode in contact with? This is for y-junctions + let ret_endnodes = endnode_tree.locate_within_distance([p1.x, p1.y], precision); + num_neighbours = 0; + for p in ret_endnodes { + if p.data != fid { + num_neighbours += 1; + } } - } - if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { - // if min_dist.is_finite() && min_dist >= precision { - // Is it an undershoot or an overshoot? - // if it is an overshoot, then the nearest point will have a distance of zero with - // the current line segment too. That is, it will be coincident. - line_seg = LineSegment::new(polylines[fid][polylines[fid].len()-2], p1); - if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { - // It's an undershoot, add the line end point. - // points.push(record.points[part_end].clone()); - polylines[fid].push(point); - polylines[joined_feature].insert_split_point(segment_num, point); - } else { // It's an overshoot - num_vertices = polylines[fid].len(); - polylines[fid].remove(num_vertices-1); - - point = match line_seg.get_intersection(&line_seg2) { - Some(ls) => ls.p1, - None => point // do nothing - }; - polylines[fid].push(point); - polylines[joined_feature].insert_split_point(segment_num, point); + if (min_dist.is_finite() && min_dist > precision) || (min_dist <= precision && num_neighbours == 0) { + // if min_dist.is_finite() && min_dist >= precision { + // Is it an undershoot or an overshoot? + // if it is an overshoot, then the nearest point will have a distance of zero with + // the current line segment too. That is, it will be coincident. + line_seg = LineSegment::new(polylines[fid][polylines[fid].len()-2], p1); + if (line_seg.dist_to_segment(point) - min_dist).abs() <= precision { + // It's an undershoot, add the line end point. + // points.push(record.points[part_end].clone()); + polylines[fid].push(point); + polylines[joined_feature].insert_split_point(segment_num, point); + } else { // It's an overshoot + num_vertices = polylines[fid].len(); + polylines[fid].remove(num_vertices-1); + + point = match line_seg.get_intersection(&line_seg2) { + Some(ls) => ls.p1, + None => point // do nothing + }; + polylines[fid].push(point); + polylines[joined_feature].insert_split_point(segment_num, point); + } } - } - if configurations.verbose_mode { - progress = - (100.0_f64 * (poly_id + 1) as f64 / polylines.len() as f64) as usize; - if progress != old_progress { - println!("Looking for dangling arcs: {}%", progress); - old_progress = progress; + if configurations.verbose_mode && inputs.len() == 1 { + progress = (100.0_f64 * (poly_id + 1) as f64 / polylines.len() as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Looking for dangling arcs: {}%", progress); + *old_progress = progress; + } } } - } - // Deal with the splits. - let mut polylines2 = vec![]; - for poly_id in 0..polylines.len() { - if polylines[poly_id].split_points.len() == 0 { - polylines2.push(polylines[poly_id].clone()); - } else { - let splits = polylines[poly_id].split(); - for pl in splits { - polylines2.push(pl.clone()); + // Deal with the splits. + let mut polylines2 = vec![]; + for poly_id in 0..polylines.len() { + if polylines[poly_id].split_points.len() == 0 { + polylines2.push(polylines[poly_id].clone()); + } else { + let splits = polylines[poly_id].split(); + for pl in splits { + polylines2.push(pl.clone()); + } } } - } - // remove any zero-length segments. - for fid in 0..polylines2.len() { - for i in (1..polylines2[fid].len()).rev() { - if polylines2[fid][i].distance(&polylines2[fid][i-1]) <= precision { - polylines2[fid].vertices.remove(i); + // remove any zero-length segments. + for fid in 0..polylines2.len() { + for i in (1..polylines2[fid].len()).rev() { + if polylines2[fid][i].distance(&polylines2[fid][i-1]) <= precision { + polylines2[fid].vertices.remove(i); + } } } - } - // Find segments that have a gap at their endnodes and can be joined. - - - // create output file - let mut output = Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, false)?; - - // add the attributes - // let in_atts = input.attributes.get_fields(); - - // output.attributes.add_fields(&in_atts); - output.attributes.add_field( - &AttributeField::new( - "FID", - FieldDataType::Int, - 7u8, - 0u8 - ) - ); - - let mut sfg: ShapefileGeometry; - // let mut record_num: usize; - for poly_id in 0..polylines2.len() { - sfg = ShapefileGeometry::new(ShapeType::PolyLine); - sfg.add_part(&polylines2[poly_id].vertices); - output.add_record(sfg); - - // record_num = polylines2[poly_id].id2; - // let att_data = input.attributes.get_record(record_num); - // output.attributes.add_record(att_data.clone(), false); - output.attributes.add_record(vec![FieldData::Int((poly_id + 1) as i32)], false); + // Find segments that have a gap at their endnodes and can be joined. - if configurations.verbose_mode { - progress = - (100.0_f64 * (poly_id + 1) as f64 / polylines2.len() as f64) as usize; - if progress != old_progress { - println!("Looking for dangling arcs: {}%", progress); - old_progress = progress; + + // create output file + let mut output = Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, false).expect("Error creating output file"); //?; + + // add the attributes + // let in_atts = input.attributes.get_fields(); + + // output.attributes.add_fields(&in_atts); + output.attributes.add_field( + &AttributeField::new( + "FID", + FieldDataType::Int, + 7u8, + 0u8 + ) + ); + + let mut sfg: ShapefileGeometry; + // let mut record_num: usize; + for poly_id in 0..polylines2.len() { + sfg = ShapefileGeometry::new(ShapeType::PolyLine); + sfg.add_part(&polylines2[poly_id].vertices); + output.add_record(sfg); + + // record_num = polylines2[poly_id].id2; + // let att_data = input.attributes.get_record(record_num); + // output.attributes.add_record(att_data.clone(), false); + output.attributes.add_record(vec![FieldData::Int((poly_id + 1) as i32)], false); + + if configurations.verbose_mode && inputs.len() == 1 { + progress = (100.0_f64 * (poly_id + 1) as f64 / polylines2.len() as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Looking for dangling arcs: {}%", progress); + *old_progress = progress; + } } } - } - - if configurations.verbose_mode { - println!("Saving data...") - }; - let _ = match output.write() { - Ok(_) => { - if configurations.verbose_mode { - println!("Output file written") + { + let mut output_num_features = output_num_features.lock().expect("Error unlocking output_num_features."); + *output_num_features += polylines2.len(); + } + + + if configurations.verbose_mode && inputs.len() == 1 { + println!("Saving data...") + }; + + output.write().expect("Error writing file."); + + + if inputs.len() > 1 { + { + let mut tiles_completed = tiles_completed.lock().unwrap(); + *tiles_completed += 1; + progress = (100.0_f64 * *tiles_completed as f64 / inputs.len() as f64) as usize; + let mut old_progress = old_progress.lock().unwrap(); + if progress != *old_progress { + println!("Progress: {}%", progress); + *old_progress = progress; + } } } - Err(e) => return Err(e), - }; + }); + + let input_num_features = input_num_features.lock().expect("Error locking output_num_features."); + println!("Num. input line features: {}", *input_num_features); + + let output_num_features = output_num_features.lock().expect("Error locking output_num_features."); + println!("Num. output line features: {}", *output_num_features); + let elapsed_time = get_formatted_elapsed_time(start); diff --git a/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json b/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json index 33f1c64c2..f38a60250 100755 --- a/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json +++ b/whitebox-plugins/src/repair_stream_vector_topology/repair_stream_vector_topology.json @@ -13,7 +13,7 @@ "description": "Name of the input lines vector file.", "parameter_type": {"ExistingFile":{"Vector":"Line"}}, "default_value": null, - "optional": false + "optional": true }, { "name": "Output Lines", @@ -21,7 +21,7 @@ "description": "Name of the output lines vector file.", "parameter_type": {"NewFile":{"Vector":"Line"}}, "default_value": null, - "optional": false + "optional": true }, { "name": "Snap Distance", diff --git a/whitebox-plugins/src/vector_stream_network_analysis/main.rs b/whitebox-plugins/src/vector_stream_network_analysis/main.rs index 57c890ed0..b17b7de34 100644 --- a/whitebox-plugins/src/vector_stream_network_analysis/main.rs +++ b/whitebox-plugins/src/vector_stream_network_analysis/main.rs @@ -647,10 +647,8 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { if is_exterior || crosses_nodata[i] { is_exterior_link[i] = true; - if link_min_elev[i] <= z || crosses_nodata[i] { - - z = link_min_elev[i]; - queue.push(StreamLink{ index: i, min: z + max_ridge_cutting_height }); + if link_min_elev[i] <= z || crosses_nodata[i] { + queue.push(StreamLink{ index: i, min: link_min_elev[i] + max_ridge_cutting_height }); } } } @@ -920,6 +918,7 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { while !stack.is_empty() { let i = stack.pop().expect("Error during pop operation."); link_mag[i] += link_lengths[i]; + max_upstream_length[i] += link_lengths[i]; dsn = downstream_link[i]; if dsn >= 0isize { // pass this downstream @@ -935,8 +934,8 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { strahler_order[dsn as usize] = strahler_order[i]; } - if max_upstream_length[i] + link_lengths[i] > max_upstream_length[dsn as usize] { - max_upstream_length[dsn as usize] = max_upstream_length[i] + link_lengths[i]; + if max_upstream_length[i] > max_upstream_length[dsn as usize] { + max_upstream_length[dsn as usize] = max_upstream_length[i]; } shreve_order[dsn as usize] += shreve_order[i]; @@ -988,7 +987,7 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { neighbour_list.clear(); let mut max_tucl = 0f64; let mut max_tucl_link = -1isize; - for pt in link_key_points[i].get_all_points() { // (XYPoint pt : linkKeyPoints[i].getAllPoints()) { + for pt in link_key_points[i].get_all_points() { x = pt.x; y = pt.y; let ret = points_tree.within(&[x, y], precision, &squared_euclidean).unwrap(); @@ -997,8 +996,9 @@ fn run(args: &Vec) -> Result<(), std::io::Error> { id = *ret[j].1; if downstream_link[id] == i as isize { neighbour_list.push(id); - if link_mag[id] > max_tucl { - max_tucl = link_mag[id]; + // if link_mag[id] > max_tucl { + if max_upstream_length[id] > max_tucl { + max_tucl = max_upstream_length[id]; // link_mag[id]; max_tucl_link = id as isize; } } diff --git a/whitebox-runner/Cargo.toml b/whitebox-runner/Cargo.toml index 05c10e7d8..a5f0f914f 100644 --- a/whitebox-runner/Cargo.toml +++ b/whitebox-runner/Cargo.toml @@ -15,6 +15,7 @@ persistence = [ [dependencies] anyhow = "1.0.66" case = "1.0.0" +copypasta = "0.8.2" egui = "0.19.0" egui_extras = { version = "0.19.0", features = ["image"] } eframe = "0.19.0" diff --git a/whitebox-runner/WBT_icon.png b/whitebox-runner/WBT_icon.png index 5b466ac26..e79aa306e 100644 Binary files a/whitebox-runner/WBT_icon.png and b/whitebox-runner/WBT_icon.png differ diff --git a/whitebox-runner/WBT_icon2.png b/whitebox-runner/WBT_icon2.png new file mode 100644 index 000000000..5b466ac26 Binary files /dev/null and b/whitebox-runner/WBT_icon2.png differ diff --git a/whitebox-runner/src/extension.rs b/whitebox-runner/src/extension.rs index 460bbaf3d..2165f31d2 100644 --- a/whitebox-runner/src/extension.rs +++ b/whitebox-runner/src/extension.rs @@ -1,7 +1,9 @@ +extern crate copypasta; use crate::MyApp; use reqwest; use std::{env, ffi, fs, io, path, process, time}; use anyhow::{bail, Result}; +use copypasta::{ClipboardContext, ClipboardProvider}; #[derive(Default)] pub struct ExtensionInstall { @@ -72,6 +74,18 @@ impl MyApp { egui::TextEdit::singleline(&mut self.ei.activation_key) .desired_width(self.state.textbox_width) ); + if ui.button("📋").clicked() { + // ui.output_mut(|o| o.copied_text = "some_text".to_string()); + // self.ei.activation_key = "hello world".to_string(); // ui.output().copied_text.clone(); + let mut ctx = ClipboardContext::new().unwrap(); + + // let msg = "Hello, world!"; + // ctx.set_contents(msg.to_owned()).unwrap(); + + let content = ctx.get_contents().unwrap_or("Clipboard empty".to_string()); + + self.ei.activation_key = content; + } ui.end_row(); }); diff --git a/whitebox-tools-app/src/tools/data_tools/raster_to_vector_polygons.rs b/whitebox-tools-app/src/tools/data_tools/raster_to_vector_polygons.rs index b61f0cd13..b21f349ad 100755 --- a/whitebox-tools-app/src/tools/data_tools/raster_to_vector_polygons.rs +++ b/whitebox-tools-app/src/tools/data_tools/raster_to_vector_polygons.rs @@ -496,6 +496,10 @@ impl WhiteboxTool for RasterToVectorPolygons { if is_clockwise_order(&points) { points.reverse(); } + } else { + if !is_clockwise_order(&points) { + points.reverse(); + } } geometries[z as usize - 1].add_part(&points); } diff --git a/whitebox-tools-app/src/tools/data_tools/vector_points_to_raster.rs b/whitebox-tools-app/src/tools/data_tools/vector_points_to_raster.rs index f9163f2d8..258957f07 100755 --- a/whitebox-tools-app/src/tools/data_tools/vector_points_to_raster.rs +++ b/whitebox-tools-app/src/tools/data_tools/vector_points_to_raster.rs @@ -85,7 +85,7 @@ impl VectorPointsToRaster { parameters.push(ToolParameter{ name: "Assignment Operation".to_owned(), flags: vec!["--assign".to_owned()], - description: "Assignment operation, where multiple points are in the same grid cell; options include 'first', 'last' (default), 'min', 'max', 'sum', 'number'".to_owned(), + description: "Assignment operation, where multiple points are in the same grid cell; options include 'first', 'last' (default), 'min', 'max', 'mean', 'sum', 'number'".to_owned(), parameter_type: ParameterType::OptionList(vec!["first".to_owned(), "last".to_owned(), "min".to_owned(), "max".to_owned(), "sum".to_owned(), "number".to_owned()]), default_value: Some("last".to_owned()), optional: true diff --git a/whitebox-tools-app/src/tools/gis_analysis/deviation_from_regional_direction.rs b/whitebox-tools-app/src/tools/gis_analysis/deviation_from_regional_direction.rs new file mode 100644 index 000000000..429da7926 --- /dev/null +++ b/whitebox-tools-app/src/tools/gis_analysis/deviation_from_regional_direction.rs @@ -0,0 +1,447 @@ +/* +This tool is part of the WhiteboxTools geospatial analysis library. +Authors: Dr. John Lindsay +Created: 24/05/2023 +Last Modified: 24/05/2023 +License: MIT +*/ + +use crate::tools::*; +use whitebox_common::algorithms::{minimum_bounding_box, MinimizationCriterion}; +use whitebox_common::structures::Point2D; +use whitebox_vector::*; +use std::env; +use std::f64; +use std::io::{Error, ErrorKind}; +use std::path; + +/// This tool calculates the orientation of polygon features based on the slope of a reduced major +/// axis (RMA) regression line. The regression analysis use the vertices of the exterior hull nodes +/// of a vector polygon. The only required input is the name of the vector polygon file. The +/// orientation values, measured in degrees from north, will be placed in the accompanying attribute +/// table as a new field (ORIENT). The value of the orientation measure for any polygon will +/// depend on how elongated the feature is. +/// +/// Note that the output values are polygon orientations and not true directions. While directions +/// may take values ranging from 0-360, orientation is expressed as an angle between 0 and 180 degrees +/// clockwise from north. Lastly, the orientation measure may become unstable when polygons are +/// oriented nearly vertical or horizontal. +/// +/// # See Also +/// `LinearityIndex`, `ElongationRatio` +pub struct DeviationFromRegionalDirection { + name: String, + description: String, + toolbox: String, + parameters: Vec, + example_usage: String, +} + +impl DeviationFromRegionalDirection { + pub fn new() -> DeviationFromRegionalDirection { + // public constructor + let name = "DeviationFromRegionalDirection".to_string(); + let toolbox = "GIS Analysis/Patch Shape Tools".to_string(); + let description = "Calculates the orientation of vector polygons.".to_string(); + + let mut parameters = vec![]; + parameters.push(ToolParameter { + name: "Input Vector Polygon File".to_owned(), + flags: vec!["-i".to_owned(), "--input".to_owned()], + description: "Input vector polygon file.".to_owned(), + parameter_type: ParameterType::ExistingFile(ParameterFileType::Vector( + VectorGeometryType::Polygon, + )), + default_value: None, + optional: false, + }); + + parameters.push(ToolParameter { + name: "Elongation Threshold (0.05-0.95)".to_owned(), + flags: vec!["--elong_threshold".to_owned()], + description: "Elongation threshold used in determining which polygons are used to estimate the regional direction (0.05-0.95).".to_owned(), + parameter_type: ParameterType::Float, + default_value: Some("0.75".to_string()), + optional: true, + }); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + let e = format!("{}", env::current_exe().unwrap().display()); + let mut parent = env::current_exe().unwrap(); + parent.pop(); + let p = format!("{}", parent.display()); + let mut short_exe = e + .replace(&p, "") + .replace(".exe", "") + .replace(".", "") + .replace(&sep, ""); + if e.contains(".exe") { + short_exe += ".exe"; + } + let usage = format!( + ">>.*{0} -r={1} -v --wd=\"*path*to*data*\" --input=polygons.shp", + short_exe, name + ) + .replace("*", &sep); + + DeviationFromRegionalDirection { + name: name, + description: description, + toolbox: toolbox, + parameters: parameters, + example_usage: usage, + } + } +} + +impl WhiteboxTool for DeviationFromRegionalDirection { + fn get_source_file(&self) -> String { + String::from(file!()) + } + + fn get_tool_name(&self) -> String { + self.name.clone() + } + + fn get_tool_description(&self) -> String { + self.description.clone() + } + + fn get_tool_parameters(&self) -> String { + let mut s = String::from("{\"parameters\": ["); + for i in 0..self.parameters.len() { + if i < self.parameters.len() - 1 { + s.push_str(&(self.parameters[i].to_string())); + s.push_str(","); + } else { + s.push_str(&(self.parameters[i].to_string())); + } + } + s.push_str("]}"); + s + } + + fn get_example_usage(&self) -> String { + self.example_usage.clone() + } + + fn get_toolbox(&self) -> String { + self.toolbox.clone() + } + + fn run<'a>( + &self, + args: Vec, + working_directory: &'a str, + verbose: bool, + ) -> Result<(), Error> { + let mut input_file = String::new(); + let mut elongation_threshold = 0.75; + + if args.len() == 0 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with no parameters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-input" { + input_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-elong_threshold" { + elongation_threshold = if keyval { + vec[1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + } else { + args[i + 1] + .to_string() + .parse::() + .expect(&format!("Error parsing {}", flag_val)) + }; + } + } + + let mut progress: usize; + let mut old_progress: usize = 1; + + if verbose { + let tool_name = self.get_tool_name(); + let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); + // 28 = length of the 'Powered by' by statement. + println!("{}", "*".repeat(welcome_len)); + println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); + println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); + println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); + println!("{}", "*".repeat(welcome_len)); + } + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + if !input_file.contains(&sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + + if elongation_threshold < 0.05 { elongation_threshold = 0.05; } + if elongation_threshold > 0.95 { elongation_threshold = 0.95; } + + if verbose { + println!("Reading data...") + }; + + let input = Shapefile::read(&input_file)?; + + let start = Instant::now(); + + // make sure the input vector file is of points type + if input.header.shape_type.base_shape_type() != ShapeType::Polygon { + return Err(Error::new( + ErrorKind::InvalidInput, + "The input vector data must be of POLYGON base shape type.", + )); + } + + // First, calculate the mean direction of the polygon set. This is weighted by + // the polygon length and it's elongation, such that w = L * (1 - S/L) where + // L is the long-axis length and S is the short-axis length. Thus, more weight + // is assigned to larger, more elongated polygons. + + // create output file + let mut output = Shapefile::initialize_using_file(&input_file, &input, input.header.shape_type, true)?; + + // add the attributes + output.attributes.add_field(&AttributeField::new( + "DEV_DIR", + FieldDataType::Real, + 7u8, + 4u8, + )); + + let mut part_start: usize; + let mut part_end: usize; + let mut midpoint_x: f64; + let mut midpoint_y: f64; + let mut n: f64; + // let mut r_squared: f64; + let mut slope_deg_rma: f64; + let mut slope_rma: f64; + let (mut x, mut y): (f64, f64); + let mut sigma_x: f64; + let mut sigma_y: f64; + let mut sigma_xy: f64; + let mut sigma_xsqr: f64; + let mut sigma_ysqr: f64; + let mut mean: f64; + let mut sxx: f64; + let mut syy: f64; + // let mut sxy: f64; + let mut dist1: f64; + let mut dist2: f64; + let mut short_axis: f64; + let mut long_axis: f64; + let mut elongation: f64; + let mut sum_sin = 0.0; + let mut sum_cos = 0.0; + let mut weight: f64; + let mut num_polys_used = 0; + // let mut r_squared: f64; + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + midpoint_x = (record.x_max - record.x_min) / 2f64; + midpoint_y = (record.y_max - record.y_min) / 2f64; + sigma_x = 0f64; + sigma_y = 0f64; + sigma_xy = 0f64; + sigma_xsqr = 0f64; + sigma_ysqr = 0f64; + // r_squared = 0f64; + part_start = record.parts[0] as usize; + part_end = if record.num_parts > 1 { + record.parts[1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + n = (part_end - part_start + 1) as f64; + let mut points: Vec = Vec::with_capacity(record.num_points as usize); + for i in part_start..=part_end { + x = record.points[i].x - midpoint_x; + y = record.points[i].y - midpoint_y; + sigma_x += x; + sigma_y += y; + sigma_xy += x * y; + sigma_xsqr += x * x; + sigma_ysqr += y * y; + + points.push(Point2D::new(record.points[i].x, record.points[i].y)); + } + + mean = sigma_x / n; + + sxx = sigma_xsqr / n - mean * mean; + syy = sigma_ysqr / n - (sigma_y / n) * (sigma_y / n); + // sxy = sigma_xy / n - (sigma_x * sigma_y) / (n * n); + // if (sxx * syy).sqrt() != 0f64 { + // r_squared = (sxy / (sxx * syy).sqrt()) * (sxy / (sxx * syy).sqrt()); + // } + + // Calculate the slope of the Reduced Major Axis (RMA) + slope_rma = (syy / sxx).sqrt(); + if (sigma_xy - mean * sigma_y) / (sigma_xsqr - mean * sigma_x) < 0f64 { + slope_rma = -slope_rma; + } + + let mbb_points = minimum_bounding_box(&mut points, MinimizationCriterion::Area); + + // now calculate the distance between the first and second points and the second and third points + dist1 = mbb_points[0].distance(&mbb_points[1]); + dist2 = mbb_points[1].distance(&mbb_points[2]); + + // get the short and long axes + short_axis = dist1.min(dist2); + long_axis = dist1.max(dist2); + + // calculate the elongation and the weight + elongation = 1f64 - short_axis / long_axis; + weight = if elongation >= elongation_threshold { + num_polys_used += 1; + long_axis * elongation + } else { + 0.0 // If the poly isn't elongated enough, don't use it in caluclating the mean regional poly direction + }; + + // towards calculating the mean regional poly direction + sum_sin += (slope_rma.atan()*2.0).sin() * weight; // multiply by 2 because these are axial data, not true directions + sum_cos += (slope_rma.atan()*2.0).cos() * weight; + // sum_sin += slope_rma.atan().sin() * weight; + // sum_cos += slope_rma.atan().cos() * weight; + + if verbose { + progress = + (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + } + + if num_polys_used == 0 { + return Err(Error::new( + ErrorKind::InvalidInput, + "No polygons in the dataset have an elongation ratio greater than the threshold. Lower the elongation threshold value and try again.", + )); + } + + // Calculate the weighted regional weighted mean direciton of the polygons + let mut regional_angle = -(sum_sin.atan2(sum_cos) / 2.0).to_degrees() + 90.0; // divided by 2 because these are axial data not true directions + // let mut regional_angle = -(sum_sin.atan2(sum_cos)).to_degrees() + 90.0; + if regional_angle < 0.0 { regional_angle = 180.0 + regional_angle; } + + if verbose { + println!("Regional weighted mean polygon direction: {:.3} degrees", regional_angle); + } + + + let mut deviation_angle: f64; + for record_num in 0..input.num_records { + let record = input.get_record(record_num); + midpoint_x = (record.x_max - record.x_min) / 2f64; + midpoint_y = (record.y_max - record.y_min) / 2f64; + sigma_x = 0f64; + sigma_y = 0f64; + sigma_xy = 0f64; + sigma_xsqr = 0f64; + sigma_ysqr = 0f64; + part_start = record.parts[0] as usize; + part_end = if record.num_parts > 1 { + record.parts[1] as usize - 1 + } else { + record.num_points as usize - 1 + }; + n = (part_end - part_start + 1) as f64; + for i in part_start..=part_end { + x = record.points[i].x - midpoint_x; + y = record.points[i].y - midpoint_y; + sigma_x += x; + sigma_y += y; + sigma_xy += x * y; + sigma_xsqr += x * x; + sigma_ysqr += y * y; + } + + mean = sigma_x / n; + + sxx = sigma_xsqr / n - mean * mean; + syy = sigma_ysqr / n - (sigma_y / n) * (sigma_y / n); + + // Calculate the slope of the Reduced Major Axis (RMA) + slope_rma = (syy / sxx).sqrt(); + if (sigma_xy - mean * sigma_y) / (sigma_xsqr - mean * sigma_x) < 0f64 { + slope_rma = -slope_rma; + } + slope_deg_rma = slope_rma.atan().to_degrees(); + slope_deg_rma = if slope_deg_rma < 0f64 { + 90f64 + -1f64 * slope_deg_rma + } else { + 90f64 - slope_deg_rma + }; + + deviation_angle = slope_deg_rma - regional_angle; + if deviation_angle < 0.0 { + deviation_angle += 180.0; + } + if deviation_angle > 90.0 { + deviation_angle = 180.0 - deviation_angle; + } + + let record_out = record.clone(); + output.add_record(record_out); + + let mut atts = input.attributes.get_record(record_num); + atts.push(FieldData::Real(deviation_angle)); + output.attributes.add_record(atts, false); + + if verbose { + progress = + (100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + } + + if verbose { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if verbose { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + + let elapsed_time = get_formatted_elapsed_time(start); + + if verbose { + println!("{}", &format!("Elapsed Time: {}", elapsed_time)); + } + + Ok(()) + } +} diff --git a/whitebox-tools-app/src/tools/gis_analysis/mod.rs b/whitebox-tools-app/src/tools/gis_analysis/mod.rs index 549b694fa..47ac3b996 100755 --- a/whitebox-tools-app/src/tools/gis_analysis/mod.rs +++ b/whitebox-tools-app/src/tools/gis_analysis/mod.rs @@ -20,6 +20,7 @@ mod count_if; mod create_hexagonal_vector_grid; mod create_plane; mod create_rectangular_vector_grid; +mod deviation_from_regional_direction; mod difference; mod dissolve; mod edge_proportion; @@ -113,6 +114,7 @@ pub use self::count_if::CountIf; pub use self::create_hexagonal_vector_grid::CreateHexagonalVectorGrid; pub use self::create_plane::CreatePlane; pub use self::create_rectangular_vector_grid::CreateRectangularVectorGrid; +pub use self::deviation_from_regional_direction::DeviationFromRegionalDirection; pub use self::difference::Difference; pub use self::dissolve::Dissolve; pub use self::edge_proportion::EdgeProportion; diff --git a/whitebox-tools-app/src/tools/gis_analysis/patch_orientation.rs b/whitebox-tools-app/src/tools/gis_analysis/patch_orientation.rs index ee14ca6f4..2ee6f1b67 100755 --- a/whitebox-tools-app/src/tools/gis_analysis/patch_orientation.rs +++ b/whitebox-tools-app/src/tools/gis_analysis/patch_orientation.rs @@ -196,7 +196,7 @@ impl WhiteboxTool for PatchOrientation { "ORIENT", FieldDataType::Real, 7u8, - 5u8, + 4u8, )); let mut part_start: usize; diff --git a/whitebox-tools-app/src/tools/gis_analysis/polygon_long_axis.rs b/whitebox-tools-app/src/tools/gis_analysis/polygon_long_axis.rs index a956f52c4..f0bb42095 100755 --- a/whitebox-tools-app/src/tools/gis_analysis/polygon_long_axis.rs +++ b/whitebox-tools-app/src/tools/gis_analysis/polygon_long_axis.rs @@ -200,8 +200,27 @@ impl WhiteboxTool for PolygonLongAxis { } // create output file - let mut output = - Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, true)?; + let mut output = Shapefile::initialize_using_file(&output_file, &input, ShapeType::PolyLine, false)?; + + // add the attributes + let mut fields_vec: Vec = vec![]; + + let in_atts = input.attributes.clone(); + for i in 0..in_atts.fields.len() { + let field = in_atts.get_field(i); + fields_vec.push(field.clone()); + } + + fields_vec.push( + AttributeField::new( + "DIRECTION", + FieldDataType::Real, + 7u8, + 2u8 + ) + ); + + output.attributes.add_fields(&fields_vec); for record_num in 0..input.num_records { let record = input.get_record(record_num); @@ -240,7 +259,14 @@ impl WhiteboxTool for PolygonLongAxis { sfg.add_part(&points); output.add_record(sfg); - let atts = input.attributes.get_record(record_num); + let mut orientation = if p2.y > p1.y { + -((p2.y - p1.y).atan2(p2.x - p1.x)).to_degrees() + 90.0 + } else { + -((p1.y - p2.y).atan2(p1.x - p2.x)).to_degrees() + 90.0 + }; + if orientation < 0.0 { orientation = 180.0 + orientation; } + let mut atts = input.attributes.get_record(record_num); + atts.push(FieldData::Real(orientation)); output.attributes.add_record(atts.clone(), false); if verbose { diff --git a/whitebox-tools-app/src/tools/hydro_analysis/fill_burn.rs b/whitebox-tools-app/src/tools/hydro_analysis/fill_burn.rs index 612cbf0d5..60d81a3f2 100755 --- a/whitebox-tools-app/src/tools/hydro_analysis/fill_burn.rs +++ b/whitebox-tools-app/src/tools/hydro_analysis/fill_burn.rs @@ -22,7 +22,7 @@ use std::sync::mpsc; use std::sync::Arc; use std::thread; -/// Burns streams into a DEM using the FillBurn (Saunders, 1999) method which produces a hydro-enforced DEM. +/// Burns streams into a digital elevation model (DEM) using the FillBurn (Saunders, 1999) method which produces a hydro-enforced DEM. /// This tool uses the algorithm described in: /// /// Lindsay JB. 2016. The practice of DEM stream burning revisited. Earth Surface Processes @@ -32,6 +32,13 @@ use std::thread; /// /// Saunders, W. 1999. Preparation of DEMs for use in environmental modeling analysis, in: ESRI User /// Conference. pp. 24-30. +/// +/// The `TopologicalBreachBurn` tool, contained within the Whitebox Toolset Extension (WTE), should be preferred to +/// this `FillBurn`, because it accounts for the topological errors that frequently occur when burning vector streams +/// into a DEM. +/// +/// # See Also +/// `TopologicalBreachBurn`, `PruneVectorStreams` pub struct FillBurn { name: String, description: String, diff --git a/whitebox-tools-app/src/tools/image_analysis/emboss_filter.rs b/whitebox-tools-app/src/tools/image_analysis/emboss_filter.rs index 8bb0abc14..b80be055c 100755 --- a/whitebox-tools-app/src/tools/image_analysis/emboss_filter.rs +++ b/whitebox-tools-app/src/tools/image_analysis/emboss_filter.rs @@ -61,7 +61,7 @@ use std::thread; /// | . | . | . | /// |:--:|:---:|:--:| /// | 0 | 1 | 0 | -/// | 1 | 0 | 0 | +/// | 0 | 0 | 0 | /// | 0 | -1 | 0 | /// /// Southwest (`sw`) diff --git a/whitebox-tools-app/src/tools/math_stat_analysis/zonal_statistics.rs b/whitebox-tools-app/src/tools/math_stat_analysis/zonal_statistics.rs index f1a4f7e82..e26be7a0d 100755 --- a/whitebox-tools-app/src/tools/math_stat_analysis/zonal_statistics.rs +++ b/whitebox-tools-app/src/tools/math_stat_analysis/zonal_statistics.rs @@ -119,6 +119,15 @@ impl ZonalStatistics { optional: true, }); + parameters.push(ToolParameter { + name: "Treat zero-valued cells as background?".to_owned(), + flags: vec!["--zero_is_background".to_owned()], + description: "Treat zero-valued cells as background?".to_owned(), + parameter_type: ParameterType::Boolean, + default_value: None, + optional: true, + }); + let sep: String = path::MAIN_SEPARATOR.to_string(); let e = format!("{}", env::current_exe().unwrap().display()); let mut parent = env::current_exe().unwrap(); @@ -192,6 +201,7 @@ impl WhiteboxTool for ZonalStatistics { // let mut out_table = false; let mut output_html_file = String::new(); let mut stat_type = String::from("mean"); + let mut zero_is_background = false; if args.len() == 0 { return Err(Error::new( @@ -240,6 +250,10 @@ impl WhiteboxTool for ZonalStatistics { } else { args[i + 1].to_string().to_lowercase() }; + } else if vec[0].to_lowercase() == "-zero_is_background" { + if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") { + zero_is_background = true; + } } } @@ -378,7 +392,7 @@ impl WhiteboxTool for ZonalStatistics { for col in 0..columns { val = input.get_value(row, col); features_val = features.get_value(row, col); - if val != nodata && features_val != features_nodata { + if val != nodata && features_val != features_nodata && !(zero_is_background && val == 0.0) { id = (features_val.round() as isize - min_id) as usize; features_data[id].push(val); features_present[id] = true; @@ -412,7 +426,7 @@ impl WhiteboxTool for ZonalStatistics { for col in 0..columns { val = input.get_value(row, col); features_val = features.get_value(row, col); - if val != nodata && features_val != features_nodata { + if val != nodata && features_val != features_nodata && !(zero_is_background && val == 0.0) { id = (features_val.round() as isize - min_id) as usize; features_total_deviation[id] += (val - features_average[id]) * (val - features_average[id]); @@ -474,7 +488,7 @@ impl WhiteboxTool for ZonalStatistics { for col in 0..columns { val = input.get_value(row, col); features_val = features.get_value(row, col); - if val != nodata && features_val != features_nodata { + if val != nodata && features_val != features_nodata && !(zero_is_background && val == 0.0) { id = (features_val.round() as isize - min_id) as usize; output.set_value(row, col, out_stat[id]); } diff --git a/whitebox-tools-app/src/tools/mod.rs b/whitebox-tools-app/src/tools/mod.rs index 1738ffbfe..ff8a891dc 100755 --- a/whitebox-tools-app/src/tools/mod.rs +++ b/whitebox-tools-app/src/tools/mod.rs @@ -81,6 +81,7 @@ impl ToolManager { tool_names.push("CreateHexagonalVectorGrid".to_string()); tool_names.push("CreatePlane".to_string()); tool_names.push("CreateRectangularVectorGrid".to_string()); + tool_names.push("DeviationFromRegionalDirection".to_string()); tool_names.push("Difference".to_string()); tool_names.push("Dissolve".to_string()); tool_names.push("EdgeProportion".to_string()); @@ -579,6 +580,7 @@ impl ToolManager { "createrectangularvectorgrid" => { Some(Box::new(gis_analysis::CreateRectangularVectorGrid::new())) } + "deviationfromregionaldirection" => Some(Box::new(gis_analysis::DeviationFromRegionalDirection::new())), "difference" => Some(Box::new(gis_analysis::Difference::new())), "dissolve" => Some(Box::new(gis_analysis::Dissolve::new())), "edgeproportion" => Some(Box::new(gis_analysis::EdgeProportion::new())),