Skip to content

Commit

Permalink
Updates
Browse files Browse the repository at this point in the history
  • Loading branch information
jblindsay committed Nov 7, 2023
1 parent a0746ec commit 5a82f51
Show file tree
Hide file tree
Showing 6 changed files with 204 additions and 47 deletions.
Binary file modified .DS_Store
Binary file not shown.
1 change: 1 addition & 0 deletions whitebox-common/src/plugins.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ impl<S: ToString> From<S> for InvocationError {
}

#[derive(Copy, Clone)]
#[warn(improper_ctypes_definitions)]
pub struct PluginDeclaration {
pub rustc_version: &'static str,
pub core_version: &'static str,
Expand Down
95 changes: 54 additions & 41 deletions whitebox-plugins/src/repair_stream_vector_topology/main.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/*
Authors: Prof. John Lindsay
Created: 03/08/2021 (oringinally in Whitebox Toolset Extension)
Last Modified: 19/05/2023
Last Modified: 04/10/2023
License: MIT
*/

Expand Down Expand Up @@ -46,7 +46,7 @@ const EPSILON: f64 = std::f64::EPSILON;
///
/// ![](../../doc_img/RepairStreamVectorTopology.png)
///
/// The user may optinally specify the name of the input vector stream network (`--input`) and the output file
/// The user may optionally specify the name of the input vector stream network (`--input`) and the output file
/// (`--output`). Note that if an input file is not specified by the user, the tool will search for all vector
/// files (*.shp) files contained within the current working directory. This feature can be very useful when
/// you need to process a large number of stream files contained within a single directory. The tool will
Expand All @@ -57,6 +57,11 @@ const EPSILON: f64 = std::f64::EPSILON;
/// data, however, if the input are in geographic coordinates (latitude and longitude), then specifying a
/// small valued snap distance is advisable.
///
/// Additionally, the tool possesses two Boolean flags, `--reverse_backward_arcs` and `--correct_nonconfluence_joins`
/// which determine whether the tool will correct backward arcs (i.e., line segements that are oriented
/// in the reverse direction to the streamflow) and non-confluence joins (i.e., upstream/downstream line
/// segments that are not joined at confluence locations).
///
/// Notice that the attributes of the input layer will not be
/// carried over to the output file because there is not a one-for-one feature correspondence between the
/// two files due to the joins and splits of stream segments. Instead the output attribute table will
Expand Down Expand Up @@ -105,10 +110,12 @@ fn help() {
version Prints the tool version information.
The following flags can be used with the 'run' command:
--routes Name of the input routes vector file.
-o, --output Name of the output HTML file.
--length Maximum segment length (m).
--dist Search distance, in grid cells, used in visibility analysis.
--routes Name of the input routes vector file.
-o, --output Name of the output HTML file.
--length Maximum segment length (m).
--dist Search distance, in grid cells, used in visibility analysis.
--reverse_backward_arcs Boolean flag determines whether backward arcs are corrected.
--correct_nonconfluence_joins Boolean flag determines whether non-confluence joins are corrected.
Input/output file names can be fully qualified, or can rely on the
working directory contained in the WhiteboxTools settings.json file.
Expand Down Expand Up @@ -153,6 +160,7 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
let mut output_file: String = String::new();
let mut snap_dist = 1.0;
let mut reverse_backward_arcs = false;
let mut correct_nonconfluence_joins = false;
if args.len() <= 1 {
return Err(Error::new(
ErrorKind::InvalidInput,
Expand Down Expand Up @@ -197,6 +205,10 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
reverse_backward_arcs = true;
}
} else if flag_val == "-correct_nonconfluence_joins" {
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
correct_nonconfluence_joins = true;
}
}
}

Expand Down Expand Up @@ -368,53 +380,54 @@ fn run(args: &Vec<String>) -> Result<(), std::io::Error> {
let mut num_polylines = polylines.len(); // will be updated after the joins.




// Find all of the segments that can be joined because they link at non-confluences.
let endnode_tree = RTree::bulk_load(end_nodes);
let precision = EPSILON * 10f64;
let mut p1: Point2D;
let mut connections = vec![[num_polylines, num_polylines]; num_polylines];
let mut connected_polyline: usize;
let mut num_neighbours: usize;
for fid in 0..num_polylines {
// fid = polylines[poly_id].id1;
p1 = polylines[fid].get_first_node();
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);

if correct_nonconfluence_joins {
// Find all of the segments that can be joined because they link at non-confluences.
for fid in 0..num_polylines {
// fid = polylines[poly_id].id1;
p1 = polylines[fid].get_first_node();
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);

connected_polyline = num_polylines;
num_neighbours = 0;
for p in ret {
if p.data != fid {
connected_polyline = p.data;
num_neighbours += 1;
connected_polyline = num_polylines;
num_neighbours = 0;
for p in ret {
if p.data != fid {
connected_polyline = p.data;
num_neighbours += 1;
}
}
if num_neighbours == 1 {
connections[fid][0] = connected_polyline;
}
}
if num_neighbours == 1 {
connections[fid][0] = connected_polyline;
}

p1 = polylines[fid].get_last_node();
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);
p1 = polylines[fid].get_last_node();
let ret = endnode_tree.locate_within_distance([p1.x, p1.y], precision);

connected_polyline = num_polylines;
num_neighbours = 0;
for p in ret {
if p.data != fid {
connected_polyline = p.data;
num_neighbours += 1;
connected_polyline = num_polylines;
num_neighbours = 0;
for p in ret {
if p.data != fid {
connected_polyline = p.data;
num_neighbours += 1;
}
}
if num_neighbours == 1 {
connections[fid][1] = connected_polyline;
}
}
if num_neighbours == 1 {
connections[fid][1] = connected_polyline;
}

if configurations.verbose_mode && inputs.len() == 1 {
progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize;
let mut old_progress = old_progress.lock().unwrap();
if progress != *old_progress {
println!("Looking for joins in arcs: {}%", progress);
*old_progress = progress;
if configurations.verbose_mode && inputs.len() == 1 {
progress = (100.0_f64 * (fid + 1) as f64 / num_polylines as f64) as usize;
let mut old_progress = old_progress.lock().unwrap();
if progress != *old_progress {
println!("Looking for joins in arcs: {}%", progress);
*old_progress = progress;
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,14 @@
"parameter_type": "Boolean",
"default_value": "true",
"optional": true
},
{
"name": "Correct non-confluence joins?",
"flags": ["--correct_nonconfluence_joins"],
"description": "Optional flag to request that non-confluence joins be corrected.",
"parameter_type": "Boolean",
"default_value": "true",
"optional": true
}
]
}
8 changes: 4 additions & 4 deletions whitebox-raster/src/geotiff/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3009,10 +3009,10 @@ pub fn write_geotiff<'a>(r: &'a mut Raster) -> Result<(), Error> {

// tGTCitationGeoKey (1026)
let mut v = String::from(
geographic_type_map
*geographic_type_map
.get(&r.configs.epsg_code)
.unwrap()
.clone(),
// .clone(),
);
v.push_str("|");
v = v.replace("_", " ");
Expand Down Expand Up @@ -3087,10 +3087,10 @@ pub fn write_geotiff<'a>(r: &'a mut Raster) -> Result<(), Error> {

// PCSCitationGeoKey (3073)
let mut v = String::from(
projected_cs_type_map
*projected_cs_type_map
.get(&r.configs.epsg_code)
.unwrap()
.clone(),
// .clone(),
);
v.push_str("|");
v = v.replace("_", " ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
This tool is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 28/06/2017
Last Modified: 18/10/2019
Last Modified: 26/10/2023
License: MIT
NOTES: This tool provides a full workflow D8 flow operation. This includes removing depressions, calculating
Expand Down Expand Up @@ -92,6 +92,15 @@ impl FlowAccumulationFullWorkflow {
optional: true,
});

parameters.push(ToolParameter {
name: "Corrected flow pointer".to_owned(),
flags: vec!["--correct_pntr".to_owned()],
description: "Optional flag to apply corerections that limit potential artifacts in the flow pointer.".to_owned(),
parameter_type: ParameterType::Boolean,
default_value: None,
optional: true,
});

parameters.push(ToolParameter {
name: "Log-transform the output?".to_owned(),
flags: vec!["--log".to_owned()],
Expand Down Expand Up @@ -183,6 +192,7 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
let mut pntr_file = String::new();
let mut accum_file = String::new();
let mut out_type = String::from("sca");
let mut correct_pntr = false;
let mut log_transform = false;
let mut clip_max = false;
let mut esri_style = false;
Expand Down Expand Up @@ -246,6 +256,10 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
} else {
out_type = String::from("ca");
}
} else if vec[0].to_lowercase() == "-correct_pntr" || vec[0].to_lowercase() == "--correct_pntr" {
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
correct_pntr = true;
}
} else if vec[0].to_lowercase() == "-log" || vec[0].to_lowercase() == "--log" {
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
log_transform = true;
Expand Down Expand Up @@ -610,7 +624,128 @@ impl WhiteboxTool for FlowAccumulationFullWorkflow {
Err(e) => return Err(e),
};

// calculate the number of inflowing cells
if correct_pntr {
let (mut dir, mut dir_n, mut dir_no, mut new_val, mut old_val): (i8, i8, i8, i8, i8);
let (mut l1, mut l2, mut r1, mut r2): (i8, i8, i8, i8);
let (mut zl, mut zr, mut zn): (f64, f64, f64);
let mut count: isize;
let mut resolved: bool;
for row in 1..(rows-1) { // buffer the edges
for col in 1..(columns-1) {
// flow_dir is the n-index of the flow revicing cell
dir = flow_dir.get_value(row, col);
old_val = dir;
if dir >= 0 {
// error 1: zig-zag where two flow directions intersect at 90deg angles
// because scan order is top down, crosses should only occur on the bottom half
l1 = dir + 7 - (8 * (dir + 7 > 7) as i8);
r1 = dir + 1 - (8 * (dir + 1 > 7) as i8);
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
zl = output.get_value(row + dy[l1 as usize], col + dx[l1 as usize]);
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
zr = output.get_value(row + dy[r1 as usize], col + dx[r1 as usize]);
zn = output.get_value(row + dy[dir as usize], col + dx[dir as usize]);
if dir_n == r2 && zr != nodata && zr <= zn { // left -> right cross && not nodata && is lower
new_val = r1;
} else if dir_no == l2 && zl != nodata && zl <= zn { // right -> left cross && not nodata && is lower
new_val = l1;
} else { // keep original value
new_val = dir;
}

if new_val != dir && new_val % 2 == 0 && [0,6,7].contains(&new_val) { // make sure new val doesn't create error 1 where it can't be corrected
l1 = new_val + 7 - (8 * (new_val + 7 > 7) as i8);
r1 = new_val + 1 - (8 * (new_val + 1 > 7) as i8);
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
if dir_n == r2 || dir_no == l2 { // avoid new error, roll back
new_val = old_val;
}
}

// setting value here preserves solutions from error 1 but prevents this scan from beng parallelized
flow_dir.set_value(row, col, new_val);

// error 2: overshoot where flow points to a cell that points back into the original neighbourhood
resolved = false;
count = 0;
dir = new_val;
while !resolved && dir >= 0 { // search until outflow is found, else keep original value.
// find the flow reviever n-index.
dir_n = flow_dir.get_value(row + dy[dir as usize], col + dx[dir as usize]);
if dir_n >= 0 {
old_val = dir; // always set to the last valid direction

// flow within these bounds flow back into the original neighbourhood.
l1 = dir + 5 - (8 * (dir + 5 > 7) as i8);
r1 = dir + 3 - (8 * (dir + 3 > 7) as i8);
l2 = dir + 6 - (8 * (dir + 6 > 7) as i8);
r2 = dir + 2 - (8 * (dir + 2 > 7) as i8);

if dir % 2 == 0 { // diagonal
if dir_n == l1 {
new_val = r1 + 4 - (8 * (r1 + 4 > 7) as i8);
} else if dir_n == r1 {
new_val = l1 + 4 - (8 * (l1 + 4 > 7) as i8);
} else {
new_val = dir;
resolved = true;
}
} else { // cardinal
if dir_n == l1 {
new_val = r2 + 4 - (8 * (r2 + 4 > 7) as i8);
} else if dir_n == r1 {
new_val = l2 + 4 - (8 * (l2 + 4 > 7) as i8);
} else if dir_n == l2 {
new_val = r1 + 4 - (8 * (r1 + 4 > 7) as i8);
} else if dir_n == r2 {
new_val = l1 + 4 - (8 * (l1 + 4 > 7) as i8);
} else {
new_val = dir;
resolved = true;
};
}

if new_val != dir && new_val % 2 == 0 && [0,6,7].contains(&new_val) { // make sure new val doesn't create error 1 where it can't be corrected
l1 = new_val + 7 - (8 * (new_val + 7 > 7) as i8);
r1 = new_val + 1 - (8 * (new_val + 1 > 7) as i8);
l2 = r1 + 5 - (8 * (r1 + 5 > 7) as i8);
r2 = l1 + 3 - (8 * (l1 + 3 > 7) as i8);
dir_n = flow_dir.get_value(row + dy[l1 as usize], col + dx[l1 as usize]); // left
dir_no = flow_dir.get_value(row + dy[r1 as usize], col + dx[r1 as usize]); // right
if dir_n == r2 || dir_no == l2 { // roll back
new_val = old_val;
resolved = true;
}
}
} else { // roll back
new_val = old_val;
resolved = true;
}
if count > 7 { // stuck in a loop, use original flow_dir
new_val = flow_dir.get_value(row, col);
resolved = true;
}
dir = new_val;
count += 1;
}
flow_dir.set_value(row, col, new_val);
}
}
if verbose {
progress = (100.0_f64 * row as f64 / (rows - 1) as f64) as usize;
if progress != old_progress {
println!("Correcting flow direction: {}%", progress);
old_progress = progress;
}
}
}
}

let flow_dir = Arc::new(flow_dir);
let mut num_inflowing: Array2D<i8> = Array2D::new(rows, columns, -1, -1)?;

Expand Down

0 comments on commit 5a82f51

Please sign in to comment.