Skip to content

Commit

Permalink
Allow user to run specific zones. (#140)
Browse files Browse the repository at this point in the history
Also don't assume `geometry` field present in DBFs, these two changes
allow other subcatchment network data massaged to look like NHD data to
be used more easily.
  • Loading branch information
brownterryn authored Jan 23, 2025
1 parent aac2d92 commit 74eeaf7
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 5 deletions.
3 changes: 2 additions & 1 deletion StreamCat.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
OUT_DIR,
PCT_FULL_FILE,
PCT_FULL_FILE_RP100,
USER_ZONES,
)
from StreamCat_functions import (
Accumulation,
Expand Down Expand Up @@ -71,7 +72,7 @@

if not os.path.exists(ACCUM_DIR):
# TODO: work out children OR bastards only
makeNumpyVectors(inter_vpu, NHD_DIR)
makeNumpyVectors(inter_vpu, NHD_DIR, USER_ZONES)

INPUTS = np.load(ACCUM_DIR +"/vpu_inputs.npy", allow_pickle=True).item()

Expand Down
15 changes: 11 additions & 4 deletions StreamCat_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1273,7 +1273,7 @@ def make_all_cat_comids(nhd, inputs):
return set(all_comids) # RETURN A SET!


def makeNumpyVectors(inter_tbl, nhd):
def makeNumpyVectors(inter_tbl, nhd, user_zones):
"""
Uses the NHD tables to create arrays of upstream catchments which are used
in the Accumulation function
Expand All @@ -1284,7 +1284,7 @@ def makeNumpyVectors(inter_tbl, nhd):
nhd : directory where NHD is stored
"""
os.mkdir("accum_npy")
inputs = nhd_dict(nhd)
inputs = nhd_dict(nhd, user_zones=user_zones)
all_comids = make_all_cat_comids(nhd, inputs)
print("Making numpy files in zone...", end="", flush=True)
for zone, hr in inputs.items():
Expand Down Expand Up @@ -1337,7 +1337,7 @@ def makeNumpyVectors(inter_tbl, nhd):
##############################################################################


def nhd_dict(nhd, unit="VPU"):
def nhd_dict(nhd, unit="VPU", user_zones=None):
"""
__author__ = "Rick Debbout <debbout.rick@epa.gov>"
Creates an OrderdDict for looping through regions of the NHD to carry
Expand All @@ -1354,7 +1354,12 @@ def nhd_dict(nhd, unit="VPU"):
"""

inputs = OrderedDict()
if user_zones: # Use user specified zones
inputs |= user_zones
np.save("./accum_npy/vpu_inputs.npy", inputs)
return inputs
bounds = dbf2DF(f"{nhd}/NHDPlusGlobalData/BoundaryUnit.dbf")
# Drop Hawaii and Cayman Islands.
remove = bounds.loc[bounds.DRAINAGEID.isin(["HI", "CI"])].index
bounds = bounds.drop(remove, axis=0)
if unit == "VPU":
Expand Down Expand Up @@ -1406,7 +1411,9 @@ def findUpstreamNpy(zone, com, numpy_dir):


def dbf2DF(f, upper=True):
data = gpd.read_file(f).drop("geometry", axis=1)
data = gpd.read_file(f)
if "geometry" in data:
data.drop("geometry", axis=1, inplace=True)
if upper is True:
data.columns = data.columns.str.upper()
return data
3 changes: 3 additions & 0 deletions stream_cat_config.py.template
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ STATES_FILE = "/path/to/file/tl_2008_us_state.shp"

ACCUM_DIR = "path/to/local/repository/accump_npy/"

# to run other than all NHD zones, set this dict to e.g. {"04": "GL", "12": "TX"}
# keys are UnitID and values are DrainageID, see ...\NHDPlusGlobalData\BoundaryUnit.dbf
USER_ZONES = {}

# location to write out accumulated StreamCat data <- this is intermediate
OUT_DIR = "/path/to/write/out/files/to"
Expand Down

0 comments on commit 74eeaf7

Please sign in to comment.