From ae271d3d51be889398311cb418799335a115d431 Mon Sep 17 00:00:00 2001 From: williamfoschiera Date: Sun, 17 Mar 2024 15:37:51 -0400 Subject: [PATCH] linter. --- .gitignore | 148 +- deafrica_tools/__init__.py | 23 +- deafrica_tools/app/animations.py | 125 +- deafrica_tools/app/changefilmstrips.py | 93 +- deafrica_tools/app/crophealth.py | 174 +- deafrica_tools/app/deacoastlines.py | 203 +- deafrica_tools/app/forestmonitoring.py | 262 +-- deafrica_tools/app/geomedian.py | 300 ++- deafrica_tools/app/imageexport.py | 229 +- deafrica_tools/app/wetlandsinsighttool.py | 82 +- deafrica_tools/app/widgetconstructors.py | 195 +- deafrica_tools/areaofinterest.py | 28 +- deafrica_tools/bandindices.py | 161 +- deafrica_tools/classification.py | 159 +- deafrica_tools/coastal.py | 169 +- deafrica_tools/dask.py | 19 +- deafrica_tools/datahandling.py | 127 +- deafrica_tools/load_era5.py | 92 +- deafrica_tools/load_isda.py | 49 +- deafrica_tools/load_soil_moisture.py | 38 +- deafrica_tools/plotting.py | 271 +-- deafrica_tools/spatial.py | 830 ++++---- deafrica_tools/temporal.py | 28 +- deafrica_tools/wetlands.py | 143 +- get_IVs_unify.py | 395 ---- get_IVs_unify_rolo_embrapa.py | 362 ---- get_IVs_yearly.py | 173 -- get_LST.py | 276 --- get_LST_unify.py | 314 --- grits_lst89_p1.py | 218 -- grits_vis_p1.py | 151 -- poetry.lock | 1881 +++++++++++++++++ pyproject.toml | 46 + tests/conftest.py | 31 + tests/utils/test_grits.py | 13 + utils/__init__.py | 0 utils/get_IVs_unify.py | 380 ++++ utils/get_IVs_unify_rolo_embrapa.py | 368 ++++ utils/get_IVs_yearly.py | 170 ++ utils/get_LST.py | 273 +++ utils/get_LST_unify.py | 303 +++ get_nc_data.py => utils/get_nc_data.py | 58 +- get_stats_nc.py => utils/get_stats_nc.py | 50 +- grits.py => utils/grits.py | 444 ++-- utils/grits_lst89_p1.py | 227 ++ utils/grits_vis_p1.py | 159 ++ utils/x_Landsat_QA.py | 78 + .../x_gemini_landsat.py | 16 +- x_Landsat_QA.py | 97 - 49 files changed, 6019 insertions(+), 4412 deletions(-) delete mode 100644 get_IVs_unify.py delete mode 100644 get_IVs_unify_rolo_embrapa.py delete mode 100644 get_IVs_yearly.py delete mode 100644 get_LST.py delete mode 100644 get_LST_unify.py delete mode 100644 grits_lst89_p1.py delete mode 100644 grits_vis_p1.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 tests/conftest.py create mode 100644 tests/utils/test_grits.py create mode 100644 utils/__init__.py create mode 100644 utils/get_IVs_unify.py create mode 100644 utils/get_IVs_unify_rolo_embrapa.py create mode 100644 utils/get_IVs_yearly.py create mode 100644 utils/get_LST.py create mode 100644 utils/get_LST_unify.py rename get_nc_data.py => utils/get_nc_data.py (51%) rename get_stats_nc.py => utils/get_stats_nc.py (51%) rename grits.py => utils/grits.py (80%) create mode 100644 utils/grits_lst89_p1.py create mode 100644 utils/grits_vis_p1.py create mode 100644 utils/x_Landsat_QA.py rename x_gemini_landsat.py => utils/x_gemini_landsat.py (70%) delete mode 100644 x_Landsat_QA.py diff --git a/.gitignore b/.gitignore index c18dd8d..6071486 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,147 @@ -__pycache__/ +# Byte-compiled / optimized / DLL files +__pycache__ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal +*.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Previous stuff +.idea +dfcredentials.json +.vscode/ + +dkdata-pricing/ +staticfiles/ \ No newline at end of file diff --git a/deafrica_tools/__init__.py b/deafrica_tools/__init__.py index 8e9f2cf..60d49e3 100644 --- a/deafrica_tools/__init__.py +++ b/deafrica_tools/__init__.py @@ -1,28 +1,25 @@ __version__ = "2.0.1" -__locales__ = __path__[0] + '/locales' +__locales__ = __path__[0] + "/locales" def set_lang(lang=None): if lang is None: import os - os_lang = os.getenv('LANG') - + + os_lang = os.getenv("LANG") + # Just take the first 2 letters: 'fr' not 'fr_FR.UTF-8' - if os_lang is not None and len(os_lang) >=2: + if os_lang is not None and len(os_lang) >= 2: lang = [os_lang[:2]] else: lang = [lang] - + import gettext + try: - translation = gettext.translation( - 'deafrica_tools', - localedir=__locales__, - languages=lang, - fallback=True - ) + translation = gettext.translation("deafrica_tools", localedir=__locales__, languages=lang, fallback=True) translation.install() - + except FileNotFoundError: - print(f'Could not load lang={lang}') + print(f"Could not load lang={lang}") diff --git a/deafrica_tools/app/animations.py b/deafrica_tools/app/animations.py index a0ad4fb..7a551af 100644 --- a/deafrica_tools/app/animations.py +++ b/deafrica_tools/app/animations.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ -Satellite imagery animation widget, which can be used to interactively +Satellite imagery animation widget, which can be used to interactively produce animations for multiple DE Africa products. """ @@ -9,7 +9,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import fiona import sys @@ -119,10 +120,9 @@ def update_map_layers(self): # Clear all layers and add basemap self.map_layers.clear_layers() self.map_layers.add_layer(self.basemap) - -def extract_data(self): +def extract_data(self): # Connect to datacube database dc = datacube.Datacube(app="Exporting satellite images") @@ -135,22 +135,18 @@ def extract_data(self): # Create query. start_date = np.datetime64(self.start_date) end_date = np.datetime64(self.end_date) - + self.query_params = { "time": (str(start_date), str(end_date)), "geopolygon": geopolygon, } # Find matching datasets - dss = [ - dc.find_datasets(product=i, **self.query_params) - for i in sat_params[self.dealayer]["products"] - ] + dss = [dc.find_datasets(product=i, **self.query_params) for i in sat_params[self.dealayer]["products"]] dss = list(itertools.chain.from_iterable(dss)) # If data is found if len(dss) > 0: - # Get CRS crs = str(dss[0].crs) @@ -190,19 +186,14 @@ def extract_data(self): def plot_data(self, fname): - # Data to plot to_plot = self.timeseries_ds # If rolling median specified if self.rolling_median: with self.status_info: - print( - f"\nApplying rolling median ({self.rolling_median_window} timesteps window)" - ) - to_plot = to_plot.rolling( - time=int(self.rolling_median_window), center=True, min_periods=1 - ).median() + print(f"\nApplying rolling median ({self.rolling_median_window} timesteps window)") + to_plot = to_plot.rolling(time=int(self.rolling_median_window), center=True, min_periods=1).median() # If resampling freq specified if self.resample_freq: @@ -215,7 +206,7 @@ def plot_data(self, fname): if self.power < 1.0: with self.status_info: print(f"\nApplying power transformation ({self.power})") - to_plot = to_plot ** self.power + to_plot = to_plot**self.power # Apply unsharp masking to enhance overall dynamic range, # and improve fine scale detail @@ -229,9 +220,7 @@ def plot_data(self, fname): funcs_list = [ rescale_intensity, - lambda x: unsharp_mask( - x, radius=self.unsharp_mask_radius, amount=self.unsharp_mask_amount - ), + lambda x: unsharp_mask(x, radius=self.unsharp_mask_radius, amount=self.unsharp_mask_amount), ] else: funcs_list = None @@ -260,7 +249,6 @@ def plot_data(self, fname): def deacoastlines_overlay(ds): - import geopandas as gpd import pandas as pd import matplotlib @@ -292,7 +280,7 @@ def deacoastlines_overlay(ds): else: return None - + class animation_app(HBox): def __init__(self): super().__init__() @@ -310,9 +298,7 @@ def __init__(self): # Satellite data end_date = datetime.datetime.today() - start_date = datetime.datetime( - year=end_date.year - 3, month=end_date.month, day=end_date.day - ) + start_date = datetime.datetime(year=end_date.year - 3, month=end_date.month, day=end_date.day) self.start_date = start_date.strftime("%Y-%m-%d") self.end_date = end_date.strftime("%Y-%m-%d") self.dealayer_list = [ @@ -366,9 +352,7 @@ def __init__(self): ################## # Create the Header widget - header_title_text = ( - "

Digital Earth Africa satellite imagery animations

" - ) + header_title_text = "

Digital Earth Africa satellite imagery animations

" instruction_text = ( "

Select the desired satellite data, imagery date range " "and image style, then zoom in and draw a rectangle to " @@ -384,7 +368,6 @@ def __init__(self): # Define the action to take once something is drawn on the map def update_geojson(target, action, geo_json): - # Get data from action self.action = action @@ -414,12 +397,7 @@ def update_geojson(target, action, geo_json): ' ' "(Overriding maximum size limit; use with caution as may lead to memory issues)" ) - self.header.value = ( - header_title_text - + instruction_text - + polyarea_text - + confirmation_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + confirmation_text self.gdf_drawn = gdf elif area <= 50000: confirmation_text = ( @@ -427,12 +405,7 @@ def update_geojson(target, action, geo_json): "(Area to extract falls within " "recommended 50000 ha limit)" ) - self.header.value = ( - header_title_text - + instruction_text - + polyarea_text - + confirmation_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + confirmation_text self.gdf_drawn = gdf else: warning_text = ( @@ -441,9 +414,7 @@ def update_geojson(target, action, geo_json): "please select an area less than 50000 " "ha)" ) - self.header.value = ( - header_title_text + instruction_text + polyarea_text + warning_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + warning_text self.gdf_drawn = None ########################### @@ -466,7 +437,7 @@ def update_geojson(target, action, geo_json): self.map_layers.name = "Map Overlays" # Create map widget - self.m = deawidgets.create_map(map_center=(5.65, 26.17), zoom_level=13) + self.m = deawidgets.create_map(map_center=(5.65, 26.17), zoom_level=13) self.m.layout = make_box_layout() # Add tools to map widget @@ -481,24 +452,16 @@ def update_geojson(target, action, geo_json): ############################ # Create parameter widgets - dropdown_basemap = deawidgets.create_dropdown( - self.basemap_list, self.basemap_list[0][1] - ) - dropdown_dealayer = deawidgets.create_dropdown( - self.dealayer_list, self.dealayer_list[0][1] - ) - dropdown_output = deawidgets.create_dropdown( - self.output_list, self.output_list[0][1] - ) + dropdown_basemap = deawidgets.create_dropdown(self.basemap_list, self.basemap_list[0][1]) + dropdown_dealayer = deawidgets.create_dropdown(self.dealayer_list, self.dealayer_list[0][1]) + dropdown_output = deawidgets.create_dropdown(self.output_list, self.output_list[0][1]) date_picker_start = deawidgets.create_datepicker( value=start_date, ) date_picker_end = deawidgets.create_datepicker( value=end_date, ) - dropdown_styles = deawidgets.create_dropdown( - self.styles_list, self.styles_list[0] - ) + dropdown_styles = deawidgets.create_dropdown(self.styles_list, self.styles_list[0]) slider_percentile = widgets.FloatRangeSlider( value=[0.01, 0.99], min=0, @@ -521,8 +484,7 @@ def update_geojson(target, action, geo_json): checkbox_rolling_median = deawidgets.create_checkbox( self.rolling_median, "Apply rolling median
to produce smooth,
cloud-free animations", - layout={"width": "90%", - "height": "4em"}, + layout={"width": "90%", "height": "4em"}, ) text_rolling_median_window = widgets.IntText( value=20, @@ -537,17 +499,13 @@ def update_geojson(target, action, geo_json): ) # Expandable advanced section - text_interval = widgets.IntText( - value=100, description="", step=50, layout={"width": "95%"} - ) + text_interval = widgets.IntText(value=100, description="", step=50, layout={"width": "95%"}) text_resolution = widgets.FloatText( value=30, description="", layout={"width": "95%", "margin": "0px", "padding": "0px"}, ) - text_width = widgets.IntText( - value=900, description="", step=50, layout={"width": "95%"} - ) + text_width = widgets.IntText(value=900, description="", step=50, layout={"width": "95%"}) dropdown_resampling = deawidgets.create_dropdown( self.resample_list, self.resample_freq, @@ -565,9 +523,7 @@ def update_geojson(target, action, geo_json): description="", layout={"width": "95%"}, ) - checkbox_unsharp_mask = deawidgets.create_checkbox( - self.unsharp_mask, "Enable", layout={"width": "95%"} - ) + checkbox_unsharp_mask = deawidgets.create_checkbox(self.unsharp_mask, "Enable", layout={"width": "95%"}) text_unsharp_mask_radius = widgets.FloatText( value=20, step=1, @@ -593,9 +549,7 @@ def update_geojson(target, action, geo_json): checkbox_deacoastlines = deawidgets.create_checkbox( self.deacoastlines, "Add DE Africa Coastlines overlay", layout={"width": "95%"} ) - checkbox_max_size = deawidgets.create_checkbox( - self.max_size, "Enable", layout={"width": "95%"} - ) + checkbox_max_size = deawidgets.create_checkbox(self.max_size, "Enable", layout={"width": "95%"}) expand_box = widgets.VBox( [ HTML("Frame interval (milliseconds):"), @@ -615,9 +569,7 @@ def update_geojson(target, action, geo_json): checkbox_unsharp_mask, text_unsharp_mask_radius, text_unsharp_mask_amount, - HTML( - "
Override maximum size limit: (use with caution; may cause memory issues/crashes)" - ), + HTML("
Override maximum size limit: (use with caution; may cause memory issues/crashes)"), checkbox_max_size, ], ) @@ -646,13 +598,9 @@ def update_geojson(target, action, geo_json): dropdown_styles.observe(self.update_styles, "value") slider_percentile.observe(self.update_slider_percentile, "value") - floatslider_max_cloud_cover.observe( - self.update_floatslider_max_cloud_cover, "value" - ) + floatslider_max_cloud_cover.observe(self.update_floatslider_max_cloud_cover, "value") checkbox_rolling_median.observe(self.update_checkbox_rolling_median, "value") - text_rolling_median_window.observe( - self.update_text_rolling_median_window, "value" - ) + text_rolling_median_window.observe(self.update_text_rolling_median_window, "value") dropdown_output.observe(self.update_output, "value") run_button.on_click(self.run_app) draw_control.on_draw(update_geojson) @@ -824,7 +772,7 @@ def update_checkbox_max_size(self, change): # Add DE Africa Coastlines overlay def update_deacoastlines(self, change): self.deacoastlines = change.new - + # Apply cloud mask in load_ard def update_checkbox_cloud_mask(self, change): self.cloud_mask = change.new @@ -884,16 +832,13 @@ def update_dropdown_resampling(self, change): self.resample_freq = change.new def run_app(self, change): - # Clear progress bar and output areas before running self.status_info.clear_output() self.output_plot.clear_output() # Verify that polygon was drawn if self.gdf_drawn is not None: - with self.status_info: - # Load data and add to attribute if self.timeseries_ds is None: self.timeseries_ds = extract_data(self) @@ -902,9 +847,7 @@ def run_app(self, change): print("Using previously loaded data") if self.timeseries_ds is not None: - with self.status_info: - # Create unique file name centre_coords = self.gdf_drawn.geometry[0].centroid.coords[0][::-1] site = reverse_geocode(coords=centre_coords) @@ -916,9 +859,7 @@ def run_app(self, change): .lower() ) - print( - f"\nExporting animation for {site}.\nThis may take several minutes..." - ) + print(f"\nExporting animation for {site}.\nThis may take several minutes...") ############ # Plotting # @@ -937,6 +878,4 @@ def run_app(self, change): else: with self.status_info: - print( - 'Please draw a valid rectangle on the map, then press "Generate animation".' - ) \ No newline at end of file + print('Please draw a valid rectangle on the map, then press "Generate animation".') diff --git a/deafrica_tools/app/changefilmstrips.py b/deafrica_tools/app/changefilmstrips.py index 31fb2d8..71b5646 100644 --- a/deafrica_tools/app/changefilmstrips.py +++ b/deafrica_tools/app/changefilmstrips.py @@ -26,14 +26,14 @@ def run_filmstrip_app( - output_name, - time_range, - time_step, - tide_range=(0.0, 1.0), - resolution=(-30, 30), - max_cloud=0.5, - ls7_slc_off=False, - size_limit=10000, + output_name, + time_range, + time_step, + tide_range=(0.0, 1.0), + resolution=(-30, 30), + max_cloud=0.5, + ls7_slc_off=False, + size_limit=10000, ): """ An interactive app that allows the user to select a region from a @@ -117,10 +117,7 @@ def run_filmstrip_app( # Plot interactive map to select area basemap = basemap_to_tiles(basemaps.Esri.WorldImagery) - geopolygon = select_on_a_map(height="600px", - layers=(basemap,), - center=centre_coords, - zoom=14) + geopolygon = select_on_a_map(height="600px", layers=(basemap,), center=centre_coords, zoom=14) # Set centre coords based on most recent selection to re-focus # subsequent data selections @@ -131,13 +128,14 @@ def run_filmstrip_app( area = geopolygon.to_crs(crs=CRS("epsg:6933")).area / msq_per_hectare radius = np.round(np.sqrt(size_limit), 1) if area > size_limit: - print(f"Warning: Your selected area is {area:.00f} hectares. " - f"Please select an area of less than {size_limit} hectares." - f"\nTo select a smaller area, re-run the cell " - f"above and draw a new polygon.") + print( + f"Warning: Your selected area is {area:.00f} hectares. " + f"Please select an area of less than {size_limit} hectares." + f"\nTo select a smaller area, re-run the cell " + f"above and draw a new polygon." + ) else: - print("Starting analysis...") # Connect to datacube database @@ -147,12 +145,7 @@ def run_filmstrip_app( client = create_local_dask_cluster(return_client=True) # Obtain native CRS - crs = mostcommon_crs(dc=dc, - product="ls8_sr", - query={ - "time": "2014", - "geopolygon": geopolygon - }) + crs = mostcommon_crs(dc=dc, product="ls8_sr", query={"time": "2014", "geopolygon": geopolygon}) # Create query based on time range, area selected, custom params query = { @@ -160,10 +153,7 @@ def run_filmstrip_app( "geopolygon": geopolygon, "output_crs": crs, "resolution": resolution, - "dask_chunks": { - "x": 3000, - "y": 3000 - }, + "dask_chunks": {"x": 3000, "y": 3000}, "align": (resolution[1] / 2.0, resolution[1] / 2.0), } @@ -182,36 +172,32 @@ def run_filmstrip_app( # dataset and drop any observations out side this range if tide_range != (0.0, 1.0): from deafrica_tools.coastal import tidal_tag + ds = tidal_tag(ds=ds, tidepost_lat=None, tidepost_lon=None) min_tide, max_tide = ds.tide_height.quantile(tide_range).values - ds = ds.sel(time=(ds.tide_height >= min_tide) & - (ds.tide_height <= max_tide)) + ds = ds.sel(time=(ds.tide_height >= min_tide) & (ds.tide_height <= max_tide)) ds = ds.drop("tide_height") - print(f" Keeping {len(ds.time)} observations with tides " - f"between {min_tide:.2f} and {max_tide:.2f} m") + print(f" Keeping {len(ds.time)} observations with tides " f"between {min_tide:.2f} and {max_tide:.2f} m") # Create time step ranges to generate filmstrips from - bins_dt = pd.date_range(start=time_range[0], - end=time_range[1], - freq=pd.DateOffset(**time_step)) + bins_dt = pd.date_range(start=time_range[0], end=time_range[1], freq=pd.DateOffset(**time_step)) # Bin all satellite observations by timestep. If some observations # fall outside the upper bin, label these with the highest bin labels = bins_dt.astype("str") - time_steps = (pd.cut(ds.time.values, bins_dt, - labels=labels[:-1]).add_categories( - labels[-1]).fillna(labels[-1])) + time_steps = pd.cut(ds.time.values, bins_dt, labels=labels[:-1]).add_categories(labels[-1]).fillna(labels[-1]) - time_steps_var = xr.DataArray(time_steps, [("time", ds.time.values)], - name="timestep") + time_steps_var = xr.DataArray(time_steps, [("time", ds.time.values)], name="timestep") # Resample data temporally into time steps, and compute geomedians - ds_geomedian = (ds.groupby(time_steps_var).apply( - lambda ds_subset: geomedian_with_mads( - ds_subset, compute_mads=False, compute_count=False))) + ds_geomedian = ds.groupby(time_steps_var).apply( + lambda ds_subset: geomedian_with_mads(ds_subset, compute_mads=False, compute_count=False) + ) - print("\nGenerating geomedian composites and plotting " - "filmstrips... (click the Dashboard link above for status)") + print( + "\nGenerating geomedian composites and plotting " + "filmstrips... (click the Dashboard link above for status)" + ) ds_geomedian = ds_geomedian.compute() # Reset CRS that is lost during geomedian compositing @@ -230,16 +216,12 @@ def run_filmstrip_app( # and aspect ratio n_obs = output_array.sizes["timestep"] ratio = output_array.sizes["x"] / output_array.sizes["y"] - fig, axes = plt.subplots(1, - n_obs + 1, - figsize=(5 * ratio * (n_obs + 1), 5)) + fig, axes = plt.subplots(1, n_obs + 1, figsize=(5 * ratio * (n_obs + 1), 5)) fig.subplots_adjust(wspace=0.05, hspace=0.05) # Add timesteps to the plot, set aspect to equal to preserve shape for i, ax_i in enumerate(axes.flatten()[:n_obs]): - output_array.isel(timestep=i).plot.imshow(ax=ax_i, - vmin=percentiles[0], - vmax=percentiles[1]) + output_array.isel(timestep=i).plot.imshow(ax=ax_i, vmin=percentiles[0], vmax=percentiles[1]) ax_i.get_xaxis().set_visible(False) ax_i.get_yaxis().set_visible(False) ax_i.set_aspect("equal") @@ -248,11 +230,12 @@ def run_filmstrip_app( # by first taking the log of the array (so change in dark areas # can be identified), then computing standard deviation between # all timesteps - (np.log(output_array).std(dim=["timestep"]).mean( - dim="variable").plot.imshow(ax=axes.flatten()[-1], - robust=True, - cmap="magma", - add_colorbar=False)) + ( + np.log(output_array) + .std(dim=["timestep"]) + .mean(dim="variable") + .plot.imshow(ax=axes.flatten()[-1], robust=True, cmap="magma", add_colorbar=False) + ) axes.flatten()[-1].get_xaxis().set_visible(False) axes.flatten()[-1].get_yaxis().set_visible(False) axes.flatten()[-1].set_aspect("equal") diff --git a/deafrica_tools/app/crophealth.py b/deafrica_tools/app/crophealth.py index f6fd091..ea2280f 100644 --- a/deafrica_tools/app/crophealth.py +++ b/deafrica_tools/app/crophealth.py @@ -1,22 +1,18 @@ # crophealth.py -''' +""" Functions for loading and interacting with data in the crop health notebook, inside the Real_world_examples folder. -''' +""" # Load modules # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' - -from ipyleaflet import ( - Map, - GeoJSON, - DrawControl, - basemaps -) + +os.environ["USE_PYGEOS"] = "0" + +from ipyleaflet import Map, GeoJSON, DrawControl, basemaps import datetime as dt import datacube from osgeo import ogr @@ -43,7 +39,7 @@ def load_crophealth_data(lat, lon, buffer, date): Loads Sentinel-2 analysis-ready data (ARD) product for the crop health case-study area over the last two years. Last modified: April 2020 - + Parameters ---------- lat: float @@ -51,7 +47,7 @@ def load_crophealth_data(lat, lon, buffer, date): lon: float The central longitude to analyse buffer: - The number of square degrees to load around the central latitude and longitude. + The number of square degrees to load around the central latitude and longitude. For reasonable loading times, set this as `0.1` or lower. date: The most recent date to show data for. @@ -59,17 +55,17 @@ def load_crophealth_data(lat, lon, buffer, date): Returns ---------- - ds: xarray.Dataset + ds: xarray.Dataset data set containing combined, masked data Masked values are set to 'nan' """ - + # Suppress warnings - warnings.filterwarnings('ignore') + warnings.filterwarnings("ignore") # Initialise the data cube. 'app' argument is used to identify this app - dc = datacube.Datacube(app='Crophealth-app') - + dc = datacube.Datacube(app="Crophealth-app") + # Define area to load latitude = (lat - buffer, lat + buffer) longitude = (lon - buffer, lon + buffer) @@ -84,20 +80,14 @@ def load_crophealth_data(lat, lon, buffer, date): # Construct the data cube query products = ["s2_l2a"] - + query = { - 'x': longitude, - 'y': latitude, - 'time': time, - 'measurements': [ - 'red', - 'green', - 'blue', - 'nir', - 'swir_2' - ], - 'output_crs': 'EPSG:6933', - 'resolution': (-20, 20) + "x": longitude, + "y": latitude, + "time": time, + "measurements": ["red", "green", "blue", "nir", "swir_2"], + "output_crs": "EPSG:6933", + "resolution": (-20, 20), } # Load the data and mask out bad quality pixels @@ -106,10 +96,10 @@ def load_crophealth_data(lat, lon, buffer, date): # Calculate the normalised difference vegetation index (NDVI) across # all pixels for each image. # This is stored as an attribute of the data - ds = calculate_indices(ds, index='NDVI', satellite_mission='s2') + ds = calculate_indices(ds, index="NDVI", satellite_mission="s2") # Return the data - return(ds) + return ds def run_crophealth_app(ds, lat, lon, buffer): @@ -118,10 +108,10 @@ def run_crophealth_app(ds, lat, lon, buffer): the user to draw polygons. This returns a plot of the average NDVI value in the polygon area. Last modified: January 2020 - + Parameters ---------- - ds: xarray.Dataset + ds: xarray.Dataset data set containing combined, masked data Masked values are set to 'nan' lat: float @@ -129,17 +119,17 @@ def run_crophealth_app(ds, lat, lon, buffer): lon: float The central longitude corresponding to the area of loaded ds buffer: - The number of square degrees to load around the central latitude and longitude. + The number of square degrees to load around the central latitude and longitude. For reasonable loading times, set this as `0.1` or lower. """ - + # Suppress warnings - warnings.filterwarnings('ignore') + warnings.filterwarnings("ignore") # Update plotting functionality through rcParams - mpl.rcParams.update({'figure.autolayout': True}) - - # Define polygon bounds + mpl.rcParams.update({"figure.autolayout": True}) + + # Define polygon bounds latitude = (lat - buffer, lat + buffer) longitude = (lon - buffer, lon + buffer) @@ -150,61 +140,39 @@ def run_crophealth_app(ds, lat, lon, buffer): "properties": { "style": { "stroke": True, - "color": 'red', + "color": "red", "weight": 4, "opacity": 0.8, "fill": True, "fillColor": False, "fillOpacity": 0, "showArea": True, - "clickable": True + "clickable": True, } }, "geometry": { "type": "Polygon", "coordinates": [ [ - [ - longitude[0], - latitude[0] - ], - [ - longitude[1], - latitude[0] - ], - [ - longitude[1], - latitude[1] - ], - [ - longitude[0], - latitude[1] - ], - [ - longitude[0], - latitude[0] - ] + [longitude[0], latitude[0]], + [longitude[1], latitude[0]], + [longitude[1], latitude[1]], + [longitude[0], latitude[1]], + [longitude[0], latitude[0]], ] - ] - } + ], + }, } - + # Create a map geometry from the geom_obj dictionary # center specifies where the background map view should focus on # zoom specifies how zoomed in the background map should be - loadeddata_geometry = ogr.CreateGeometryFromJson(str(geom_obj['geometry'])) - loadeddata_center = [ - loadeddata_geometry.Centroid().GetY(), - loadeddata_geometry.Centroid().GetX() - ] + loadeddata_geometry = ogr.CreateGeometryFromJson(str(geom_obj["geometry"])) + loadeddata_center = [loadeddata_geometry.Centroid().GetY(), loadeddata_geometry.Centroid().GetX()] loadeddata_zoom = 16 # define the study area map - studyarea_map = Map( - center=loadeddata_center, - zoom=loadeddata_zoom, - basemap=basemaps.Esri.WorldImagery - ) + studyarea_map = Map(center=loadeddata_center, zoom=loadeddata_zoom, basemap=basemaps.Esri.WorldImagery) # define the drawing controls studyarea_drawctrl = DrawControl( @@ -223,75 +191,62 @@ def run_crophealth_app(ds, lat, lon, buffer): polygon_number = 0 # Define widgets to interact with - instruction = widgets.Output(layout={'border': '1px solid black'}) + instruction = widgets.Output(layout={"border": "1px solid black"}) with instruction: - print("Draw a polygon within the red box to view a plot of " - "average NDVI over time in that area.") + print("Draw a polygon within the red box to view a plot of " "average NDVI over time in that area.") - info = widgets.Output(layout={'border': '1px solid black'}) + info = widgets.Output(layout={"border": "1px solid black"}) with info: print("Plot status:") - fig_display = widgets.Output(layout=widgets.Layout( - width="50%", # proportion of horizontal space taken by plot - )) + fig_display = widgets.Output( + layout=widgets.Layout( + width="50%", # proportion of horizontal space taken by plot + ) + ) with fig_display: plt.ioff() fig, ax = plt.subplots(figsize=(8, 6)) ax.set_ylim([0, 1]) - colour_list = plt.rcParams['axes.prop_cycle'].by_key()['color'] + colour_list = plt.rcParams["axes.prop_cycle"].by_key()["color"] # Function to execute each time something is drawn on the map def handle_draw(self, action, geo_json): nonlocal polygon_number # Execute behaviour based on what the user draws - if geo_json['geometry']['type'] == 'Polygon': - + if geo_json["geometry"]["type"] == "Polygon": info.clear_output(wait=True) # wait=True reduces flicker effect - + # Save geojson polygon to io temporary file to be rasterized later jsonData = json.dumps(geo_json) binaryData = jsonData.encode() io = BytesIO(binaryData) io.seek(0) - + # Read the polygon as a geopandas dataframe gdf = gpd.read_file(io) gdf.crs = "EPSG:4326" # Convert the drawn geometry to pixel coordinates - xr_poly = xr_rasterize(gdf, ds.NDVI.isel(time=0), crs='EPSG:6933') + xr_poly = xr_rasterize(gdf, ds.NDVI.isel(time=0), crs="EPSG:6933") # Construct a mask to only select pixels within the drawn polygon masked_ds = ds.NDVI.where(xr_poly) - - masked_ds_mean = masked_ds.mean(dim=['x', 'y'], skipna=True) + + masked_ds_mean = masked_ds.mean(dim=["x", "y"], skipna=True) colour = colour_list[polygon_number % len(colour_list)] # Add a layer to the map to make the most recently drawn polygon # the same colour as the line on the plot studyarea_map.add_layer( - GeoJSON( - data=geo_json, - style={ - 'color': colour, - 'opacity': 1, - 'weight': 4.5, - 'fillOpacity': 0.0 - } - ) + GeoJSON(data=geo_json, style={"color": colour, "opacity": 1, "weight": 4.5, "fillOpacity": 0.0}) ) # add new data to the plot - xr.plot.plot( - masked_ds_mean, - marker='*', - color=colour, - ax=ax - ) + xr.plot.plot(masked_ds_mean, marker="*", color=colour, ax=ax) # reset titles back to custom ax.set_title("Average NDVI from Sentinel-2") @@ -302,7 +257,7 @@ def handle_draw(self, action, geo_json): fig_display.clear_output(wait=True) # wait=True reduces flicker effect with fig_display: display(fig) - + with info: print("Plot status: polygon sucessfully added to plot.") @@ -312,8 +267,7 @@ def handle_draw(self, action, geo_json): else: info.clear_output(wait=True) with info: - print("Plot status: this drawing tool is not currently " - "supported. Please use the polygon tool.") + print("Plot status: this drawing tool is not currently " "supported. Please use the polygon tool.") # call to say activate handle_draw function on draw studyarea_drawctrl.on_draw(handle_draw) @@ -331,7 +285,5 @@ def handle_draw(self, action, geo_json): # +-----------+-----------+ # | info | # +-----------------------+ - ui = widgets.VBox([instruction, - widgets.HBox([studyarea_map, fig_display]), - info]) + ui = widgets.VBox([instruction, widgets.HBox([studyarea_map, fig_display]), info]) display(ui) diff --git a/deafrica_tools/app/deacoastlines.py b/deafrica_tools/app/deacoastlines.py index 2b00a48..5b946f4 100644 --- a/deafrica_tools/app/deacoastlines.py +++ b/deafrica_tools/app/deacoastlines.py @@ -1,5 +1,5 @@ """ -Digital Earth Africa Coastline widget, which can be used to +Digital Earth Africa Coastline widget, which can be used to interactively extract shoreline data using transects. """ @@ -8,7 +8,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import fiona import sys @@ -46,13 +47,14 @@ from deafrica_tools.coastal import get_coastlines, transect_distances from owslib.wms import WebMapService + def make_box_layout(): return Layout( # border='solid 1px black', - margin='0px 10px 10px 0px', - padding='5px 5px 5px 5px', - width='100%', - height='100%', + margin="0px 10px 10px 0px", + padding="5px 5px 5px 5px", + width="100%", + height="100%", ) @@ -65,7 +67,6 @@ def create_expanded_button(description, button_style): class transect_app(HBox): - def __init__(self): super().__init__() @@ -81,7 +82,7 @@ def __init__(self): ("Open Street Map", "open_street_map"), ] self.product = self.product_list[0][1] - self.mode_list = [('Distance', 'distance'), ('Width', 'width')] + self.mode_list = [("Distance", "distance"), ("Width", "width")] self.mode = self.mode_list[0][1] self.target = None self.action = None @@ -95,8 +96,7 @@ def __init__(self): # Create the Header widget header_title_text = "

Digital Earth Africa Coastlines shoreline transect extraction

" instruction_text = "Select parameters and draw a transect on the map to extract shoreline data. In distance mode, draw a transect line starting from land that crosses multiple shorelines.
In width mode, draw a transect line that intersects shorelines at least twice. Alternatively, upload an vector file to extract shoreline data for multiple existing transects." - self.header = deawidgets.create_html( - f"{header_title_text}

{instruction_text}

") + self.header = deawidgets.create_html(f"{header_title_text}

{instruction_text}

") self.header.layout = make_box_layout() ##################################### @@ -105,7 +105,6 @@ def __init__(self): # Define the action to take once something is drawn on the map def update_geojson(target, action, geo_json): - # Remove previously uploaded data if present self.gdf_uploaded = None fileupload_transects._counter = 0 @@ -125,7 +124,7 @@ def update_geojson(target, action, geo_json): gdf_drawn_nsidc = gdf.copy().to_crs("EPSG:6933") m2_per_km2 = 10**6 area = gdf_drawn_nsidc.envelope.area.values[0] / m2_per_km2 - polyarea_label = 'Total area of DE Africa Coastlines data to extract' + polyarea_label = "Total area of DE Africa Coastlines data to extract" polyarea_text = f"{polyarea_label}: {area:.2f} km2" # Test area size @@ -150,7 +149,7 @@ def update_geojson(target, action, geo_json): ######################################### # Create drawing tools - desired_drawtools = ['polyline'] + desired_drawtools = ["polyline"] draw_control = deawidgets.create_drawcontrol(desired_drawtools) # Load DEACoastLines WMS @@ -159,18 +158,17 @@ def update_geojson(target, action, geo_json): deacoastlines = WMSLayer( url=deacl_url, layers=deacl_layer, - format='image/png', + format="image/png", transparent=True, - attribution='DE Africa Coastlines © 2022 Digital Earth Africa') + attribution="DE Africa Coastlines © 2022 Digital Earth Africa", + ) # Begin by displaying an empty layer group, and update the group with desired WMS on interaction. self.map_layers = LayerGroup(layers=(deacoastlines,)) - self.map_layers.name = 'Map Overlays' + self.map_layers.name = "Map Overlays" # Create map widget - self.m = deawidgets.create_map(map_center=(0.5273, 25.1367), - zoom_level=3, - basemap=basemaps.Esri.WorldImagery) + self.m = deawidgets.create_map(map_center=(0.5273, 25.1367), zoom_level=3, basemap=basemaps.Esri.WorldImagery) self.m.layout = make_box_layout() # Add tools to map widget @@ -185,18 +183,13 @@ def update_geojson(target, action, geo_json): ############################ # Create parameter widgets - text_output_name = deawidgets.create_inputtext(self.output_name, - self.output_name) - checkbox_csv = deawidgets.create_checkbox(self.export_csv, - 'Distance table (.csv)') - checkbox_plot = deawidgets.create_checkbox(self.export_plot, - 'Figure (.png)') - deaoverlay_dropdown = deawidgets.create_dropdown( - self.product_list, self.product_list[0][1]) - mode_dropdown = deawidgets.create_dropdown(self.mode_list, - self.mode_list[0][1]) + text_output_name = deawidgets.create_inputtext(self.output_name, self.output_name) + checkbox_csv = deawidgets.create_checkbox(self.export_csv, "Distance table (.csv)") + checkbox_plot = deawidgets.create_checkbox(self.export_plot, "Figure (.png)") + deaoverlay_dropdown = deawidgets.create_dropdown(self.product_list, self.product_list[0][1]) + mode_dropdown = deawidgets.create_dropdown(self.mode_list, self.mode_list[0][1]) run_button = create_expanded_button("Extract shoreline data", "info") - fileupload_transects = widgets.FileUpload(accept='', multiple=True) + fileupload_transects = widgets.FileUpload(accept="", multiple=True) #################################### # UPDATE FUNCTIONS FOR EACH WIDGET # @@ -216,24 +209,28 @@ def update_geojson(target, action, geo_json): # COLLECTION OF ALL APP CONTROLS # ################################## - parameter_selection = VBox([ - HTML("Output name:"), text_output_name, - HTML( - 'Transect extraction mode:
' - ), - mode_dropdown, - HTML("
Output files:
"), - checkbox_plot, - checkbox_csv, - HTML( - "
Advanced
Upload a GeoJSON or ESRI " - "Shapefile (<5 mb) containing one or more transect lines.
"), - fileupload_transects - ]) - map_selection = VBox([ - HTML("
Map overlay:"), - deaoverlay_dropdown, - ]) + parameter_selection = VBox( + [ + HTML("Output name:"), + text_output_name, + HTML('Transect extraction mode:
'), + mode_dropdown, + HTML("
Output files:
"), + checkbox_plot, + checkbox_csv, + HTML( + "
Advanced
Upload a GeoJSON or ESRI " + "Shapefile (<5 mb) containing one or more transect lines.
" + ), + fileupload_transects, + ] + ) + map_selection = VBox( + [ + HTML("
Map overlay:"), + deaoverlay_dropdown, + ] + ) parameter_selection.layout = make_box_layout() map_selection.layout = make_box_layout() @@ -283,34 +280,26 @@ def update_geojson(target, action, geo_json): # Set the output csv def update_fileupload_transects(self, change): - # Clear any drawn data if present self.gdf_drawn = None # Save to file for uploaded_filename in change.new.keys(): with open(uploaded_filename, "wb") as output_file: - content = change.new[uploaded_filename]['content'] + content = change.new[uploaded_filename]["content"] output_file.write(content) with self.status_info: - - try: - - print('Loading vector data...', end='\r') - valid_files = [ - file for file in change.new.keys() - if file.lower().endswith(('.shp', '.geojson')) - ] + try: + print("Loading vector data...", end="\r") + valid_files = [file for file in change.new.keys() if file.lower().endswith((".shp", ".geojson"))] valid_file = valid_files[0] - transect_gdf = (gpd.read_file(valid_file).to_crs( - "EPSG:4326").explode().reset_index(drop=True)) + transect_gdf = gpd.read_file(valid_file).to_crs("EPSG:4326").explode().reset_index(drop=True) # Use ID column if it exists - if 'id' in transect_gdf: - transect_gdf = transect_gdf.set_index('id') - print(f"Uploaded '{valid_file}'; automatically labelling " - "transects using column 'id'.") + if "id" in transect_gdf: + transect_gdf = transect_gdf.set_index("id") + print(f"Uploaded '{valid_file}'; automatically labelling " "transects using column 'id'.") else: print( f"Uploaded '{valid_file}'; no 'id' column detected, " @@ -318,11 +307,7 @@ def update_fileupload_transects(self, change): ) # Create a geodata - geodata = GeoData(geo_dataframe=transect_gdf, - style={ - 'color': 'black', - 'weight': 3 - }) + geodata = GeoData(geo_dataframe=transect_gdf, style={"color": "black", "weight": 3}) # Add to map xmin, ymin, xmax, ymax = transect_gdf.total_bounds @@ -336,14 +321,16 @@ def update_fileupload_transects(self, change): print( "Cannot read uploaded files. Please ensure that data is " "in either GeoJSON or ESRI Shapefile format.", - end='\r') + end="\r", + ) self.gdf_uploaded = None except fiona.errors.DriverError: print( "Shapefile is invalid. Please ensure that all shapefile " "components (e.g. .shp, .shx, .dbf, .prj) are uploaded.", - end='\r') + end="\r", + ) self.gdf_uploaded = None # Set output name @@ -364,7 +351,6 @@ def update_mode(self, change): # Update product def update_deaoverlay(self, change): - self.product = change.new # Load DE Africa CoastLines WMS @@ -375,7 +361,8 @@ def update_deaoverlay(self, change): layers=deacl_layer, format="image/png", transparent=True, - attribution="DE Africa Coastlines © 2022 Digital Earth Africa") + attribution="DE Africa Coastlines © 2022 Digital Earth Africa", + ) if self.product == "none": self.map_layers.clear_layers() @@ -388,7 +375,6 @@ def update_deaoverlay(self, change): self.map_layers.add_layer(deacoastlines) def run_app(self, change): - # Clear progress bar and output areas before running self.status_info.clear_output() self.output_plot.clear_output() @@ -400,96 +386,90 @@ def run_app(self, change): # Load transects from either map or uploaded files if self.gdf_uploaded is not None: transect_gdf = self.gdf_uploaded - run_text = 'uploaded file' + run_text = "uploaded file" elif self.gdf_drawn is not None: transect_gdf = self.gdf_drawn transect_gdf.index = [self.output_name] - run_text = 'selected transect' + run_text = "selected transect" else: - print(f'No transect drawn or uploaded. Please select a transect on the map, or upload a GeoJSON or ESRI Shapefile.', - end='\r') + print( + f"No transect drawn or uploaded. Please select a transect on the map, or upload a GeoJSON or ESRI Shapefile.", + end="\r", + ) transect_gdf = None # If valid data was returned, load DEA Coastlines data if transect_gdf is not None: - # Load Coastlines data from WFS deacl_gdf = get_coastlines(bbox=transect_gdf) - + # Test that data was correctly returned if len(deacl_gdf.index) > 0: - # Dissolve by year to remove duplicates, then sort by date - deacl_gdf = deacl_gdf.dissolve(by='year', as_index=False) - deacl_gdf['year'] = deacl_gdf.year.astype(int) - deacl_gdf = deacl_gdf.sort_values('year') - deacl_gdf = deacl_gdf.set_index('year') + deacl_gdf = deacl_gdf.dissolve(by="year", as_index=False) + deacl_gdf["year"] = deacl_gdf.year.astype(int) + deacl_gdf = deacl_gdf.sort_values("year") + deacl_gdf = deacl_gdf.set_index("year") else: print( "No annual shoreline data was found near the " "supplied transect. Please draw or select a new " "transect.", - end='\r') - deacl_gdf = None + end="\r", + ) + deacl_gdf = None # If valid DEA Coastlines data returned, calculate distances if deacl_gdf is not None: - print(f'Analysing transect distances using "{self.mode}" mode...', - end='\r') + print(f'Analysing transect distances using "{self.mode}" mode...', end="\r") dist_df = transect_distances( - transect_gdf.to_crs("EPSG:6933"), - deacl_gdf.to_crs("EPSG:6933"), - mode=self.mode) + transect_gdf.to_crs("EPSG:6933"), deacl_gdf.to_crs("EPSG:6933"), mode=self.mode + ) # If valid data was produced: if dist_df.any(axis=None): - # Successful output - print(f'DE Africa Coastlines data successfully extracted for {run_text}.') + print(f"DE Africa Coastlines data successfully extracted for {run_text}.") # Export distance data if self.export_csv: - # Create folder if required and set path - out_dir = 'deacoastlines_outputs' - os.makedirs(out_dir, exist_ok=True) + out_dir = "deacoastlines_outputs" + os.makedirs(out_dir, exist_ok=True) csv_filename = f"{out_dir}/{self.output_name}.csv" - + # Export to file dist_df.to_csv(csv_filename, index_label="Transect") print(f'Distance data exported to "{csv_filename}".') # Generate plot with self.output_plot: - - fig, ax = plt.subplots(constrained_layout=True, - figsize=(15, 5.5)) + fig, ax = plt.subplots(constrained_layout=True, figsize=(15, 5.5)) dist_df.T.plot(ax=ax, linewidth=3) - ax.legend(frameon=False, ncol=3, title='Transect') + ax.legend(frameon=False, ncol=3, title="Transect") ax.set_title(f"Digital Earth Africa Coastlines transect extraction - {self.output_name}") ax.set_ylabel(f"Along-transect {self.mode} (m)") ax.set_xlim(dist_df.T.index[0], dist_df.T.index[-1]) # Hide the right and top spines - ax.spines['right'].set_visible(False) - ax.spines['top'].set_visible(False) + ax.spines["right"].set_visible(False) + ax.spines["top"].set_visible(False) # Only show ticks on the left and bottom spines - ax.yaxis.set_ticks_position('left') - ax.xaxis.set_ticks_position('bottom') + ax.yaxis.set_ticks_position("left") + ax.xaxis.set_ticks_position("bottom") plt.show() # Export plot with self.status_info: if self.export_plot: - # Create folder if required and set path - out_dir = 'deacoastlines_outputs' - os.makedirs(out_dir, exist_ok=True) + out_dir = "deacoastlines_outputs" + os.makedirs(out_dir, exist_ok=True) figure_filename = f"{out_dir}/{self.output_name}.png" - + # Export to file fig.savefig(figure_filename) print(f'Figure exported to "{figure_filename}".') @@ -502,4 +482,5 @@ def run_app(self, change): " - the transect intersects with shorelines more than once in 'distance' mode\n" " - the transect intersects with shorelines only once in 'width' mode\n\n" "Please draw or upload a new transect.", - end='\r') \ No newline at end of file + end="\r", + ) diff --git a/deafrica_tools/app/forestmonitoring.py b/deafrica_tools/app/forestmonitoring.py index 4e99243..28d9219 100644 --- a/deafrica_tools/app/forestmonitoring.py +++ b/deafrica_tools/app/forestmonitoring.py @@ -1,13 +1,14 @@ -''' +""" Functions for loading and interacting with Global Forest Change data in the forest monitoring notebook, inside the Real_world_examples folder. -''' +""" # Import required packages # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import json import warnings @@ -43,6 +44,7 @@ warnings.filterwarnings("ignore") warnings.simplefilter("ignore") + def make_box_layout(): """ Defines a number of CSS properties that impact how a widget is laid out. @@ -54,6 +56,7 @@ def make_box_layout(): height="100%", ) + def create_expanded_button(description, button_style): """ Defines a number of CSS properties to create a button to handle mouse clicks. @@ -64,6 +67,7 @@ def create_expanded_button(description, button_style): layout=Layout(width="auto", height="auto"), ) + def load_gfclayer(gdf_drawn, gfclayer): """ Loads the selected Global Forest Change layer for the @@ -83,12 +87,8 @@ def load_gfclayer(gdf_drawn, gfclayer): gdf_drawn.bounds.maxx.item(), ) - lats = np.arange( - np.floor(min_lat / 10) * 10, np.ceil(max_lat / 10) * 10, 10 - ).astype(int) - lons = np.arange( - np.floor(min_lon / 10) * 10, np.ceil(max_lon / 10) * 10, 10 - ).astype(int) + lats = np.arange(np.floor(min_lat / 10) * 10, np.ceil(max_lat / 10) * 10, 10).astype(int) + lons = np.arange(np.floor(min_lon / 10) * 10, np.ceil(max_lon / 10) * 10, 10).astype(int) coord_list = [] for lat in lats: @@ -106,7 +106,9 @@ def load_gfclayer(gdf_drawn, gfclayer): coord_list.append(coord_str) # Load each Global Forest Change tile covering the area of interest. - base_url = f"https://storage.googleapis.com/earthenginepartners-hansen/GFC-2021-v1.9/Hansen_GFC-2021-v1.9_{gfclayer}_" + base_url = ( + f"https://storage.googleapis.com/earthenginepartners-hansen/GFC-2021-v1.9/Hansen_GFC-2021-v1.9_{gfclayer}_" + ) dask_chunks = dict(x=2048, y=2048) tile_list = [] @@ -154,6 +156,7 @@ def load_gfclayer(gdf_drawn, gfclayer): client.close() return ds + def load_all_gfclayers(gdf_drawn): gfclayers = ["treecover2000", "gain", "lossyear"] @@ -165,6 +168,7 @@ def load_all_gfclayers(gdf_drawn): dataset = xr.merge(dataset_list) return dataset + def get_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): """ Preprocess the Global Forest change "treecover2020" layer. @@ -185,9 +189,7 @@ def get_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): counts = np.unique(ds_masked, return_counts=True) # Remove the counts for pixels with the value np.nan. index = np.argwhere(np.isnan(counts[0])) - counts_dict = dict( - zip(np.delete(counts[0], index), np.delete(counts[1], index)) - ) + counts_dict = dict(zip(np.delete(counts[0], index), np.delete(counts[1], index))) # Reproject the dataset to EPSG:6933 which uses metres ds_reprojected = ds_masked.rio.reproject("EPSG:6933") @@ -200,19 +202,21 @@ def get_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): df = pd.DataFrame( data={ "Year": ["2000"], - "Tree Cover in km$^2$": np.fromiter(counts_dict.values(), dtype=float) - * per_pixel_area, + "Tree Cover in km$^2$": np.fromiter(counts_dict.values(), dtype=float) * per_pixel_area, } ) # Get the total area. - print_statement = f'Total Forest Cover in {df["Year"].item()}: {round(df["Tree Cover in km$^2$"].item(), 4)} km2' + print_statement = ( + f'Total Forest Cover in {df["Year"].item()}: {round(df["Tree Cover in km$^2$"].item(), 4)} km2' + ) # File name to use when exporting results. file_name = f"forest_cover_in_2000" return ds, df, print_statement, file_name + def get_gfclayer_gain(gfclayer_ds, gfclayer="gain"): """ Preprocess the Global Forest Change "gain" layer. @@ -229,9 +233,7 @@ def get_gfclayer_gain(gfclayer_ds, gfclayer="gain"): counts = np.unique(ds, return_counts=True) # Remove the counts for pixels with the value np.nan. index = np.argwhere(np.isnan(counts[0])) - counts_dict = dict( - zip(np.delete(counts[0], index), np.delete(counts[1], index)) - ) + counts_dict = dict(zip(np.delete(counts[0], index), np.delete(counts[1], index))) # Reproject the dataset to EPSG:6933 which uses metres. ds_reprojected = ds.rio.reproject("EPSG:6933") @@ -244,21 +246,21 @@ def get_gfclayer_gain(gfclayer_ds, gfclayer="gain"): df = pd.DataFrame( data={ "Year": ["2000-2012"], - "Forest Cover Gain in km$^2$": np.fromiter( - counts_dict.values(), dtype=float - ) - * per_pixel_area, + "Forest Cover Gain in km$^2$": np.fromiter(counts_dict.values(), dtype=float) * per_pixel_area, } ) # Get the total area. - print_statement = f'Total Forest Cover Gain {df["Year"].item()}: {round(df["Forest Cover Gain in km$^2$"].item(), 4)} km2' + print_statement = ( + f'Total Forest Cover Gain {df["Year"].item()}: {round(df["Forest Cover Gain in km$^2$"].item(), 4)} km2' + ) # File name to use when exporting results. file_name = f"forest_cover_gain_from_2000_to_2012" return ds, df, print_statement, file_name + def get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear"): """ Preprocess the Global Forest Change "lossyear" layer. @@ -281,9 +283,7 @@ def get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear" counts = np.unique(ds, return_counts=True) # Remove the counts for pixels with the value np.nan. index = np.argwhere(np.isnan(counts[0])) - counts_dict = dict( - zip(np.delete(counts[0], index), np.delete(counts[1], index)) - ) + counts_dict = dict(zip(np.delete(counts[0], index), np.delete(counts[1], index))) # Reproject the dataset to EPSG:6933 which uses metres ds_reprojected = ds.rio.reproject("EPSG:6933") @@ -297,10 +297,7 @@ def get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear" df = pd.DataFrame( { "Year": 2000 + np.fromiter(counts_dict.keys(), dtype=int), - "Forest Cover Loss in km$^2$": np.fromiter( - counts_dict.values(), dtype=float - ) - * per_pixel_area, + "Forest Cover Loss in km$^2$": np.fromiter(counts_dict.values(), dtype=float) * per_pixel_area, } ) @@ -312,6 +309,7 @@ def get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear" return ds, df, print_statement, file_name + def plot_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): """ Plot the Global Forest Change "treecover2000" layer. @@ -338,9 +336,7 @@ def plot_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): im = ds.plot(cmap="Greens", add_colorbar=False, ax=ax) # Add a colorbar to the plot. cbar = plt.colorbar(mappable=im) - cbar.set_label( - "Percentage tree canopy cover for year 2000", labelpad=-65, y=0.25 - ) + cbar.set_label("Percentage tree canopy cover for year 2000", labelpad=-65, y=0.25) # Add a title to the plot. plt.title(title) # Save the plot. @@ -350,6 +346,7 @@ def plot_gfclayer_treecover2000(gfclayer_ds, gfclayer="treecover2000"): print(print_statement) + def plot_gfclayer_gain(gfclayer_ds, gfclayer="gain"): """ Plot the Global Forest Change "gain" layer. @@ -392,15 +389,13 @@ def plot_gfclayer_gain(gfclayer_ds, gfclayer="gain"): print(print_statement) + def plot_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear"): """ Plot the Global Forest change "lossyear" layer. """ - if ( - get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear") - is None - ): + if get_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear") is None: print( f"No Global Forest Change {gfclayer} layer data found in the selected area. Please select a new polygon over an area with data." ) @@ -454,9 +449,7 @@ def plot_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear norm = mcolors.BoundaryNorm(boundaries=color_levels, ncolors=cmap.N) # Plot the dataset. - fig, (ax1, ax2) = plt.subplots( - nrows, ncols, figsize=(figure_width, figure_length) - ) + fig, (ax1, ax2) = plt.subplots(nrows, ncols, figsize=(figure_width, figure_length)) im = ds.plot(ax=ax1, cmap=cmap, norm=norm, add_colorbar=False) # Add a title to the subplot. ax1.set_title(title) @@ -479,6 +472,7 @@ def plot_gfclayer_lossyear(gfclayer_ds, start_year, end_year, gfclayer="lossyear print(print_statement) + def plot_gfclayer_all(gfclayer_ds, start_year, end_year): """ Plot all the Global Forest Change Layers loaded. @@ -498,12 +492,7 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): # Define the figure. fig, ax = plt.subplots(figsize=(figure_width, figure_length)) - if ( - get_gfclayer_treecover2000( - gfclayer_ds[["treecover2000"]], gfclayer="treecover2000" - ) - is None - ): + if get_gfclayer_treecover2000(gfclayer_ds[["treecover2000"]], gfclayer="treecover2000") is None: print( f"No Global Forest Change 'treecover2000' layer data found in the selected area. Please select a new polygon over an area with data." ) @@ -513,18 +502,12 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): df_treecover2000, print_statement_treecover2000, file_name_treecover2000, - ) = get_gfclayer_treecover2000( - gfclayer_ds[["treecover2000"]], gfclayer="treecover2000" - ) + ) = get_gfclayer_treecover2000(gfclayer_ds[["treecover2000"]], gfclayer="treecover2000") # Plot the treecover2000 layer as the background layer. - background = ds_treecover2000.plot( - cmap=treecover_color, add_colorbar=False, ax=ax - ) + background = ds_treecover2000.plot(cmap=treecover_color, add_colorbar=False, ax=ax) # Add a colorbar to the treecover2000 plot. cbar = plt.colorbar(mappable=background) - cbar.set_label( - "Percentage tree canopy cover for year 2000", labelpad=-65, y=0.25 - ) + cbar.set_label("Percentage tree canopy cover for year 2000", labelpad=-65, y=0.25) # Export the dataframe as a csv. df_treecover2000.to_csv(f"{file_name_treecover2000}.csv", index=False) # Add the print statement to the list. @@ -541,9 +524,7 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): gfclayer_ds[["gain"]], gfclayer="gain" ) # Plot the gain layer. - ds_gain.plot( - ax=ax, cmap=mcolors.ListedColormap([gain_color]), add_colorbar=False - ) + ds_gain.plot(ax=ax, cmap=mcolors.ListedColormap([gain_color]), add_colorbar=False) # Export the dataframe as a csv. df_gain.to_csv(f"{file_name_gain}.csv", index=False) # Add the print statement to the list. @@ -551,12 +532,7 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): # Add the file name to the list. filename_list.append(f'"{file_name_gain}.csv"') - if ( - get_gfclayer_lossyear( - gfclayer_ds[["lossyear"]], start_year, end_year, gfclayer="lossyear" - ) - is None - ): + if get_gfclayer_lossyear(gfclayer_ds[["lossyear"]], start_year, end_year, gfclayer="lossyear") is None: print( f"No Global Forest Change 'lossyear' layer data found in the selected area. Please select a new polygon over an area with data." ) @@ -566,13 +542,9 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): df_lossyear, print_statement_lossyear, file_name_lossyear, - ) = get_gfclayer_lossyear( - gfclayer_ds[["lossyear"]], start_year, end_year, gfclayer="lossyear" - ) + ) = get_gfclayer_lossyear(gfclayer_ds[["lossyear"]], start_year, end_year, gfclayer="lossyear") # Plot the lossyear layer. - ds_lossyear.plot( - ax=ax, cmap=mcolors.ListedColormap([lossyear_color]), add_colorbar=False - ) + ds_lossyear.plot(ax=ax, cmap=mcolors.ListedColormap([lossyear_color]), add_colorbar=False) # Export the dataframe as a csv. df_lossyear.to_csv(f"{file_name_lossyear}.csv", index=False) # Add the print statement to the list. @@ -597,7 +569,8 @@ def plot_gfclayer_all(gfclayer_ds, start_year, end_year): plt.show() print(*print_statement_list, sep="\n") print(*filename_list, sep="\n\t") - print(f'\nFigure saved as "{figure_fn}"'); + print(f'\nFigure saved as "{figure_fn}"') + def plot_gfclayer(gfclayer_ds, start_year, end_year, gfclayer): if gfclayer == "treecover2000": @@ -609,6 +582,7 @@ def plot_gfclayer(gfclayer_ds, start_year, end_year, gfclayer): elif gfclayer == "alllayers": plot_gfclayer_all(gfclayer_ds, start_year, end_year) + def update_map_layers(self): """ Updates map widget to add new basemap when selected @@ -622,6 +596,7 @@ def update_map_layers(self): # Add the selected basemap to the layer Group. self.map_layers.add_layer(self.basemap) + class forest_monitoring_app(HBox): def __init__(self): super().__init__() @@ -634,9 +609,7 @@ def __init__(self): header_title_text = "

Digital Earth Africa Forest Change

" instruction_text = """

Select the desired Global Forest Change layer, then zoom in and draw a polygon to select an area for which to plot the selected Global Forest Change layer. Alternatively, upload a vector file of the area of interest.

""" - self.header = deawidgets.create_html( - value=f"{header_title_text}{instruction_text}" - ) + self.header = deawidgets.create_html(value=f"{header_title_text}{instruction_text}") self.header.layout = make_box_layout() ############################ @@ -653,16 +626,12 @@ def __init__(self): # Set the default basemap to be used for the map widget / initial value for the widget. self.basemap = self.basemap_list[0][1] # Dropdown selection widget. - dropdown_basemap = deawidgets.create_dropdown( - options=self.basemap_list, value=self.basemap - ) + dropdown_basemap = deawidgets.create_dropdown(options=self.basemap_list, value=self.basemap) # Register the update function to run when a new value is selected # on the dropdown_basemap widget. dropdown_basemap.observe(self.update_basemap, "value") # Text to accompany the dropdown selection widget. - basemap_selection_html = deawidgets.create_html( - value=f"
Map overlay:" - ) + basemap_selection_html = deawidgets.create_html(value=f"
Map overlay:") # Combine the basemap_selection_html text and the dropdown_basemap widget in a single container. basemap_selection = VBox([basemap_selection_html, dropdown_basemap]) @@ -696,27 +665,19 @@ def __init__(self): # Register the update function to run when a new value is selected on the slider. timerange_selection_slide.observe(self.update_timerange, "value") # Text to accompany the timerange_selection widget. - timerange_selection_html = deawidgets.create_html( - value=f"
Forest Cover Loss Time Range:" - ) + timerange_selection_html = deawidgets.create_html(value=f"
Forest Cover Loss Time Range:") # Combine the timerange_selection_text and the timerange_selection_slide in a single container. - timerange_selection = VBox( - [timerange_selection_html, timerange_selection_slide] - ) + timerange_selection = VBox([timerange_selection_html, timerange_selection_slide]) # Set the initial parameter for the GFC layer dataset. self.gfclayer_ds = None # Dropdown selection widget. - dropdown_gfclayer = deawidgets.create_dropdown( - options=self.gfclayers_list, value=self.gfclayer - ) + dropdown_gfclayer = deawidgets.create_dropdown(options=self.gfclayers_list, value=self.gfclayer) # Register the update function to run when a new value is selected # on the dropdown_gfclayer widget. dropdown_gfclayer.observe(self.update_gfclayer, "value") # Text to accompany the dropdown selection widget. - gfclayer_selection_html = deawidgets.create_html( - value=f"
Global Forest Change Layer:" - ) + gfclayer_selection_html = deawidgets.create_html(value=f"
Global Forest Change Layer:") # Combine the gfclayer_selection_html text and the dropdown_gfclayer widget in a single container. gfclayer_selection = VBox([gfclayer_selection_html, dropdown_gfclayer]) @@ -738,37 +699,28 @@ def __init__(self): checkbox_max_size.observe(self.update_checkbox_max_size, "value") # # Combine the checkbox_max_size_html text and the checkbox_max_size widget in a single container. enable_max_size = VBox([checkbox_max_size_html, checkbox_max_size]) - - # Add widget to enable uploading a geojson or ESRI shapefile. + + # Add widget to enable uploading a geojson or ESRI shapefile. self.gdf_uploaded = None fileupload_aoi = widgets.FileUpload(accept="", multiple=True) - # Register the update function to be called for the file upload. + # Register the update function to be called for the file upload. fileupload_aoi.observe(self.update_fileupload_aoi, "value") - fileupload_html = deawidgets.create_html(value=f"""
Advanced
Upload a GeoJSON or ESRI Shapefile (<5 mb) containing a single area of interest.
""") + fileupload_html = deawidgets.create_html( + value=f"""
Advanced
Upload a GeoJSON or ESRI Shapefile (<5 mb) containing a single area of interest.
""" + ) fileupload = VBox([fileupload_html, fileupload_aoi]) - - + ## Put the app controls widgets into a single container. parameter_selection = VBox( - [ - basemap_selection, - gfclayer_selection, - timerange_selection, - enable_max_size, - fileupload - ] + [basemap_selection, gfclayer_selection, timerange_selection, enable_max_size, fileupload] ) parameter_selection.layout = make_box_layout() ## Button to click to run the app. - run_button = create_expanded_button( - description="Generate plot", button_style="info" - ) + run_button = create_expanded_button(description="Generate plot", button_style="info") # Register the update function to be called when the run_button button # is clicked. run_button.on_click(self.run_app) - - ########################### # WIDGETS FOR APP OUTPUTS # @@ -810,7 +762,6 @@ def __init__(self): ##################################### def handle_draw(target, action, geo_json): - """ Defines the action to take once something is drawn on the map widget. @@ -818,7 +769,7 @@ def handle_draw(target, action, geo_json): # Remove previously uploaded data if present self.gdf_uploaded = None fileupload_aoi._counter = 0 - + self.target = target self.action = action @@ -838,40 +789,26 @@ def handle_draw(target, action, geo_json): m2_per_ha = 10000 area = gdf_drawn_nsidc.area.values[0] / m2_per_ha - polyarea_label = ( - f"Total area of Global Forest Change {self.gfclayer} layer to load" - ) + polyarea_label = f"Total area of Global Forest Change {self.gfclayer} layer to load" polyarea_text = f"{polyarea_label}: {area:.2f} ha" # Test the size of the polygon drawn. if self.max_size: confirmation_text = """ (Overriding maximum size limit; use with caution as may lead to memory issues)""" - self.header.value = ( - header_title_text - + instruction_text - + polyarea_text - + confirmation_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + confirmation_text self.gdf_drawn = gdf elif area <= 50000: confirmation_text = """ (Area to extract falls within recommended 50000 ha limit)""" - self.header.value = ( - header_title_text - + instruction_text - + polyarea_text - + confirmation_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + confirmation_text self.gdf_drawn = gdf else: warning_text = """ (Area to extract is too large, please select an area less than 50000 )""" - self.header.value = ( - header_title_text + instruction_text + polyarea_text + warning_text - ) + self.header.value = header_title_text + instruction_text + polyarea_text + warning_text self.gdf_drawn = None # Register the handler for draw events. @@ -886,9 +823,7 @@ def handle_draw(target, action, geo_json): grid_columns = 11 grid_height = "1500px" grid_width = "auto" - grid = GridspecLayout( - grid_rows, grid_columns, height=grid_height, width=grid_width - ) + grid = GridspecLayout(grid_rows, grid_columns, height=grid_height, width=grid_width) # Place app widgets and components in app layout. # [rows, columns] @@ -932,37 +867,26 @@ def update_checkbox_max_size(self, change): checkbox_max_size CheckBox is checked. """ self.max_size = change.new - - def update_fileupload_aoi(self, change): + def update_fileupload_aoi(self, change): # Clear any drawn data if present self.gdf_drawn = None - + # Save to file for uploaded_filename in change.new.keys(): with open(uploaded_filename, "wb") as output_file: - content = change.new[uploaded_filename]['content'] + content = change.new[uploaded_filename]["content"] output_file.write(content) with self.status_info: - - try: - - print('Loading vector data...', end='\r') - valid_files = [ - file for file in change.new.keys() - if file.lower().endswith(('.shp', '.geojson')) - ] + try: + print("Loading vector data...", end="\r") + valid_files = [file for file in change.new.keys() if file.lower().endswith((".shp", ".geojson"))] valid_file = valid_files[0] - aoi_gdf = (gpd.read_file(valid_file).to_crs( - "EPSG:4326").explode().reset_index(drop=True)) + aoi_gdf = gpd.read_file(valid_file).to_crs("EPSG:4326").explode().reset_index(drop=True) # Create a geodata - geodata = GeoData(geo_dataframe=aoi_gdf, - style={ - 'color': 'black', - 'weight': 3 - }) + geodata = GeoData(geo_dataframe=aoi_gdf, style={"color": "black", "weight": 3}) # Add to map xmin, ymin, xmax, ymax = aoi_gdf.total_bounds @@ -976,22 +900,23 @@ def update_fileupload_aoi(self, change): print( "Cannot read uploaded files. Please ensure that data is " "in either GeoJSON or ESRI Shapefile format.", - end='\r') + end="\r", + ) self.gdf_uploaded = None except fiona.errors.DriverError: print( "Shapefile is invalid. Please ensure that all shapefile " "components (e.g. .shp, .shx, .dbf, .prj) are uploaded.", - end='\r') + end="\r", + ) self.gdf_uploaded = None def run_app(self, change): - # Clear progress bar and output areas before running. self.status_info.clear_output() self.output_plot.clear_output() - + with self.status_info: # Load the area of interest from the map or uploaded files. if self.gdf_uploaded is not None: @@ -999,13 +924,14 @@ def run_app(self, change): elif self.gdf_drawn is not None: aoi_gdf = self.gdf_drawn else: - print(f'No valid polygon drawn on the map or uploaded. Please draw a valid a transect on the map, or upload a GeoJSON or ESRI Shapefile.', - end='\r') + print( + f"No valid polygon drawn on the map or uploaded. Please draw a valid a transect on the map, or upload a GeoJSON or ESRI Shapefile.", + end="\r", + ) aoi_gdf = None # If valid area of interest data returned. Load the selected Global Forest Change data. if aoi_gdf is not None: - if self.gfclayer_ds is None: if self.gfclayer != "alllayers": self.gfclayer_ds = load_gfclayer(gdf_drawn=aoi_gdf, gfclayer=self.gfclayer) @@ -1013,14 +939,18 @@ def run_app(self, change): self.gfclayer_ds = load_all_gfclayers(gdf_drawn=aoi_gdf) else: print("Using previously loaded data") - + # Plot the selected Global Forest Change layer. if self.gfclayer_ds is not None: with self.output_plot: - plot_gfclayer(gfclayer_ds=self.gfclayer_ds, - start_year=self.start_year, - end_year=self.end_year, - gfclayer=self.gfclayer) + plot_gfclayer( + gfclayer_ds=self.gfclayer_ds, + start_year=self.start_year, + end_year=self.end_year, + gfclayer=self.gfclayer, + ) else: with self.status_info: - print(f"No Global Forest Change {self.gfclayer} layer data found in the selected area. Please select a new polygon over an area with data.") \ No newline at end of file + print( + f"No Global Forest Change {self.gfclayer} layer data found in the selected area. Please select a new polygon over an area with data." + ) diff --git a/deafrica_tools/app/geomedian.py b/deafrica_tools/app/geomedian.py index 4c7f45e..84c5870 100644 --- a/deafrica_tools/app/geomedian.py +++ b/deafrica_tools/app/geomedian.py @@ -1,9 +1,9 @@ """ Geomedian widget: generates an interactive visualisation of -the geomedian summary statistic. +the geomedian summary statistic. """ -# Load modules +# Load modules import ipywidgets as widgets import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D @@ -11,116 +11,290 @@ import xarray as xr from odc.algo import xr_geomedian + def run_app(): - """ - An interactive app that allows users to visualise the difference between the median and geomedian time-series summary statistics. By modifying the red-green-blue values of three timesteps for a given pixel, the user changes the output summary statistics. - + An interactive app that allows users to visualise the difference between the median and geomedian time-series summary statistics. By modifying the red-green-blue values of three timesteps for a given pixel, the user changes the output summary statistics. + This allows a visual representation of the difference through the output values, RGB colour, as well as showing values plotted as a vector on a 3-dimensional space. - + Last modified: December 2021 """ - + # Define the red-green-blue sliders for timestep 1 - p1r = widgets.IntSlider(description='Red', max=255, value=58) - p1g = widgets.IntSlider(description='Green', max=255, value=153) - p1b = widgets.IntSlider(description='Blue', max=255, value=68) + p1r = widgets.IntSlider(description="Red", max=255, value=58) + p1g = widgets.IntSlider(description="Green", max=255, value=153) + p1b = widgets.IntSlider(description="Blue", max=255, value=68) # Define the red-green-blue sliders for timestep 2 - p2r = widgets.IntSlider(description='Red', max=255, value=208) - p2g = widgets.IntSlider(description='Green', max=255, value=221) - p2b = widgets.IntSlider(description='Blue', max=255, value=203) + p2r = widgets.IntSlider(description="Red", max=255, value=208) + p2g = widgets.IntSlider(description="Green", max=255, value=221) + p2b = widgets.IntSlider(description="Blue", max=255, value=203) # Define the red-green-blue sliders for timestep 3 - p3r = widgets.IntSlider(description='Red', max=255, value=202) - p3g = widgets.IntSlider(description='Green', max=255, value=82) - p3b = widgets.IntSlider(description='Blue', max=255, value=33) + p3r = widgets.IntSlider(description="Red", max=255, value=202) + p3g = widgets.IntSlider(description="Green", max=255, value=82) + p3b = widgets.IntSlider(description="Blue", max=255, value=33) # Define the median calculation for the timesteps def f(p1r, p1g, p1b, p2r, p2g, p2b, p3r, p3g, p3b): - print('Red Median = {}'.format(np.median([p1r, p2r, p3r]))) - print('Green Median = {}'.format(np.median([p1g, p2g, p3g]))) - print('Blue Median = {}'.format(np.median([p1b, p2b, p3b]))) + print("Red Median = {}".format(np.median([p1r, p2r, p3r]))) + print("Green Median = {}".format(np.median([p1g, p2g, p3g]))) + print("Blue Median = {}".format(np.median([p1b, p2b, p3b]))) # Define the geomedian calculation for the timesteps def g(p1r, p1g, p1b, p2r, p2g, p2b, p3r, p3g, p3b): - print('Red Geomedian = {:.2f}'.format(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).red.values.ravel()[0])) - print('Green Geomedian = {:.2f}'.format(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).green.values.ravel()[0])) - print('Blue Geomedian = {:.2f}'.format(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).blue.values.ravel()[0])) + print( + "Red Geomedian = {:.2f}".format( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).red.values.ravel()[0] + ) + ) + print( + "Green Geomedian = {:.2f}".format( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).green.values.ravel()[0] + ) + ) + print( + "Blue Geomedian = {:.2f}".format( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).blue.values.ravel()[0] + ) + ) # Define the Timestep 1 box colour def h(p1r, p1g, p1b): - fig1, axes1 = plt.subplots(figsize=(2,2)) + fig1, axes1 = plt.subplots(figsize=(2, 2)) fig1 = plt.imshow([[(p1r, p1g, p1b)]]) - axes1.set_title('Timestep 1') - axes1.axis('off') + axes1.set_title("Timestep 1") + axes1.axis("off") plt.show(fig1) # Define the Timestep 2 box colour - def hh(p2r, p2g, p2b): - fig2, axes2 = plt.subplots(figsize=(2,2)) + def hh(p2r, p2g, p2b): + fig2, axes2 = plt.subplots(figsize=(2, 2)) fig2 = plt.imshow([[(p2r, p2g, p2b)]]) - axes2.set_title('Timestep 2') - axes2.axis('off') + axes2.set_title("Timestep 2") + axes2.axis("off") plt.show(fig2) # Define the Timestep 3 box colour - def hhh(p3r, p3g, p3b): - fig3, axes3 = plt.subplots(figsize=(2,2)) + def hhh(p3r, p3g, p3b): + fig3, axes3 = plt.subplots(figsize=(2, 2)) fig3 = plt.imshow([[(p3r, p3g, p3b)]]) - axes3.set_title('Timestep 3') - axes3.axis('off') + axes3.set_title("Timestep 3") + axes3.axis("off") plt.show(fig3) # Define the Median RGB colour box def i(p1r, p1g, p1b, p2r, p2g, p2b, p3r, p3g, p3b): - fig4, axes4 = plt.subplots(figsize=(3,3)) - fig4 = plt.imshow([[(int(np.median([p1r, p2r, p3r])), int(np.median([p1g, p2g, p3g])), int(np.median([p1b, p2b, p3b])))]]) - axes4.set_title('Median RGB - All timesteps') - axes4.axis('off') + fig4, axes4 = plt.subplots(figsize=(3, 3)) + fig4 = plt.imshow( + [[(int(np.median([p1r, p2r, p3r])), int(np.median([p1g, p2g, p3g])), int(np.median([p1b, p2b, p3b])))]] + ) + axes4.set_title("Median RGB - All timesteps") + axes4.axis("off") plt.show(fig4) # Define the Geomedian RGB colour box def ii(p1r, p1g, p1b, p2r, p2g, p2b, p3r, p3g, p3b): - fig5, axes5 = plt.subplots(figsize=(3,3)) - fig5 = plt.imshow([[(int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).red.values.ravel()[0]), int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).green.values.ravel()[0]), int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).blue.values.ravel()[0]))]]) - axes5.set_title('Geomedian RGB - All timesteps') - axes5.axis('off') + fig5, axes5 = plt.subplots(figsize=(3, 3)) + fig5 = plt.imshow( + [ + [ + ( + int( + xr_geomedian( + xr.Dataset( + { + "red": ( + ("x", "y", "time"), + [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]], + ), + "green": ( + ("x", "y", "time"), + [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]], + ), + "blue": ( + ("x", "y", "time"), + [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]], + ), + } + ) + ).red.values.ravel()[0] + ), + int( + xr_geomedian( + xr.Dataset( + { + "red": ( + ("x", "y", "time"), + [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]], + ), + "green": ( + ("x", "y", "time"), + [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]], + ), + "blue": ( + ("x", "y", "time"), + [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]], + ), + } + ) + ).green.values.ravel()[0] + ), + int( + xr_geomedian( + xr.Dataset( + { + "red": ( + ("x", "y", "time"), + [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]], + ), + "green": ( + ("x", "y", "time"), + [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]], + ), + "blue": ( + ("x", "y", "time"), + [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]], + ), + } + ) + ).blue.values.ravel()[0] + ), + ) + ] + ] + ) + axes5.set_title("Geomedian RGB - All timesteps") + axes5.axis("off") plt.show(fig5) - # Define 3-D axis to display vectors on + # Define 3-D axis to display vectors on def j(p1r, p1g, p1b, p2r, p2g, p2b, p3r, p3g, p3b): fig6 = plt.figure() - axes6 = fig6.add_subplot(111, projection='3d') - x = [p1r, p2r, p3r, int(np.median([p1r, p2r, p3r])), int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).red.values.ravel()[0])] - y = [p1g, p2g, p3g, int(np.median([p1g, p2g, p3g])), int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).green.values.ravel()[0])] - z = [p1b, p2b, p3b, int(np.median([p1b, p2b, p3b])), int(xr_geomedian(xr.Dataset({"red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]])})).blue.values.ravel()[0])] - labels = [' 1', ' 2', ' 3', ' median', ' geomedian'] - axes6.scatter(x, y, z, c=['black','black','black','r', 'blue'], marker='o') - axes6.set_xlabel('Red') - axes6.set_ylabel('Green') - axes6.set_zlabel('Blue') + axes6 = fig6.add_subplot(111, projection="3d") + x = [ + p1r, + p2r, + p3r, + int(np.median([p1r, p2r, p3r])), + int( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).red.values.ravel()[0] + ), + ] + y = [ + p1g, + p2g, + p3g, + int(np.median([p1g, p2g, p3g])), + int( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).green.values.ravel()[0] + ), + ] + z = [ + p1b, + p2b, + p3b, + int(np.median([p1b, p2b, p3b])), + int( + xr_geomedian( + xr.Dataset( + { + "red": (("x", "y", "time"), [[[np.float32(p1r), np.float32(p2r), np.float32(p3r)]]]), + "green": (("x", "y", "time"), [[[np.float32(p1g), np.float32(p2g), np.float32(p3g)]]]), + "blue": (("x", "y", "time"), [[[np.float32(p1b), np.float32(p2b), np.float32(p3b)]]]), + } + ) + ).blue.values.ravel()[0] + ), + ] + labels = [" 1", " 2", " 3", " median", " geomedian"] + axes6.scatter(x, y, z, c=["black", "black", "black", "r", "blue"], marker="o") + axes6.set_xlabel("Red") + axes6.set_ylabel("Green") + axes6.set_zlabel("Blue") axes6.set_xlim3d(0, 255) axes6.set_ylim3d(0, 255) axes6.set_zlim3d(0, 255) for ax, ay, az, label in zip(x, y, z, labels): axes6.text(ax, ay, az, label) - plt.title('Each band represents a dimension.') + plt.title("Each band represents a dimension.") plt.show() # Define outputs - outf = widgets.interactive_output(f, {'p1r': p1r, 'p2r': p2r,'p3r': p3r, 'p1g': p1g, 'p2g': p2g,'p3g': p3g, 'p1b': p1b, 'p2b': p2b,'p3b': p3b}) - outg = widgets.interactive_output(g, {'p1r': p1r, 'p2r': p2r,'p3r': p3r, 'p1g': p1g, 'p2g': p2g,'p3g': p3g, 'p1b': p1b, 'p2b': p2b,'p3b': p3b}) + outf = widgets.interactive_output( + f, {"p1r": p1r, "p2r": p2r, "p3r": p3r, "p1g": p1g, "p2g": p2g, "p3g": p3g, "p1b": p1b, "p2b": p2b, "p3b": p3b} + ) + outg = widgets.interactive_output( + g, {"p1r": p1r, "p2r": p2r, "p3r": p3r, "p1g": p1g, "p2g": p2g, "p3g": p3g, "p1b": p1b, "p2b": p2b, "p3b": p3b} + ) + + outh = widgets.interactive_output(h, {"p1r": p1r, "p1g": p1g, "p1b": p1b}) + outhh = widgets.interactive_output(hh, {"p2r": p2r, "p2g": p2g, "p2b": p2b}) + outhhh = widgets.interactive_output(hhh, {"p3r": p3r, "p3g": p3g, "p3b": p3b}) - outh = widgets.interactive_output(h, {'p1r': p1r, 'p1g': p1g, 'p1b': p1b}) - outhh = widgets.interactive_output(hh, {'p2r': p2r, 'p2g': p2g, 'p2b': p2b}) - outhhh = widgets.interactive_output(hhh, {'p3r': p3r, 'p3g': p3g, 'p3b': p3b}) + outi = widgets.interactive_output( + i, {"p1r": p1r, "p2r": p2r, "p3r": p3r, "p1g": p1g, "p2g": p2g, "p3g": p3g, "p1b": p1b, "p2b": p2b, "p3b": p3b} + ) + outii = widgets.interactive_output( + ii, {"p1r": p1r, "p2r": p2r, "p3r": p3r, "p1g": p1g, "p2g": p2g, "p3g": p3g, "p1b": p1b, "p2b": p2b, "p3b": p3b} + ) - outi = widgets.interactive_output(i, {'p1r': p1r, 'p2r': p2r,'p3r': p3r, 'p1g': p1g, 'p2g': p2g,'p3g': p3g, 'p1b': p1b, 'p2b': p2b,'p3b': p3b}) - outii = widgets.interactive_output(ii, {'p1r': p1r, 'p2r': p2r,'p3r': p3r, 'p1g': p1g, 'p2g': p2g,'p3g': p3g, 'p1b': p1b, 'p2b': p2b,'p3b': p3b}) + outj = widgets.interactive_output( + j, {"p1r": p1r, "p2r": p2r, "p3r": p3r, "p1g": p1g, "p2g": p2g, "p3g": p3g, "p1b": p1b, "p2b": p2b, "p3b": p3b} + ) - outj = widgets.interactive_output(j, {'p1r': p1r, 'p2r': p2r,'p3r': p3r, 'p1g': p1g, 'p2g': p2g,'p3g': p3g, 'p1b': p1b, 'p2b': p2b,'p3b': p3b}) + app_output = widgets.HBox( + [ + widgets.VBox( + [ + widgets.HBox([outh, widgets.VBox([p1r, p1g, p1b])]), + widgets.HBox([outhh, widgets.VBox([p2r, p2g, p2b])]), + widgets.HBox([outhhh, widgets.VBox([p3r, p3g, p3b])]), + ] + ), + widgets.VBox([widgets.HBox([widgets.VBox([outf, outi]), widgets.VBox([outg, outii])]), outj]), + ] + ) - app_output = widgets.HBox([widgets.VBox([widgets.HBox([outh, widgets.VBox([ p1r, p1g, p1b])]), widgets.HBox([outhh, widgets.VBox([p2r, p2g, p2b])]), widgets.HBox([outhhh, widgets.VBox([ p3r, p3g, p3b])])]), widgets.VBox([widgets.HBox([widgets.VBox([outf, outi]), widgets.VBox([outg, outii])]), outj])]) - - return app_output \ No newline at end of file + return app_output diff --git a/deafrica_tools/app/imageexport.py b/deafrica_tools/app/imageexport.py index 8b0cf1c..dffef44 100644 --- a/deafrica_tools/app/imageexport.py +++ b/deafrica_tools/app/imageexport.py @@ -11,16 +11,14 @@ from datacube.utils.geometry import CRS from datacube.utils import masking from skimage import exposure -from ipyleaflet import (WMSLayer, basemaps, basemap_to_tiles) +from ipyleaflet import WMSLayer, basemaps, basemap_to_tiles from traitlets import Unicode from deafrica_tools.spatial import reverse_geocode from deafrica_tools.dask import create_local_dask_cluster -def select_region_app(date, - satellites, - size_limit=10000): +def select_region_app(date, satellites, size_limit=10000): """ An interactive app that allows the user to select a region from a map using imagery from Sentinel-2 and Landsat. The output of this @@ -35,7 +33,7 @@ def select_region_app(date, The exact date used to plot imagery on the interactive map (e.g. ``date='1988-01-01'``). satellites : str - The satellite data to plot on the interactive map. The + The satellite data to plot on the interactive map. The following options are supported: ``'Landsat-9'``: data from the Landsat 9 satellite @@ -66,61 +64,67 @@ def select_region_app(date, # Load DEA WMS class TimeWMSLayer(WMSLayer): - time = Unicode('').tag(sync=True, o=True) + time = Unicode("").tag(sync=True, o=True) # WMS layers wms_params = { - 'Landsat-9': 'ls9_sr', - 'Landsat-8': 'ls8_sr', - 'Landsat-7': 'ls7_sr', - 'Landsat-5': 'ls5_sr', - 'Sentinel-2': 's2_l2a', - 'Sentinel-2 geomedian': 'gm_s2_annual' + "Landsat-9": "ls9_sr", + "Landsat-8": "ls8_sr", + "Landsat-7": "ls7_sr", + "Landsat-5": "ls5_sr", + "Sentinel-2": "s2_l2a", + "Sentinel-2 geomedian": "gm_s2_annual", } - time_wms = TimeWMSLayer(url='https://ows.digitalearth.africa/', - layers=wms_params[satellites], - time=date, - format='image/png', - transparent=True, - attribution='Digital Earth Africa') + time_wms = TimeWMSLayer( + url="https://ows.digitalearth.africa/", + layers=wms_params[satellites], + time=date, + format="image/png", + transparent=True, + attribution="Digital Earth Africa", + ) # Plot interactive map to select area basemap = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik) - geopolygon = select_on_a_map(height='1000px', - layers=( - basemap, - time_wms, - ), - center=(4, 20), - zoom=4) + geopolygon = select_on_a_map( + height="1000px", + layers=( + basemap, + time_wms, + ), + center=(4, 20), + zoom=4, + ) # Test size of selected area - area = geopolygon.to_crs(crs=CRS('epsg:6933')).area / 1000000 + area = geopolygon.to_crs(crs=CRS("epsg:6933")).area / 1000000 if area > size_limit: - print(f'Warning: Your selected area is {area:.00f} sq km. ' - f'Please select an area of less than {size_limit} sq km.' - f'\nTo select a smaller area, re-run the cell ' - f'above and draw a new polygon.') + print( + f"Warning: Your selected area is {area:.00f} sq km. " + f"Please select an area of less than {size_limit} sq km." + f"\nTo select a smaller area, re-run the cell " + f"above and draw a new polygon." + ) else: - return {'geopolygon': geopolygon, - 'date': date, - 'satellites': satellites} - - -def export_image_app(geopolygon, - date, - satellites, - style='True colour', - resolution=None, - vmin=0, - vmax=2000, - percentile_stretch=None, - power=None, - image_proc_funcs=None, - output_format="jpg", - standardise_name=False): + return {"geopolygon": geopolygon, "date": date, "satellites": satellites} + + +def export_image_app( + geopolygon, + date, + satellites, + style="True colour", + resolution=None, + vmin=0, + vmax=2000, + percentile_stretch=None, + power=None, + image_proc_funcs=None, + output_format="jpg", + standardise_name=False, +): """ Exports Digital Earth Africa satellite data as an image file based on the extent and time period selected using @@ -146,7 +150,7 @@ def export_image_app(geopolygon, The exact date used to extract imagery (e.g. `date='1988-01-01'`). satellites : str - The satellite data to be used to extract imagery. The + The satellite data to be used to extract imagery. The following options are supported: ``'Landsat-9'``: data from the Landsat 9 satellite @@ -206,53 +210,35 @@ def export_image_app(geopolygon, ########################### sat_params = { - 'Landsat-9': { - 'products': ['ls9_sr'], - 'resolution': [-30, 30], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_1', 'nir', 'green'] - } + "Landsat-9": { + "products": ["ls9_sr"], + "resolution": [-30, 30], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_1", "nir", "green"]}, }, - 'Landsat-8': { - 'products': ['ls8_sr'], - 'resolution': [-30, 30], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_1', 'nir', 'green'] - } + "Landsat-8": { + "products": ["ls8_sr"], + "resolution": [-30, 30], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_1", "nir", "green"]}, }, - 'Landsat-7': { - 'products': ['ls7_sr'], - 'resolution': [-30, 30], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_1', 'nir', 'green'] - } + "Landsat-7": { + "products": ["ls7_sr"], + "resolution": [-30, 30], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_1", "nir", "green"]}, }, - 'Landsat-5': { - 'products': ['ls5_sr'], - 'resolution': [-30, 30], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_1', 'nir', 'green'] - } + "Landsat-5": { + "products": ["ls5_sr"], + "resolution": [-30, 30], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_1", "nir", "green"]}, }, - 'Sentinel-2': { - 'products': ['s2_l2a'], - 'resolution': [-10, 10], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_2', 'nir_1', 'green'] - } + "Sentinel-2": { + "products": ["s2_l2a"], + "resolution": [-10, 10], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_2", "nir_1", "green"]}, }, - 'Sentinel-2 geomedian': { - 'products': ['gm_s2_annual'], - 'resolution': [-10, 10], - 'styles': { - 'True colour': ['red', 'green', 'blue'], - 'False colour': ['swir_2', 'nir_1', 'green'] - } + "Sentinel-2 geomedian": { + "products": ["gm_s2_annual"], + "resolution": [-10, 10], + "styles": {"True colour": ["red", "green", "blue"], "False colour": ["swir_2", "nir_1", "green"]}, }, } @@ -261,7 +247,7 @@ def export_image_app(geopolygon, ############# # Connect to datacube database - dc = datacube.Datacube(app='Exporting_satellite_images') + dc = datacube.Datacube(app="Exporting_satellite_images") # Configure local dask cluster client = create_local_dask_cluster(return_client=True) @@ -269,48 +255,36 @@ def export_image_app(geopolygon, # Create query after adjusting interval time to UTC by # adding a UTC offset of -10 hours. start_date = np.datetime64(date) - query_params = { - 'time': (str(start_date)), - 'geopolygon': geopolygon - } + query_params = {"time": (str(start_date)), "geopolygon": geopolygon} # Find matching datasets - dss = [ - dc.find_datasets(product=i, **query_params) - for i in sat_params[satellites]['products'] - ] + dss = [dc.find_datasets(product=i, **query_params) for i in sat_params[satellites]["products"]] dss = list(itertools.chain.from_iterable(dss)) # Get CRS and sensor crs = str(dss[0].crs) - if satellites == 'Sentinel-2 geomedian': + if satellites == "Sentinel-2 geomedian": sensor = satellites else: - sensor = dss[0].metadata_doc['properties']['eo:platform'].capitalize() - sensor = sensor[0:-1].replace('_', '-') + sensor[-1].capitalize() + sensor = dss[0].metadata_doc["properties"]["eo:platform"].capitalize() + sensor = sensor[0:-1].replace("_", "-") + sensor[-1].capitalize() # Use resolution if provided, otherwise use default if resolution: - sat_params[satellites]['resolution'] = resolution + sat_params[satellites]["resolution"] = resolution - load_params = { - 'output_crs': crs, - 'resolution': sat_params[satellites]['resolution'], - 'resampling': 'bilinear' - } + load_params = {"output_crs": crs, "resolution": sat_params[satellites]["resolution"], "resampling": "bilinear"} # Load data from datasets - ds = dc.load(datasets=dss, - measurements=sat_params[satellites]['styles'][style], - group_by='solar_day', - dask_chunks={ - 'time': 1, - 'x': 3000, - 'y': 3000 - }, - **load_params, - **query_params) + ds = dc.load( + datasets=dss, + measurements=sat_params[satellites]["styles"][style], + group_by="solar_day", + dask_chunks={"time": 1, "x": 3000, "y": 3000}, + **load_params, + **query_params, + ) ds = masking.mask_invalid_data(ds) rgb_array = ds.isel(time=0).to_array().values @@ -322,18 +296,13 @@ def export_image_app(geopolygon, # Create unique file name centre_coords = geopolygon.centroid.coords[0][::-1] site = reverse_geocode(coords=centre_coords) - fname = (f"{sensor} - {date} - {site} - {style}, " - f"{load_params['resolution'][1]} m resolution.{output_format}") + fname = f"{sensor} - {date} - {site} - {style}, " f"{load_params['resolution'][1]} m resolution.{output_format}" # Remove spaces and commas if requested if standardise_name: - fname = fname.replace(' - ', '_').replace(', ', - '-').replace(' ', - '-').lower() + fname = fname.replace(" - ", "_").replace(", ", "-").replace(" ", "-").lower() - print( - f'\nExporting image to {fname}.\nThis may take several minutes to complete...' - ) + print(f"\nExporting image to {fname}.\nThis may take several minutes to complete...") # Convert to numpy array rgb_array = np.transpose(rgb_array, axes=[1, 2, 0]) @@ -350,14 +319,12 @@ def export_image_app(geopolygon, vmin, vmax = vmin**power, vmax**power # Rescale/stretch imagery between vmin and vmax - rgb_rescaled = exposure.rescale_intensity(rgb_array.astype(float), - in_range=(vmin, vmax), - out_range=(0.0, 1.0)) + rgb_rescaled = exposure.rescale_intensity(rgb_array.astype(float), in_range=(vmin, vmax), out_range=(0.0, 1.0)) # Apply image processing funcs if image_proc_funcs: for i, func in enumerate(image_proc_funcs): - print(f'Applying custom function {i + 1}') + print(f"Applying custom function {i + 1}") rgb_rescaled = func(rgb_rescaled) # Plot RGB @@ -369,4 +336,4 @@ def export_image_app(geopolygon, # Close dask client client.shutdown() - print('Finished exporting image.') + print("Finished exporting image.") diff --git a/deafrica_tools/app/wetlandsinsighttool.py b/deafrica_tools/app/wetlandsinsighttool.py index e8b6ce5..56e127f 100644 --- a/deafrica_tools/app/wetlandsinsighttool.py +++ b/deafrica_tools/app/wetlandsinsighttool.py @@ -8,7 +8,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import datacube import warnings @@ -47,13 +48,13 @@ def make_box_layout(): - return Layout( - #border='solid 1px black', - margin='0px 10px 10px 0px', - padding='5px 5px 5px 5px', - width='100%', - height='100%', - ) + return Layout( + # border='solid 1px black', + margin="0px 10px 10px 0px", + padding="5px 5px 5px 5px", + width="100%", + height="100%", + ) def create_expanded_button(description, button_style): @@ -67,7 +68,7 @@ def create_expanded_button(description, button_style): class wit_app(HBox): def __init__(self, lang=None): super().__init__() - + deafrica_tools.set_lang(lang) ########################################################## @@ -84,29 +85,27 @@ def __init__(self, lang=None): (_("ESRI World Imagery"), "esri_world_imagery"), (_("Sentinel-2 Geomedian"), "gm_s2_annual"), (_("Water Observations from Space"), "wofs_ls_summary_annual"), - ] self.product = self.product_list[0][1] self.product_year = "2020-01-01" self.target = None self.action = None self.gdf_drawn = None - + ########################################################## # HEADER FOR APP # - + # Create the Header widget header_title_text = _("Wetlands Insight Tool") instruction_text = _("Select parameters and AOI") self.header = deawidgets.create_html(f"

{header_title_text}

{instruction_text}

") self.header.layout = make_box_layout() - + ########################################################## # HANDLER FUNCTION FOR DRAW CONTROL # - + # Define the action to take once something is drawn on the map def update_geojson(target, action, geo_json): - self.action = action json_data = json.dumps(geo_json) @@ -119,16 +118,18 @@ def update_geojson(target, action, geo_json): self.gdf_drawn = gdf gdf_drawn_epsg6933 = gdf.copy().to_crs("EPSG:6933") - m2_per_km2 = 10 ** 6 + m2_per_km2 = 10**6 area = gdf_drawn_epsg6933.area.values[0] / m2_per_km2 - polyarea_label = _('Total polygon area') + polyarea_label = _("Total polygon area") polyarea_text = f"

{polyarea_label}: {area:.2f} km2

" if area <= 3000: - confirmation_text = '

' + _('Area falls within recommended limit') + '

' + confirmation_text = '

' + _("Area falls within recommended limit") + "

" self.header.value = header_title_text + polyarea_text + confirmation_text else: - warning_text = '

' + _('Area is too large, please update your polygon') + '

' + warning_text = ( + '

' + _("Area is too large, please update your polygon") + "

" + ) self.header.value = header_title_text + polyarea_text + warning_text ########################################################## @@ -143,22 +144,22 @@ def update_geojson(target, action, geo_json): # MAP WIDGET, DRAWING TOOLS, WMS LAYERS # # Create drawing tools - desired_drawtools = ['rectangle', 'polygon'] + desired_drawtools = ["rectangle", "polygon"] draw_control = deawidgets.create_drawcontrol(desired_drawtools) - + # Begin by displaying an empty layer group, and update the group with desired WMS on interaction. self.deafrica_layers = LayerGroup(layers=()) - self.deafrica_layers.name = _('Map Overlays') + self.deafrica_layers.name = _("Map Overlays") # Create map widget self.m = deawidgets.create_map() - + self.m.layout = make_box_layout() - + # Add tools to map widget self.m.add_control(draw_control) self.m.add_layer(self.deafrica_layers) - + # Store current basemap for future use self.basemap = self.m.basemap @@ -177,7 +178,7 @@ def update_geojson(target, action, geo_json): ########################################################## # COLLECTION OF ALL APP CONTROLS # - + parameter_selection = VBox( [ HTML("" + _("Map Overlay:") + ""), @@ -208,7 +209,7 @@ def update_geojson(target, action, geo_json): grid[0, :] = self.header grid[1:6, 0:2] = parameter_selection grid[6, 0:2] = run_button - + # Dask and Progress info grid[1, 7:] = self.dask_client grid[2:7, 7:] = self.progress_bar @@ -265,7 +266,6 @@ def update_outputplot(self, change): # Update product def update_deaoverlay(self, change): - self.product = change.new if self.product == "none": @@ -280,7 +280,6 @@ def update_deaoverlay(self, change): self.deafrica_layers.add_layer(layer) def run_app(self, change): - # Clear progress bar and output areas before running self.dask_client.clear_output() self.progress_bar.clear_output() @@ -291,26 +290,24 @@ def run_app(self, change): # Configure local dask cluster with self.dask_client: - client = create_local_dask_cluster( - return_client=True, display_client=True - ) + client = create_local_dask_cluster(return_client=True, display_client=True) # Set any defaults TCW_threshold = -0.035 dask_chunks = dict(x=1000, y=1000, time=1) - - #check resampling freq - if self.resamplingfreq == 'None': + + # check resampling freq + if self.resamplingfreq == "None": rsf = None else: rsf = self.resamplingfreq - - self.progress_header.value = f"

"+_("Progress")+"

" - + + self.progress_header.value = f"

" + _("Progress") + "

" + # run wetlands polygon drill with self.progress_bar: -# with ProgressBar(): - warnings.filterwarnings("ignore") + # with ProgressBar(): + warnings.filterwarnings("ignore") try: df = WIT_drill( gdf=self.gdf_drawn, @@ -326,7 +323,7 @@ def run_app(self, change): print(_("WIT complete")) except AttributeError: print(_("No polygon selected")) - + # close down the dask client client.shutdown() @@ -335,9 +332,8 @@ def run_app(self, change): df.to_csv(self.out_csv, index_label="Datetime") # ---Plotting------------------------------ - - with self.wit_plot: + with self.wit_plot: fontsize = 17 plt.rcParams.update({"font.size": fontsize}) # set up color palette diff --git a/deafrica_tools/app/widgetconstructors.py b/deafrica_tools/app/widgetconstructors.py index 6881712..7787ba3 100644 --- a/deafrica_tools/app/widgetconstructors.py +++ b/deafrica_tools/app/widgetconstructors.py @@ -10,41 +10,36 @@ from traitlets import Unicode -def create_datepicker(description='', value=None, layout={'width': '85%'}): - ''' +def create_datepicker(description="", value=None, layout={"width": "85%"}): + """ Create a DatePicker widget - + Last modified: July 2022 - + Parameters ---------- description : string descirption label to attach layout : dictionary any layout commands for the widget - + Returns ------- date_picker : ipywidgets.widgets.widget_date.DatePicker - - ''' - date_picker = widgets.DatePicker( - description=description, - layout=layout, - disabled=False, - value=value - ) + """ + + date_picker = widgets.DatePicker(description=description, layout=layout, disabled=False, value=value) return date_picker -def create_inputtext(value, placeholder, description="", layout={'width': '85%'}): - ''' +def create_inputtext(value, placeholder, description="", layout={"width": "85%"}): + """ Create a Text widget - + Last modified: October 2021 - + Parameters ---------- value : string @@ -55,30 +50,26 @@ def create_inputtext(value, placeholder, description="", layout={'width': '85%'} descirption label to attach layout : dictionary any layout commands for the widget - + Returns ------- input_text : ipywidgets.widgets.widget_string.Text - - ''' + + """ input_text = widgets.Text( - value=value, - placeholder=placeholder, - description=description, - layout=layout, - disabled=False + value=value, placeholder=placeholder, description=description, layout=layout, disabled=False ) return input_text -def create_boundedfloattext(value, min_val, max_val, step_val, description="", layout={'width': '85%'}): - ''' +def create_boundedfloattext(value, min_val, max_val, step_val, description="", layout={"width": "85%"}): + """ Create a BoundedFloatText widget - + Last modified: October 2021 - + Parameters ---------- value : float @@ -93,12 +84,12 @@ def create_boundedfloattext(value, min_val, max_val, step_val, description="", l descirption label to attach layout : dictionary any layout commands for the widget - + Returns ------- float_text : ipywidgets.widgets.widget_float.BoundedFloatText - - ''' + + """ float_text = widgets.BoundedFloatText( value=value, @@ -113,12 +104,12 @@ def create_boundedfloattext(value, min_val, max_val, step_val, description="", l return float_text -def create_dropdown(options, value, description="", layout={'width': '85%'}): - ''' +def create_dropdown(options, value, description="", layout={"width": "85%"}): + """ Create a Dropdown widget - + Last modified: October 2021 - + Parameters ---------- options : list @@ -129,12 +120,12 @@ def create_dropdown(options, value, description="", layout={'width': '85%'}): descirption label to attach layout : dictionary any layout commands for the widget - + Returns ------- dropdown : ipywidgets.widgets.widget_selection.Dropdown - - ''' + + """ dropdown = widgets.Dropdown( options=options, @@ -148,21 +139,21 @@ def create_dropdown(options, value, description="", layout={'width': '85%'}): def create_html(value): - ''' + """ Create a HTML widget - + Last modified: October 2021 - + Parameters ---------- value : string HTML text to display - + Returns ------- html : ipywidgets.widgets.widget_string.HTML - - ''' + + """ html = widgets.HTML( value=value, @@ -171,12 +162,14 @@ def create_html(value): return html -def create_map(map_center=(4, 20), zoom_level=3, basemap=leaflet.basemaps.OpenStreetMap.Mapnik, basemap_name='Open Street Map'): - ''' +def create_map( + map_center=(4, 20), zoom_level=3, basemap=leaflet.basemaps.OpenStreetMap.Mapnik, basemap_name="Open Street Map" +): + """ Create an interactive ipyleaflet map - + Last modified: October 2021 - + Parameters ---------- map_center : tuple @@ -190,28 +183,28 @@ def create_map(map_center=(4, 20), zoom_level=3, basemap=leaflet.basemaps.OpenSt Defaults to Open Street Map (basemaps.OpenStreetMap.Mapnik) basemap_name : string Layer name for the basemap - + Returns ------- m : ipyleaflet.leaflet.Map interactive ipyleaflet map - - ''' - + + """ + basemap_tiles = leaflet.basemap_to_tiles(basemap) basemap_tiles.name = basemap_name m = leaflet.Map(center=map_center, zoom=zoom_level, basemap=basemap_tiles, scroll_wheel_zoom=True) - + return m def create_dea_wms_layer(product, date): - ''' + """ Create a Digital Earth Africa WMS layer to add to a map - + Last modified: October 2021 - + Parameters ---------- product : string @@ -219,13 +212,12 @@ def create_dea_wms_layer(product, date): (e.g. 'gm_s2_annual') date : string (yyyy-mm-dd format) The date to load the product for - + Returns ------- time_wms : ipyleaflet WMS layer - - ''' - + + """ # Load DEA WMS class TimeWMSLayer(leaflet.WMSLayer): @@ -244,7 +236,7 @@ class TimeWMSLayer(leaflet.WMSLayer): def create_drawcontrol( - draw_controls = ['rectangle', 'polygon', 'circle', 'polyline', 'marker', 'circlemarker'], + draw_controls=["rectangle", "polygon", "circle", "polyline", "marker", "circlemarker"], rectangle_options={}, polygon_options={}, circle_options={}, @@ -252,11 +244,11 @@ def create_drawcontrol( marker_options={}, circlemarker_options={}, ): - ''' + """ Create a draw control widget to add to ipyleaflet maps - + Last modified: October 2021 - + Parameters ---------- draw_controls : list @@ -280,14 +272,14 @@ def create_drawcontrol( circlemarker_options : dict Options to customise the appearence of the relevant shape User can supply, or leave blank to get default DE Africa appearence - - + + Returns ------- draw_control : ipyleaflet.leaflet.DrawControl - - ''' - + + """ + # Set defualt DE Africa styling options for polygons default_shapeoptions = { "color": "#FFFFFF", @@ -295,36 +287,33 @@ def create_drawcontrol( "fillColor": "#336699", "fillOpacity": 0.4, } - default_drawerror = { - "color": "#FF6633", - "message": "Drawing error, clear all and try again" - } - - # Set draw control appearence to DE Africa defaults + default_drawerror = {"color": "#FF6633", "message": "Drawing error, clear all and try again"} + + # Set draw control appearence to DE Africa defaults # Do this if user has requested a control, but has not provided a corresponding options dict - - if ('rectangle' in draw_controls) and (not rectangle_options): + + if ("rectangle" in draw_controls) and (not rectangle_options): rectangle_options = {"shapeOptions": default_shapeoptions} - - if ('polygon' in draw_controls) and (not polygon_options): + + if ("polygon" in draw_controls) and (not polygon_options): polygon_options = { "shapeOptions": default_shapeoptions, "drawError": default_drawerror, - "allowIntersection": False, - } - - if ('circle' in draw_controls) and (not circle_options): + "allowIntersection": False, + } + + if ("circle" in draw_controls) and (not circle_options): circle_options = {"shapeOptions": default_shapeoptions} - - if ('polyline' in draw_controls) and (not polyline_options): + + if ("polyline" in draw_controls) and (not polyline_options): polyline_options = {"shapeOptions": default_shapeoptions} - - if ('marker' in draw_controls) and (not marker_options): - marker_options = {'shapeOptions': {'opacity': 1.0}} - - if ('circlemarker' in draw_controls) and (not circlemarker_options): + + if ("marker" in draw_controls) and (not marker_options): + marker_options = {"shapeOptions": {"opacity": 1.0}} + + if ("circlemarker" in draw_controls) and (not circlemarker_options): circlemarker_options = {"shapeOptions": default_shapeoptions} - + # Instantiate draw control and add options draw_control = leaflet.DrawControl() draw_control.rectangle = rectangle_options @@ -337,12 +326,12 @@ def create_drawcontrol( return draw_control -def create_checkbox(value, description="", layout={'width': '85%'}): - ''' +def create_checkbox(value, description="", layout={"width": "85%"}): + """ Create a Checkbox widget - + Last modified: July 2022 - + Parameters ---------- value : string @@ -351,17 +340,13 @@ def create_checkbox(value, description="", layout={'width': '85%'}): description label to attach layout : dictionary any layout commands for the widget - + Returns ------- dropdown : ipywidgets.widgets.widget_selection.Dropdown - - ''' - checklist = widgets.Checkbox(value=value, - description=description, - layout=layout, - disabled=False, - indent=False) + """ + + checklist = widgets.Checkbox(value=value, description=description, layout=layout, disabled=False, indent=False) - return checklist \ No newline at end of file + return checklist diff --git a/deafrica_tools/areaofinterest.py b/deafrica_tools/areaofinterest.py index 865039d..8f3dff5 100644 --- a/deafrica_tools/areaofinterest.py +++ b/deafrica_tools/areaofinterest.py @@ -1,5 +1,5 @@ """ -Function for defining an area of interest using either a point and buffer or a shapefile file. +Function for defining an area of interest using either a point and buffer or a shapefile file. """ # Import required packages @@ -7,16 +7,18 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import geopandas as gpd from shapely.geometry import box from geojson import Feature, Point, FeatureCollection + def define_area(lat=None, lon=None, buffer=None, shapefile_path=None): - ''' + """ Define an area of interest using either a point and buffer or a shapefile. - + Parameters: ----------- lat : float, optional @@ -27,28 +29,30 @@ def define_area(lat=None, lon=None, buffer=None, shapefile_path=None): The buffer around the center point, in degrees. shapefile_path : str, optional The path to a shapefile defining the area of interest. - + Returns: -------- feature_collection : dict A GeoJSON feature collection representing the area of interest. - ''' + """ # Define area using point and buffer if lat is not None and lon is not None and buffer is not None: lat_range = (lat - buffer, lat + buffer) lon_range = (lon - buffer, lon + buffer) box_geom = box(min(lon_range), min(lat_range), max(lon_range), max(lat_range)) - aoi = gpd.GeoDataFrame(geometry=[box_geom], crs='EPSG:4326') - + aoi = gpd.GeoDataFrame(geometry=[box_geom], crs="EPSG:4326") + # Define area using shapefile elif shapefile_path is not None: aoi = gpd.read_file(shapefile_path).to_crs("EPSG:4326") # If neither option is provided, raise an error else: raise ValueError("Either lat/lon/buffer or shapefile_path must be provided.") - + # Convert the GeoDataFrame to a GeoJSON FeatureCollection - features = [Feature(geometry=row["geometry"], properties=row.drop("geometry").to_dict()) for _, row in aoi.iterrows()] + features = [ + Feature(geometry=row["geometry"], properties=row.drop("geometry").to_dict()) for _, row in aoi.iterrows() + ] feature_collection = FeatureCollection(features) - - return feature_collection \ No newline at end of file + + return feature_collection diff --git a/deafrica_tools/bandindices.py b/deafrica_tools/bandindices.py index 45bc604..7970365 100644 --- a/deafrica_tools/bandindices.py +++ b/deafrica_tools/bandindices.py @@ -7,6 +7,7 @@ import warnings import numpy as np + # Define custom functions def calculate_indices( ds, @@ -31,11 +32,11 @@ def calculate_indices( A two-dimensional or multi-dimensional array with containing the spectral bands required to calculate the index. These bands are used as inputs to calculate the selected water index. - + index : str or list of strs A string giving the name of the index to calculate or a list of strings giving the names of the indices to calculate: - + * ``'ASI'`` (Artificial Surface Index, Yongquan Zhao & Zhe Zhu 2022) * ``'AWEI_ns'`` (Automated Water Extraction Index, no shadows, Feyisa 2014) * ``'AWEI_sh'`` (Automated Water Extraction Index, shadows, Feyisa 2014) @@ -66,16 +67,16 @@ def calculate_indices( * ``'TCG'`` (Tasseled Cap Greeness, Crist 1985) * ``'TCW'`` (Tasseled Cap Wetness, Crist 1985) * ``'WI'`` (Water Index, Fisher 2016) - + collection : str - Deprecated in version 0.1.7. Use `satellite_mission` instead. - - Valid options are: + Deprecated in version 0.1.7. Use `satellite_mission` instead. + + Valid options are: * ``'c2'`` (for USGS Landsat Collection 2) If 'c2', then `satellite_mission='ls'`. * ``'s2'`` (for Sentinel-2) If 's2', then `satellite_mission='s2'`. - + satellite_mission : str An string that tells the function which satellite mission's data is being used to calculate the index. This is necessary because @@ -86,14 +87,14 @@ def calculate_indices( * ``'ls'`` (for USGS Landsat) * ``'s2'`` (for Copernicus Sentinel-2) - + custom_varname : str, optional By default, the original dataset will be returned with a new index variable named after `index` (e.g. 'NDVI'). To specify a custom name instead, you can supply e.g. `custom_varname='custom_name'`. Defaults to None, which uses `index` to name the variable. - + normalise : bool, optional Some coefficient-based indices (e.g. ``'WI'``, ``'BAEI'``, ``'AWEI_ns'``, ``'AWEI_sh'``, ``'TCW'``, ``'TCG'``, ``'TCB'``, @@ -102,11 +103,11 @@ def calculate_indices( scaled between 0.0 and 1.0 prior to calculating the index. Setting `normalise=True` first scales values to a 0.0-1.0 range by dividing by 10000.0. Defaults to True. - + drop : bool, optional Provides the option to drop the original input data, thus saving space. If `drop=True`, returns only the index and its values. - + deep_copy: bool, optional If `deep_copy=False`, calculate_indices will modify the original array, adding bands to the input dataset and not removing them. @@ -140,22 +141,13 @@ def calculate_indices( # Normalised Difference Vegation Index, Rouse 1973 "NDVI": lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red), # Enhanced Vegetation Index, Huete 2002 - "EVI": lambda ds: ( - 2.5 * ((ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1)) - ), + "EVI": lambda ds: (2.5 * ((ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1))), # Leaf Area Index, Boegh 2002 - "LAI": lambda ds: ( - 3.618 - * ((2.5 * (ds.nir - ds.red)) / (ds.nir + (6 * ds.red) - (7.5 * ds.blue) + 1)) - - 0.118 - ), + "LAI": lambda ds: (3.618 * ((2.5 * (ds.nir - ds.red)) / (ds.nir + (6 * ds.red) - (7.5 * ds.blue) + 1)) - 0.118), # Soil Adjusted Vegetation Index, Huete 1988 "SAVI": lambda ds: ((1.5 * (ds.nir - ds.red)) / (ds.nir + ds.red + 0.5)), # Mod. Soil Adjusted Vegetation Index, Qi et al. 1994 - "MSAVI": lambda ds: ( - (2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) - / 2 - ), + "MSAVI": lambda ds: ((2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2), # Normalised Difference Moisture Index, Gao 1996 "NDMI": lambda ds: (ds.nir - ds.swir16) / (ds.nir + ds.swir16), # Normalised Burn Ratio, Lopez Garcia 1991 @@ -164,7 +156,7 @@ def calculate_indices( "BAI": lambda ds: (1.0 / ((0.10 - ds.red) ** 2 + (0.06 - ds.nir) ** 2)), # Normalised Difference Chlorophyll Index, # (Mishra & Mishra, 2012) - "NDCI": lambda ds: (ds.rededge - ds.red) / (ds.rededge + ds.red), + "NDCI": lambda ds: (ds.rededge - ds.red) / (ds.rededge + ds.red), # Normalised Difference Snow Index, Hall 1995 "NDSI": lambda ds: (ds.green - ds.swir16) / (ds.green + ds.swir16), # Normalised Difference Water Index, McFeeters 1996 @@ -174,32 +166,19 @@ def calculate_indices( # Normalised Difference Built-Up Index, Zha 2003 "NDBI": lambda ds: (ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir), # Built-Up Index, He et al. 2010 - "BUI": lambda ds: ((ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir)) - - ((ds.nir - ds.red) / (ds.nir + ds.red)), + "BUI": lambda ds: ((ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir)) - ((ds.nir - ds.red) / (ds.nir + ds.red)), # Built-up Area Extraction Index, Bouzekri et al. 2015 "BAEI": lambda ds: (ds.red + 0.3) / (ds.green + ds.swir16), # New Built-up Index, Jieli et al. 2010 "NBI": lambda ds: (ds.swir16 + ds.red) / ds.nir, # Bare Soil Index, Rikimaru et al. 2002 - "BSI": lambda ds: ((ds.swir16 + ds.red) - (ds.nir + ds.blue)) - / ((ds.swir16 + ds.red) + (ds.nir + ds.blue)), + "BSI": lambda ds: ((ds.swir16 + ds.red) - (ds.nir + ds.blue)) / ((ds.swir16 + ds.red) + (ds.nir + ds.blue)), # Automated Water Extraction Index (no shadows), Feyisa 2014 - "AWEI_ns": lambda ds: ( - 4 * (ds.green - ds.swir16) - (0.25 * ds.nir * +2.75 * ds.swir22) - ), + "AWEI_ns": lambda ds: (4 * (ds.green - ds.swir16) - (0.25 * ds.nir * +2.75 * ds.swir22)), # Automated Water Extraction Index (shadows), Feyisa 2014 - "AWEI_sh": lambda ds: ( - ds.blue + 2.5 * ds.green - 1.5 * (ds.nir + ds.swir16) - 0.25 * ds.swir22 - ), + "AWEI_sh": lambda ds: (ds.blue + 2.5 * ds.green - 1.5 * (ds.nir + ds.swir16) - 0.25 * ds.swir22), # Water Index, Fisher 2016 - "WI": lambda ds: ( - 1.7204 - + 171 * ds.green - + 3 * ds.red - - 70 * ds.nir - - 45 * ds.swir16 - - 71 * ds.swir22 - ), + "WI": lambda ds: (1.7204 + 171 * ds.green + 3 * ds.red - 70 * ds.nir - 45 * ds.swir16 - 71 * ds.swir22), # Tasseled Cap Wetness, Crist 1985 "TCW": lambda ds: ( 0.0315 * ds.blue @@ -238,69 +217,79 @@ def calculate_indices( # Modified Bare Soil Index, Nguyen et al. 2021 "MBI": lambda ds: ((ds.swir16 - ds.swir22 - ds.nir) / (ds.swir16 + ds.swir22 + ds.nir)) + 0.5, } - + # Enhanced Normalised Difference Impervious Surfaces Index, Chen et al. 2019 def mndwi(ds): return (ds.green - ds.swir16) / (ds.green + ds.swir16) + def swir_diff(ds): - return ds.swir16/ds.swir22 + return ds.swir16 / ds.swir22 + def alpha(ds): - return (2*(np.mean(ds.blue)))/(np.mean(swir_diff(ds)) + np.mean(mndwi(ds)**2)) + return (2 * (np.mean(ds.blue))) / (np.mean(swir_diff(ds)) + np.mean(mndwi(ds) ** 2)) + def ENDISI(ds): m = mndwi(ds) s = swir_diff(ds) a = alpha(ds) - return (ds.blue - (a)*(s + m**2))/(ds.blue + (a)*(s + m**2)) - + return (ds.blue - (a) * (s + m**2)) / (ds.blue + (a) * (s + m**2)) + index_dict["ENDISI"] = ENDISI - + ## Artificial Surface Index, Yongquan Zhao & Zhe Zhu 2022 def af(ds): AF = (ds.nir - ds.blue) / (ds.nir + ds.blue) - AF_norm = (AF - AF.min(dim=["y","x"]))/(AF.max(dim=["y","x"]) - AF.min(dim=["y","x"])) + AF_norm = (AF - AF.min(dim=["y", "x"])) / (AF.max(dim=["y", "x"]) - AF.min(dim=["y", "x"])) return AF_norm + def ndvi(ds): return (ds.nir - ds.red) / (ds.nir + ds.red) + def msavi(ds): - return ((2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2 ) + return (2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2 + def vsf(ds): NDVI = ndvi(ds) MSAVI = msavi(ds) - VSF = 1 - NDVI * MSAVI - VSF_norm = (VSF - VSF.min(dim=["y","x"]))/(VSF.max(dim=["y","x"]) - VSF.min(dim=["y","x"])) + VSF = 1 - NDVI * MSAVI + VSF_norm = (VSF - VSF.min(dim=["y", "x"])) / (VSF.max(dim=["y", "x"]) - VSF.min(dim=["y", "x"])) return VSF_norm + def mbi(ds): return ((ds.swir16 - ds.swir22 - ds.nir) / (ds.swir16 + ds.swir22 + ds.nir)) + 0.5 + def embi(ds): MBI = mbi(ds) MNDWI = mndwi(ds) return (MBI - MNDWI - 0.5) / (MBI + MNDWI + 1.5) + def ssf(ds): EMBI = embi(ds) SSF = 1 - EMBI - SSF_norm = (SSF - SSF.min(dim=["y","x"]))/(SSF.max(dim=["y","x"]) - SSF.min(dim=["y","x"])) - return SSF_norm + SSF_norm = (SSF - SSF.min(dim=["y", "x"])) / (SSF.max(dim=["y", "x"]) - SSF.min(dim=["y", "x"])) + return SSF_norm + # Overall modulation using the Modulation Factor (MF). def mf(ds): MF = ((ds.blue + ds.green) - (ds.nir + ds.swir16)) / ((ds.blue + ds.green) + (ds.nir + ds.swir16)) - MF_norm = (MF - MF.min(dim=["y","x"]))/(MF.max(dim=["y","x"]) - MF.min(dim=["y","x"])) + MF_norm = (MF - MF.min(dim=["y", "x"])) / (MF.max(dim=["y", "x"]) - MF.min(dim=["y", "x"])) return MF_norm + def ASI(ds): AF = af(ds) VSF = vsf(ds) SSF = ssf(ds) MF = mf(ds) return AF * VSF * SSF * MF - + index_dict["ASI"] = ASI - + # If index supplied is not a list, convert to list. This allows us to # iterate through either multiple or single indices in the loop below indices = index if isinstance(index, list) else [index] # calculate for each index in the list of indices supplied (indexes) for index in indices: - # Select an index function from the dictionary index_func = index_dict.get(str(index)) @@ -308,7 +297,6 @@ def ASI(ds): # invalid option being provided, raise an exception informing user to # choose from the list of valid options if index is None: - raise ValueError( f"No remote sensing `index` was provided. Please " "refer to the function \ndocumentation for a full " @@ -329,7 +317,6 @@ def ASI(ds): ] and not normalise ): - warnings.warn( f"\nA coefficient-based index ('{index}') normally " "applied to surface reflectance values in the \n" @@ -339,19 +326,20 @@ def ASI(ds): ) elif index_func is None: - raise ValueError( f"The selected index '{index}' is not one of the " "valid remote sensing index options. \nPlease " "refer to the function documentation for a full " "list of valid options for `index`" ) - + # Deprecation warning if `collection` is specified instead of `satellite_mission`. if collection is not None: - warnings.warn('`collection` was deprecated in version 0.1.7. Use `satelite_mission` instead.', - DeprecationWarning, - stacklevel=2) + warnings.warn( + "`collection` was deprecated in version 0.1.7. Use `satelite_mission` instead.", + DeprecationWarning, + stacklevel=2, + ) # Map the collection values to the valid satellite_mission values. if collection == "c2": satellite_mission = "ls" @@ -362,27 +350,26 @@ def ASI(ds): raise ValueError( f"'{collection}' is not a valid option for " "`collection`. Please specify either \n" - "'c2' or 's2'.") + "'c2' or 's2'." + ) - # Rename bands to a consistent format if depending on what satellite mission # is specified in `satellite_mission`. This allows the same index calculations # to be applied to all satellite missions. If no satellite mission was provided, # raise an exception. if satellite_mission is None: - raise ValueError( "No `satellite_mission` was provided. Please specify " "either 'ls' or 's2' to ensure the \nfunction " "calculates indices using the correct spectral " "bands." ) - + elif satellite_mission == "ls": sr_max = 1.0 # Dictionary mapping full data names to simpler alias names # This only applies to properly-scaled "ls" data i.e. from - # the Landsat geomedians. calculate_indices will not show + # the Landsat geomedians. calculate_indices will not show # correct output for raw (unscaled) Landsat data (i.e. default # outputs from dc.load) bandnames_dict = { @@ -392,18 +379,15 @@ def ASI(ds): "SR_B4": "nir", "SR_B5": "swir_1", "SR_B7": "swir_2", - } - - # Rename bands in dataset to use simple names (e.g. 'red') - bands_to_rename = { - a: b for a, b in bandnames_dict.items() if a in ds.variables } + # Rename bands in dataset to use simple names (e.g. 'red') + bands_to_rename = {a: b for a, b in bandnames_dict.items() if a in ds.variables} + elif satellite_mission == "s2": sr_max = 10000 # Dictionary mapping full data names to simpler alias names bandnames_dict = { - "B02": "blue", "B03": "green", "B04": "red", @@ -414,12 +398,10 @@ def ASI(ds): "B08A": "rededge", "B11": "swir16", "B12": "swir22", - } + } # Rename bands in dataset to use simple names (e.g. 'red') - bands_to_rename = { - a: b for a, b in bandnames_dict.items() if a in ds.variables - } + bands_to_rename = {a: b for a, b in bandnames_dict.items() if a in ds.variables} # Raise error if no valid satellite_mission name is provided: else: @@ -436,10 +418,7 @@ def ASI(ds): index_array = index_func(ds.rename(bands_to_rename) / mult) except AttributeError: - raise ValueError( - f"Please verify that all bands required to " - f"compute {index} are present in `ds`." - ) + raise ValueError(f"Please verify that all bands required to " f"compute {index} are present in `ds`.") # Add as a new variable in dataset output_band_name = custom_varname if custom_varname else index @@ -452,10 +431,11 @@ def ASI(ds): # Return input dataset with added water index variable return ds + def dualpol_indices( ds, - co_pol='vv', - cross_pol='vh', + co_pol="vv", + cross_pol="vh", index=None, custom_varname=None, drop=False, @@ -544,7 +524,7 @@ def purity(ds): return (1 - ratio(ds)) / (1 + ratio(ds)) def theta(ds): - return np.arctan((1 - ratio(ds))**2 / (1 + ratio(ds)**2 - ratio(ds))) + return np.arctan((1 - ratio(ds)) ** 2 / (1 + ratio(ds) ** 2 - ratio(ds))) def P1(ds): return 1 / (1 + ratio(ds)) @@ -553,12 +533,12 @@ def P2(ds): return 1 - P1(ds) def entropy(ds): - return P1(ds)*np.log2(P1(ds)) + P2(ds)*np.log2(P2(ds)) + return P1(ds) * np.log2(P1(ds)) + P2(ds) * np.log2(P2(ds)) # Dictionary containing remote sensing index band recipes index_dict = { # Radar Vegetation Index for dual-pol, Trudel et al. 2012 - "RVI": lambda ds: 4*ds[cross_pol] / (ds[co_pol] + ds[cross_pol]), + "RVI": lambda ds: 4 * ds[cross_pol] / (ds[co_pol] + ds[cross_pol]), # Vertical dual depolarization index, Periasamy 2018 "VDDPI": lambda ds: (ds[co_pol] + ds[cross_pol]) / ds[co_pol], # cross-pol/co-pol ratio @@ -577,7 +557,6 @@ def entropy(ds): # calculate for each index in the list of indices supplied (indexes) for index in indices: - # Select an index function from the dictionary index_func = index_dict.get(str(index)) @@ -585,7 +564,6 @@ def entropy(ds): # invalid option being provided, raise an exception informing user to # choose from the list of valid options if index is None: - raise ValueError( f"No radar `index` was provided. Please " "refer to the function \ndocumentation for a full " @@ -593,7 +571,6 @@ def entropy(ds): ) elif index_func is None: - raise ValueError( f"The selected index '{index}' is not one of the " "valid remote sensing index options. \nPlease " diff --git a/deafrica_tools/classification.py b/deafrica_tools/classification.py index 701cd0d..a65fdd6 100644 --- a/deafrica_tools/classification.py +++ b/deafrica_tools/classification.py @@ -269,9 +269,7 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input): input_data_flattened = da.array(input_data_flattened).transpose() if clean == True: - input_data_flattened = da.where( - da.isfinite(input_data_flattened), input_data_flattened, 0 - ) + input_data_flattened = da.where(da.isfinite(input_data_flattened), input_data_flattened, 0) if (proba == True) & (persist == True): # persisting data so we don't require loading all the data twice @@ -305,9 +303,7 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input): out_proba = out_proba.reshape(len(y), len(x)) - out_proba = xr.DataArray( - out_proba, coords={"x": x, "y": y}, dims=["y", "x"] - ) + out_proba = xr.DataArray(out_proba, coords={"x": x, "y": y}, dims=["y", "x"]) output_xr["Probabilities"] = out_proba if return_input == True: @@ -322,9 +318,7 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input): if len(input_data_flattened.shape[1:]): output_px_shape = input_data_flattened.shape[1:] - output_features = input_data_flattened.reshape( - (len(stacked.z), *output_px_shape) - ) + output_features = input_data_flattened.reshape((len(stacked.z), *output_px_shape)) # set the stacked coordinate to match the input output_features = xr.DataArray( @@ -339,9 +333,7 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input): # convert to dataset and rename arrays output_features = output_features.to_dataset(dim="output_dim_0") data_vars = list(input_xr.data_vars) - output_features = output_features.rename( - {i: j for i, j in zip(output_features.data_vars, data_vars)} - ) + output_features = output_features.rename({i: j for i, j in zip(output_features.data_vars, data_vars)}) # merge with predictions output_xr = xr.merge([output_xr, output_features], compat="override") @@ -352,14 +344,10 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input): # convert model to dask predict model = ParallelPostFit(model) with joblib.parallel_backend("dask", wait_for_workers_timeout=20): - output_xr = _predict_func( - model, input_xr, persist, proba, clean, return_input - ) + output_xr = _predict_func(model, input_xr, persist, proba, clean, return_input) else: - output_xr = _predict_func( - model, input_xr, persist, proba, clean, return_input - ).compute() + output_xr = _predict_func(model, input_xr, persist, proba, clean, return_input).compute() return output_xr @@ -420,7 +408,7 @@ def _get_training_data_for_shp( # mulitprocessing for parallization if "dask_chunks" in dc_query.keys(): dc_query.pop("dask_chunks", None) - + # set up query based on polygon geom = geometry.Geometry(geom=gdf.iloc[index].geometry, crs=gdf.crs) q = {"geopolygon": geom} @@ -471,18 +459,14 @@ def _get_training_data_for_shp( else: raise Exception( - zonal_stats - + " is not one of the supported" - + " reduce functions ('mean','median','max','min')" + zonal_stats + " is not one of the supported" + " reduce functions ('mean','median','max','min')" ) out_arrs.append(stacked) out_vars.append([field] + list(data.data_vars)) -def _get_training_data_parallel( - gdf, dc_query, ncpus, return_coords, feature_func=None, field=None, zonal_stats=None -): +def _get_training_data_parallel(gdf, dc_query, ncpus, return_coords, feature_func=None, field=None, zonal_stats=None): """ Function passing the '_get_training_data_for_shp' function to a mulitprocessing.Pool. @@ -498,8 +482,7 @@ def _get_training_data_parallel( if zx is not None: raise ValueError( - "You have a Dask Client running, which prevents \n" - "this function from multiprocessing. Close the client." + "You have a Dask Client running, which prevents \n" "this function from multiprocessing. Close the client." ) # instantiate lists that can be shared across processes @@ -553,7 +536,7 @@ def collect_training_data( max_retries=3, ): """ - This function provides methods for gathering training data from the ODC over + This function provides methods for gathering training data from the ODC over geometries stored within a geopandas geodataframe. The function will return a 'model_input' array containing stacked training data arrays with all NaNs & Infs removed. In the instance where ncpus > 1, a parallel version of the function will be run @@ -620,25 +603,20 @@ def feature_function(query): Two objects are returned: `columns_names`: a list of variable (feature) names `model_input`: a numpy.array containing the data values for each feature extracted - + """ # check the dtype of the class field if gdf[field].dtype != np.int: - raise ValueError( - 'The "field" column of the input vector must contain integer dtypes' - ) + raise ValueError('The "field" column of the input vector must contain integer dtypes') # set up some print statements if feature_func is None: - raise ValueError( - "Please supply a feature layer function through the " - +"parameter 'feature_func'" - ) + raise ValueError("Please supply a feature layer function through the " + "parameter 'feature_func'") if zonal_stats is not None: print("Taking zonal statistic: " + zonal_stats) - + # add unique id to gdf to help with indexing failed rows # during multiprocessing # if zonal_stats is not None: @@ -709,13 +687,7 @@ def feature_function(query): fail_ids = perc_fail[perc_fail > fail_ratio] fail_rate = len(fail_ids) / len(gdf) - print( - "Percentage of possible fails after run " - + str(i) - + " = " - + str(round(fail_rate * 100, 2)) - + " %" - ) + print("Percentage of possible fails after run " + str(i) + " = " + str(round(fail_rate * 100, 2)) + " %") if fail_rate > fail_threshold: print("Recollecting samples that failed") @@ -793,7 +765,6 @@ class KMeans_tree(ClusterMixin): """ def __init__(self, n_levels=2, n_clusters=3, **kwargs): - assert n_levels >= 1 self.base_model = KMeans(n_clusters=3, **kwargs) @@ -802,8 +773,7 @@ def __init__(self, n_levels=2, n_clusters=3, **kwargs): # make child models if n_levels > 1: self.branches = [ - KMeans_tree(n_levels=n_levels - 1, n_clusters=n_clusters, **kwargs) - for _ in range(n_clusters) + KMeans_tree(n_levels=n_levels - 1, n_clusters=n_clusters, **kwargs) for _ in range(n_clusters) ] def fit(self, X, y=None, sample_weight=None): @@ -835,11 +805,7 @@ def fit(self, X, y=None, sample_weight=None): # fit child models on their corresponding partition of the training set self.branches[clu].fit( X[labels_old == clu], - sample_weight=( - sample_weight[labels_old == clu] - if sample_weight is not None - else None - ), + sample_weight=(sample_weight[labels_old == clu] if sample_weight is not None else None), ) self.labels_[labels_old == clu] += self.branches[clu].labels_ @@ -875,24 +841,13 @@ def predict(self, X, sample_weight=None): for clu in range(self.n_clusters): result[rescpy == clu] += self.branches[clu].predict( X[rescpy == clu], - sample_weight=( - sample_weight[rescpy == clu] - if sample_weight is not None - else None - ), + sample_weight=(sample_weight[rescpy == clu] if sample_weight is not None else None), ) return result -def spatial_clusters( - coordinates, - method="Hierarchical", - max_distance=None, - n_groups=None, - verbose=False, - **kwargs -): +def spatial_clusters(coordinates, method="Hierarchical", max_distance=None, n_groups=None, verbose=False, **kwargs): """ Create spatial groups on coorindate data using either KMeans clustering or a Gaussian Mixture model @@ -931,28 +886,21 @@ def spatial_clusters( raise ValueError("method must be one of: 'Hierarchical','KMeans' or 'GMM'") if (method in ["GMM", "KMeans"]) & (n_groups is None): - raise ValueError( - "The 'GMM' and 'KMeans' methods requires explicitly setting 'n_groups'" - ) + raise ValueError("The 'GMM' and 'KMeans' methods requires explicitly setting 'n_groups'") if (method == "Hierarchical") & (max_distance is None): raise ValueError("The 'Hierarchical' method requires setting max_distance") if method == "Hierarchical": cluster_label = AgglomerativeClustering( - n_clusters=None, - linkage="complete", - distance_threshold=max_distance, - **kwargs + n_clusters=None, linkage="complete", distance_threshold=max_distance, **kwargs ).fit_predict(coordinates) if method == "KMeans": cluster_label = KMeans(n_clusters=n_groups, **kwargs).fit_predict(coordinates) if method == "GMM": - cluster_label = GaussianMixture(n_components=n_groups, **kwargs).fit_predict( - coordinates - ) + cluster_label = GaussianMixture(n_components=n_groups, **kwargs).fit_predict(coordinates) if verbose: print("n clusters = " + str(len(np.unique(cluster_label)))) @@ -970,7 +918,7 @@ def SKCV( max_distance=None, train_size=None, random_state=None, - **kwargs + **kwargs, ): """ Generate spatial k-fold cross validation indices using coordinate data. @@ -1059,7 +1007,7 @@ def SKCV( n_splits=n_splits, random_state=random_state, balance=balance, - **kwargs + **kwargs, ) if kfold_method == "SpatialKFold": @@ -1072,7 +1020,7 @@ def SKCV( n_splits=n_splits, random_state=random_state, balance=balance, - **kwargs + **kwargs, ) return splitter @@ -1091,7 +1039,7 @@ def spatial_train_test_split( max_distance=None, train_size=None, random_state=None, - **kwargs + **kwargs, ): """ Split arrays into random train and test subsets. Similar to @@ -1185,14 +1133,12 @@ def spatial_train_test_split( n_splits=1 if n_splits is None else n_splits, random_state=random_state, balance=balance, - **kwargs + **kwargs, ) if kfold_method == "SpatialKFold": if n_splits is None: - raise ValueError( - "n_splits parameter requires an integer value, eg. 'n_splits=5'" - ) + raise ValueError("n_splits parameter requires an integer value, eg. 'n_splits=5'") if (test_size is not None) or (train_size is not None): warnings.warn( "With the 'SpatialKFold' method, controlling the test/train ratio " @@ -1207,7 +1153,7 @@ def spatial_train_test_split( n_splits=n_splits, random_state=random_state, balance=balance, - **kwargs + **kwargs, ) lst = [] @@ -1243,11 +1189,7 @@ def _partition_by_sum(array, parts): """ array = np.atleast_1d(array).ravel() if parts > array.size: - raise ValueError( - "Cannot partition an array of size {} into {} parts of equal sum.".format( - array.size, parts - ) - ) + raise ValueError("Cannot partition an array of size {} into {} parts of equal sum.".format(array.size, parts)) cumulative_sum = array.cumsum() # Ideally, we want each part to have the same number of points (total / # parts). @@ -1259,8 +1201,7 @@ def _partition_by_sum(array, parts): # split the array. if np.unique(indices).size != indices.size: raise ValueError( - "Could not find partition points to split the array into {} parts " - "of equal sum.".format(parts) + "Could not find partition points to split the array into {} parts " "of equal sum.".format(parts) ) return indices @@ -1291,7 +1232,6 @@ def __init__( max_distance=None, n_splits=None, ): - self.n_groups = n_groups self.coordinates = coordinates self.method = method @@ -1320,11 +1260,7 @@ def split(self, X, y=None, groups=None): The testing set indices for that split. """ if X.shape[1] != 2: - raise ValueError( - "X (the coordinate data) must have exactly 2 columns ({} given).".format( - X.shape[1] - ) - ) + raise ValueError("X (the coordinate data) must have exactly 2 columns ({} given).".format(X.shape[1])) for train, test in super().split(X, y, groups): yield train, test @@ -1447,7 +1383,7 @@ def __init__( train_size=None, random_state=None, balance=10, - **kwargs + **kwargs, ): super().__init__( n_groups=n_groups, @@ -1455,12 +1391,10 @@ def __init__( method=method, max_distance=max_distance, n_splits=n_splits, - **kwargs + **kwargs, ) if balance < 1: - raise ValueError( - "The *balance* argument must be >= 1. To disable balance, use 1." - ) + raise ValueError("The *balance* argument must be >= 1. To disable balance, use 1.") self.test_size = test_size self.train_size = train_size self.random_state = random_state @@ -1493,7 +1427,7 @@ def _iter_test_indices(self, X=None, y=None, groups=None): coordinates=self.coordinates, method=self.method, max_distance=self.max_distance, - **self.kwargs + **self.kwargs, ) cluster_ids = np.unique(labels) @@ -1519,12 +1453,7 @@ def _iter_test_indices(self, X=None, y=None, groups=None): test_points = np.where(np.isin(labels, cluster_ids[test_clusters]))[0] # The proportion of data points assigned to each group should # be close the proportion of clusters assigned to each group. - balance.append( - abs( - train_points.size / test_points.size - - train_clusters.size / test_clusters.size - ) - ) + balance.append(abs(train_points.size / test_points.size - train_clusters.size / test_clusters.size)) test_sets.append(test_points) best = np.argmin(balance) yield test_sets[best] @@ -1594,7 +1523,7 @@ def __init__( shuffle=True, random_state=None, balance=True, - **kwargs + **kwargs, ): super().__init__( n_groups=n_groups, @@ -1602,15 +1531,11 @@ def __init__( method=method, max_distance=max_distance, n_splits=n_splits, - **kwargs + **kwargs, ) if n_splits < 2: - raise ValueError( - "Number of splits must be >=2 for clusterKFold. Given {}.".format( - n_splits - ) - ) + raise ValueError("Number of splits must be >=2 for clusterKFold. Given {}.".format(n_splits)) self.test_size = test_size self.shuffle = shuffle self.random_state = random_state @@ -1641,7 +1566,7 @@ def _iter_test_indices(self, X=None, y=None, groups=None): coordinates=self.coordinates, method=self.method, max_distance=self.max_distance, - **self.kwargs + **self.kwargs, ) cluster_ids = np.unique(labels) diff --git a/deafrica_tools/coastal.py b/deafrica_tools/coastal.py index a379cfd..8b1a606 100644 --- a/deafrica_tools/coastal.py +++ b/deafrica_tools/coastal.py @@ -7,7 +7,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import requests import numpy as np @@ -24,10 +25,11 @@ # Fix converters for tidal plot from pandas.plotting import register_matplotlib_converters + register_matplotlib_converters() -# URL for the DE Africa Coastlines data on Geoserver. +# URL for the DE Africa Coastlines data on Geoserver. WFS_ADDRESS = "https://geoserver.digitalearth.africa/geoserver/wfs" @@ -89,20 +91,11 @@ def tidal_tag( # If custom tide modelling locations are not provided, use the # dataset centroid if not tidepost_lat or not tidepost_lon: - - tidepost_lon, tidepost_lat = ds.extent.centroid.to_crs( - crs=CRS("EPSG:4326") - ).coords[0] - print( - f"Setting tide modelling location from dataset centroid: " - f"{tidepost_lon:.2f}, {tidepost_lat:.2f}" - ) + tidepost_lon, tidepost_lat = ds.extent.centroid.to_crs(crs=CRS("EPSG:4326")).coords[0] + print(f"Setting tide modelling location from dataset centroid: " f"{tidepost_lon:.2f}, {tidepost_lat:.2f}") else: - print( - f"Using user-supplied tide modelling location: " - f"{tidepost_lon:.2f}, {tidepost_lat:.2f}" - ) + print(f"Using user-supplied tide modelling location: " f"{tidepost_lon:.2f}, {tidepost_lat:.2f}") # Use the tidal model to compute tide heights for each observation: obs_datetimes = ds.time.data.astype("M8[s]").astype("O").tolist() @@ -112,7 +105,6 @@ def tidal_tag( # If tides cannot be successfully modeled (e.g. if the centre of the # xarray dataset is located is over land), raise an exception if len(obs_predictedtides) > 0: - # Extract tide heights obs_tideheights = [predictedtide.tide_m for predictedtide in obs_predictedtides] @@ -121,24 +113,20 @@ def tidal_tag( # Optionally calculate the tide phase for each observation if ebb_flow: - # Model tides for a time 15 minutes prior to each previously # modelled satellite acquisition time. This allows us to compare # tide heights to see if they are rising or falling. print("Modelling tidal phase (e.g. ebb or flow)") pre_times = ds.time - pd.Timedelta("15 min") pre_datetimes = pre_times.data.astype("M8[s]").astype("O").tolist() - pre_timepoints = [ - TimePoint(tidepost_lon, tidepost_lat, dt) for dt in pre_datetimes - ] + pre_timepoints = [TimePoint(tidepost_lon, tidepost_lat, dt) for dt in pre_datetimes] pre_predictedtides = predict_tide(pre_timepoints) # Compare tides computed for each timestep. If the previous tide # was higher than the current tide, the tide is 'ebbing'. If the # previous tide was lower, the tide is 'flowing' tidal_phase = [ - "Ebb" if pre.tide_m > obs.tide_m else "Flow" - for pre, obs in zip(pre_predictedtides, obs_predictedtides) + "Ebb" if pre.tide_m > obs.tide_m else "Flow" for pre, obs in zip(pre_predictedtides, obs_predictedtides) ] # Assign tide phase to the dataset as a new variable @@ -147,7 +135,6 @@ def tidal_tag( # If swap_dims = True, make tide height the primary dimension # instead of time if swap_dims: - # Swap dimensions and sort by tide height ds = ds.swap_dims({"time": "tide_height"}) ds = ds.sortby("tide_height") @@ -159,7 +146,6 @@ def tidal_tag( return ds else: - raise ValueError( f"Tides could not be modelled for dataset centroid located " f"at {tidepost_lon:.2f}, {tidepost_lat:.2f}. This can occur if " @@ -293,20 +279,12 @@ def tidal_stats( high_tide_offset = abs(all_max - obs_max) / all_range # Extract x (time in decimal years) and y (distance) values - all_x = ( - all_timerange.year - + ((all_timerange.dayofyear - 1) / 365) - + ((all_timerange.hour - 1) / 24) - ) + all_x = all_timerange.year + ((all_timerange.dayofyear - 1) / 365) + ((all_timerange.hour - 1) / 24) all_y = all_tideheights time_period = all_x.max() - all_x.min() # Extract x (time in decimal years) and y (distance) values - obs_x = ( - ds_tides.time.dt.year - + ((ds_tides.time.dt.dayofyear - 1) / 365) - + ((ds_tides.time.dt.hour - 1) / 24) - ) + obs_x = ds_tides.time.dt.year + ((ds_tides.time.dt.dayofyear - 1) / 365) + ((ds_tides.time.dt.hour - 1) / 24) obs_y = ds_tides.tide_height.values.astype(np.float) # Compute linear regression @@ -314,7 +292,6 @@ def tidal_stats( all_linreg = stats.linregress(x=all_x, y=all_y) if plain_english: - print( f"\n{spread:.0%} of the full {all_range:.2f} m modelled tidal " f"range is observed at this location.\nThe lowest " @@ -325,8 +302,7 @@ def tidal_stats( # Plain english if obs_linreg.pvalue > 0.05: print( - f"Observed tides do not increase or decrease significantly " - f"over the ~{time_period:.0f} year period." + f"Observed tides do not increase or decrease significantly " f"over the ~{time_period:.0f} year period." ) else: obs_slope_desc = "decrease" if obs_linreg.slope < 0 else "increase" @@ -339,10 +315,7 @@ def tidal_stats( ) if all_linreg.pvalue > 0.05: - print( - f"All tides do not increase or decrease significantly over " - f"the ~{time_period:.0f} year period." - ) + print(f"All tides do not increase or decrease significantly over " f"the ~{time_period:.0f} year period.") else: all_slope_desc = "decrease" if all_linreg.slope < 0 else "increase" print( @@ -354,13 +327,10 @@ def tidal_stats( ) if plot: - # Create plot and add all time and observed tide data fig, ax = plt.subplots(figsize=(10, 5)) ax.plot(all_timerange, all_tideheights, alpha=0.4) - ds_tides.tide_height.plot.line( - ax=ax, marker="o", linewidth=0.0, color="black", markersize=2 - ) + ds_tides.tide_height.plot.line(ax=ax, marker="o", linewidth=0.0, color="black", markersize=2) # Add horizontal lines for spread/offsets ax.axhline(obs_min, color="black", linestyle=":", linewidth=1) @@ -415,31 +385,31 @@ def tidal_stats( ).round(round_stats) -def transect_distances(transects_gdf, lines_gdf, mode='distance'): +def transect_distances(transects_gdf, lines_gdf, mode="distance"): """ - Take a set of transects (e.g. shore-normal beach survey lines), and + Take a set of transects (e.g. shore-normal beach survey lines), and determine the distance along the transect to each object in a set of - lines (e.g. shorelines). Distances are measured in the CRS of the + lines (e.g. shorelines). Distances are measured in the CRS of the input datasets. - - For coastal applications, transects should be drawn from land to + + For coastal applications, transects should be drawn from land to water (with the first point being on land so that it can be used as a consistent location from which to measure distances. - + The distance calculation can be performed using two modes: - - 'distance': Distances are measured from the start of the - transect to where it intersects with each line. Any transect - that intersects a line more than once is ignored. This mode is - useful for measuring e.g. the distance to the shoreline over + - 'distance': Distances are measured from the start of the + transect to where it intersects with each line. Any transect + that intersects a line more than once is ignored. This mode is + useful for measuring e.g. the distance to the shoreline over time from a consistent starting location. - 'width' Distances are measured between the first and last - intersection between a transect and each line. Any transect - that intersects a line only once is ignored. This is useful + intersection between a transect and each line. Any transect + that intersects a line only once is ignored. This is useful for e.g. measuring the width of a narrow area of coastline over time, e.g. the neck of a spit or tombolo. - + Parameters - ---------- + ---------- transects_gdf : geopandas.GeoDataFrame A GeoDataFrame containing one or multiple vector profile lines. The GeoDataFrame's index column will be used to name the rows in @@ -451,17 +421,17 @@ def transect_distances(transects_gdf, lines_gdf, mode='distance'): in the output distance table. mode : string, optional Whether to use 'distance' (for measuring distances from the - start of a profile) or 'width' mode (for measuring the width + start of a profile) or 'width' mode (for measuring the width between two profile intersections). See docstring above for more info; defaults to 'distance'. - + Returns ------- distance_df : pandas.DataFrame A DataFrame containing distance measurements for each profile - line (rows) and line feature (columns). + line (rows) and line feature (columns). """ - + import warnings from shapely.errors import ShapelyDeprecationWarning from shapely.geometry import Point @@ -469,82 +439,77 @@ def transect_distances(transects_gdf, lines_gdf, mode='distance'): def _intersect_dist(transect_gdf, lines_gdf, mode=mode): """ Take an individual transect, and determine the distance along - the transect to each object in a set of lines (e.g. shorelines). + the transect to each object in a set of lines (e.g. shorelines). """ # Identify intersections between transects and lines - intersect_points = lines_gdf.apply( - lambda x: x.geometry.intersection(transect_gdf.geometry), axis=1) + intersect_points = lines_gdf.apply(lambda x: x.geometry.intersection(transect_gdf.geometry), axis=1) # In distance mode, identify transects with one intersection only, # and use this as the end point and the start of the transect as the # start point when measuring distances - if mode == 'distance': + if mode == "distance": start_point = Point(transect_gdf.geometry.coords[0]) point_df = intersect_points.apply( - lambda x: pd.Series({'start': start_point, 'end': x}) - if x.type == 'Point' - else pd.Series({'start': None, 'end': None})) + lambda x: pd.Series({"start": start_point, "end": x}) + if x.type == "Point" + else pd.Series({"start": None, "end": None}) + ) # In width mode, identify transects with multiple intersections, and # use the first intersection as the start point and the second # intersection for the end point when measuring distances - if mode == 'width': + if mode == "width": point_df = intersect_points.apply( - lambda x: pd.Series({'start': x.geoms[0], 'end': x.geoms[-1]}) - if x.type == 'MultiPoint' - else pd.Series({'start': None, 'end': None})) + lambda x: pd.Series({"start": x.geoms[0], "end": x.geoms[-1]}) + if x.type == "MultiPoint" + else pd.Series({"start": None, "end": None}) + ) # Calculate distances between valid start and end points - distance_df = point_df.apply( - lambda x: x.start.distance(x.end) if x.start else None, axis=1) - + distance_df = point_df.apply(lambda x: x.start.distance(x.end) if x.start else None, axis=1) + return distance_df # Run code after ignoring Shapely pre-v2.0 warnings - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=ShapelyDeprecationWarning) - + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=ShapelyDeprecationWarning) + # Assert that both datasets use the same CRS - assert transects_gdf.crs == lines_gdf.crs, ('Please ensure both ' - 'input datasets use the same CRS.') - + assert transects_gdf.crs == lines_gdf.crs, "Please ensure both " "input datasets use the same CRS." + # Run distance calculations - distance_df = transects_gdf.apply( - lambda x: _intersect_dist(x, lines_gdf), axis=1) - + distance_df = transects_gdf.apply(lambda x: _intersect_dist(x, lines_gdf), axis=1) + return pd.DataFrame(distance_df) - -def get_coastlines(bbox: tuple, - crs="EPSG:4326", - layer="shorelines", - drop_wms=True) -> gpd.GeoDataFrame: + +def get_coastlines(bbox: tuple, crs="EPSG:4326", layer="shorelines", drop_wms=True) -> gpd.GeoDataFrame: """ Get DE Africa Coastlines data for a provided bounding box using WFS. - - For a full description of the DE Africa Coastlines dataset, refer to the + + For a full description of the DE Africa Coastlines dataset, refer to the official Digital Earth Africa product description: - + Parameters ---------- bbox : (xmin, ymin, xmax, ymax), or geopandas object - Bounding box expressed as a tuple. Alternatively, a bounding - box can be automatically extracted by suppling a + Bounding box expressed as a tuple. Alternatively, a bounding + box can be automatically extracted by suppling a geopandas.GeoDataFrame or geopandas.GeoSeries. crs : str, optional Optional CRS for the bounding box. This is ignored if `bbox` is provided as a geopandas object. layer : str, optional Which DE Africa Coastlines layer to load. Options include the annual - shoreline vectors ("shorelines") and the rates of change + shoreline vectors ("shorelines") and the rates of change statistics points ("statistics"). Defaults to "shorelines". drop_wms : bool, optional Whether to drop WMS-specific attribute columns from the data. These columns are used for visualising the dataset on DE Africa Maps, and are unlikely to be useful for scientific analysis. Defaults to True. - + Returns ------- gpd.GeoDataFrame @@ -562,9 +527,9 @@ def get_coastlines(bbox: tuple, # Get the available layers in the coastlines:DEAfrica_Coastlines group. describe_layer_url = "https://geoserver.digitalearth.africa/geoserver/wms?service=WMS&version=1.1.1&request=DescribeLayer&layers=coastlines:DEAfrica_Coastlines&outputFormat=application/json" describe_layer_response = requests.get(describe_layer_url).json() - available_layers = [layer["layerName"] for layer in describe_layer_response['layerDescriptions']] + available_layers = [layer["layerName"] for layer in describe_layer_response["layerDescriptions"]] - # Get the layer name. + # Get the layer name. if layer == "shorelines": layer_name = [i for i in available_layers if "shorelines" in i] else: @@ -572,9 +537,7 @@ def get_coastlines(bbox: tuple, # Query WFS. wfs = WebFeatureService(url=WFS_ADDRESS, version="1.1.0") - response = wfs.getfeature(typename=layer_name, - bbox=tuple(bbox) + (crs,), - outputFormat="json") + response = wfs.getfeature(typename=layer_name, bbox=tuple(bbox) + (crs,), outputFormat="json") # Load data as a geopandas.GeoDataFrame. coastlines_gdf = gpd.read_file(response) @@ -587,4 +550,4 @@ def get_coastlines(bbox: tuple, if drop_wms: coastlines_gdf = coastlines_gdf.loc[:, ~coastlines_gdf.columns.str.contains("wms_")] - return coastlines_gdf \ No newline at end of file + return coastlines_gdf diff --git a/deafrica_tools/dask.py b/deafrica_tools/dask.py index fdd377d..926071f 100644 --- a/deafrica_tools/dask.py +++ b/deafrica_tools/dask.py @@ -10,13 +10,7 @@ from datacube.utils.rio import configure_s3_access -def create_local_dask_cluster( - spare_mem="3Gb", - aws_unsigned=True, - display_client=True, - return_client=False, - **kwargs -): +def create_local_dask_cluster(spare_mem="3Gb", aws_unsigned=True, display_client=True, return_client=False, **kwargs): """ Using the datacube utils function 'start_local_dask', generate a local dask cluster. @@ -52,12 +46,7 @@ def create_local_dask_cluster( # configure dashboard link to go over proxy dask.config.set( - { - "distributed.dashboard.link": os.environ.get( - "JUPYTERHUB_SERVICE_PREFIX", "/" - ) - + "proxy/{port}/status" - } + {"distributed.dashboard.link": os.environ.get("JUPYTERHUB_SERVICE_PREFIX", "/") + "proxy/{port}/status"} ) # start up a local cluster @@ -69,8 +58,8 @@ def create_local_dask_cluster( # Show the dask cluster settings if display_client: display(client) - - #return the client as an object + + # return the client as an object if return_client: return client diff --git a/deafrica_tools/datahandling.py b/deafrica_tools/datahandling.py index 5b986d4..ef0f55c 100644 --- a/deafrica_tools/datahandling.py +++ b/deafrica_tools/datahandling.py @@ -77,9 +77,7 @@ def load_ard( dc, products=None, min_gooddata=0.0, - categories_to_mask_ls=dict( - cloud="high_confidence", cloud_shadow="high_confidence" - ), + categories_to_mask_ls=dict(cloud="high_confidence", cloud_shadow="high_confidence"), categories_to_mask_s2=[ "cloud high probability", "cloud medium probability", @@ -253,11 +251,11 @@ def load_ard( "Sentinel-2: ['s2_l2a'], or" "Sentinel-1: ['s1_rtc'], or" ) - + # convert products to list if user passed as a string if type(products) == str: - products=[products] - + products = [products] + if all(["ls" in product for product in products]): product_type = "ls" elif all(["s2" in product for product in products]): @@ -273,16 +271,12 @@ def load_ard( # Check some parameters before proceeding if (product_type == "ls") & (dtype == "native"): raise ValueError( - "Cannot load Landsat bands in native dtype " - "as values require rescaling which converts dtype to float" + "Cannot load Landsat bands in native dtype " "as values require rescaling which converts dtype to float" ) if product_type == "ls": if any(k in categories_to_mask_ls for k in ("cirrus", "cirrus_confidence")): - raise ValueError( - "'cirrus' categories for the pixel quality mask" - " are not supported by load_ard" - ) + raise ValueError("'cirrus' categories for the pixel quality mask" " are not supported by load_ard") # If `measurements` are specified but do not include pixel quality bands, # add these to `measurements` according to collection @@ -323,7 +317,6 @@ def load_ard( if measurements is not None: if product_type == "ls": - # check we aren't loading aerosol bands from LS8 aerosol_bands = [ "aerosol_qa", @@ -345,8 +338,7 @@ def load_ard( else: raise ValueError( "load_ard does not support all band aliases for Landsat, " - "use only the following band names to load Landsat data: " - + str(ls_aliases) + "use only the following band names to load Landsat data: " + str(ls_aliases) ) # Deal with "load all" case: pick a set of bands common across @@ -364,11 +356,7 @@ def load_ard( # Get list of data and mask bands so that we can later exclude # mask bands from being masked themselves (also handle the case of rad_sat) - data_bands = [ - band - for band in measurements - if band not in (fmask_band, "radiometric_saturation") - ] + data_bands = [band for band in measurements if band not in (fmask_band, "radiometric_saturation")] mask_bands = [band for band in measurements if band not in data_bands] ################# @@ -385,7 +373,6 @@ def load_ard( if verbose: print("Finding datasets") for product in products: - # Obtain list of datasets for product if verbose: print(f" {product}") @@ -393,9 +380,7 @@ def load_ard( if product_type == "ls": # handle LS seperately to S2/S1 due to collection_category # force the user to load Tier 1 - datasets = dc.find_datasets( - product=product, collection_category='T1', **query - ) + datasets = dc.find_datasets(product=product, collection_category="T1", **query) else: datasets = dc.find_datasets(product=product, **query) @@ -403,11 +388,7 @@ def load_ard( if not ls7_slc_off and product in ["ls7_sr"]: if verbose: print(" Ignoring SLC-off observations for ls7") - datasets = [ - i - for i in datasets - if i.time.begin < datetime.datetime(2003, 5, 31, tzinfo=pytz.UTC) - ] + datasets = [i for i in datasets if i.time.begin < datetime.datetime(2003, 5, 31, tzinfo=pytz.UTC)] # Add any returned datasets to list dataset_list.extend(datasets) @@ -454,49 +435,41 @@ def load_ard( # collection 2 USGS if product_type == "ls": - mask, _ = masking.create_mask_value( - ds[fmask_band].attrs["flags_definition"], **categories_to_mask_ls - ) - + mask, _ = masking.create_mask_value(ds[fmask_band].attrs["flags_definition"], **categories_to_mask_ls) + pq_mask = (ds[fmask_band] & mask) != 0 - - # only run if data bands are present - if len(data_bands) > 0: - - # identify pixels that will become negative after rescaling (but not 0 values) + + # only run if data bands are present + if len(data_bands) > 0: + # identify pixels that will become negative after rescaling (but not 0 values) invalid = ( - ((ds[data_bands] < (-1.0 * -0.2 / 0.0000275)) & (ds[data_bands] > 0)) - .to_array(dim="band") - .any(dim="band") - ) + ((ds[data_bands] < (-1.0 * -0.2 / 0.0000275)) & (ds[data_bands] > 0)) + .to_array(dim="band") + .any(dim="band") + ) - #merge masks + # merge masks pq_mask = np.logical_or(pq_mask, pq_mask) # sentinel 2 if product_type == "s2": - pq_mask = odc.algo.enum_to_bool(mask=ds[fmask_band], - categories=categories_to_mask_s2) - + pq_mask = odc.algo.enum_to_bool(mask=ds[fmask_band], categories=categories_to_mask_s2) + # sentinel 1 if product_type == "s1": - pq_mask = odc.algo.enum_to_bool(mask=ds[fmask_band], - categories=categories_to_mask_s1) + pq_mask = odc.algo.enum_to_bool(mask=ds[fmask_band], categories=categories_to_mask_s1) # The good data percentage calculation has to load in all `fmask` # data, which can be slow. If the user has chosen no filtering # by using the default `min_gooddata = 0`, we can skip this step # completely to save processing time if min_gooddata > 0.0: - # Compute good data for each observation as % of total pixels. # Inveerting the pq_mask for this because cloud=True in pq_mask # and we want to sum good pixels if verbose: print("Counting good quality pixels for each time step") - data_perc = (~pq_mask).sum(axis=[1, 2], dtype="int32") / ( - pq_mask.shape[1] * pq_mask.shape[2] - ) + data_perc = (~pq_mask).sum(axis=[1, 2], dtype="int32") / (pq_mask.shape[1] * pq_mask.shape[2]) keep = (data_perc >= min_gooddata).persist() @@ -533,16 +506,16 @@ def load_ard( # should only be applied to data bands ds_data = ds[data_bands] ds_masks = ds[mask_bands] - + # Remove sentinel-2 pixels valued 1 (scene edges, terrain shadow) if product_type == "s2": valid_data_mask = (ds_data > 1).to_array(dim="band").all(dim="band") - ds_data = odc.algo.keep_good_only(ds_data, where=valid_data_mask) - + ds_data = odc.algo.keep_good_only(ds_data, where=valid_data_mask) + # Mask data if either of the above masks were generated if mask is not None: ds_data = odc.algo.erase_bad(ds_data, where=mask) - + # Automatically set dtype to either native or float32 depending # on whether masking was requested if dtype == "auto": @@ -621,9 +594,7 @@ def load_ard( return ds.compute() -def array_to_geotiff( - fname, data, geo_transform, projection, nodata_val=0, dtype=gdal.GDT_Float32 -): +def array_to_geotiff(fname, data, geo_transform, projection, nodata_val=0, dtype=gdal.GDT_Float32): """ Create a single band GeoTIFF file with data from an array. @@ -729,7 +700,6 @@ def mostcommon_crs(dc, product, query): # Warn user if multiple CRSs are encountered if len(crs_counts.keys()) > 1: - warnings.warn( f"Multiple UTM zones {list(crs_counts.keys())} " f"were returned for this query. Defaulting to " @@ -781,10 +751,7 @@ def download_unzip(url, output_dir=None, remove_zip=True): # Extract into output_dir with zipfile.ZipFile(zip_name, "r") as zip_ref: zip_ref.extractall(output_dir) - print( - f"Unzipping output files to: " - f"{output_dir if output_dir else os.getcwd()}" - ) + print(f"Unzipping output files to: " f"{output_dir if output_dir else os.getcwd()}") # Optionally cleanup if remove_zip: @@ -845,9 +812,7 @@ def dilate(array, dilation=10, invert=True): if invert: array = ~array - return ~binary_dilation( - array.astype(np.bool), structure=kernel.reshape((1,) + kernel.shape) - ) + return ~binary_dilation(array.astype(np.bool), structure=kernel.reshape((1,) + kernel.shape)) def _select_along_axis(values, idx, axis): @@ -916,9 +881,7 @@ def last(array: xr.DataArray, dim: str, index_name: str = None) -> xr.DataArray: return reduced -def nearest( - array: xr.DataArray, dim: str, target, index_name: str = None -) -> xr.DataArray: +def nearest(array: xr.DataArray, dim: str, target, index_name: str = None) -> xr.DataArray: """ Finds the nearest values to a target label along the given dimension, for all other dimensions. @@ -973,9 +936,7 @@ def nearest( nearest_array = xr.where(is_before_closer, da_before, da_after) nearest_array[dim] = xr.where(is_before_closer, da_before[dim], da_after[dim]) if index_name is not None: - nearest_array[index_name] = xr.where( - is_before_closer, da_before[index_name], da_after[index_name] - ) + nearest_array[index_name] = xr.where(is_before_closer, da_before[index_name], da_after[index_name]) return nearest_array @@ -999,19 +960,13 @@ def pan_sharpen_brovey(band_1, band_2, band_3, pan_band): pan-sharpened to the spatial resolution of `pan_band`. """ # Calculate total - exp = 'band_1 + band_2 + band_3' + exp = "band_1 + band_2 + band_3" total = numexpr.evaluate(exp) # Perform Brovey Transform in form of: band/total*panchromatic - exp = 'a/b*c' - band_1_sharpen = numexpr.evaluate(exp, local_dict={'a': band_1, - 'b': total, - 'c': pan_band}) - band_2_sharpen = numexpr.evaluate(exp, local_dict={'a': band_2, - 'b': total, - 'c': pan_band}) - band_3_sharpen = numexpr.evaluate(exp, local_dict={'a': band_3, - 'b': total, - 'c': pan_band}) - - return band_1_sharpen, band_2_sharpen, band_3_sharpen \ No newline at end of file + exp = "a/b*c" + band_1_sharpen = numexpr.evaluate(exp, local_dict={"a": band_1, "b": total, "c": pan_band}) + band_2_sharpen = numexpr.evaluate(exp, local_dict={"a": band_2, "b": total, "c": pan_band}) + band_3_sharpen = numexpr.evaluate(exp, local_dict={"a": band_3, "b": total, "c": pan_band}) + + return band_1_sharpen, band_2_sharpen, band_3_sharpen diff --git a/deafrica_tools/load_era5.py b/deafrica_tools/load_era5.py index 053013a..95c8b3f 100644 --- a/deafrica_tools/load_era5.py +++ b/deafrica_tools/load_era5.py @@ -20,25 +20,30 @@ # import warnings -ERA5_VARS = ['air_pressure_at_mean_sea_level', - 'air_temperature_at_2_metres', - 'air_temperature_at_2_metres_1hour_Maximum', - 'air_temperature_at_2_metres_1hour_Minimum', - 'dew_point_temperature_at_2_metres', - 'eastward_wind_at_100_metres', - 'eastward_wind_at_10_metres', - 'integral_wrt_time_of_surface_direct_downwelling_shortwave_flux_in_air_1hour_Accumulation', - 'lwe_thickness_of_surface_snow_amount', - 'northward_wind_at_100_metres', - 'northward_wind_at_10_metres', - 'precipitation_amount_1hour_Accumulation', - 'sea_surface_temperature', - 'snow_density', - 'surface_air_pressure'] +ERA5_VARS = [ + "air_pressure_at_mean_sea_level", + "air_temperature_at_2_metres", + "air_temperature_at_2_metres_1hour_Maximum", + "air_temperature_at_2_metres_1hour_Minimum", + "dew_point_temperature_at_2_metres", + "eastward_wind_at_100_metres", + "eastward_wind_at_10_metres", + "integral_wrt_time_of_surface_direct_downwelling_shortwave_flux_in_air_1hour_Accumulation", + "lwe_thickness_of_surface_snow_amount", + "northward_wind_at_100_metres", + "northward_wind_at_10_metres", + "precipitation_amount_1hour_Accumulation", + "sea_surface_temperature", + "snow_density", + "surface_air_pressure", +] def load_era5( - var, lat, lon, time, + var, + lat, + lon, + time, reduce_func=None, resample="1D", ): @@ -75,37 +80,35 @@ def load_era5( """ # constrain query to available variables - assert var in ERA5_VARS, "var must be one of [{}] (got {})".format( - ",".join(ERA5_VARS), var - ) - + assert var in ERA5_VARS, "var must be one of [{}] (got {})".format(",".join(ERA5_VARS), var) + # set default reduction function if reduce_func is None: reduce_func = np.mean - + # process date range if type(time) in [list, tuple]: - date_from = np.datetime64(min(time)).astype('datetime64[D]') - date_to = (np.datetime64(max(time))+1).astype('datetime64[D]')-np.timedelta64(1,'D') + date_from = np.datetime64(min(time)).astype("datetime64[D]") + date_to = (np.datetime64(max(time)) + 1).astype("datetime64[D]") - np.timedelta64(1, "D") elif type(time) in [str, np.datetime64]: - date_from = np.datetime64(time).astype('datetime64[D]') - date_to = (np.datetime64(time)+1).astype('datetime64[D]')-np.timedelta64(1,'D') + date_from = np.datetime64(time).astype("datetime64[D]") + date_to = (np.datetime64(time) + 1).astype("datetime64[D]") - np.timedelta64(1, "D") else: - raise(ValueError) + raise (ValueError) # actual lat lon ranges will be infered from nearest match to data lat_range = None lon_range = None - + datasets = [] # Loop through month and year to access ERA5 zarr - month = date_from.astype('datetime64[M]') - while month <= date_to.astype('datetime64[M]'): + month = date_from.astype("datetime64[M]") + while month <= date_to.astype("datetime64[M]"): url = f"s3://era5-pds/zarr/{month.astype(object).year:04}/{month.astype(object).month:02}/data/{var}.zarr" - ds = xr.open_zarr(fsspec.get_mapper(url, anon=True, - client_kwargs={'region_name':'us-east-1'}), - consolidated=True) - + ds = xr.open_zarr( + fsspec.get_mapper(url, anon=True, client_kwargs={"region_name": "us-east-1"}), consolidated=True + ) + # re-order along longitude to go from -180 to 180 if needed if min(lon) < 0: ds = ds.assign_coords({"lon": (((ds.lon + 180) % 360) - 180)}) @@ -117,23 +120,26 @@ def load_era5( # define the lat/lon grid lat_range = slice(test.lat.max().values, test.lat.min().values) lon_range = slice(test.lon.min().values, test.lon.max().values) - + if "time0" in ds.dims: ds = ds.rename({"time0": "time"}) if "time1" in ds.dims: - ds = ds.rename( - {"time1": "time"} - ) # This should INTENTIONALLY error if both times are defined - - output = ds[[var]].sel(lat=lat_range, lon=lon_range, time=slice(date_from, date_to)).resample(time=resample).reduce(reduce_func) + ds = ds.rename({"time1": "time"}) # This should INTENTIONALLY error if both times are defined + + output = ( + ds[[var]] + .sel(lat=lat_range, lon=lon_range, time=slice(date_from, date_to)) + .resample(time=resample) + .reduce(reduce_func) + ) output.attrs = ds.attrs for v in output.data_vars: output[v].attrs = ds[v].attrs - + datasets.append(output) - month += np.timedelta64(1,'M') - - return assign_crs(xr.combine_by_coords(datasets), 'EPSG:4326') + month += np.timedelta64(1, "M") + + return assign_crs(xr.combine_by_coords(datasets), "EPSG:4326") # # older version of scripts to download and use netcdf diff --git a/deafrica_tools/load_isda.py b/deafrica_tools/load_isda.py index 2e9cca1..dbe1f06 100644 --- a/deafrica_tools/load_isda.py +++ b/deafrica_tools/load_isda.py @@ -17,18 +17,20 @@ import boto3 from pystac import stac_io, Catalog -#this function allows us to directly query the data on s3, adapted from iSDA tutorial https://github.com/iSDA-Africa/isdasoil-tutorial/blob/main/iSDAsoil-tutorial.ipynb + +# this function allows us to directly query the data on s3, adapted from iSDA tutorial https://github.com/iSDA-Africa/isdasoil-tutorial/blob/main/iSDAsoil-tutorial.ipynb def my_read_method(uri): parsed = urlparse(uri) - if parsed.scheme == 's3': + if parsed.scheme == "s3": bucket = parsed.netloc key = parsed.path[1:] - s3 = boto3.resource('s3') + s3 = boto3.resource("s3") obj = s3.Object(bucket, key) - return obj.get()['Body'].read().decode('utf-8') + return obj.get()["Body"].read().decode("utf-8") else: return stac_io.default_read_text_method(uri) + stac_io.read_text_method = my_read_method catalog = Catalog.from_file("https://isdasoil.s3.amazonaws.com/catalog.json") @@ -41,7 +43,7 @@ def my_read_method(uri): # save all items to a dictionary as we go along assets[item.id] = item for asset in item.assets.values(): - if asset.roles == ['data']: + if asset.roles == ["data"]: str(f"Title: {asset.title}") str(f"Description: {asset.description}") str(f"URL: {asset.href}") @@ -49,6 +51,7 @@ def my_read_method(uri): # define load_isda() function + def load_isda(var, lat, lon): """ Download and return iSDA variable with number of bands corresponding to number of iSDA layers. @@ -59,34 +62,36 @@ def load_isda(var, lat, lon): lat: tuple or list Latitude range for query. lon: tuple or list - Longitude range for query. + Longitude range for query. """ - bands = assets[var].assets["image"].extra_fields.get('eo:bands') - bands = [val['description'] for val in bands] - + bands = assets[var].assets["image"].extra_fields.get("eo:bands") + bands = [val["description"] for val in bands] + if len(np.unique(bands)) > 1: - ds = xr.open_dataset(assets[var].assets["image"].href, engine="rasterio").rio.clip_box( - minx=lon[0], - miny=lat[0], - maxx=lon[1], - maxy=lat[1], - crs="EPSG:4326", - ) + minx=lon[0], + miny=lat[0], + maxx=lon[1], + maxy=lat[1], + crs="EPSG:4326", + ) - ds_layered = ds.drop_dims('band') + ds_layered = ds.drop_dims("band") for x in np.unique(ds.band): - ds_layered[bands[x-1]] = ds.sel(band=x).to_array(dim='band').squeeze() - + ds_layered[bands[x - 1]] = ds.sel(band=x).to_array(dim="band").squeeze() + else: - - ds_layered = xr.open_dataset(assets[var].assets["image"].href, engine="rasterio").rio.clip_box( + ds_layered = ( + xr.open_dataset(assets[var].assets["image"].href, engine="rasterio") + .rio.clip_box( minx=lon[0], miny=lat[0], maxx=lon[1], maxy=lat[1], crs="EPSG:4326", - ).squeeze() + ) + .squeeze() + ) return ds_layered diff --git a/deafrica_tools/load_soil_moisture.py b/deafrica_tools/load_soil_moisture.py index 4fb1a58..104007d 100644 --- a/deafrica_tools/load_soil_moisture.py +++ b/deafrica_tools/load_soil_moisture.py @@ -3,37 +3,47 @@ # function to load soil moisture data -def load_soil_moisture(lat, lon, time, product = 'surface', grid = 'nearest'): - product_baseurl = 'https://dapds00.nci.org.au/thredds/dodsC/ub8/global/GRAFS/' - assert product in ['surface', 'rootzone'], 'product parameter must be surface or root-zone' + +def load_soil_moisture(lat, lon, time, product="surface", grid="nearest"): + product_baseurl = "https://dapds00.nci.org.au/thredds/dodsC/ub8/global/GRAFS/" + assert product in ["surface", "rootzone"], "product parameter must be surface or root-zone" # lat, lon grid - if grid == 'nearest': + if grid == "nearest": # select lat/lon range from data; snap to nearest grid lat_range, lon_range = None, None else: # define a grid that covers the entire area of interest - lat_range = np.arange(np.max(np.ceil(np.array(lat)*10.+0.5)/10.-0.05), np.min(np.floor(np.array(lat)*10.-0.5)/10.+0.05)-0.05, -0.1) - lon_range = np.arange(np.min(np.floor(np.array(lon)*10.-0.5)/10.+0.05), np.max(np.ceil(np.array(lon)*10.+0.5)/10.-0.05)+0.05, 0.1) + lat_range = np.arange( + np.max(np.ceil(np.array(lat) * 10.0 + 0.5) / 10.0 - 0.05), + np.min(np.floor(np.array(lat) * 10.0 - 0.5) / 10.0 + 0.05) - 0.05, + -0.1, + ) + lon_range = np.arange( + np.min(np.floor(np.array(lon) * 10.0 - 0.5) / 10.0 + 0.05), + np.max(np.ceil(np.array(lon) * 10.0 + 0.5) / 10.0 - 0.05) + 0.05, + 0.1, + ) # split time window into years day_range = np.array(time).astype("M8[D]") year_range = np.array(time).astype("M8[Y]") - if product == 'surface': - product_name = 'GRAFS_TopSoilRelativeWetness_' - else: product_name = 'GRAFS_RootzoneSoilWaterIndex_' + if product == "surface": + product_name = "GRAFS_TopSoilRelativeWetness_" + else: + product_name = "GRAFS_RootzoneSoilWaterIndex_" datasets = [] - for year in np.arange(year_range[0], year_range[1]+1, np.timedelta64(1, 'Y')): + for year in np.arange(year_range[0], year_range[1] + 1, np.timedelta64(1, "Y")): start = np.max([day_range[0], year.astype("M8[D]")]) - end = np.min([day_range[1], (year+1).astype("M8[D]")-1]) - product_url = product_baseurl + product_name +'%s.nc'%str(year) + end = np.min([day_range[1], (year + 1).astype("M8[D]") - 1]) + product_url = product_baseurl + product_name + "%s.nc" % str(year) print(product_url) # data is loaded lazily through OPeNDAP ds = xr.open_dataset(product_url) if lat_range is None: # select lat/lon range from data if not specified; snap to nearest grid - test = ds.sel(lat=list(lat), lon=list(lon), method='nearest') + test = ds.sel(lat=list(lat), lon=list(lon), method="nearest") lat_range = slice(test.lat.values[0], test.lat.values[1]) lon_range = slice(test.lon.values[0], test.lon.values[1]) # slice before return ds = ds.sel(lat=lat_range, lon=lon_range, time=slice(start, end)).compute() datasets.append(ds) - return xr.merge(datasets) \ No newline at end of file + return xr.merge(datasets) diff --git a/deafrica_tools/plotting.py b/deafrica_tools/plotting.py index d554a4c..458d51d 100644 --- a/deafrica_tools/plotting.py +++ b/deafrica_tools/plotting.py @@ -7,7 +7,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import math import folium @@ -55,7 +56,6 @@ def rgb( savefig_kwargs={}, **kwargs, ): - """ Takes an xarray dataset and plots RGB images using three imagery bands (e.g ['red', 'green', 'blue']). The `index` @@ -140,7 +140,6 @@ def rgb( # If ax is supplied via kwargs, ignore aspect and size if "ax" in kwargs: - # Create empty aspect size kwarg that will be passed to imshow aspect_size_kwarg = {} else: @@ -154,7 +153,6 @@ def rgb( # If no value is supplied for `index` (the default), plot using default # values and arguments passed via `**kwargs` if index is None: - # Select bands and convert to DataArray da = ds[bands].to_array() @@ -166,13 +164,11 @@ def rgb( # If there are more than three dimensions and the index dimension == 1, # squeeze this dimension out to remove it if (len(ds.dims) > 2) and ("col" not in kwargs) and (len(da[index_dim]) == 1): - da = da.squeeze(dim=index_dim) # If there are more than three dimensions and the index dimension # is longer than 1, raise exception to tell user to use 'col'/`index` elif (len(ds.dims) > 2) and ("col" not in kwargs) and (len(da[index_dim]) > 1): - raise Exception( f"The input dataset `ds` has more than two dimensions: " "{list(ds.dims.keys())}. Please select a single observation " @@ -181,26 +177,18 @@ def rgb( "call" ) da = da.compute() - img = da.plot.imshow( - robust=robust, col_wrap=col_wrap, **aspect_size_kwarg, **kwargs - ) + img = da.plot.imshow(robust=robust, col_wrap=col_wrap, **aspect_size_kwarg, **kwargs) # If values provided for `index`, extract corresponding observations and # plot as either single image or facet plot else: - # If a float is supplied instead of an integer index, raise exception if isinstance(index, float): - raise Exception( - f"Please supply `index` as either an integer or a list of " "integers" - ) + raise Exception(f"Please supply `index` as either an integer or a list of " "integers") # If col argument is supplied as well as `index`, raise exception if "col" in kwargs: - raise Exception( - f"Cannot supply both `index` and `col`; please remove one and " - "try again" - ) + raise Exception(f"Cannot supply both `index` and `col`; please remove one and " "try again") # Convert index to generic type list so that number of indices supplied # can be computed @@ -216,7 +204,6 @@ def rgb( # If multiple index values are supplied, plot as a faceted plot if len(index) > 1: - img = da.plot.imshow( robust=robust, col=index_dim, @@ -228,16 +215,12 @@ def rgb( # If only one index is supplied, squeeze out index_dim and plot as a # single panel else: - - img = da.squeeze(dim=index_dim).plot.imshow( - robust=robust, **aspect_size_kwarg, **kwargs - ) + img = da.squeeze(dim=index_dim).plot.imshow(robust=robust, **aspect_size_kwarg, **kwargs) # If an export path is provided, save image to file. Individual and # faceted plots have a different API (figure vs fig) so we get around this # using a try statement: if savefig_path: - print(f"Exporting image to {savefig_path}") try: @@ -291,14 +274,8 @@ def display_map(x, y, crs="EPSG:4326", margin=-0.5, zoom_bias=0): all_longitude, all_latitude = transformer.transform(all_x, all_y) # Calculate zoom level based on coordinates - lat_zoom_level = ( - _degree_to_zoom_level(min(all_latitude), max(all_latitude), margin=margin) - + zoom_bias - ) - lon_zoom_level = ( - _degree_to_zoom_level(min(all_longitude), max(all_longitude), margin=margin) - + zoom_bias - ) + lat_zoom_level = _degree_to_zoom_level(min(all_latitude), max(all_latitude), margin=margin) + zoom_bias + lon_zoom_level = _degree_to_zoom_level(min(all_longitude), max(all_longitude), margin=margin) + zoom_bias zoom_level = min(lat_zoom_level, lon_zoom_level) # Identify centre point for plotting @@ -322,9 +299,7 @@ def display_map(x, y, crs="EPSG:4326", margin=-0.5, zoom_bias=0): ] # Add bounding box as an overlay - interactive_map.add_child( - folium.features.PolyLine(locations=line_segments, color="red", opacity=0.8) - ) + interactive_map.add_child(folium.features.PolyLine(locations=line_segments, color="red", opacity=0.8)) # Add clickable lat-lon popup box interactive_map.add_child(folium.features.LatLngPopup()) @@ -425,7 +400,6 @@ def on_hover(event, id, properties): # If continuous is False, remap categorical classes for visualisation if not continuous: - # Zip classes data together to make a dictionary classes_uni = list(gdf[attribute].unique()) classes_clean = list(range(0, len(classes_uni))) @@ -436,7 +410,6 @@ def on_hover(event, id, properties): # If continuous is True then do not remap else: - # Get values to colour by as a list classes = gdf[attribute].tolist() @@ -450,7 +423,6 @@ def on_hover(event, id, properties): lat = (lat1 + lat2) / 2 if default_zoom is None: - # Calculate default zoom from latitude of features default_zoom = _degree_to_zoom_level(lat1, lat2, margin=-0.5) @@ -469,9 +441,7 @@ def on_hover(event, id, properties): # Get `branca.colormap` object from matplotlib string cm_cmap = cm.get_cmap(cmap, 30) - colormap = branca.colormap.LinearColormap( - [cm_cmap(i) for i in np.linspace(0, 1, 30)] - ) + colormap = branca.colormap.LinearColormap([cm_cmap(i) for i in np.linspace(0, 1, 30)]) # Create the choropleth choropleth = Choropleth( @@ -486,30 +456,25 @@ def on_hover(event, id, properties): # across the 'fillColor' attribute to the 'color' attribute for each # feature, then plot the data as a GeoJSON layer rather than the # choropleth layer that we use for polygon data. - linefeatures = any( - x in ["LineString", "MultiLineString"] for x in gdf.geometry.type.values - ) + linefeatures = any(x in ["LineString", "MultiLineString"] for x in gdf.geometry.type.values) if linefeatures: - # Copy colour from fill to line edge colour for i in keys: - choropleth.data["features"][i]["properties"]["style"][ - "color" - ] = choropleth.data["features"][i]["properties"]["style"]["fillColor"] + choropleth.data["features"][i]["properties"]["style"]["color"] = choropleth.data["features"][i][ + "properties" + ]["style"]["fillColor"] # Add GeoJSON layer to map feature_layer = GeoJSON(data=choropleth.data, style=style_kwargs) m.add_layer(feature_layer) else: - # Add Choropleth layer to map m.add_layer(choropleth) # If a column is specified by `hover_col`, print data from the # hovered feature above the map if hover_col and not linefeatures: - # Use cholopleth object if data is polygon lbl = ipywidgets.Label() dbg = ipywidgets.Output() @@ -517,7 +482,6 @@ def on_hover(event, id, properties): display(lbl) else: - lbl = ipywidgets.Label() dbg = ipywidgets.Output() feature_layer.on_hover(on_hover) @@ -682,7 +646,6 @@ def _start_end_times(gdf, ds): # Update both `start_time` and `end_time` columns for time_col, time_val in zip(["start_time", "end_time"], minmax_times): - # Add time_col if it does not exist if time_col not in gdf: gdf[time_col] = np.nan @@ -704,9 +667,7 @@ def _add_colorbar(fig, ax, vmin, vmax, imshow_defaults, colorbar_defaults): # Initialise color bar using plot min and max values img = ax.imshow(np.array([[vmin, vmax]]), **imshow_defaults) - fig.colorbar( - img, cax=cax, orientation="horizontal", ticks=np.linspace(vmin, vmax, 2) - ) + fig.colorbar(img, cax=cax, orientation="horizontal", ticks=np.linspace(vmin, vmax, 2)) # Fine-tune appearance of colorbar cax.xaxis.set_ticks_position("top") @@ -736,14 +697,9 @@ def _frame_annotation(times, show_date, show_text): f"of timesteps in `ds` (n={len(times)})" ) - times_list = ( - times.dt.strftime(show_date).values if show_date else [None] * len(times) - ) + times_list = times.dt.strftime(show_date).values if show_date else [None] * len(times) text_list = show_text if is_sequence else [show_text] * len(times) - annotation_list = [ - "\n".join([str(i) for i in (a, b) if i]) - for a, b in zip(times_list, text_list) - ] + annotation_list = ["\n".join([str(i) for i in (a, b) if i]) for a, b in zip(times_list, text_list)] return annotation_list @@ -779,17 +735,13 @@ def _update_frames( # Add geodataframe annotation if show_gdf is not None: - # Obtain start and end times to filter geodataframe features time_i = ds.time.isel(time=i).values # Subset geodataframe using start and end dates - gdf_subset = show_gdf.loc[ - (show_gdf.start_time <= time_i) & (show_gdf.end_time >= time_i) - ] + gdf_subset = show_gdf.loc[(show_gdf.start_time <= time_i) & (show_gdf.end_time >= time_i)] if len(gdf_subset.index) > 0: - # Set color to geodataframe field if supplied if ("color" in gdf_subset) and ("color" not in gdf_kwargs): gdf_defaults.update({"color": gdf_subset["color"].tolist()}) @@ -816,17 +768,11 @@ def _update_frames( # Test if bands exist in dataset missing_bands = [b for b in bands if b not in ds.data_vars] if missing_bands: - raise ValueError( - f"Band(s) {missing_bands} do not exist as " - f"variables in `ds` {list(ds.data_vars)}" - ) + raise ValueError(f"Band(s) {missing_bands} do not exist as " f"variables in `ds` {list(ds.data_vars)}") # Test if time dimension exists in dataset if "time" not in ds.dims: - raise ValueError( - f"`ds` does not contain a 'time' dimension " - f"required for generating an animation" - ) + raise ValueError(f"`ds` does not contain a 'time' dimension " f"required for generating an animation") # Set default parameters outline = [PathEffects.withStroke(linewidth=2.5, foreground="black")] @@ -870,10 +816,7 @@ def _update_frames( array = ds.astype(np.float32).values # Optionally apply image processing along axis 0 (e.g. to each timestep) - bar_format = ( - "{l_bar}{bar}| {n_fmt}/{total_fmt} ({remaining_s:.1f} " - "seconds remaining at {rate_fmt}{postfix})" - ) + bar_format = "{l_bar}{bar}| {n_fmt}/{total_fmt} ({remaining_s:.1f} " "seconds remaining at {rate_fmt}{postfix})" if image_proc_funcs: print("Applying custom image processing functions") for i, array_i in tqdm( @@ -1113,7 +1056,7 @@ def plot_lulc(lulc, product=None, legend=True, **plot_kwargs): "clouds", "rangeland", ] - ticks = list(np.mean((bounds[i+1], val)) for i, val in enumerate(bounds[:-1])) + ticks = list(np.mean((bounds[i + 1], val)) for i, val in enumerate(bounds[:-1])) except: AttributeError @@ -1153,91 +1096,99 @@ def plot_lulc(lulc, product=None, legend=True, **plot_kwargs): ] except: AttributeError - + if "CGLS" in product: try: - labels = {0: {'color': '#282828', 'flag': 'unknown'}, - 20: {'color': '#FFBB22', 'flag': 'shrubs'}, - 30: {'color': '#FFFF4C', 'flag': 'herbaceous_vegetation'}, - 40: {'color': '#F096FF', 'flag': 'cultivated_and_managed_vegetation_or_agriculture'}, - 50: {'color': '#FA0000', 'flag': 'urban_or_built_up'}, - 60: {'color': '#B4B4B4', 'flag': 'bare_or_sparse_vegetation'}, - 70: {'color': '#F0F0F0', 'flag': 'snow_and_ice'}, - 80: {'color': '#0032C8', 'flag': 'permanent_water_bodies'}, - 90: {'color': '#0096A0', 'flag': 'herbaceous_wetland'}, - 100: {'color': '#FAE6A0', 'flag': 'moss_and_lichen'}, - 111: {'color': '#58481F', 'flag': 'closed_forest_evergreen_needle_leaf'}, - 112: {'color': '#009900', 'flag': 'closed_forest_evergreen_broad_leaf'}, - 113: {'color': '#70663E', 'flag': 'closed_forest_deciduous_needle_leaf'}, - 114: {'color': '#00CC00', 'flag': 'closed_forest_deciduous_broad_leaf'}, - 115: {'color': '#4E751F', 'flag': 'closed_forest_mixed'}, - 116: {'color': '#007800', 'flag': 'closed_forest_not_matching_any_of_the_other_definitions'}, - 121: {'color': '#666000', 'flag': 'open_forest_evergreen_needle_leaf'}, - 122: {'color': '#8DB400', 'flag': 'open_forest_evergreen_broad_leaf'}, - 123: {'color': '#8D7400', 'flag': 'open_forest_deciduous_needle_leaf'}, - 124: {'color': '#A0DC00', 'flag': 'open_forest_deciduous_broad_leaf'}, - 125: {'color': '#929900', 'flag': 'open_forest_mixed'}, - 126: {'color': '#648C00', 'flag': 'open_forest_not_matching_any_of_the_other_definitions'}, - 200: {'color': '#000080', 'flag': 'oceans_seas'}} - - colors = [label['color'] for label in labels.values()] - cmap = ListedColormap([label['color'] for label in labels.values()]) - norm = mcolours.BoundaryNorm(list(labels.keys())+[201], cmap.N+1, extend='max') - ticks = list(np.mean((list(list(labels.keys())+[201])[i+1], val)) for i, val in enumerate(list(labels.keys()))) - cblabels=[label['flag'] for label in labels.values()] - + labels = { + 0: {"color": "#282828", "flag": "unknown"}, + 20: {"color": "#FFBB22", "flag": "shrubs"}, + 30: {"color": "#FFFF4C", "flag": "herbaceous_vegetation"}, + 40: {"color": "#F096FF", "flag": "cultivated_and_managed_vegetation_or_agriculture"}, + 50: {"color": "#FA0000", "flag": "urban_or_built_up"}, + 60: {"color": "#B4B4B4", "flag": "bare_or_sparse_vegetation"}, + 70: {"color": "#F0F0F0", "flag": "snow_and_ice"}, + 80: {"color": "#0032C8", "flag": "permanent_water_bodies"}, + 90: {"color": "#0096A0", "flag": "herbaceous_wetland"}, + 100: {"color": "#FAE6A0", "flag": "moss_and_lichen"}, + 111: {"color": "#58481F", "flag": "closed_forest_evergreen_needle_leaf"}, + 112: {"color": "#009900", "flag": "closed_forest_evergreen_broad_leaf"}, + 113: {"color": "#70663E", "flag": "closed_forest_deciduous_needle_leaf"}, + 114: {"color": "#00CC00", "flag": "closed_forest_deciduous_broad_leaf"}, + 115: {"color": "#4E751F", "flag": "closed_forest_mixed"}, + 116: {"color": "#007800", "flag": "closed_forest_not_matching_any_of_the_other_definitions"}, + 121: {"color": "#666000", "flag": "open_forest_evergreen_needle_leaf"}, + 122: {"color": "#8DB400", "flag": "open_forest_evergreen_broad_leaf"}, + 123: {"color": "#8D7400", "flag": "open_forest_deciduous_needle_leaf"}, + 124: {"color": "#A0DC00", "flag": "open_forest_deciduous_broad_leaf"}, + 125: {"color": "#929900", "flag": "open_forest_mixed"}, + 126: {"color": "#648C00", "flag": "open_forest_not_matching_any_of_the_other_definitions"}, + 200: {"color": "#000080", "flag": "oceans_seas"}, + } + + colors = [label["color"] for label in labels.values()] + cmap = ListedColormap([label["color"] for label in labels.values()]) + norm = mcolours.BoundaryNorm(list(labels.keys()) + [201], cmap.N + 1, extend="max") + ticks = list( + np.mean((list(list(labels.keys()) + [201])[i + 1], val)) for i, val in enumerate(list(labels.keys())) + ) + cblabels = [label["flag"] for label in labels.values()] + except: AttributeError - if 'CCI' in product: + if "CCI" in product: try: - labels = {0: {'color': '#282828', 'flag': 'no data'}, - 10: {'color': '#EBEB34', 'flag': 'cropland, rainfed'}, - 11: {'color': '#D9EB34', 'flag': 'cropland, rainfed, herbaceous cover'}, - 12: {'color': '#EBDF34', 'flag': 'cropland, rainfed, tree or shrub cover'}, - 20: {'color': '#34EBE2', 'flag': 'cropland, irrigated or post-flooding'}, - 30: {'color': '#EBBD34', 'flag': 'mosaic cropland/natural vegetation'}, - 40: {'color': '#eba534', 'flag': 'mosaic natural vegetation/cropland'}, - 50: {'color': '#34eb46', 'flag': 'tree cover, broadleaved, evergreen, closed to open'}, - 60: {'color': '#21750e', 'flag': 'tree cover, broadleaved, deciduous, closed to open'}, - 61: {'color': '#449432', 'flag': 'tree cover, broadleaved, deciduous, closed'}, - 62: {'color': '#5da64c', 'flag': 'tree cover, broadleaved, deciduous, open'}, - 70: {'color': '#16470b', 'flag': 'tree cover, needleleaved, evergreen, closed to open'}, - 71: {'color': '#237012', 'flag': 'tree cover, needleleaved, evergreen, closed'}, - 72: {'color': '#237012', 'flag': 'tree cover, needleleaved, evergreen, open'}, - 80: {'color': '#31a317', 'flag': 'tree cover, needleleaved, deciduous, closed to open'}, - 81: {'color': '#57ed34', 'flag': 'tree cover, needleleaved, deciduous, closed'}, - 82: {'color': '#81f765', 'flag': 'tree cover, needleleaved, deciduous, open'}, - 90: {'color': '#b6ed64', 'flag': 'tree cover, mixed leaf type'}, - 100: {'color': '#6f8f3f', 'flag': 'mosaic tree and shrub/herbaceous cover'}, - 110: {'color': '#ad950c', 'flag': 'mosaic herbaceous cover/tree and shrub'}, - 120: {'color': '#5e5209', 'flag': 'shrubland'}, - 121: {'color': '#292302', 'flag': 'shrubland, evergreen'}, - 122: {'color': '#a89008', 'flag': 'shrubland, deciduous'}, - 130: {'color': '#f7bf07', 'flag': 'grassland'}, - 140: {'color': '#f57feb', 'flag': 'lichens and mosses'}, - 150: {'color': '#f57feb', 'flag': 'sparse vegetation'}, - 151: {'color': '#fcf7a4', 'flag': 'sparse tree'}, - 152: {'color': '#d4cf87', 'flag': 'sparse shrub'}, - 153: {'color': '#b0aa54', 'flag': 'sparse herbaceous cover'}, - 160: {'color': '#159638', 'flag': 'tree cover, flooded, fresh or brakish water'}, - 170: {'color': '#22bf81', 'flag': 'tree cover, flooded, saline water'}, - 180: {'color': '#44eba9', 'flag': 'shrub or herbaceous cover, flooded, fresh/saline/brakish water'}, - 190: {'color': '#a3273c', 'flag': 'urban areas'}, - 200: {'color': '#fffbcc', 'flag': 'bare areas'}, - 201: {'color': '#b0afa4', 'flag': 'consolidated bare areas'}, - 202: {'color': '#d6d4b6', 'flag': 'unconsolidated bare areas'}, - 210: {'color': '#1A3EF0', 'flag': 'water bodies'}, - 220: {'color': '#ffffff', 'flag': 'permanent snow and ice'}} - - colors = [label['color'] for label in labels.values()] - cmap = ListedColormap([label['color'] for label in labels.values()]) - norm = mcolours.BoundaryNorm(list(labels.keys())+[221], cmap.N+1, extend='max') - ticks = list(np.mean((list(list(labels.keys())+[221])[i+1], val)) for i, val in enumerate(list(labels.keys()))) - cblabels=[label['flag'] for label in labels.values()] - + labels = { + 0: {"color": "#282828", "flag": "no data"}, + 10: {"color": "#EBEB34", "flag": "cropland, rainfed"}, + 11: {"color": "#D9EB34", "flag": "cropland, rainfed, herbaceous cover"}, + 12: {"color": "#EBDF34", "flag": "cropland, rainfed, tree or shrub cover"}, + 20: {"color": "#34EBE2", "flag": "cropland, irrigated or post-flooding"}, + 30: {"color": "#EBBD34", "flag": "mosaic cropland/natural vegetation"}, + 40: {"color": "#eba534", "flag": "mosaic natural vegetation/cropland"}, + 50: {"color": "#34eb46", "flag": "tree cover, broadleaved, evergreen, closed to open"}, + 60: {"color": "#21750e", "flag": "tree cover, broadleaved, deciduous, closed to open"}, + 61: {"color": "#449432", "flag": "tree cover, broadleaved, deciduous, closed"}, + 62: {"color": "#5da64c", "flag": "tree cover, broadleaved, deciduous, open"}, + 70: {"color": "#16470b", "flag": "tree cover, needleleaved, evergreen, closed to open"}, + 71: {"color": "#237012", "flag": "tree cover, needleleaved, evergreen, closed"}, + 72: {"color": "#237012", "flag": "tree cover, needleleaved, evergreen, open"}, + 80: {"color": "#31a317", "flag": "tree cover, needleleaved, deciduous, closed to open"}, + 81: {"color": "#57ed34", "flag": "tree cover, needleleaved, deciduous, closed"}, + 82: {"color": "#81f765", "flag": "tree cover, needleleaved, deciduous, open"}, + 90: {"color": "#b6ed64", "flag": "tree cover, mixed leaf type"}, + 100: {"color": "#6f8f3f", "flag": "mosaic tree and shrub/herbaceous cover"}, + 110: {"color": "#ad950c", "flag": "mosaic herbaceous cover/tree and shrub"}, + 120: {"color": "#5e5209", "flag": "shrubland"}, + 121: {"color": "#292302", "flag": "shrubland, evergreen"}, + 122: {"color": "#a89008", "flag": "shrubland, deciduous"}, + 130: {"color": "#f7bf07", "flag": "grassland"}, + 140: {"color": "#f57feb", "flag": "lichens and mosses"}, + 150: {"color": "#f57feb", "flag": "sparse vegetation"}, + 151: {"color": "#fcf7a4", "flag": "sparse tree"}, + 152: {"color": "#d4cf87", "flag": "sparse shrub"}, + 153: {"color": "#b0aa54", "flag": "sparse herbaceous cover"}, + 160: {"color": "#159638", "flag": "tree cover, flooded, fresh or brakish water"}, + 170: {"color": "#22bf81", "flag": "tree cover, flooded, saline water"}, + 180: {"color": "#44eba9", "flag": "shrub or herbaceous cover, flooded, fresh/saline/brakish water"}, + 190: {"color": "#a3273c", "flag": "urban areas"}, + 200: {"color": "#fffbcc", "flag": "bare areas"}, + 201: {"color": "#b0afa4", "flag": "consolidated bare areas"}, + 202: {"color": "#d6d4b6", "flag": "unconsolidated bare areas"}, + 210: {"color": "#1A3EF0", "flag": "water bodies"}, + 220: {"color": "#ffffff", "flag": "permanent snow and ice"}, + } + + colors = [label["color"] for label in labels.values()] + cmap = ListedColormap([label["color"] for label in labels.values()]) + norm = mcolours.BoundaryNorm(list(labels.keys()) + [221], cmap.N + 1, extend="max") + ticks = list( + np.mean((list(list(labels.keys()) + [221])[i + 1], val)) for i, val in enumerate(list(labels.keys())) + ) + cblabels = [label["flag"] for label in labels.values()] + except: AttributeError - + try: im = lulc.plot.imshow(cmap=cmap, norm=norm, add_colorbar=legend, **plot_kwargs) except AttributeError: @@ -1250,7 +1201,7 @@ def plot_lulc(lulc, product=None, legend=True, **plot_kwargs): cb = im.cbar if "ESRI" in product: - cb.set_ticks(np.arange(0, 11, 1)+0.5) + cb.set_ticks(np.arange(0, 11, 1) + 0.5) cb.set_ticklabels(cblabels) if "IO" in product: @@ -1260,11 +1211,11 @@ def plot_lulc(lulc, product=None, legend=True, **plot_kwargs): if "ESA" in product: cb.set_ticks([0, 10, 20, 30, 40, 50, 60, 70, 80, 88.5, 95, 101.5]) cb.set_ticklabels(cblabels) - + if "CGLS" in product: cb.set_ticks(ticks) cb.set_ticklabels(cblabels) - + if "CCI" in product: cb.set_ticks(ticks) cb.set_ticklabels(cblabels) diff --git a/deafrica_tools/spatial.py b/deafrica_tools/spatial.py index 10672a8..d04b226 100644 --- a/deafrica_tools/spatial.py +++ b/deafrica_tools/spatial.py @@ -1,13 +1,14 @@ -''' +""" Spatial analyses functions for Digital Earth Africa data. -''' +""" # Import required packages # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import fiona import collections @@ -30,66 +31,70 @@ from datacube.utils.geometry import CRS, Geometry from shapely.geometry import LineString, MultiLineString, shape -def xr_vectorize(da, - attribute_col='attribute', - transform=None, - crs=None, - dtype='float32', - export_shp=False, - verbose=False, - **rasterio_kwargs): + +def xr_vectorize( + da, + attribute_col="attribute", + transform=None, + crs=None, + dtype="float32", + export_shp=False, + verbose=False, + **rasterio_kwargs, +): """ Vectorises a xarray.DataArray into a geopandas.GeoDataFrame. - + Parameters ---------- - da : xarray dataarray or a numpy ndarray + da : xarray dataarray or a numpy ndarray attribute_col : str, optional - Name of the attribute column in the resulting geodataframe. - Values of the raster object converted to polygons will be + Name of the attribute column in the resulting geodataframe. + Values of the raster object converted to polygons will be assigned to this column. Defaults to 'attribute'. transform : affine.Affine object, optional - An affine.Affine object (e.g. `from affine import Affine; - Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the - affine transformation used to convert raster coordinates - (e.g. [0, 0]) to geographic coordinates. If none is provided, - the function will attempt to obtain an affine transformation + An affine.Affine object (e.g. `from affine import Affine; + Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the + affine transformation used to convert raster coordinates + (e.g. [0, 0]) to geographic coordinates. If none is provided, + the function will attempt to obtain an affine transformation from the xarray object (e.g. either at `da.transform` or `da.geobox.transform`). crs : str or CRS object, optional - An EPSG string giving the coordinate system of the array - (e.g. 'EPSG:3577'). If none is provided, the function will - attempt to extract a CRS from the xarray object's `crs` + An EPSG string giving the coordinate system of the array + (e.g. 'EPSG:3577'). If none is provided, the function will + attempt to extract a CRS from the xarray object's `crs` attribute. dtype : str, optional - Data type must be one of int16, int32, uint8, uint16, + Data type must be one of int16, int32, uint8, uint16, or float32 export_shp : Boolean or string path, optional To export the output vectorised features to a shapefile, supply - an output path (e.g. 'output_dir/output.shp'. The default is - False, which will not write out a shapefile. + an output path (e.g. 'output_dir/output.shp'. The default is + False, which will not write out a shapefile. verbose : bool, optional Print debugging messages. Default False. - **rasterio_kwargs : + **rasterio_kwargs : A set of keyword arguments to rasterio.features.shapes Can include `mask` and `connectivity`. - + Returns ------- gdf : Geopandas GeoDataFrame - + """ - # Check for a crs object try: crs = da.crs except: if crs is None: - raise Exception("Please add a `crs` attribute to the " - "xarray.DataArray, or provide a CRS using the " - "function's `crs` parameter (e.g. 'EPSG:3577')") - + raise Exception( + "Please add a `crs` attribute to the " + "xarray.DataArray, or provide a CRS using the " + "function's `crs` parameter (e.g. 'EPSG:3577')" + ) + # Check if transform is provided as a xarray.DataArray method. # If not, require supplied Affine if transform is None: @@ -102,94 +107,92 @@ def xr_vectorize(da, # Try getting transform from 'transform' attribute transform = da.transform except: - # If neither of those options work, raise an exception telling the + # If neither of those options work, raise an exception telling the # user to provide a transform - raise TypeError("Please provide an Affine transform object using the " - "`transform` parameter (e.g. `from affine import " - "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " - "6886890.0)`") - + raise TypeError( + "Please provide an Affine transform object using the " + "`transform` parameter (e.g. `from affine import " + "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " + "6886890.0)`" + ) + # Check to see if the input is a numpy array if type(da) is np.ndarray: - vectors = rasterio.features.shapes(source=da.astype(dtype), - transform=transform, - **rasterio_kwargs) - + vectors = rasterio.features.shapes(source=da.astype(dtype), transform=transform, **rasterio_kwargs) + else: # Run the vectorizing function - vectors = rasterio.features.shapes(source=da.data.astype(dtype), - transform=transform, - **rasterio_kwargs) - + vectors = rasterio.features.shapes(source=da.data.astype(dtype), transform=transform, **rasterio_kwargs) + # Convert the generator into a list vectors = list(vectors) - + # Extract the polygon coordinates and values from the list polygons = [polygon for polygon, value in vectors] values = [value for polygon, value in vectors] - + # Convert polygon coordinates into polygon shapes polygons = [shape(polygon) for polygon in polygons] - + # Create a geopandas dataframe populated with the polygon shapes - gdf = gpd.GeoDataFrame(data={attribute_col: values}, - geometry=polygons, - crs=str(crs)) - + gdf = gpd.GeoDataFrame(data={attribute_col: values}, geometry=polygons, crs=str(crs)) + # If a file path is supplied, export a shapefile if export_shp: - gdf.to_file(export_shp) - + gdf.to_file(export_shp) + return gdf -def xr_rasterize(gdf, - da, - attribute_col=False, - crs=None, - transform=None, - name=None, - x_dim='x', - y_dim='y', - export_tiff=None, - verbose=False, - **rasterio_kwargs): +def xr_rasterize( + gdf, + da, + attribute_col=False, + crs=None, + transform=None, + name=None, + x_dim="x", + y_dim="y", + export_tiff=None, + verbose=False, + **rasterio_kwargs, +): """ Rasterizes a geopandas.GeoDataFrame into an xarray.DataArray. - + Parameters ---------- gdf : geopandas.GeoDataFrame A geopandas.GeoDataFrame object containing the vector/shapefile data you want to rasterise. da : xarray.DataArray or xarray.Dataset - The shape, coordinates, dimensions, and transform of this object - are used to build the rasterized shapefile. It effectively - provides a template. The attributes of this object are also + The shape, coordinates, dimensions, and transform of this object + are used to build the rasterized shapefile. It effectively + provides a template. The attributes of this object are also appended to the output xarray.DataArray. attribute_col : string, optional - Name of the attribute column in the geodataframe that the pixels - in the raster will contain. If set to False, output will be a + Name of the attribute column in the geodataframe that the pixels + in the raster will contain. If set to False, output will be a boolean array of 1's and 0's. crs : str, optional CRS metadata to add to the output xarray. e.g. 'epsg:3577'. - The function will attempt get this info from the input + The function will attempt get this info from the input GeoDataFrame first. transform : affine.Affine object, optional - An affine.Affine object (e.g. `from affine import Affine; - Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the - affine transformation used to convert raster coordinates - (e.g. [0, 0]) to geographic coordinates. If none is provided, - the function will attempt to obtain an affine transformation + An affine.Affine object (e.g. `from affine import Affine; + Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the + affine transformation used to convert raster coordinates + (e.g. [0, 0]) to geographic coordinates. If none is provided, + the function will attempt to obtain an affine transformation from the xarray object (e.g. either at `da.transform` or `da.geobox.transform`). x_dim : str, optional - An optional string allowing you to override the xarray dimension - used for x coordinates. Defaults to 'x'. Useful, for example, - if x and y dims instead called 'lat' and 'lon'. + An optional string allowing you to override the xarray dimension + used for x coordinates. Defaults to 'x'. Useful, for example, + if x and y dims instead called 'lat' and 'lon'. y_dim : str, optional - An optional string allowing you to override the xarray dimension - used for y coordinates. Defaults to 'y'. Useful, for example, + An optional string allowing you to override the xarray dimension + used for y coordinates. Defaults to 'y'. Useful, for example, if x and y dims instead called 'lat' and 'lon'. export_tiff: str, optional If a filepath is provided (e.g 'output/output.tif'), will export a @@ -197,16 +200,16 @@ def xr_rasterize(gdf, is not supplied by the user a default name, 'data', is used verbose : bool, optional Print debugging messages. Default False. - **rasterio_kwargs : + **rasterio_kwargs : A set of keyword arguments to rasterio.features.rasterize Can include: 'all_touched', 'merge_alg', 'dtype'. - + Returns ------- xarr : xarray.DataArray - + """ - + # Check for a crs object try: crs = da.geobox.crs @@ -215,10 +218,12 @@ def xr_rasterize(gdf, crs = da.crs except: if crs is None: - raise ValueError("Please add a `crs` attribute to the " - "xarray.DataArray, or provide a CRS using the " - "function's `crs` parameter (e.g. crs='EPSG:3577')") - + raise ValueError( + "Please add a `crs` attribute to the " + "xarray.DataArray, or provide a CRS using the " + "function's `crs` parameter (e.g. crs='EPSG:3577')" + ) + # Check if transform is provided as a xarray.DataArray method. # If not, require supplied Affine if transform is None: @@ -231,43 +236,45 @@ def xr_rasterize(gdf, # Try getting transform from 'transform' attribute transform = da.transform except: - # If neither of those options work, raise an exception telling the + # If neither of those options work, raise an exception telling the # user to provide a transform - raise TypeError("Please provide an Affine transform object using the " - "`transform` parameter (e.g. `from affine import " - "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " - "6886890.0)`") - - # Grab the 2D dims (not time) + raise TypeError( + "Please provide an Affine transform object using the " + "`transform` parameter (e.g. `from affine import " + "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " + "6886890.0)`" + ) + + # Grab the 2D dims (not time) try: dims = da.geobox.dims except: - dims = y_dim, x_dim - + dims = y_dim, x_dim + # Coords xy_coords = [da[dims[0]], da[dims[1]]] - #xy_coords = [da['y'], da['x']] - + # xy_coords = [da['y'], da['x']] + # Shape try: y, x = da.geobox.shape except: y, x = len(xy_coords[0]), len(xy_coords[1]) - + # Reproject shapefile to match CRS of raster if verbose: - print(f'Rasterizing to match xarray.DataArray dimensions ({y}, {x})') - + print(f"Rasterizing to match xarray.DataArray dimensions ({y}, {x})") + try: gdf_reproj = gdf.to_crs(crs=crs) except: # Sometimes the crs can be a datacube utils CRS object # so convert to string before reprojecting - gdf_reproj = gdf.to_crs(crs={'init': str(crs)}) - - # If an attribute column is specified, rasterise using vector + gdf_reproj = gdf.to_crs(crs={"init": str(crs)}) + + # If an attribute column is specified, rasterise using vector # attribute values. Otherwise, rasterise into a boolean array - if attribute_col: + if attribute_col: # Use the geometry and attributes from `gdf` to create an iterable shapes = zip(gdf_reproj.geometry, gdf_reproj[attribute_col]) else: @@ -275,103 +282,95 @@ def xr_rasterize(gdf, shapes = gdf_reproj.geometry # Rasterise shapes into an array - arr = rasterio.features.rasterize(shapes=shapes, - out_shape=(y, x), - transform=transform, - **rasterio_kwargs) - + arr = rasterio.features.rasterize(shapes=shapes, out_shape=(y, x), transform=transform, **rasterio_kwargs) + # Convert result to a xarray.DataArray - xarr = xr.DataArray(arr, - coords=xy_coords, - dims=dims, - attrs=da.attrs, - name=name if name else None) - + xarr = xr.DataArray(arr, coords=xy_coords, dims=dims, attrs=da.attrs, name=name if name else None) + # Add back crs if xarr.attrs doesn't have it if xarr.geobox is None: xarr = assign_crs(xarr, str(crs)) - - if export_tiff: + + if export_tiff: if verbose: print(f"Exporting GeoTIFF to {export_tiff}") - write_cog(xarr, - export_tiff, - overwrite=True) - + write_cog(xarr, export_tiff, overwrite=True) + return xarr -def subpixel_contours(da, - z_values=[0.0], - crs=None, - affine=None, - attribute_df=None, - output_path=None, - min_vertices=2, - dim='time', - errors='ignore', - verbose=False): - +def subpixel_contours( + da, + z_values=[0.0], + crs=None, + affine=None, + attribute_df=None, + output_path=None, + min_vertices=2, + dim="time", + errors="ignore", + verbose=False, +): """ - Uses `skimage.measure.find_contours` to extract multiple z-value + Uses `skimage.measure.find_contours` to extract multiple z-value contour lines from a two-dimensional array (e.g. multiple elevations - from a single DEM), or one z-value for each array along a specified - dimension of a multi-dimensional array (e.g. to map waterlines - across time by extracting a 0 NDWI contour from each individual - timestep in an xarray timeseries). - - Contours are returned as a geopandas.GeoDataFrame with one row per - z-value or one row per array along a specified dimension. The - `attribute_df` parameter can be used to pass custom attributes + from a single DEM), or one z-value for each array along a specified + dimension of a multi-dimensional array (e.g. to map waterlines + across time by extracting a 0 NDWI contour from each individual + timestep in an xarray timeseries). + + Contours are returned as a geopandas.GeoDataFrame with one row per + z-value or one row per array along a specified dimension. The + `attribute_df` parameter can be used to pass custom attributes to the output contour features. - + Last modified: November 2020 - + Parameters - ---------- + ---------- da : xarray DataArray - A two-dimensional or multi-dimensional array from which - contours are extracted. If a two-dimensional array is provided, - the analysis will run in 'single array, multiple z-values' mode + A two-dimensional or multi-dimensional array from which + contours are extracted. If a two-dimensional array is provided, + the analysis will run in 'single array, multiple z-values' mode which allows you to specify multiple `z_values` to be extracted. - If a multi-dimensional array is provided, the analysis will run - in 'single z-value, multiple arrays' mode allowing you to - extract contours for each array along the dimension specified - by the `dim` parameter. + If a multi-dimensional array is provided, the analysis will run + in 'single z-value, multiple arrays' mode allowing you to + extract contours for each array along the dimension specified + by the `dim` parameter. z_values : int, float or list of ints, floats - An individual z-value or list of multiple z-values to extract - from the array. If operating in 'single z-value, multiple + An individual z-value or list of multiple z-values to extract + from the array. If operating in 'single z-value, multiple arrays' mode specify only a single z-value. crs : string or CRS object, optional - An EPSG string giving the coordinate system of the array - (e.g. 'EPSG:3577'). If none is provided, the function will - attempt to extract a CRS from the xarray object's `crs` + An EPSG string giving the coordinate system of the array + (e.g. 'EPSG:3577'). If none is provided, the function will + attempt to extract a CRS from the xarray object's `crs` attribute. affine : affine.Affine object, optional - An affine.Affine object (e.g. `from affine import Affine; - Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the - affine transformation used to convert raster coordinates - (e.g. [0, 0]) to geographic coordinates. If none is provided, - the function will attempt to obtain an affine transformation + An affine.Affine object (e.g. `from affine import Affine; + Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the + affine transformation used to convert raster coordinates + (e.g. [0, 0]) to geographic coordinates. If none is provided, + the function will attempt to obtain an affine transformation from the xarray object (e.g. either at `da.transform` or `da.geobox.transform`). output_path : string, optional The path and filename for the output shapefile. attribute_df : pandas.Dataframe, optional A pandas.Dataframe containing attributes to pass to the output - contour features. The dataframe must contain either the same - number of rows as supplied `z_values` (in 'multiple z-value, - single array' mode), or the same number of rows as the number - of arrays along the `dim` dimension ('single z-value, multiple + contour features. The dataframe must contain either the same + number of rows as supplied `z_values` (in 'multiple z-value, + single array' mode), or the same number of rows as the number + of arrays along the `dim` dimension ('single z-value, multiple arrays mode'). min_vertices : int, optional - The minimum number of vertices required for a contour to be - extracted. The default (and minimum) value is 2, which is the + The minimum number of vertices required for a contour to be + extracted. The default (and minimum) value is 2, which is the smallest number required to produce a contour line (i.e. a start - and end point). Higher values remove smaller contours, + and end point). Higher values remove smaller contours, potentially removing noise from the output dataset. dim : string, optional - The name of the dimension along which to extract contours when + The name of the dimension along which to extract contours when operating in 'single z-value, multiple arrays' mode. The default is 'time', which extracts contours for each array along the time dimension. @@ -382,33 +381,33 @@ def subpixel_contours(da, be raised. verbose : bool, optional Print debugging messages. Default False. - + Returns ------- output_gdf : geopandas geodataframe - A geopandas geodataframe object with one feature per z-value - ('single array, multiple z-values' mode), or one row per array - along the dimension specified by the `dim` parameter ('single - z-value, multiple arrays' mode). If `attribute_df` was - provided, these values will be included in the shapefile's + A geopandas geodataframe object with one feature per z-value + ('single array, multiple z-values' mode), or one row per array + along the dimension specified by the `dim` parameter ('single + z-value, multiple arrays' mode). If `attribute_df` was + provided, these values will be included in the shapefile's attribute table. """ def contours_to_multiline(da_i, z_value, min_vertices=2): - ''' + """ Helper function to apply marching squares contour extraction to an array and return a data as a shapely MultiLineString. - The `min_vertices` parameter allows you to drop small contours + The `min_vertices` parameter allows you to drop small contours with less than X vertices. - ''' - + """ + # Extracts contours from array, and converts each discrete - # contour into a Shapely LineString feature. If the function + # contour into a Shapely LineString feature. If the function # returns a KeyError, this may be due to an unresolved issue in # scikit-image: https://github.com/scikit-image/scikit-image/issues/4830 - line_features = [LineString(i[:,[1, 0]]) - for i in find_contours(da_i.data, z_value) - if i.shape[0] > min_vertices] + line_features = [ + LineString(i[:, [1, 0]]) for i in find_contours(da_i.data, z_value) if i.shape[0] > min_vertices + ] # Output resulting lines into a single combined MultiLineString return MultiLineString(line_features) @@ -419,9 +418,11 @@ def contours_to_multiline(da_i, z_value, min_vertices=2): crs = da.crs except: if crs is None: - raise ValueError("Please add a `crs` attribute to the " - "xarray.DataArray, or provide a CRS using the " - "function's `crs` parameter (e.g. 'EPSG:3577')") + raise ValueError( + "Please add a `crs` attribute to the " + "xarray.DataArray, or provide a CRS using the " + "function's `crs` parameter (e.g. 'EPSG:3577')" + ) # Check if Affine transform is provided as a xarray.DataArray method. # If not, require supplied Affine @@ -431,188 +432,185 @@ def contours_to_multiline(da_i, z_value, min_vertices=2): affine = da.transform except: if affine is None: - raise TypeError("Please provide an Affine object using the " - "`affine` parameter (e.g. `from affine import " - "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " - "6886890.0)`") + raise TypeError( + "Please provide an Affine object using the " + "`affine` parameter (e.g. `from affine import " + "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " + "6886890.0)`" + ) # If z_values is supplied is not a list, convert to list: - z_values = z_values if (isinstance(z_values, list) or - isinstance(z_values, np.ndarray)) else [z_values] + z_values = z_values if (isinstance(z_values, list) or isinstance(z_values, np.ndarray)) else [z_values] # Test number of dimensions in supplied data array if len(da.shape) == 2: if verbose: - print(f'Operating in multiple z-value, single array mode') - dim = 'z_value' - contour_arrays = {str(i)[0:10]: - contours_to_multiline(da, i, min_vertices) - for i in z_values} + print(f"Operating in multiple z-value, single array mode") + dim = "z_value" + contour_arrays = {str(i)[0:10]: contours_to_multiline(da, i, min_vertices) for i in z_values} else: - - # Test if only a single z-value is given when operating in + # Test if only a single z-value is given when operating in # single z-value, multiple arrays mode if verbose: - print(f'Operating in single z-value, multiple arrays mode') + print(f"Operating in single z-value, multiple arrays mode") if len(z_values) > 1: - raise ValueError('Please provide a single z-value when operating ' - 'in single z-value, multiple arrays mode') + raise ValueError( + "Please provide a single z-value when operating " "in single z-value, multiple arrays mode" + ) - contour_arrays = {str(i)[0:10]: - contours_to_multiline(da_i, z_values[0], min_vertices) - for i, da_i in da.groupby(dim)} + contour_arrays = { + str(i)[0:10]: contours_to_multiline(da_i, z_values[0], min_vertices) for i, da_i in da.groupby(dim) + } # If attributes are provided, add the contour keys to that dataframe if attribute_df is not None: - try: attribute_df.insert(0, dim, contour_arrays.keys()) except ValueError: - - raise ValueError("One of the following issues occured:\n\n" - "1) `attribute_df` contains a different number of " - "rows than the number of supplied `z_values` (" - "'multiple z-value, single array mode')\n" - "2) `attribute_df` contains a different number of " - "rows than the number of arrays along the `dim` " - "dimension ('single z-value, multiple arrays mode')") + raise ValueError( + "One of the following issues occured:\n\n" + "1) `attribute_df` contains a different number of " + "rows than the number of supplied `z_values` (" + "'multiple z-value, single array mode')\n" + "2) `attribute_df` contains a different number of " + "rows than the number of arrays along the `dim` " + "dimension ('single z-value, multiple arrays mode')" + ) # Otherwise, use the contour keys as the only main attributes else: attribute_df = list(contour_arrays.keys()) # Convert output contours to a geopandas.GeoDataFrame - contours_gdf = gpd.GeoDataFrame(data=attribute_df, - geometry=list(contour_arrays.values()), - crs=crs) + contours_gdf = gpd.GeoDataFrame(data=attribute_df, geometry=list(contour_arrays.values()), crs=crs) # Define affine and use to convert array coords to geographic coords. - # We need to add 0.5 x pixel size to the x and y to obtain the centre + # We need to add 0.5 x pixel size to the x and y to obtain the centre # point of our pixels, rather than the top-left corner - shapely_affine = [affine.a, affine.b, affine.d, affine.e, - affine.xoff + affine.a / 2.0, - affine.yoff + affine.e / 2.0] - contours_gdf['geometry'] = contours_gdf.affine_transform(shapely_affine) + shapely_affine = [ + affine.a, + affine.b, + affine.d, + affine.e, + affine.xoff + affine.a / 2.0, + affine.yoff + affine.e / 2.0, + ] + contours_gdf["geometry"] = contours_gdf.affine_transform(shapely_affine) # Rename the data column to match the dimension contours_gdf = contours_gdf.rename({0: dim}, axis=1) # Drop empty timesteps empty_contours = contours_gdf.geometry.is_empty - failed = ', '.join(map(str, contours_gdf[empty_contours][dim].to_list())) + failed = ", ".join(map(str, contours_gdf[empty_contours][dim].to_list())) contours_gdf = contours_gdf[~empty_contours] # Raise exception if no data is returned, or if any contours fail # when `errors='raise'. Otherwise, print failed contours - if empty_contours.all() and errors == 'raise': - raise RuntimeError("Failed to generate any valid contours; verify that " - "values passed to `z_values` are valid and present " - "in `da`") - elif empty_contours.all() and errors == 'ignore': + if empty_contours.all() and errors == "raise": + raise RuntimeError( + "Failed to generate any valid contours; verify that " + "values passed to `z_values` are valid and present " + "in `da`" + ) + elif empty_contours.all() and errors == "ignore": if verbose: - print ("Failed to generate any valid contours; verify that " - "values passed to `z_values` are valid and present " - "in `da`") - elif empty_contours.any() and errors == 'raise': - raise Exception(f'Failed to generate contours: {failed}') - elif empty_contours.any() and errors == 'ignore': + print( + "Failed to generate any valid contours; verify that " + "values passed to `z_values` are valid and present " + "in `da`" + ) + elif empty_contours.any() and errors == "raise": + raise Exception(f"Failed to generate contours: {failed}") + elif empty_contours.any() and errors == "ignore": if verbose: - print(f'Failed to generate contours: {failed}') + print(f"Failed to generate contours: {failed}") # If asked to write out file, test if geojson or shapefile - if output_path and output_path.endswith('.geojson'): + if output_path and output_path.endswith(".geojson"): if verbose: - print(f'Writing contours to {output_path}') - contours_gdf.to_crs('EPSG:4326').to_file(filename=output_path, - driver='GeoJSON') + print(f"Writing contours to {output_path}") + contours_gdf.to_crs("EPSG:4326").to_file(filename=output_path, driver="GeoJSON") - if output_path and output_path.endswith('.shp'): + if output_path and output_path.endswith(".shp"): if verbose: - print(f'Writing contours to {output_path}') + print(f"Writing contours to {output_path}") contours_gdf.to_file(filename=output_path) - + return contours_gdf -def interpolate_2d(ds, - x_coords, - y_coords, - z_coords, - method='linear', - factor=1, - verbose=False, - **kwargs): - +def interpolate_2d(ds, x_coords, y_coords, z_coords, method="linear", factor=1, verbose=False, **kwargs): """ - This function takes points with X, Y and Z coordinates, and - interpolates Z-values across the extent of an existing xarray + This function takes points with X, Y and Z coordinates, and + interpolates Z-values across the extent of an existing xarray dataset. This can be useful for producing smooth surfaces from point - data that can be compared directly against satellite data derived + data that can be compared directly against satellite data derived from an OpenDataCube query. - + Supported interpolation methods include 'linear', 'nearest' and - 'cubic (using `scipy.interpolate.griddata`), and 'rbf' (using + 'cubic (using `scipy.interpolate.griddata`), and 'rbf' (using `scipy.interpolate.Rbf`). - + Last modified: February 2020 - + Parameters - ---------- + ---------- ds : xarray DataArray or Dataset - A two-dimensional or multi-dimensional array from which x and y - dimensions will be copied and used for the area in which to - interpolate point data. + A two-dimensional or multi-dimensional array from which x and y + dimensions will be copied and used for the area in which to + interpolate point data. x_coords, y_coords : numpy array - Arrays containing X and Y coordinates for all points (e.g. + Arrays containing X and Y coordinates for all points (e.g. longitudes and latitudes). z_coords : numpy array - An array containing Z coordinates for all points (e.g. - elevations). These are the values you wish to interpolate + An array containing Z coordinates for all points (e.g. + elevations). These are the values you wish to interpolate between. method : string, optional The method used to interpolate between point values. This string - is either passed to `scipy.interpolate.griddata` (for 'linear', - 'nearest' and 'cubic' methods), or used to specify Radial Basis + is either passed to `scipy.interpolate.griddata` (for 'linear', + 'nearest' and 'cubic' methods), or used to specify Radial Basis Function interpolation using `scipy.interpolate.Rbf` ('rbf'). Defaults to 'linear'. factor : int, optional - An optional integer that can be used to subsample the spatial + An optional integer that can be used to subsample the spatial interpolation extent to obtain faster interpolation times, then - up-sample this array back to the original dimensions of the - data as a final step. For example, setting `factor=10` will - interpolate data into a grid that has one tenth of the - resolution of `ds`. This approach will be significantly faster - than interpolating at full resolution, but will potentially + up-sample this array back to the original dimensions of the + data as a final step. For example, setting `factor=10` will + interpolate data into a grid that has one tenth of the + resolution of `ds`. This approach will be significantly faster + than interpolating at full resolution, but will potentially produce less accurate or reliable results. verbose : bool, optional Print debugging messages. Default False. - **kwargs : - Optional keyword arguments to pass to either - `scipy.interpolate.griddata` (if `method` is 'linear', 'nearest' + **kwargs : + Optional keyword arguments to pass to either + `scipy.interpolate.griddata` (if `method` is 'linear', 'nearest' or 'cubic'), or `scipy.interpolate.Rbf` (is `method` is 'rbf'). - + Returns ------- interp_2d_array : xarray DataArray - An xarray DataArray containing with x and y coordinates copied - from `ds_array`, and Z-values interpolated from the points data. + An xarray DataArray containing with x and y coordinates copied + from `ds_array`, and Z-values interpolated from the points data. """ - + # Extract xy and elev points points_xy = np.vstack([x_coords, y_coords]).T - - # Extract x and y coordinates to interpolate into. - # If `factor` is greater than 1, the coordinates will be subsampled - # for faster run-times. If the last x or y value in the subsampled - # grid aren't the same as the last x or y values in the original - # full resolution grid, add the final full resolution grid value to + + # Extract x and y coordinates to interpolate into. + # If `factor` is greater than 1, the coordinates will be subsampled + # for faster run-times. If the last x or y value in the subsampled + # grid aren't the same as the last x or y values in the original + # full resolution grid, add the final full resolution grid value to # ensure data is interpolated up to the very edge of the array if ds.x[::factor][-1].item() == ds.x[-1].item(): x_grid_coords = ds.x[::factor].values else: x_grid_coords = ds.x[::factor].values.tolist() + [ds.x[-1].item()] - + if ds.y[::factor][-1].item() == ds.y[-1].item(): y_grid_coords = ds.y[::factor].values else: @@ -620,121 +618,108 @@ def interpolate_2d(ds, # Create grid to interpolate into grid_y, grid_x = np.meshgrid(x_grid_coords, y_grid_coords) - + # Apply scipy.interpolate.griddata interpolation methods - if method in ('linear', 'nearest', 'cubic'): - - # Interpolate x, y and z values - interp_2d = scipy.interpolate.griddata(points=points_xy, - values=z_coords, - xi=(grid_y, grid_x), - method=method, - **kwargs) - - # Apply Radial Basis Function interpolation - elif method == 'rbf': + if method in ("linear", "nearest", "cubic"): + # Interpolate x, y and z values + interp_2d = scipy.interpolate.griddata( + points=points_xy, values=z_coords, xi=(grid_y, grid_x), method=method, **kwargs + ) - # Interpolate x, y and z values - rbf = scipy.interpolate.Rbf(x_coords, y_coords, z_coords, **kwargs) + # Apply Radial Basis Function interpolation + elif method == "rbf": + # Interpolate x, y and z values + rbf = scipy.interpolate.Rbf(x_coords, y_coords, z_coords, **kwargs) interp_2d = rbf(grid_y, grid_x) # Create xarray dataarray from the data and resample to ds coords - interp_2d_da = xr.DataArray(interp_2d, - coords=[y_grid_coords, x_grid_coords], - dims=['y', 'x']) - + interp_2d_da = xr.DataArray(interp_2d, coords=[y_grid_coords, x_grid_coords], dims=["y", "x"]) + # If factor is greater than 1, resample the interpolated array to # match the input `ds` array - if factor > 1: - interp_2d_da = interp_2d_da.interp_like(ds) + if factor > 1: + interp_2d_da = interp_2d_da.interp_like(ds) return interp_2d_da def contours_to_arrays(gdf, col): - """ This function converts a polyline shapefile into an array with three columns giving the X, Y and Z coordinates of each vertex. This data - can then be used as an input to interpolation procedures (e.g. using + can then be used as an input to interpolation procedures (e.g. using a function like `interpolate_2d`. - + Last modified: October 2019 - + Parameters - ---------- + ---------- gdf : Geopandas GeoDataFrame - A GeoPandas GeoDataFrame of lines to convert into point + A GeoPandas GeoDataFrame of lines to convert into point coordinates. col : str - A string giving the name of the GeoDataFrame field to use as + A string giving the name of the GeoDataFrame field to use as Z-values. - + Returns ------- - A numpy array with three columns giving the X, Y and Z coordinates + A numpy array with three columns giving the X, Y and Z coordinates of each vertex in the input GeoDataFrame. - - """ + + """ # Explode multi-part geometries into multiple single geometries. gdf = gdf.explode(ignore_index=True) coords_zvals = [] for i in range(0, len(gdf)): - val = gdf.iloc[i][col] try: - coords = np.concatenate([np.vstack(x.coords.xy).T - for x in gdf.iloc[i].geometry]) + coords = np.concatenate([np.vstack(x.coords.xy).T for x in gdf.iloc[i].geometry]) except: coords = np.vstack(gdf.iloc[i].geometry.coords.xy).T - coords_zvals.append(np.column_stack((coords, - np.full(np.shape(coords)[0], - fill_value=val)))) + coords_zvals.append(np.column_stack((coords, np.full(np.shape(coords)[0], fill_value=val)))) return np.concatenate(coords_zvals) def largest_region(bool_array, **kwargs): - - ''' - Takes a boolean array and identifies the largest contiguous region of - connected True values. This is returned as a new array with cells in + """ + Takes a boolean array and identifies the largest contiguous region of + connected True values. This is returned as a new array with cells in the largest region marked as True, and all other cells marked as False. - + Parameters - ---------- + ---------- bool_array : boolean array A boolean array (numpy or xarray.DataArray) with True values for - the areas that will be inspected to find the largest group of + the areas that will be inspected to find the largest group of connected cells - **kwargs : + **kwargs : Optional keyword arguments to pass to `measure.label` - + Returns ------- largest_region : boolean array - A boolean array with cells in the largest region marked as True, - and all other cells marked as False. - - ''' - + A boolean array with cells in the largest region marked as True, + and all other cells marked as False. + + """ + # First, break boolean array into unique, discrete regions/blobs blobs_labels = label(bool_array, background=0, **kwargs) - + # Count the size of each blob, excluding the background class (0) - ids, counts = np.unique(blobs_labels[blobs_labels > 0], - return_counts=True) - + ids, counts = np.unique(blobs_labels[blobs_labels > 0], return_counts=True) + # Identify the region ID of the largest blob largest_region_id = ids[np.argmax(counts)] - + # Produce a boolean array where 1 == the largest region largest_region = blobs_labels == largest_region_id - + return largest_region @@ -755,23 +740,17 @@ def transform_geojson_wgs_to_epsg(geojson, EPSG): a geojson dictionary containing a 'coordinates' key, in the desired CRS """ - gg = Geometry(geojson['geometry'], CRS('epsg:4326')) - gg = gg.to_crs(CRS(f'epsg:{EPSG}')) + gg = Geometry(geojson["geometry"], CRS("epsg:4326")) + gg = gg.to_crs(CRS(f"epsg:{EPSG}")) return gg.__geo_interface__ -def zonal_stats_parallel(shp, - raster, - statistics, - out_shp, - ncpus, - **kwargs): - +def zonal_stats_parallel(shp, raster, statistics, out_shp, ncpus, **kwargs): """ Summarizing raster datasets based on vector geometries in parallel. - Each cpu recieves an equal chunk of the dataset. + Each cpu recieves an equal chunk of the dataset. Utilizes the perrygeo/rasterstats package. - + Parameters ---------- shp : str @@ -786,85 +765,85 @@ def zonal_stats_parallel(shp, out_shp: str Path to export shapefile containing zonal statistics. ncpus: int - number of cores to parallelize the operations over. - kwargs: + number of cores to parallelize the operations over. + kwargs: Any other keyword arguments to rasterstats.zonal_stats() See https://github.com/perrygeo/python-rasterstats for all options - + Returns ------- Exports a shapefile to disk containing the zonal statistics requested - + """ - - #yields n sized chunks from list l (used for splitting task to multiple processes) + + # yields n sized chunks from list l (used for splitting task to multiple processes) def chunks(l, n): for i in range(0, len(l), n): - yield l[i:i + n] + yield l[i : i + n] - #calculates zonal stats and adds results to a dictionary - def worker(z,raster,d): - z_stats = zonal_stats(z,raster,stats=statistics,**kwargs) - for i in range(0,len(z_stats)): - d[z[i]['id']]=z_stats[i] + # calculates zonal stats and adds results to a dictionary + def worker(z, raster, d): + z_stats = zonal_stats(z, raster, stats=statistics, **kwargs) + for i in range(0, len(z_stats)): + d[z[i]["id"]] = z_stats[i] - #write output polygon - def write_output(zones, out_shp,d): - #copy schema and crs from input and add new fields for each statistic + # write output polygon + def write_output(zones, out_shp, d): + # copy schema and crs from input and add new fields for each statistic schema = zones.schema.copy() crs = zones.crs - for stat in statistics: - schema['properties'][stat] = 'float' + for stat in statistics: + schema["properties"][stat] = "float" - with fiona.open(out_shp, 'w', 'ESRI Shapefile', schema, crs) as output: + with fiona.open(out_shp, "w", "ESRI Shapefile", schema, crs) as output: for elem in zones: - for stat in statistics: - elem['properties'][stat]=d[elem['id']][stat] - output.write({'properties':elem['properties'],'geometry': mapping(shape(elem['geometry']))}) - + for stat in statistics: + elem["properties"][stat] = d[elem["id"]][stat] + output.write({"properties": elem["properties"], "geometry": mapping(shape(elem["geometry"]))}) + with fiona.open(shp) as zones: jobs = [] # create manager dictionary (polygon ids=keys, stats=entries) # where multiple processes can write without conflicts - man = mp.Manager() - d = man.dict() + man = mp.Manager() + d = man.dict() - #split zone polygons into 'ncpus' chunks for parallel processing + # split zone polygons into 'ncpus' chunks for parallel processing # and call worker() for each - split = chunks(zones, len(zones)//ncpus) + split = chunks(zones, len(zones) // ncpus) for z in split: - p = mp.Process(target=worker,args=(z, raster,d)) + p = mp.Process(target=worker, args=(z, raster, d)) p.start() jobs.append(p) - #wait that all chunks are finished + # wait that all chunks are finished [j.join() for j in jobs] - write_output(zones,out_shp,d) + write_output(zones, out_shp, d) + - def reverse_geocode(coords, site_classes=None, state_classes=None): """ - Takes a latitude and longitude coordinate, and performs a reverse - geocode to return a plain-text description of the location in the + Takes a latitude and longitude coordinate, and performs a reverse + geocode to return a plain-text description of the location in the form: - + Site, State - + E.g.: `reverse_geocode(coords=(-35.282163, 149.128835))` - + 'Canberra, Australian Capital Territory' Parameters ---------- coords : tuple of floats - A tuple of (latitude, longitude) coordinates used to perform + A tuple of (latitude, longitude) coordinates used to perform the reverse geocode. site_classes : list of strings, optional - A list of strings used to define the site part of the plain - text location description. Because the contents of the geocoded + A list of strings used to define the site part of the plain + text location description. Because the contents of the geocoded address can vary greatly depending on location, these strings are tested against the address one by one until a match is made. @@ -873,62 +852,57 @@ def reverse_geocode(coords, site_classes=None, state_classes=None): ``['city', 'town', 'village', 'suburb', 'hamlet', 'county', 'municipality']`` state_classes : list of strings, optional - A list of strings used to define the state part of the plain - text location description. These strings are tested against the - address one by one until a match is made. Defaults to: + A list of strings used to define the state part of the plain + text location description. These strings are tested against the + address one by one until a match is made. Defaults to: `['state', 'territory']`. Returns ------- - If a valid geocoded address is found, a plain text location + If a valid geocoded address is found, a plain text location description will be returned: - + 'Site, State' - + If no valid address is found, formatted coordinates will be returned instead: - - 'XX.XX S, XX.XX E' + + 'XX.XX S, XX.XX E' """ # Run reverse geocode using coordinates - geocoder = Nominatim(user_agent='Digital Earth Africa') + geocoder = Nominatim(user_agent="Digital Earth Africa") out = geocoder.reverse(coords) - + # Create plain text-coords as fall-back - lat = f'{-coords[0]:.2f} S' if coords[0] < 0 else f'{coords[0]:.2f} N' - lon = f'{-coords[1]:.2f} W' if coords[1] < 0 else f'{coords[1]:.2f} E' + lat = f"{-coords[0]:.2f} S" if coords[0] < 0 else f"{coords[0]:.2f} N" + lon = f"{-coords[1]:.2f} W" if coords[1] < 0 else f"{coords[1]:.2f} E" try: - # Get address from geocoded data - address = out.raw['address'] + address = out.raw["address"] # Use site and state classes if supplied; else use defaults - default_site_classes = ['city', 'town', 'village', 'suburb', 'hamlet', - 'county', 'municipality'] - default_state_classes = ['state', 'territory'] + default_site_classes = ["city", "town", "village", "suburb", "hamlet", "county", "municipality"] + default_state_classes = ["state", "territory"] site_classes = site_classes if site_classes else default_site_classes state_classes = state_classes if state_classes else default_state_classes # Return the first site or state class that exists in address dict site = next((address[k] for k in site_classes if k in address), None) state = next((address[k] for k in state_classes if k in address), None) - + # If site and state exist in the data, return this. # Otherwise, return N/E/S/W coordinates. if site and state: - # Return as site, state formatted string - return f'{site}, {state}' - + return f"{site}, {state}" + else: - # If no geocoding result, return N/E/S/W coordinates - print('No valid geocoded location; returning coordinates instead') - return f'{lat}, {lon}' - - except (KeyError, AttributeError): + print("No valid geocoded location; returning coordinates instead") + return f"{lat}, {lon}" + except (KeyError, AttributeError): # If no geocoding result, return N/E/S/W coordinates - print('No valid geocoded location; returning coordinates instead') - return f'{lat}, {lon}' \ No newline at end of file + print("No valid geocoded location; returning coordinates instead") + return f"{lat}, {lon}" diff --git a/deafrica_tools/temporal.py b/deafrica_tools/temporal.py index 6539451..ad221cb 100644 --- a/deafrica_tools/temporal.py +++ b/deafrica_tools/temporal.py @@ -182,7 +182,7 @@ def _los(da, eos, sos): LOS = Length of season (in DOY) """ los = eos - sos - #handle negative values + # handle negative values los = xr.where( los >= 0, los, @@ -223,7 +223,7 @@ def xr_phenology( ], method_sos="first", method_eos="last", - verbose=True + verbose=True, ): """ Obtain land surface phenology metrics from an @@ -471,9 +471,7 @@ def temporal_statistics(da, stats): da_all_time = da.chunk({"time": -1}) # apply function across chunks - lazy_ds = da_all_time.map_blocks( - temporal_statistics, kwargs={"stats": stats}, template=template - ) + lazy_ds = da_all_time.map_blocks(temporal_statistics, kwargs={"stats": stats}, template=template) try: crs = da.geobox.crs @@ -521,15 +519,11 @@ def temporal_statistics(da, stats): n3 = zz[:, :, 2] # intialise dataset with first statistic - ds = xr.DataArray( - n1, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"] - ).to_dataset(name=stats[0] + "_n1") + ds = xr.DataArray(n1, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"]).to_dataset(name=stats[0] + "_n1") # add other datasets for i, j in zip([n2, n3], ["n2", "n3"]): - ds[stats[0] + "_" + j] = xr.DataArray( - i, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"] - ) + ds[stats[0] + "_" + j] = xr.DataArray(i, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"]) else: # simpler if first function isn't fourier transform first_func = stats_dict.get(str(stats[0])) @@ -537,9 +531,7 @@ def temporal_statistics(da, stats): ds = first_func(da) # convert back to xarray dataset - ds = xr.DataArray( - ds, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"] - ).to_dataset(name=stats[0]) + ds = xr.DataArray(ds, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"]).to_dataset(name=stats[0]) # loop through the other functions for stat in stats[1:]: @@ -554,17 +546,13 @@ def temporal_statistics(da, stats): n3 = zz[:, :, 2] for i, j in zip([n1, n2, n3], ["n1", "n2", "n3"]): - ds[stat + "_" + j] = xr.DataArray( - i, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"] - ) + ds[stat + "_" + j] = xr.DataArray(i, attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"]) else: # Select a stats function from the dictionary # and add to the dataset stat_func = stats_dict.get(str(stat)) - ds[stat] = xr.DataArray( - stat_func(da), attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"] - ) + ds[stat] = xr.DataArray(stat_func(da), attrs=attrs, coords={"x": x, "y": y}, dims=["y", "x"]) # try to add back the geobox try: diff --git a/deafrica_tools/wetlands.py b/deafrica_tools/wetlands.py index 4d7377a..87a4c45 100644 --- a/deafrica_tools/wetlands.py +++ b/deafrica_tools/wetlands.py @@ -7,7 +7,8 @@ # Force GeoPandas to use Shapely instead of PyGEOS # In a future release, GeoPandas will switch to using Shapely by default. import os -os.environ['USE_PYGEOS'] = '0' + +os.environ["USE_PYGEOS"] = "0" import warnings import numpy as np @@ -73,7 +74,7 @@ def WIT_drill( TCW_threshold : Int, optional The tasseled cap wetness threshold, beyond which a pixel will be considered 'wet'. Defaults to -0.035. - resample_frequency : str + resample_frequency : str Option for resampling time-series of input datasets. This option is useful for either smoothing the WIT plot, or because the area of analysis is larger than a scene width and therefore requires composites. Options include any @@ -98,7 +99,7 @@ def WIT_drill( """ # add geom to dc query dict if isinstance(gdf, datacube.utils.geometry._base.Geometry): - gdf = gpd.GeoDataFrame({'col1':['name'],'geometry':gdf.geom}, crs=gdf.crs) + gdf = gpd.GeoDataFrame({"col1": ["name"], "geometry": gdf.geom}, crs=gdf.crs) geom = geometry.Geometry(geom=gdf.iloc[0].geometry, crs=gdf.crs) query = {"geopolygon": geom, "time": time} @@ -115,7 +116,7 @@ def WIT_drill( products=["ls8_sr", "ls7_sr", "ls5_sr"], output_crs="epsg:6933", min_gooddata=min_gooddata, - mask_filters=(['opening', 3], ['dilation', 3]), + mask_filters=(["opening", 3], ["dilation", 3]), measurements=["red", "green", "blue", "nir", "swir_1", "swir_2"], dask_chunks=dask_chunks, group_by="solar_day", @@ -123,28 +124,26 @@ def WIT_drill( verbose=verbose, **query, ) - + # create polygon mask mask = xr_rasterize(gdf.iloc[[0]], ds_ls) ds_ls = ds_ls.where(mask) - + # calculate tasselled cap wetness within masked AOI if verbose: print("calculating tasseled cap wetness index ") - - with HiddenPrints(): #suppres the prints from this func - tcw = calculate_indices( - ds_ls, index=["TCW"], normalise=False, satellite_mission="ls", drop=True - ) - + + with HiddenPrints(): # suppres the prints from this func + tcw = calculate_indices(ds_ls, index=["TCW"], normalise=False, satellite_mission="ls", drop=True) + if resample_frequency is not None: if verbose: - print('Resampling TCW to '+ resample_frequency) + print("Resampling TCW to " + resample_frequency) tcw = tcw.resample(time=resample_frequency).max() - + tcw = tcw.TCW >= TCW_threshold tcw = tcw.where(mask, 0) - tcw = tcw.persist() + tcw = tcw.persist() if verbose: print("Loading WOfS layers ") @@ -159,15 +158,15 @@ def WIT_drill( # boolean of wet/dry wofls_wet = masking.make_mask(wofls.water, wet=True) - + if resample_frequency is not None: if verbose: - print('Resampling WOfS to '+ resample_frequency) + print("Resampling WOfS to " + resample_frequency) wofls_wet = wofls_wet.resample(time=resample_frequency).max() - + # mask sure wofs matches other datasets wofls_wet = wofls_wet.where(wofls_wet.time == tcw.time) - + # apply the polygon mask wofls_wet = wofls_wet.where(mask) @@ -184,16 +183,16 @@ def WIT_drill( measurements=["pv", "npv", "bs"], collection_category="T1", ) - + # use wofls mask to cloud mask FC clear_and_dry = masking.make_mask(wofls, dry=True).water fc_ds = fc_ds.where(clear_and_dry) - + if resample_frequency is not None: if verbose: - print('Resampling FC to '+ resample_frequency) + print("Resampling FC to " + resample_frequency) fc_ds = fc_ds.resample(time=resample_frequency).max() - + # mask sure fc matches other datasets fc_ds = fc_ds.where(fc_ds.time == tcw.time) @@ -215,15 +214,15 @@ def WIT_drill( # use nanargmax to get the index of the maximum value BSPVNPV = fc_int.argmax(dim="variable") - - #int dytype remocves NaNs so we need to create mask again + + # int dytype remocves NaNs so we need to create mask again FC_mask = np.isfinite(fc_ds_noTCW).all(dim="variable") BSPVNPV = BSPVNPV.where(FC_mask) # Restack the Fractional cover dataset all together # CAUTION:ARGMAX DEPENDS ON ORDER OF VARIABALES IN # DATASET. NEED TO ADJUST BELOW DEPENDING ON ORDER OF FC VARIABLES - + FC_dominant = xr.Dataset( { "bs": (BSPVNPV == 2).where(FC_mask), @@ -234,16 +233,15 @@ def WIT_drill( # pixel counts pixels = mask.sum(dim=["x", "y"]) - if verbose_progress: print("Computing wetness") tcw_pixel_count = tcw.sum(dim=["x", "y"]).compute() - + if verbose_progress: print("Computing green veg, dry veg, and bare soil") FC_count = FC_dominant.sum(dim=["x", "y"]).compute() - + if verbose_progress: print("Computing open water") wofs_pixels = wofls_wet.sum(dim=["x", "y"]).compute() @@ -259,9 +257,7 @@ def WIT_drill( BS_percent = (FC_count.bs / pixels) * 100 PV_percent = (FC_count.pv / pixels) * 100 NPV_percent = (FC_count.npv / pixels) * 100 - NoData_count = (( - 100 - wofs_area_percent - tcw_less_wofs - PV_percent - NPV_percent - BS_percent - ) / 100) * pixels + NoData_count = ((100 - wofs_area_percent - tcw_less_wofs - PV_percent - NPV_percent - BS_percent) / 100) * pixels # re-do percentages but now handling any no-data pixels within polygon BS_percent = (FC_count.bs / (pixels - NoData_count)) * 100 @@ -270,10 +266,10 @@ def WIT_drill( wofs_area_percent = (wofs_pixels / (pixels - NoData_count)) * 100 tcw_area_percent = (tcw_pixel_count / (pixels - NoData_count)) * 100 tcw_less_wofs = tcw_area_percent - wofs_area_percent - + # Sometimes when we resample datastes, WOfS extent can be # greater than the wetness extent, thus make negative values == zero - tcw_less_wofs = tcw_less_wofs.where(tcw_less_wofs>=0, 0) + tcw_less_wofs = tcw_less_wofs.where(tcw_less_wofs >= 0, 0) # start setup of dataframe by adding only one dataset df = pd.DataFrame( @@ -290,7 +286,7 @@ def WIT_drill( # round numbers df = df.round(2) - + # save the csv of the output data used to create the stacked plot for the polygon drill if export_csv: if verbose: @@ -321,19 +317,14 @@ def animated_timeseries_WIT( x_dim="x", y_dim="y", ): - ############### # Setup steps # ############### # Test if all dimensions exist in dataset if time_dim in ds and x_dim in ds and y_dim in ds: - # Test if there is one or three bands, and that all exist in both datasets: - if ((len(bands) == 3) | (len(bands) == 1)) & all( - [(b in ds.data_vars) for b in bands] - ): - + if ((len(bands) == 3) | (len(bands) == 1)) & all([(b in ds.data_vars) for b in bands]): # Import xarrays as lists of three band numpy arrays imagelist, vmin, vmax = _ds_to_arrraylist( ds, @@ -390,9 +381,7 @@ def animated_timeseries_WIT( "verticalalignment": "top", "fontsize": 15, "color": "white", - "path_effects": [ - PathEffects.withStroke(linewidth=3, foreground="black") - ], + "path_effects": [PathEffects.withStroke(linewidth=3, foreground="black")], }, **annotation_kwargs, ) @@ -413,9 +402,7 @@ def animated_timeseries_WIT( ################### # Set up figure - fig, (ax1, ax2) = plt.subplots( - ncols=2, gridspec_kw={"width_ratios": [1, 2]} - ) + fig, (ax1, ax2) = plt.subplots(ncols=2, gridspec_kw={"width_ratios": [1, 2]}) fig.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0.2, hspace=0) fig.set_size_inches(10.0, height * 0.5, forward=True) ax1.axis("off") @@ -461,9 +448,7 @@ def animated_timeseries_WIT( { "wofs_area_percent": df.wofs_area_percent, "wet_percent": df.wofs_area_percent + df.wet_percent, - "green_veg_percent": df.wofs_area_percent - + df.wet_percent - + df.green_veg_percent, + "green_veg_percent": df.wofs_area_percent + df.wet_percent + df.green_veg_percent, "dry_veg_percent": df.wofs_area_percent + df.wet_percent + df.green_veg_percent @@ -477,9 +462,7 @@ def animated_timeseries_WIT( ) df1 = df1.set_index(df.index) - line_test = df1.plot( - ax=ax2, legend=False, color="black", **pandasplot_kwargs - ) + line_test = df1.plot(ax=ax2, legend=False, color="black", **pandasplot_kwargs) # set axis limits to the min and max ax2.set(xlim=(df.index[0], df.index[-1]), ylim=(0, 100)) @@ -497,15 +480,12 @@ def animated_timeseries_WIT( # Optionally add shapefile overlay(s) from either string path or list of string paths if isinstance(shapefile_path, str): - shapefile = gpd.read_file(shapefile_path) shapefile.plot(**shapefile_kwargs, ax=ax1) elif isinstance(shapefile_path, list): - # Iterate through list of string paths for shapefile in shapefile_path: - shapefile = gpd.read_file(shapefile) shapefile.plot(**shapefile_kwargs, ax=ax1) @@ -531,25 +511,20 @@ def animated_timeseries_WIT( # Function to update figure def update_figure(frame_i): - #################### # Plot image panel # #################### # If possible, extract dates from time dimension try: - # Get human-readable date info (e.g. "16 May 1990") ts = ds[time_dim][{time_dim: frame_i}].dt year = ts.year.item() month = ts.month.item() day = ts.day.item() - date_string = "{} {} {}".format( - day, calendar.month_abbr[month], year - ) + date_string = "{} {} {}".format(day, calendar.month_abbr[month], year) except: - date_string = ds[time_dim][{time_dim: frame_i}].values.item() # Create annotation string based on title and date specifications: @@ -576,16 +551,9 @@ def update_figure(frame_i): # Update right panel with temporal line subset, adding each new line into artist_list for i, line in enumerate(line_test.lines): - # Clip line data to current time, and get x and y values - y = df1[ - df1.index - <= datetime(year=year, month=month, day=day, hour=23, minute=59) - ].iloc[:, i] - x = df1[ - df1.index - <= datetime(year=year, month=month, day=day, hour=23, minute=59) - ].index + y = df1[df1.index <= datetime(year=year, month=month, day=day, hour=23, minute=59)].iloc[:, i] + x = df1[df1.index <= datetime(year=year, month=month, day=day, hour=23, minute=59)].index # Plot lines after stripping NaNs (this produces continuous, unbroken lines) line.set_data(x[y.notnull()], y[y.notnull()]) @@ -620,9 +588,7 @@ def update_figure(frame_i): ani.save( output_path, dpi=width_pixels / 10.0, - writer=animation.FFMpegFileWriter( - fps=1000 / interval, bitrate=4000, codec="wmv2" - ), + writer=animation.FFMpegFileWriter(fps=1000 / interval, bitrate=4000, codec="wmv2"), ) elif output_path[-3:] == "gif": @@ -633,9 +599,7 @@ def update_figure(frame_i): print(" Output file type must be either .mp4, .wmv or .gif") else: - print( - "Please select either one or three bands that all exist in the input dataset" - ) + print("Please select either one or three bands that all exist in the input dataset") else: print( @@ -647,9 +611,7 @@ def update_figure(frame_i): # Define function to convert xarray dataset to list of one or three band numpy arrays -def _ds_to_arrraylist( - ds, bands, time_dim, x_dim, y_dim, percentile_stretch, image_proc_func=None -): +def _ds_to_arrraylist(ds, bands, time_dim, x_dim, y_dim, percentile_stretch, image_proc_func=None): """ Converts an xarray dataset to a list of numpy arrays for plt.imshow plotting """ @@ -659,7 +621,6 @@ def _ds_to_arrraylist( array_list = [] for i, timestep in enumerate(ds[time_dim]): - # Select single timestep from the data array ds_i = ds[{time_dim: i}] @@ -668,30 +629,22 @@ def _ds_to_arrraylist( y = len(ds[y_dim]) if len(bands) == 1: - # Create new one band array - img_toshow = exposure.rescale_intensity( - ds_i[bands[0]].values, in_range=(p_low, p_high), out_range="image" - ) + img_toshow = exposure.rescale_intensity(ds_i[bands[0]].values, in_range=(p_low, p_high), out_range="image") else: - # Create new three band array rawimg = np.zeros((y, x, 3), dtype=np.float32) # Add xarray bands into three dimensional numpy array for band, colour in enumerate(bands): - rawimg[:, :, band] = ds_i[colour].values # Stretch contrast using percentile values - img_toshow = exposure.rescale_intensity( - rawimg, in_range=(p_low, p_high), out_range=(0, 1.0) - ) + img_toshow = exposure.rescale_intensity(rawimg, in_range=(p_low, p_high), out_range=(0, 1.0)) # Optionally image processing if image_proc_func: - img_toshow = image_proc_func(img_toshow).clip(0, 1) array_list.append(img_toshow) @@ -699,18 +652,14 @@ def _ds_to_arrraylist( return array_list, p_low, p_high -def _add_colourbar( - ax, im, vmin, vmax, cmap="Greys", tick_fontsize=15, tick_colour="black" -): +def _add_colourbar(ax, im, vmin, vmax, cmap="Greys", tick_fontsize=15, tick_colour="black"): """ Add a nicely formatted colourbar to an animation panel """ # Add colourbar axins2 = inset_axes(ax, width="97%", height="4%", loc=8, borderpad=1) - plt.gcf().colorbar( - im, cax=axins2, orientation="horizontal", ticks=np.linspace(vmin, vmax, 3) - ) + plt.gcf().colorbar(im, cax=axins2, orientation="horizontal", ticks=np.linspace(vmin, vmax, 3)) axins2.xaxis.set_ticks_position("top") axins2.tick_params(axis="x", colors=tick_colour, labelsize=tick_fontsize) diff --git a/get_IVs_unify.py b/get_IVs_unify.py deleted file mode 100644 index b0dfa1b..0000000 --- a/get_IVs_unify.py +++ /dev/null @@ -1,395 +0,0 @@ -# %% -print(''' - Vegetation Indices series extraction - from Landsat series - - --- - created by Denis Mariano - denis@seca.space - www.seca.space - 2023-12 - ToDo's - - verificar porque EVI e LAI não estão displaying no valuetool - - TEM QUE DAR UM TRATO NOS VALUES - - agregar no tempo, zscores - - plots - - extraction - - ''') - -# %% -import time -start = time.time() - -import pylab as plt -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') -# %% DEFINE AREA OF INTEREST -# ========================= -# %% Uniguiri Farm -# name = 'Uniguiri_full_' - -# # AOI file and layer (for GPKG) -# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -# file = path_vector + 'fazenda_uniguiri.gpkg' -# layer = 'piquetes_tid' - -# # Get FIELD -# field = gpd.read_file(file, layer=layer) -# #field = field[field['Re'] == 80000] - -# bbox, lat_range, lon_range = get_lims(field) -# print(field.head()) -# field.plot(column='TID') - -# %% THE CAR WAY -''' - XXX ler o gpkg do MT leva 30 segundos, não está bom - -''' -car = 'MT-5103601-948E6FB555E3445CB7E0538F61483371' -car = 'MT-5104807-84F5196D22B847C1BD91AA27DB598BC1' -car = 'SP-3548906-AEEFC5ECB2EF42AF9721E496EC7678D9' #Embrapa Pecuaria Sudeste -car = 'MT-5107941-3E795652613843F98A703C84BCF9CDA6' #Tabapora -#%% -if car: - name = car - gdf = gpd.read_file('/home/jovyan/PlanetaryComputerExamples/vetorial/CAR/MT_CAR_AREA_IMOVEL_.gpkg') - field = gdf[gdf['cod_imovel'] == name] - - bbox, lat_range, lon_range = get_lims(field) - print(field.head()) - del gdf - print(f'área da fazenda = {field.geometry.to_crs(6933).area.values[0]/10000:.1f} ha') - field.plot() - -#%% Embrapa Sao Carlos -name = 'embrapa_saocarlos' -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'fazenda_embrapa.gpkg' -layer = 'talhoes' - -# Get FIELD -field = gpd.read_file(file, layer=layer) -#field = field[field['Re'] == 80000] - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot(column='tid') -plt.title(name) - - -# %% Define period and output path -datetime='1985-01-01/'+str(date.today()) -datetime='1985-01-01/2022-04-01'#+str(date.today()) -# para embrapa sanca deu erro em 2022-04-02 -#datetime='2015-01-01/2017-01-01' -print(datetime) - -# Parameters to save raster data? -savenc = True -zscores = True -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/' - -# parameters for extracting data -savecsv = True -column = 'TID' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - -# some parameters to filter scenes -max_cloud = 30 -# %% QUERY LANDSAT -items57 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-5", "landsat-7", - ]) - -items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-8", "landsat-9" - ]) - -# %% LOAD BANDS -indices = ["NDVI","LAI","BSI","MSAVI","NDMI"] # EVI, LAI -assets = ['blue','green','red','nir08','swir16','swir22'] -# get the data the lazy way -data89 = ( - stackstac.stack( - items89, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) -del data89.attrs['spec'] - -data57 = ( - stackstac.stack( - items57, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) -del data57.attrs['spec'] - -# %% MATCH REPROJECTION using rioxarray -print(f'matching datasets ... ') -ds57 = data57.to_dataset(dim='band') -ds57 = ds57.rio.write_crs('4326') -ds89 = data89.to_dataset(dim='band') -ds89 = ds89.rio.write_crs('4326') - -ds57 = ds57.rio.reproject_match(ds89) - -#%% CONCAT DATASETS -%%time -ds = xr.concat([ds57, ds89 ], dim="time", join='outer') -ds = ds.sortby('time') - -# REPROJECT -#%% -print('reprojecting') -ds = ds.rio.write_crs('4326') -ds = ds.rio.reproject('EPSG:4326') -ds = ds.rename({'x': 'longitude','y': 'latitude'}) - -#%% clip nodata and run simple diagnostic -ds_ = xr.where(ds > 60000, np.nan, ds) - -# for var in list(ds.data_vars): -# print(var, ds_[var].quantile([.01,.1,.5,.9,.99], skipna=True), '\n') - - #%% - %%time -# INTERPOLATE NANs -print('interpolating NaNs') -ds_ = ds_.interpolate_na(dim='time', - method='pchip', - #limit = 7, - use_coordinate=True) - -# %% XXX SMOOTHENING WOULD BE COOL -%%time -smooth = True -w = 4 -sm = 'pchip_smW'+str(w) -if smooth: - print('smoothening...') - ds_ = ds_.chunk(dict(time=-1)) - ds_ = ds_.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - - - - - - - - - -#%% CALCULATE INDICES -%%time -ds_ = ds_.rename({'nir08':'nir'}) -dsi = calculate_indices(ds_, - index= indices, - satellite_mission='ls', - #normalise=True, - drop=True); -dsi -#%% REPROJECT -print('reprojecting') -dsi = dsi.rio.write_crs('4326') -dsi = dsi.rio.reproject('EPSG:4326') -dsi = dsi.rename({'x': 'longitude','y': 'latitude'}) -#%% -# DROPPING STUFF -drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands','sci:doi'] -dsi = dsi.drop_vars(drops) -dsi = dsi.astype('float32') - -#%% -dsi.to_netcdf(f'{path_nc}/{name}_IVs.nc') -#XXX BSI e NDVI ok, LAI e EVI weird - -# #%% -# for iv in indices: -# dsi[iv].to_netcdf(f'{path_nc}/{name}_{iv}.nc') - -# %% XXX OS INDICES SAO GERADOS APARENTEMENTE OK - -lat = field.geometry.centroid.y.values[0] -lon = field.geometry.centroid.x.values[0] - -for iv in indices: - dsi[iv].sel(latitude=lat, longitude=lon, - method='nearest').plot();plt.grid();plt.show();plt.close() - - - - - - - - - - - - - - - - -#%% IVS Climatology -Cdsi = dsi.groupby('time.month').mean(skipna=True) - -Cdsi.load() - -#%% -Cdsi.to_netcdf(f'{path_nc}/{name}_IVs_cli.nc') - -#%% -for iv in indices: - Cdsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() - plt.grid();plt.plot(); plt.show() - -#%% -for iv in indices: - dsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() - plt.grid();plt.plot(); plt.show() - - -# %% -if zscores: - print('calculating zscores') - dsi_mean = dsi.groupby('time.month').mean(dim='time') - dsi_std = dsi.groupby('time.month').std(dim='time') - - dsi_anom = dsi.groupby('time.month') - dsi_mean - dsi_z = dsi_anom.groupby('time.month') / dsi_std - - dsi_z.to_netcdf(f'{path_nc}/{name}_Z-{sm}.nc') - print('zscores saved') - -print(f'{time.time()-start} seconds') - - -# XXX XXX XXX XXX ... ,,, XXX XXX -# %% THE EXTRACTION MISSION -def mask_farm(field,dst): - - mask = xr_rasterize(field,dst) - # #mask data - dst = dst.where(mask) - # #convert to float 32 to conserve memory - #ds = ds.astype(np.int16) * 1000 - dst = dst.astype(np.float32) - print('Farm masked outside of boundaries!') - return dst - -if savecsv: - print('Masking farm') - dam = mask_farm(field,dsi) - -# %% Create zones for paddocks -def farm_zones(field,data,column,ochunk=64): - - fz = xr_rasterize(field,data,attribute_col=column,verbose=True) - fz = fz.chunk(ochunk) - fz.astype('int16') - return fz - -start = time.time() - -def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): - ''' - Extract time-series for farm zones for one variable - band is, for example in LST 89, 'lwir11' - ''' - fz = farm_zones(field,data,column,ochunk) - tozip = [] - dstrc = dst.chunk(ochunk) - - # - tempo = pd.to_datetime(dam.time.values) - anos = np.unique([str(x) for x in tempo.year]) - - for ano in anos[:-1]: - - # get stats for the first dataframe - print(f'working on {ano}') - data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() - data_ = data_.sel(time=slice(ano+'-01-01',str(int(ano)+1)+'12-31')) - print(f'computing stats for the first date of year {ano}') - outst = zonal_stats(zones=fz, values=data_).compute() - outst['date'] = str(dstrc[band].time.values[0]) - data_.close() - - # and through the loop - for t in dstrc.time.values[1:]: - data_ = dstrc[band].sel(time=t).squeeze() - if verbose: print(f'computing stats for {t}') - - outst1 = zonal_stats(zones=fz, values=data_).compute() - - outst1['date'] = str(t) - outst = pd.concat([outst,outst1]) - data_.close() - del outst1 - namestr = f'{path_csv}/{name}_{band}_{ano}_{suffix}.csv' - #tozip.append(namestr) - outst.to_csv(namestr) - print(f'{namestr} SAVED \n \n') - del outst, dstrc, data_ - - # if zip: - # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: - # for file in tozip: - # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) - -# %% -#ds = da.to_dataset() -extract_fz_timeseries(dsi, - ds, - field, - 'mod_fiscal', - path_csv, - name, - '_-_', - 'NDVI', - ochunk=64, verbose=False) -# %% diff --git a/get_IVs_unify_rolo_embrapa.py b/get_IVs_unify_rolo_embrapa.py deleted file mode 100644 index a5d7e83..0000000 --- a/get_IVs_unify_rolo_embrapa.py +++ /dev/null @@ -1,362 +0,0 @@ -# %% -print(''' - Vegetation Indices series extraction - from Landsat series - - --- - created by Denis Mariano - denis@seca.space - www.seca.space - 2023-12 - ToDo's - - verificar porque EVI e LAI não estão displaying no valuetool - - TEM QUE DAR UM TRATO NOS VALUES - - agregar no tempo, zscores - - plots - - extraction - - o pior de todos - datas faltantes - - ''') - -# %% -import time -start = time.time() - -import pylab as plt -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') -# %% DEFINE AREA OF INTEREST -# ========================= - -# %% THE CAR WAY - -#%% Embrapa Sao Carlos -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'fazenda_embrapa.gpkg' -layer = 'talhoes' - -# Get FIELD -field = gpd.read_file(file, layer=layer) -#field = field[field['Re'] == 80000] - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot(column='tid') - -# %% Define period and output path -# 2022-04-02, '2022-11-10' - embrapa sanca -# '2022-07-01' - tabapora mt -name = 'embrapa_saocarlos' -#dt1 = '1985-01-01' -dt1 = '2020-11-10' -dt2 = '2022-04-01' - -#dt1 = '2022-11-15' -#dt2 = '2022-10-28' - -#dt2 = str(date.today()) - -datetime = dt1 + '/' + dt2 - -print(datetime) - -# Parameters to save raster data? -savenc = True -zscores = True -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/' - -# parameters for extracting data -savecsv = True -column = 'TID' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - -# some parameters to filter scenes -max_cloud = 70 -# %% QUERY LANDSAT -items57 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-5", "landsat-7", - ]) - -items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-8", "landsat-9" - ]) - -# %% LOAD BANDS -indices = ["NDVI","MSAVI","NDMI","BSI","LAI"] # EVI, LAI -assets = ['blue','green','red','nir08','swir16','swir22'] -# get the data the lazy way -data89 = ( - stackstac.stack( - items89, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) -del data89.attrs['spec'] - -data57 = ( - stackstac.stack( - items57, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) -del data57.attrs['spec'] - -# %% MATCH REPROJECTION using rioxarray -print(f'matching datasets ... ') -ds57 = data57.to_dataset(dim='band') -ds57 = ds57.rio.write_crs('4326') -ds89 = data89.to_dataset(dim='band') -ds89 = ds89.rio.write_crs('4326') - -ds57 = ds57.rio.reproject_match(ds89) - -#%% CONCAT DATASETS -ds = xr.concat([ds57, ds89 ], dim="time", join='outer') -ds = ds.sortby('time') -ds = ds.chunk(dict(time=-1)) - - -#%% -# if pau: -# ds2 = ds.copy() -# #%% -# dss = xr.concat([ds2,ds], dim="time", join='outer') -# dss = dss.sortby('time') -# if dss: -# print('reprojecting') -# dss = dss.rio.write_crs('4326') -# dss = dss.rio.reproject('EPSG:4326') -# dss = dss.rename({'x': 'longitude','y': 'latitude'}) -# ds_ = xr.where(dss > 50000, np.nan, dss) - -# #%% REPROJECT -# print('reprojecting') -# ds = ds.rio.write_crs('4326') -# ds = ds.rio.reproject('EPSG:4326') -# ds = ds.rename({'x': 'longitude','y': 'latitude'}) - -#%% clip nodata and run simple diagnostic -ds = xr.where(ds > 65000, np.nan, ds) - -for var in list(ds.data_vars): - print(var, ds[var].quantile([.01,.1,.5,.9,.99], skipna=True).values, '\n') - - #%% -%%time -# INTERPOLATE NANs -print('interpolating NaNs') -ds = ds.interpolate_na(dim='time', - method='pchip', #pchip - #limit = 7, - use_coordinate=True) - -# %% XXX SMOOTHENING WOULD BE COOL -%%time -smooth = True -w = 4 -sm = 'pchip_w'+str(w) -if smooth: - print('smoothening...') - ds = ds.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - - - -#%% CALCULATE INDICES -ds = ds.rename({'nir08':'nir'}) -dsi = calculate_indices(ds, - index= indices, - satellite_mission='ls', - #normalise=True, - drop=True); -#%% REPROJECT -print('reprojecting') -dsi = dsi.rio.write_crs('4326') -dsi = dsi.rio.reproject('EPSG:4326') -dsi = dsi.rename({'x': 'longitude','y': 'latitude'}) -#%% -# DROPPING STUFF -dsi = dsi.astype('float32') - -drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands','sci:doi'] -dsi = dsi.drop_vars(drops) - -#%% -dsi.to_netcdf(f'{path_nc}/{dt1}_{dt2}_{iv}_{name}_.nc') -#XXX BSI e NDVI ok, LAI e EVI weird - -# #%% -# for iv in indices: -# dsi[iv].to_netcdf(f'{path_nc}/{name}_{iv}.nc') - -# %% XXX OS INDICES SAO GERADOS APARENTEMENTE OK -lat = field.geometry.centroid.y.values[0] -lon = field.geometry.centroid.x.values[0] - -for iv in indices: - dsi[iv].sel(latitude=lat, longitude=lon, - method='nearest').plot();plt.grid();plt.show();plt.close() - - - -#%% -# #%% IVS Climatology -# Cdsi = dsi.groupby('time.month').mean(skipna=True) - -# Cdsi.load() - -# #%% -# Cdsi.to_netcdf(f'{path_nc}/{name}_IVs_cli.nc') - -# #%% -# for iv in indices: -# Cdsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() -# plt.grid();plt.plot(); plt.show() - -# #%% -# for iv in indices: -# dsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() -# plt.grid();plt.plot(); plt.show() - - -# # %% -# if zscores: -# print('calculating zscores') -# dsi_mean = dsi.groupby('time.month').mean(dim='time') -# dsi_std = dsi.groupby('time.month').std(dim='time') - -# dsi_anom = dsi.groupby('time.month') - dsi_mean -# dsi_z = dsi_anom.groupby('time.month') / dsi_std - -# dsi_z.to_netcdf(f'{path_nc}/{name}_Z-{sm}.nc') -# print('zscores saved') - -# print(f'{time.time()-start} seconds') - - -# # XXX XXX XXX XXX ... ,,, XXX XXX -# # %% THE EXTRACTION MISSION -# def mask_farm(field,dst): - -# mask = xr_rasterize(field,dst) -# # #mask data -# dst = dst.where(mask) -# # #convert to float 32 to conserve memory -# #ds = ds.astype(np.int16) * 1000 -# dst = dst.astype(np.float32) -# print('Farm masked outside of boundaries!') -# return dst - -# if savecsv: -# print('Masking farm') -# dam = mask_farm(field,dsi) - -# # %% Create zones for paddocks -# def farm_zones(field,data,column,ochunk=64): - -# fz = xr_rasterize(field,data,attribute_col=column,verbose=True) -# fz = fz.chunk(ochunk) -# fz.astype('int16') -# return fz - -# start = time.time() - -# def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): -# ''' -# Extract time-series for farm zones for one variable -# band is, for example in LST 89, 'lwir11' -# ''' -# fz = farm_zones(field,data,column,ochunk) -# tozip = [] -# dstrc = dst.chunk(ochunk) - -# # -# tempo = pd.to_datetime(dam.time.values) -# anos = np.unique([str(x) for x in tempo.year]) - -# for ano in anos[:-1]: - -# # get stats for the first dataframe -# print(f'working on {ano}') -# data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() -# data_ = data_.sel(time=slice(ano+'-01-01',str(int(ano)+1)+'12-31')) -# print(f'computing stats for the first date of year {ano}') -# outst = zonal_stats(zones=fz, values=data_).compute() -# outst['date'] = str(dstrc[band].time.values[0]) -# data_.close() - -# # and through the loop -# for t in dstrc.time.values[1:]: -# data_ = dstrc[band].sel(time=t).squeeze() -# if verbose: print(f'computing stats for {t}') - -# outst1 = zonal_stats(zones=fz, values=data_).compute() - -# outst1['date'] = str(t) -# outst = pd.concat([outst,outst1]) -# data_.close() -# del outst1 -# namestr = f'{path_csv}/{name}_{band}_{ano}_{suffix}.csv' -# #tozip.append(namestr) -# outst.to_csv(namestr) -# print(f'{namestr} SAVED \n \n') -# del outst, dstrc, data_ - -# # if zip: -# # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: -# # for file in tozip: -# # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) - -# # %% -# #ds = da.to_dataset() -# extract_fz_timeseries(dsi, -# ds, -# field, -# 'mod_fiscal', -# path_csv, -# name, -# '_-_', -# 'NDVI', -# ochunk=64, verbose=False) - -# %% diff --git a/get_IVs_yearly.py b/get_IVs_yearly.py deleted file mode 100644 index 212c96f..0000000 --- a/get_IVs_yearly.py +++ /dev/null @@ -1,173 +0,0 @@ - -#%% -import pylab as plt -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') - - - - -#%% Embrapa Sao Carlos -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'fazenda_embrapa.gpkg' -layer = 'talhoes' - -# Get FIELD -field = gpd.read_file(file, layer=layer) -#field = field[field['Re'] == 80000] - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot(column='tid') - -# to save -savenc = True -zscores = True -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/' -# parameters for extracting data -savecsv = True -column = 'TID' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - -# some parameters to filter scenes -max_cloud = 70 - -name = 'embrapa_sc_testeyearly' - - - -indices = ["NDVI","MSAVI","NDMI","BSI"] # EVI, LAI -assets = ['blue','green','red','nir08','swir16','swir22'] - -# %% -### THE FUCKING for -### - -for ano in range(2022,2023): - dt1 = str(ano)+'-11-15' - dt2 = str(ano+1)+'-06-20' - - datetime = dt1 + '/' + dt2 - print(datetime) - # get items - items57 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-5", "landsat-7", - ]) - - items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-8", "landsat-9" - ]) - - # get Data - data89 = ( - stackstac.stack( - items89, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) - del data89.attrs['spec'] - - data57 = ( - stackstac.stack( - items57, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) - del data57.attrs['spec'] - - # Match, Repro, Concat - print(f'matching datasets ... ') - ds57 = data57.to_dataset(dim='band') - ds57 = ds57.rio.write_crs('4326') - ds89 = data89.to_dataset(dim='band') - ds89 = ds89.rio.write_crs('4326') - - ds57 = ds57.rio.reproject_match(ds89) - - ds = xr.concat([ds57, ds89 ], dim="time", join='outer') - ds = ds.sortby('time') - ds = ds.chunk(dict(time=-1)) - - # data wrangling - ds = xr.where(ds > 65000, np.nan, ds) - - print('interpolating NaNs') - ds = ds.interpolate_na(dim='time', - method='pchip', #pchip - #limit = 7, - use_coordinate=True) - - smooth = True - w = 4 - sm = 'pchip_w'+str(w) - if smooth: - print('smoothening...') - ds = ds.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - - # CALCULATE INDICES - ds = ds.rename({'nir08':'nir'}) - dsi = calculate_indices(ds, - index= indices, - satellite_mission='ls', - drop=True) - - print('reprojecting') - dsi = dsi.rio.write_crs('4326') - dsi = dsi.rio.reproject('EPSG:4326') - dsi = dsi.rename({'x': 'longitude','y': 'latitude'}) - - # DROPPING STUFF - dsi = dsi.astype('float32') - - drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands','sci:doi'] - dsi = dsi.drop_vars(drops) - - #SAVE - print('saving...') - dsi.to_netcdf(f'{path_nc}/{dt1}_{dt2}_{name}.nc') - print(f'{path_nc}/{dt1}_{dt2}_{name}.nc saved') - - del dsi, ds, ds57, data57, items57, ds89, data89, items89 - -# %% diff --git a/get_LST.py b/get_LST.py deleted file mode 100644 index 7fb8a24..0000000 --- a/get_LST.py +++ /dev/null @@ -1,276 +0,0 @@ -#%% -import time -start = time.time() - -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') - -#%% DEFINE AREA OF INTEREST - -# Name for reference -name = 'Uniguiri_farm_unify' - -# AOI file and layer (for GPKG) -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'fazenda_uniguiri.gpkg' -layer = 'piquetes_tid' - -# Landsat 4,5,7 have 'lwir' and 8 and 9 have 'lwir11' -# datetime89 = '2013-05-01/'+str(date.today()) -# datetime457 = '1985-01-01/2013-05-01' -datetime89 = '2021-05-01/2022-11-22' -datetime457 = '2006-01-01/2008-05-01' - - - -# Parameters to save raster data? -savenc = True -zscores = True -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/' - -# parameters for extracting data -savecsv = True -column = 'TID' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - -# some parameters to filter scenes -max_cloud = 50 - -# %%# Get FIELD -field = gpd.read_file(file, layer=layer) -#field = field[field['Re'] == 80000] - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot(column='TID') - -# %% QUERY LANDSAT -# You can exclude some Landsats from the list -items89 = query_L89_items(datetime=datetime89, - bbox=bbox, - max_cloud=max_cloud, - landsats = ["landsat-8", "landsat-9"] - ) - -items457 = query_L457_items(datetime=datetime457, - bbox=bbox, - max_cloud=max_cloud, - landsats = ["landsat-4", "landsat-5", "landsat-7"] - ) - -#%% -assets89 = ['lwir11'] -data89 = ( - stackstac.stack( - items89, - assets=assets89, - bounds_latlon=bbox, - epsg=4326, # o xarray de imagens será retornado no EPSG:4326 - #resolution = 0.000281612818071153, # cuidado se for mexer na resolucao, tente algo como 0.001 para começar, pois é graus (não metros) - )) -data89 = data89.rename({'x': 'longitude','y': 'latitude'}) -dst89 = data89.to_dataset(dim='band') -del dst89.attrs['spec'] -# %% -assets457 = ['lwir'] -data457 = ( - stackstac.stack( - items457, - assets=assets457, - bounds_latlon=bbox, - epsg=4326, # o xarray de imagens será retornado no EPSG:4326 - #resolution = 0.000281612818071153, # cuidado se for mexer na resolucao, tente algo como 0.001 para começar, pois é graus (não metros) - )) - -data457 = data457.rename({'x': 'longitude','y': 'latitude'}) -dst457 = data457.to_dataset(dim='band') -del dst457.attrs['spec'] -# %% -def get_lst(lwirband, items, dst, w=5): - ''' - Convert lwir to Celcius and prepare dataset for further processing - lwirband (str): 'lwir' for 457 and lwirband for 89 - da (DataArray loaded from items__) - w (int): rolling mean window size, default is 5 - ''' - # get lwir11 band info - band_info = items[0].assets[lwirband].extra_fields["raster:bands"][0] - print(band_info) - - dst[lwirband] = dst[lwirband].astype(float) - dst[lwirband] *= band_info['scale'] - dst[lwirband] += band_info['offset'] - dst[lwirband] -= 273.15 - - # variables to drop so I can save the .nc later on - drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands','instruments'] - dst = dst.drop_vars(drops) - # interpolate NaNs (rechunk it first) - dst = dst.chunk(dict(time=-1)) - dst[lwirband] = xr.where(dst[lwirband] < 1, np.nan, dst[lwirband]) # - dst[lwirband] = xr.where(dst[lwirband] > 65, np.nan, dst[lwirband]) - dst[lwirband] = dst[lwirband].interpolate_na(dim='time',method='linear') - - # I`m overwriting the raw data - dst[lwirband] = dst[lwirband].rolling(time=w, center=True).mean(savgol_filter, window = w, polyorder=2) - del band_info - return dst - -# %% finally, get the Land Surface Temperature in Celcius -lst89 = get_lst('lwir11',items89, dst89,5) -lst457 = get_lst('lwir',items457, dst457,5) - -#%% -def lst2nc(dst,path_nc,name,Landsats): - ''' - save LST data to netcdf - ''' - Landsats = str(Landsats) - - print('Reprojecting and saving ... \n') - dst = dst.rio.write_crs('4326') - dst = dst.rio.reproject('EPSG:4326') - dst = dst.rename({'x': 'longitude','y': 'latitude'}) - print('... saving ...') - - try: - dst.to_netcdf(f'{path_nc}lst_{name}_{Landsats}.nc', mode='w') - except: - print('trying to remove some weird shit') - dst = dst.drop_vars(['raster:bands','instruments']) - dst.to_netcdf(f'{path_nc}lst_{name}_{Landsats}.nc', mode='w') - - print(f'lst_{name}_{Landsats}.nc saved!') - -if savenc: - lst2nc(dst89,path_nc,name,89) - lst2nc(dst457,path_nc,name,457) - - -#%% EXTRACTING data - -def mask_farm(field,dst): - - mask = xr_rasterize(field,dst) - # #mask data - dst = dst.where(mask) - # #convert to float 32 to conserve memory - #ds = ds.astype(np.int16) * 1000 - dst = dst.astype(np.float32) - print('Farm masked outside of boundaries!') - return dst - -if savecsv: - print('Masking farm') - lst89m = mask_farm(field,lst89) - lst457m = mask_farm(field,lst457) - - - -print(f'Tempo total de processamento salvando os netcdfs no final: {time.time() - start} segundos') - - - - -if savecsv: - start = time.time() - -#%% Create zones for paddocks -def farm_zones(field,data,column,ochunk=64): - - fz = xr_rasterize(field,data,attribute_col=column,verbose=True) - fz = fz.chunk(ochunk) - fz.astype('int16') - return fz - -#%% and finally, the extraction -def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, zip=False, verbose=False): - ''' - Extract time-series for farm zones for one variable - band is, for example in LST 89, 'lwir11' - ''' - fz = farm_zones(field,data,column,ochunk) - tozip = [] - dstrc = dst.chunk(ochunk) - # get stats for the first dataframe - data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() - print('computing stats for the first date') - outst = zonal_stats(zones=fz, values=data_).compute() - outst['date'] = str(dstrc[band].time.values[0]) - data_.close() - - # and through the loop - for t in dstrc.time.values[1:]: - data_ = dstrc[band].sel(time=t).squeeze() - if verbose: print(f'computing stats for {t}') - - outst1 = zonal_stats(zones=fz, values=data_).compute() - - outst1['date'] = str(t) - outst = pd.concat([outst,outst1]) - data_.close() - del outst1 - namestr = f'{path_csv}/{name}_{band}_{suffix}.csv' - tozip.append(namestr) - outst.to_csv(namestr) - print(f'{namestr} SAVED \n \n') - del outst, dstrc, data_ - - if zip: - with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: - for file in tozip: - zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) - -#%% -extract_fz_timeseries(da, - data, - field, - 'TID', - path_csv, - name, - 'allLandsat', - 'lst', - ochunk=64, verbose=False) -#%% -# extract_fz_timeseries(lst457, -# data457, -# field, -# 'TID', -# path_csv, -# name, -# '457', -# 'lwir', -# ochunk=64, zip=False, verbose=True) -# #%% - -print(f'Tempo total de processamento das extractions: {time.time() - start} segundos') diff --git a/get_LST_unify.py b/get_LST_unify.py deleted file mode 100644 index 1a69a7d..0000000 --- a/get_LST_unify.py +++ /dev/null @@ -1,314 +0,0 @@ -# %% -print(''' - Landsat Land Surface Temperature series extractor - created by Denis Mariano - www.seca.space - denis@seca.space - ''') - -# %% -import time -start = time.time() - -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import pylab as plt -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') -# %% DEFINE AREA OF INTEREST -# ========================= -# Name for reference -# name = 'Uniguiri_full_' - -# # AOI file and layer (for GPKG) -# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -# file = path_vector + 'fazenda_uniguiri.gpkg' -# layer = 'piquetes_tid' - -# # Get FIELD -# field = gpd.read_file(file, layer=layer) -# #field = field[field['Re'] == 80000] - -# bbox, lat_range, lon_range = get_lims(field) -# print(field.head()) -# field.plot(column='TID') - -# %% THE CAR WAY -''' -a CAR MT-5103601-948E6FB555E3445CB7E0538F61483371 - XXX ler o gpkg do MT leva 30 segundos, não está bom - -''' -#car = 'MT-5103601-948E6FB555E3445CB7E0538F61483371' -# car = 'MT-5104807-84F5196D22B847C1BD91AA27DB598BC1' -# if car: -# name = car -# gdf = gpd.read_file('/home/jovyan/PlanetaryComputerExamples/vetorial/CAR/MT_CAR_AREA_IMOVEL_.gpkg') -# field = gdf[gdf['cod_imovel'] == name] - -# bbox, lat_range, lon_range = get_lims(field) -# print(field.head()) -# del gdf -# field.plot() - -#%% EMBRAPA SANCA -name = 'embrapa_sc' -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'fazenda_embrapa.gpkg' -layer = 'talhoes' -field = gpd.read_file(file, layer=layer) - -#%% Belem PA -# name = 'Belem2' -# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/' -# file = path_vector + 'belem.gpkg' -# field = gpd.read_file(file) - - - - -# Get FIELD - -#field = field[field['Re'] == 80000] - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot()#column='tid' -plt.title(name) - -# %% Define period and output path -# Landsat 4,5,7 have 'lwir' and 8 and 9 have 'lwir11' -#datetime='1985-01-01/'+str(date.today()) -#datetime='1985-01-01/2022-04-01'#+str(date.today()) -datetime='2022-11-25/'+str(date.today()) - -print(datetime) - -# Parameters to save raster data? -savenc = True -zscores = True -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/' - -# parameters for extracting data -savecsv = True -column = 'TID' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - -# some parameters to filter scenes -max_cloud = 50 -# %% QUERY LANDSAT -items57 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-5", "landsat-7", - ]) - -items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-8", "landsat-9" - ]) - -# get the data the lazy way -data89 = ( - stackstac.stack( - items89, - assets=['lwir11'], - bounds_latlon=bbox, - epsg=4326, - )) - -data57 = ( - stackstac.stack( - items57, - assets=['lwir'], - bounds_latlon=bbox, - epsg=4326, - )) -# %% The CONCAT Way -# SQUEEZE monoBAND -data89 = data89.rename('lwir').squeeze() -data57 = data57.rename('lwir').squeeze() - -# MATCH REPROJECTION using rioxarray -print('matching DataArrays spatially') -data57 = data57.rio.reproject_match(data89) - -# CONCATENATE DATAARRAYS -da = xr.concat([data89, data57], dim="time", join='outer') - -# RESCALE AND FILTER FOR LAND SURFACE TEMPERATURE -print('reescaling LST') -scale = items89[0].assets['lwir11'].extra_fields["raster:bands"][0]['scale'] -offset = items89[0].assets['lwir11'].extra_fields["raster:bands"][0]['offset'] -da = da*scale + offset - 273.15 -da = da.astype('float32') -da = xr.where((da < -5) | (da > 65), np.nan, da) - -# REPROJECT -print('reprojecting') -da = da.rio.write_crs('4326') -da = da.rio.reproject('EPSG:4326') -da = da.rename({'x': 'longitude','y': 'latitude'}) - -# REORDER -da = da.rename('lst') -da = da.sortby('time') - -# INTERPOLATE NANs -print('interpolating NaNs') -da = da.interpolate_na(dim='time', - method='pchip', - limit = 7, - use_coordinate=True) - -# %% XXX SMOOTHENING WOULD BE COOL -smooth = True -w = 7 -sm = 'pchip_smW'+str(w) -if smooth: - print('smoothening...') - da = da.chunk(dict(time=-1)) - da = da.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - -# DROPPING STUFF -drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands'] -da = da.drop_vars(drops) - -#%% Save NC -da.to_netcdf(f'{path_nc}/{name}_LST{sm}.nc') -print(f'SAVED {path_nc}/{name}_LST{sm}.nc') -# %% -if zscores: - print('calculating zscores') - da_mean = da.groupby('time.month').mean(dim='time') - da_std = da.groupby('time.month').std(dim='time') - - da_anom = da.groupby('time.month') - da_mean - da_z = da_anom.groupby('time.month') / da_std - - da_z.to_netcdf(f'{path_nc}/{name}_Z-LST{sm}.nc') - print('zscores saved') - -print(f'{time.time()-start} seconds') - - - -# XXX XXX XXX XXX ... ,,, XXX XXX -# %% THE EXTRACTION MISSION -def mask_farm(field,dst): - - mask = xr_rasterize(field,dst) - # #mask data - dst = dst.where(mask) - # #convert to float 32 to conserve memory - #ds = ds.astype(np.int16) * 1000 - dst = dst.astype(np.float32) - print('Farm masked outside of boundaries!') - return dst - -if savecsv: - print('Masking farm') - dam = mask_farm(field,da) - -# %% Create zones for paddocks -def farm_zones(field,data,column,ochunk=64): - - fz = xr_rasterize(field,data,attribute_col=column,verbose=True) - fz = fz.chunk(ochunk) - fz.astype('int16') - return fz - -start = time.time() - -def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): - ''' - Extract time-series for farm zones for one variable - band is, for example in LST 89, 'lwir11' - ''' - fz = farm_zones(field,data,column,ochunk) - tozip = [] - dstrc = dst.chunk(ochunk) - - # - tempo = pd.to_datetime(dam.time.values) - anos = np.unique([str(x) for x in tempo.year]) - - for ano in anos[:-1]: - - # get stats for the first dataframe - print(f'working on {ano}') - data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() - data_ = data_.sel(time=slice(ano+'-01-01',str(int(ano)+1)+'12-31')) - print(f'computing stats for the first date of year {ano}') - outst = zonal_stats(zones=fz, values=data_).compute() - outst['date'] = str(dstrc[band].time.values[0]) - data_.close() - - # and through the loop - for t in dstrc.time.values[1:]: - data_ = dstrc[band].sel(time=t).squeeze() - if verbose: print(f'computing stats for {t}') - - outst1 = zonal_stats(zones=fz, values=data_).compute() - - outst1['date'] = str(t) - outst = pd.concat([outst,outst1]) - data_.close() - del outst1 - namestr = f'{path_csv}/{name}_{band}_{ano}_{suffix}.csv' - #tozip.append(namestr) - outst.to_csv(namestr) - print(f'{namestr} SAVED \n \n') - del outst, dstrc, data_ - - # if zip: - # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: - # for file in tozip: - # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) - -# %% -ds = da.to_dataset() -extract_fz_timeseries(ds, - da, - field, - 'TID', - path_csv, - name, - 'allLandsat', - 'lst', - ochunk=64, verbose=False) -# %% diff --git a/grits_lst89_p1.py b/grits_lst89_p1.py deleted file mode 100644 index a413686..0000000 --- a/grits_lst89_p1.py +++ /dev/null @@ -1,218 +0,0 @@ - -''' - - Extraindo LST por region (grid) em uma grande propriedade - - Por enquanto, somente Landsat 8 e 9 - - Feb, 9, 2024 - -''' - -#%% -import time -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import pylab as plt -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -# %% Area Of Interest -# name embrapa_sc , layer talhoes, fazenda_embrapa.gpkg, column 'tid' -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'iaca_r400.shp' -layer = None -column = 'grid' -field = gpd.read_file(file, layer=layer) -print(field.dtypes) - - -bbox, lat_range, lon_range = get_lims(field) -print(field.head()) -field.plot(column=column, legend=True) - - -savenc = True -zscores = True - - -# parameters for extracting data -savecsv = True -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/' - - - -# %% -# ### THE FUCKING FOR -# - -name = 'iacanga' -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/' -max_cloud = 50 -# DATETIME CONTROL -a0 = 2020 -a1 = 2024 -p = 1 # ano a ano -d0 = '-06-20' -d1 = '-06-20' - -# for record, Iacanga -# grid 100 -> 2013 - 2019[ ok -# pulando 2019-2020 - -#### -for i in sorted(field.grid.unique()): - gridstart = time.time() - print(i) - name_ = f'{name}_{i}' - field_ = field[field[column] == i] - bbox, lat_range, lon_range = get_lims(field_) - print(bbox, lat_range, lon_range) - ha = field_.area_ha.sum() - print(f'{name_} de {ha} ha') - field_.plot(); plt.show(); plt.close() - # -# -# - - for ano in range(a0,a1,p): - pstart = time.time() - dt0 = str(ano) + d0 - dt1 = str(ano+p) + d1 - datetime = dt0 + '/' + dt1 - print(f'periodo {datetime}, {column} = {i}') - - # items57 = query_Landsat_items(datetime=datetime, - # bbox=bbox, - # max_cloud=max_cloud, - # landsats = [ - # "landsat-5", "landsat-7", - # ]) - # print('items57 created') - items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [#"landsat-5", "landsat-7", - "landsat-8", "landsat-9" - ]) - scale = items89[0].assets['lwir11'].extra_fields["raster:bands"][0]['scale'] - offset = items89[0].assets['lwir11'].extra_fields["raster:bands"][0]['offset'] - print(f'items89 created, scale {scale} and offset {offset}') - - # get the data the lazy way - data89 = ( - stackstac.stack( - items89, - assets=['lwir11'], - bounds_latlon=bbox, - epsg=4326, - #resolution=100 - )) - data89 = data89.rename('lwir').squeeze() - print('data89 ok!') - print(humanbytes(data89.nbytes)) - # data57 = ( - # stackstac.stack( - # items57, - # assets=['lwir'], - # bounds_latlon=bbox, - # epsg=4326, - # resolution=100 - # )) - #data89 - # print('data57 ok!') - ## %% The CONCAT Way - - # MATCH REPROJECTION using rioxarray - # print(f'matching DataArrays spatially for _{datetime}') - # data57 = data57.rio.reproject_match(data89) - - # CONCATENATE DATAARRAYS - # da = xr.concat([data89, data57], dim="time", join='outer') - - # RESCALE AND FILTER FOR LAND SURFACE TEMPERATURE - - da = data89.copy() - - print('reescaling LST') - da = da*scale + offset - 273.15 - da = da.astype('float32') - da = xr.where((da < -5) | (da > 65), np.nan, da) - - # REPROJECT - # print(f'reprojecting_{datetime}') - print('reprojecting...') - da = da.rio.write_crs('4326') - da = da.rio.reproject('EPSG:4326') - da = da.rename({'x': 'longitude','y': 'latitude'}) - print('reprojecting... done') - - # REORDER - da = da.rename('lst') - da = da.sortby('time') - - # INTERPOLATE NANs - print('interpolating NaNs') - da = da.chunk(dict(time=-1)) - da = da.interpolate_na(dim='time', - method='pchip', - limit = 7, - use_coordinate=True) - print('interpolating NaNs... done') - - # XXX SMOOTHENING WOULD BE COOL - smooth = True - w = 3 - sm = 'pchip_'+str(w) - if smooth: - print('smoothening...') - da = da.chunk(dict(time=-1)) - da = da.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - print('smoothing... done.') - - # DROPPING STUFF - drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands'] - da = da.drop_vars(drops) - - #SAVE - print('saving...') - da.to_netcdf(f'{path_nc}/{dt0}_{dt1}_{name}_{i}_LST_{sm}.nc') - print(f'saving... {path_nc}/{dt0}_{dt1}_{name}_{i}_LST_{sm}.nc DONE!') - del da, data89, items89, - pend = time.time() - print(f'{dt0}_{dt1}_{name}_{i} took {pend - pstart} seconds to complete.') - - gridend = time.time() - print(f' Grid {i} took {(gridend - gridstart)} seconds') - - -#%% \ No newline at end of file diff --git a/grits_vis_p1.py b/grits_vis_p1.py deleted file mode 100644 index f8b88ab..0000000 --- a/grits_vis_p1.py +++ /dev/null @@ -1,151 +0,0 @@ -# %% -print(''' - Vegetation Indices series extraction - from Landsat series - - --- - created by Denis Mariano - denis@seca.space - www.seca.space - 2024-02-09 - ToDo's - - verificar porque EVI e LAI não estão displaying no valuetool - - TEM QUE DAR UM TRATO NOS VALUES - - agregar no tempo, zscores - - plots - - extraction - - ''') - -# %% -import time -start = time.time() - -import pylab as plt -from datetime import date -import sys -import subprocess -import pkg_resources - -required = {'rasterstats','odc-ui'} -installed = {pkg.key for pkg in pkg_resources.working_set} -missing = required - installed - -if missing: - python = sys.executable - subprocess.check_call([python, '-m', 'pip', 'install', *missing], stdout=subprocess.DEVNULL) -else: - print(f'Required packages {required} already installed.') - -import geopandas as gpd -import stackstac -import xarray as xr -import numpy as np -import rioxarray -from scipy.signal import savgol_filter -import zipfile -from xrspatial import zonal_stats -import pandas as pd -import numpy as np - -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') -from grits import * - -print('all good!') -# %% DEFINE AREA OF INTEREST -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'iaca_r400.shp' -field_ = gpd.read_file(file) -print(field_.grid.unique()) -#%% # some parameters to filter scenes -indices = ["NDVI","MSAVI"] # EVI, LAI,"NDMI","BSI", -#assets = ['blue','green','red','nir08','swir16','swir22'] -assets = ['red','nir08'] -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/' -max_cloud = 30 - -datetime= '2022-05-02/2024-02-08' - -# -> Iacanga -# '2019-06-20/2022-04-01' done -# '2022-04-02/2024-02-08' done -# '2022-05-02/2024-02-08' done -# '2013-04-02/2019-06-19' - - -for grid in sorted(field_.grid.unique()): - name = f'iacanga_{grid}' - field = field_[field_['grid'] == grid] - bbox, lat_range, lon_range = get_lims(field) - ha = field.area_ha.sum() - print(f'{name} de {ha} ha - periodo {datetime}') - - items89 = query_Landsat_items(datetime=datetime, - bbox=bbox, - max_cloud=max_cloud, - landsats = [ - "landsat-8", "landsat-9" - ]) - # get the data the lazy way - data89 = ( - stackstac.stack( - items89, - assets=assets, - bounds_latlon=bbox, - epsg=4326, - )) - del data89.attrs['spec'] - - ds89 = data89.to_dataset(dim='band') - ds = ds89.rio.write_crs('4326') - - ds_ = xr.where(ds > 60000, np.nan, ds) - - # INTERPOLATE NANs - print('interpolating NaNs') - ds_ = ds_.chunk(dict(time=-1)) - ds_ = ds_.interpolate_na(dim='time', - method='pchip', - #limit = 7, - use_coordinate=True) - - smooth = True - w = 4 - sm = 'pchip_smW'+str(w) - if smooth: - print('smoothening...') - ds_ = ds_.chunk(dict(time=-1)) - ds_ = ds_.rolling(time=w, - center=True).mean(savgol_filter, - window = w, - polyorder=2) - - # CALCULATE INDICES - ds_ = ds_.rename({'nir08':'nir'}) - dsi = calculate_indices(ds_, - index= indices, - satellite_mission='ls', - #normalise=True, - drop=True); - - # REPROJECT - print('reprojecting') - dsi = dsi.rio.write_crs('4326') - dsi = dsi.rio.reproject('EPSG:4326') - dsi = dsi.rename({'x': 'longitude','y': 'latitude'}) - - # DROPPING STUFF - drops = ['landsat:correction','landsat:wrs_path','landsat:wrs_row', - 'landsat:collection_number','landsat:wrs_type','instruments', - 'raster:bands','sci:doi'] - dsi = dsi.drop_vars(drops) - dsi = dsi.astype('float32') - - # Saving - dt1 = datetime.split('/')[0] - dt2 = datetime.split('/')[1] - dsi.to_netcdf(f'{path_nc}/{dt1}_{dt2}_{name}.nc') - print(f'SAVED ___ {path_nc}/{dt1}_{dt2}_{name}.nc ___SAVED ') - - del dsi, ds, ds_, data89, items89, ds89 - #%% diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..ccc6bf3 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1881 @@ +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. + +[[package]] +name = "affine" +version = "2.4.0" +description = "Matrices describing affine transformation of the plane" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"}, + {file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"}, +] + +[package.extras] +dev = ["coveralls", "flake8", "pydocstyle"] +test = ["pytest (>=4.6)", "pytest-cov"] + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "cligj" +version = "0.7.2" +description = "Click params for commmand line interfaces to GeoJSON" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" +files = [ + {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, + {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contourpy" +version = "1.2.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0274c1cb63625972c0c007ab14dd9ba9e199c36ae1a231ce45d725cbcbfd10a8"}, + {file = "contourpy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab459a1cbbf18e8698399c595a01f6dcc5c138220ca3ea9e7e6126232d102bb4"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdd887f17c2f4572ce548461e4f96396681212d858cae7bd52ba3310bc6f00f"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d16edfc3fc09968e09ddffada434b3bf989bf4911535e04eada58469873e28e"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c203f617abc0dde5792beb586f827021069fb6d403d7f4d5c2b543d87edceb9"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b69303ceb2e4d4f146bf82fda78891ef7bcd80c41bf16bfca3d0d7eb545448aa"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:884c3f9d42d7218304bc74a8a7693d172685c84bd7ab2bab1ee567b769696df9"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1b1208102be6e851f20066bf0e7a96b7d48a07c9b0cfe6d0d4545c2f6cadab"}, + {file = "contourpy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:34b9071c040d6fe45d9826cbbe3727d20d83f1b6110d219b83eb0e2a01d79488"}, + {file = "contourpy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd2f1ae63998da104f16a8b788f685e55d65760cd1929518fd94cd682bf03e41"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"}, + {file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"}, + {file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"}, + {file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"}, + {file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5fd1810973a375ca0e097dee059c407913ba35723b111df75671a1976efa04bc"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:999c71939aad2780f003979b25ac5b8f2df651dac7b38fb8ce6c46ba5abe6ae9"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7caf9b241464c404613512d5594a6e2ff0cc9cb5615c9475cc1d9b514218ae8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:266270c6f6608340f6c9836a0fb9b367be61dde0c9a9a18d5ece97774105ff3e"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbd50d0a0539ae2e96e537553aff6d02c10ed165ef40c65b0e27e744a0f10af8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ce96dd400486e80ac7d195b2d800b03e3e6a787e2a522bfb83755938465a819e"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d3364b999c62f539cd403f8123ae426da946e142312a514162adb2addd8d808"}, + {file = "contourpy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:1c88dfb9e0c77612febebb6ac69d44a8d81e3dc60f993215425b62c1161353f4"}, + {file = "contourpy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:78e6ad33cf2e2e80c5dfaaa0beec3d61face0fb650557100ee36db808bfa6843"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"}, + {file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"}, +] + +[package.dependencies] +numpy = ">=1.20,<2.0" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dask" +version = "2024.3.1" +description = "Parallel PyData with Task Scheduling" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dask-2024.3.1-py3-none-any.whl", hash = "sha256:1ac260b8716b1a9fc144c0d7f958336812cfc3ef542a3742c9ae02387189b32b"}, + {file = "dask-2024.3.1.tar.gz", hash = "sha256:78bee2ffd735514e572adaa669fc2a437ec256aecb6bec036a1f5b8dd36b2e60"}, +] + +[package.dependencies] +click = ">=8.1" +cloudpickle = ">=1.5.0" +fsspec = ">=2021.09.0" +importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} +numpy = {version = ">=1.21", optional = true, markers = "extra == \"array\""} +packaging = ">=20.0" +partd = ">=1.2.0" +pyyaml = ">=5.3.1" +toolz = ">=0.10.0" + +[package.extras] +array = ["numpy (>=1.21)"] +complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] +dataframe = ["dask-expr (>=1.0,<1.1)", "dask[array]", "pandas (>=1.3)"] +diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] +distributed = ["distributed (==2024.3.1)"] +test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] + +[[package]] +name = "fiona" +version = "1.9.6" +description = "Fiona reads and writes spatial data files" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fiona-1.9.6-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:63e528b5ea3d8b1038d788e7c65117835c787ba7fdc94b1b42f09c2cbc0aaff2"}, + {file = "fiona-1.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:918bd27d8625416672e834593970f96dff63215108f81efb876fe5c0bc58a3b4"}, + {file = "fiona-1.9.6-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:e313210b30d09ed8f829bf625599e248dadd78622728030221f6526580ff26c5"}, + {file = "fiona-1.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:89095c2d542325ee45894b8837e8048cdbb2f22274934e1be3b673ca628010d7"}, + {file = "fiona-1.9.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:98cea6f435843b2119731c6b0470e5b7386aa16b6aa7edabbf1ed93aefe029c3"}, + {file = "fiona-1.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4230eccbd896a79d1ebfa551d84bf90f512f7bcbe1ca61e3f82231321f1a532"}, + {file = "fiona-1.9.6-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:48b6218224e96de5e36b5eb259f37160092260e5de0dcd82ca200b1887aa9884"}, + {file = "fiona-1.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:c1dd5fbc29b7303bb87eb683455e8451e1a53bb8faf20ef97fdcd843c9e4a7f6"}, + {file = "fiona-1.9.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:42d8a0e5570948d3821c493b6141866d9a4d7a64edad2be4ecbb89f81904baac"}, + {file = "fiona-1.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39819fb8f5ec6d9971cb01b912b4431615a3d3f50c83798565d8ce41917930db"}, + {file = "fiona-1.9.6-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:9b53034efdf93ada9295b081e6a8280af7c75496a20df82d4c2ca46d65b85905"}, + {file = "fiona-1.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:1dcd6eca7524535baf2a39d7981b4a46d33ae28c313934a7c3eae62eecf9dfa5"}, + {file = "fiona-1.9.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e5404ed08c711489abcb3a50a184816825b8af06eb73ad2a99e18b8e7b47c96a"}, + {file = "fiona-1.9.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:53bedd2989e255df1bf3378ae9c06d6d241ec273c280c544bb44ffffebb97fb0"}, + {file = "fiona-1.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:77653a08564a44e634c44cd74a068d2f55d1d4029edd16d1c8aadcc4d8cc1d2c"}, + {file = "fiona-1.9.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e7617563b36d2be99f048f0d0054b4d765f4aae454398f88f19de9c2c324b7f8"}, + {file = "fiona-1.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50037c3b7a5f6f434b562b5b1a5b664f1caa7a4383b00af23cdb59bfc6ba852c"}, + {file = "fiona-1.9.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:bf51846ad602757bf27876f458c5c9f14b09421fac612f64273cc4e3fcabc441"}, + {file = "fiona-1.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:11af1afc1255642a7787fe112c29d01f968f1053e4d4700fc6f3bb879c1622e0"}, + {file = "fiona-1.9.6-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:52e8fec650b72fc5253d8f86b63859acc687182281c29bfacd3930496cf982d1"}, + {file = "fiona-1.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9b92aa1badb2773e7cac19bef3064d73e9d80c67c42f0928db2520a04be6f2f"}, + {file = "fiona-1.9.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0eaffbf3bfae9960484c0c08ea461b0c40e111497f04e9475ebf15ac7a22d9dc"}, + {file = "fiona-1.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f1b49d51a744874608b689f029766aa1e078dd72e94b44cf8eeef6d7bd2e9051"}, + {file = "fiona-1.9.6.tar.gz", hash = "sha256:791b3494f8b218c06ea56f892bd6ba893dfa23525347761d066fb7738acda3b1"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +certifi = "*" +click = ">=8.0,<9.0" +click-plugins = ">=1.0" +cligj = ">=0.5" +six = "*" + +[package.extras] +all = ["fiona[calc,s3,test]"] +calc = ["shapely"] +s3 = ["boto3 (>=1.3.1)"] +test = ["fiona[s3]", "pytest (>=7)", "pytest-cov", "pytz"] + +[[package]] +name = "fonttools" +version = "4.50.0" +description = "Tools to manipulate font files" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effd303fb422f8ce06543a36ca69148471144c534cc25f30e5be752bc4f46736"}, + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7913992ab836f621d06aabac118fc258b9947a775a607e1a737eb3a91c360335"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0a1c5bd2f63da4043b63888534b52c5a1fd7ae187c8ffc64cbb7ae475b9dab"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40fc98540fa5360e7ecf2c56ddf3c6e7dd04929543618fd7b5cc76e66390562"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fff65fbb7afe137bac3113827855e0204482727bddd00a806034ab0d3951d0d"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1aeae3dd2ee719074a9372c89ad94f7c581903306d76befdaca2a559f802472"}, + {file = "fonttools-4.50.0-cp310-cp310-win32.whl", hash = "sha256:e9623afa319405da33b43c85cceb0585a6f5d3a1d7c604daf4f7e1dd55c03d1f"}, + {file = "fonttools-4.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:778c5f43e7e654ef7fe0605e80894930bc3a7772e2f496238e57218610140f54"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3dfb102e7f63b78c832e4539969167ffcc0375b013080e6472350965a5fe8048"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e58fe34cb379ba3d01d5d319d67dd3ce7ca9a47ad044ea2b22635cd2d1247fc"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c673ab40d15a442a4e6eb09bf007c1dda47c84ac1e2eecbdf359adacb799c24"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b3ac35cdcd1a4c90c23a5200212c1bb74fa05833cc7c14291d7043a52ca2aaa"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8844e7a2c5f7ecf977e82eb6b3014f025c8b454e046d941ece05b768be5847ae"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f849bd3c5c2249b49c98eca5aaebb920d2bfd92b3c69e84ca9bddf133e9f83f0"}, + {file = "fonttools-4.50.0-cp311-cp311-win32.whl", hash = "sha256:39293ff231b36b035575e81c14626dfc14407a20de5262f9596c2cbb199c3625"}, + {file = "fonttools-4.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:c33d5023523b44d3481624f840c8646656a1def7630ca562f222eb3ead16c438"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b4a886a6dbe60100ba1cd24de962f8cd18139bd32808da80de1fa9f9f27bf1dc"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2ca1837bfbe5eafa11313dbc7edada79052709a1fffa10cea691210af4aa1fa"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0493dd97ac8977e48ffc1476b932b37c847cbb87fd68673dee5182004906828"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77844e2f1b0889120b6c222fc49b2b75c3d88b930615e98893b899b9352a27ea"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3566bfb8c55ed9100afe1ba6f0f12265cd63a1387b9661eb6031a1578a28bad1"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:35e10ddbc129cf61775d58a14f2d44121178d89874d32cae1eac722e687d9019"}, + {file = "fonttools-4.50.0-cp312-cp312-win32.whl", hash = "sha256:cc8140baf9fa8f9b903f2b393a6c413a220fa990264b215bf48484f3d0bf8710"}, + {file = "fonttools-4.50.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ccc85fd96373ab73c59833b824d7a73846670a0cb1f3afbaee2b2c426a8f931"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e270a406219af37581d96c810172001ec536e29e5593aa40d4c01cca3e145aa6"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac2463de667233372e9e1c7e9de3d914b708437ef52a3199fdbf5a60184f190c"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47abd6669195abe87c22750dbcd366dc3a0648f1b7c93c2baa97429c4dc1506e"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:074841375e2e3d559aecc86e1224caf78e8b8417bb391e7d2506412538f21adc"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0743fd2191ad7ab43d78cd747215b12033ddee24fa1e088605a3efe80d6984de"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3d7080cce7be5ed65bee3496f09f79a82865a514863197ff4d4d177389e981b0"}, + {file = "fonttools-4.50.0-cp38-cp38-win32.whl", hash = "sha256:a467ba4e2eadc1d5cc1a11d355abb945f680473fbe30d15617e104c81f483045"}, + {file = "fonttools-4.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:f77e048f805e00870659d6318fd89ef28ca4ee16a22b4c5e1905b735495fc422"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6245eafd553c4e9a0708e93be51392bd2288c773523892fbd616d33fd2fda59"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a4062cc7e8de26f1603323ef3ae2171c9d29c8a9f5e067d555a2813cd5c7a7e0"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34692850dfd64ba06af61e5791a441f664cb7d21e7b544e8f385718430e8f8e4"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678dd95f26a67e02c50dcb5bf250f95231d455642afbc65a3b0bcdacd4e4dd38"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f2ce7b0b295fe64ac0a85aef46a0f2614995774bd7bc643b85679c0283287f9"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d346f4dc2221bfb7ab652d1e37d327578434ce559baf7113b0f55768437fe6a0"}, + {file = "fonttools-4.50.0-cp39-cp39-win32.whl", hash = "sha256:a51eeaf52ba3afd70bf489be20e52fdfafe6c03d652b02477c6ce23c995222f4"}, + {file = "fonttools-4.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:8639be40d583e5d9da67795aa3eeeda0488fb577a1d42ae11a5036f18fb16d93"}, + {file = "fonttools-4.50.0-py3-none-any.whl", hash = "sha256:48fa36da06247aa8282766cfd63efff1bb24e55f020f29a335939ed3844d20d3"}, + {file = "fonttools-4.50.0.tar.gz", hash = "sha256:fa5cf61058c7dbb104c2ac4e782bf1b2016a8cf2f69de6e4dd6a865d2c969bb5"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "fsspec" +version = "2024.3.0" +description = "File-system specification" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.0-py3-none-any.whl", hash = "sha256:779001bd0122c9c4975cf03827d5e86c3afb914a3ae27040f15d341ab506a693"}, + {file = "fsspec-2024.3.0.tar.gz", hash = "sha256:f13a130c0ed07e15c4e1aeb0472a823e9c426b0b5792a1f40d902b0a71972d43"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "geopandas" +version = "0.14.3" +description = "Geographic pandas extensions" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "geopandas-0.14.3-py3-none-any.whl", hash = "sha256:41b31ad39e21bc9e8c4254f78f8dc4ce3d33d144e22e630a00bb336c83160204"}, + {file = "geopandas-0.14.3.tar.gz", hash = "sha256:748af035d4a068a4ae00cab384acb61d387685c833b0022e0729aa45216b23ac"}, +] + +[package.dependencies] +fiona = ">=1.8.21" +packaging = "*" +pandas = ">=1.4.0" +pyproj = ">=3.3.0" +shapely = ">=1.8.0" + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.2" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonschema" +version = "4.21.1" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + +[[package]] +name = "locket" +version = "1.0.0" +description = "File-based locks for Python on Linux and Windows" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, + {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, +] + +[[package]] +name = "matplotlib" +version = "3.8.3" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.8.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cf60138ccc8004f117ab2a2bad513cc4d122e55864b4fe7adf4db20ca68a078f"}, + {file = "matplotlib-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f557156f7116be3340cdeef7f128fa99b0d5d287d5f41a16e169819dcf22357"}, + {file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f386cf162b059809ecfac3bcc491a9ea17da69fa35c8ded8ad154cd4b933d5ec"}, + {file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c5f96f57b0369c288bf6f9b5274ba45787f7e0589a34d24bdbaf6d3344632f"}, + {file = "matplotlib-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:83e0f72e2c116ca7e571c57aa29b0fe697d4c6425c4e87c6e994159e0c008635"}, + {file = "matplotlib-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c5c8290074ba31a41db1dc332dc2b62def469ff33766cbe325d32a3ee291aea"}, + {file = "matplotlib-3.8.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5184e07c7e1d6d1481862ee361905b7059f7fe065fc837f7c3dc11eeb3f2f900"}, + {file = "matplotlib-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7e7e0993d0758933b1a241a432b42c2db22dfa37d4108342ab4afb9557cbe3e"}, + {file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b36ad07eac9740fc76c2aa16edf94e50b297d6eb4c081e3add863de4bb19a7"}, + {file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c42dae72a62f14982f1474f7e5c9959fc4bc70c9de11cc5244c6e766200ba65"}, + {file = "matplotlib-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf5932eee0d428192c40b7eac1399d608f5d995f975cdb9d1e6b48539a5ad8d0"}, + {file = "matplotlib-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:40321634e3a05ed02abf7c7b47a50be50b53ef3eaa3a573847431a545585b407"}, + {file = "matplotlib-3.8.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:09074f8057917d17ab52c242fdf4916f30e99959c1908958b1fc6032e2d0f6d4"}, + {file = "matplotlib-3.8.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5745f6d0fb5acfabbb2790318db03809a253096e98c91b9a31969df28ee604aa"}, + {file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97653d869a71721b639714b42d87cda4cfee0ee74b47c569e4874c7590c55c5"}, + {file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:242489efdb75b690c9c2e70bb5c6550727058c8a614e4c7716f363c27e10bba1"}, + {file = "matplotlib-3.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:83c0653c64b73926730bd9ea14aa0f50f202ba187c307a881673bad4985967b7"}, + {file = "matplotlib-3.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef6c1025a570354297d6c15f7d0f296d95f88bd3850066b7f1e7b4f2f4c13a39"}, + {file = "matplotlib-3.8.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c4af3f7317f8a1009bbb2d0bf23dfaba859eb7dd4ccbd604eba146dccaaaf0a4"}, + {file = "matplotlib-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c6e00a65d017d26009bac6808f637b75ceade3e1ff91a138576f6b3065eeeba"}, + {file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7b49ab49a3bea17802df6872f8d44f664ba8f9be0632a60c99b20b6db2165b7"}, + {file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6728dde0a3997396b053602dbd907a9bd64ec7d5cf99e728b404083698d3ca01"}, + {file = "matplotlib-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:813925d08fb86aba139f2d31864928d67511f64e5945ca909ad5bc09a96189bb"}, + {file = "matplotlib-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:cd3a0c2be76f4e7be03d34a14d49ded6acf22ef61f88da600a18a5cd8b3c5f3c"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fa93695d5c08544f4a0dfd0965f378e7afc410d8672816aff1e81be1f45dbf2e"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9764df0e8778f06414b9d281a75235c1e85071f64bb5d71564b97c1306a2afc"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5e431a09e6fab4012b01fc155db0ce6dccacdbabe8198197f523a4ef4805eb26"}, + {file = "matplotlib-3.8.3.tar.gz", hash = "sha256:7b416239e9ae38be54b028abbf9048aff5054a9aba5416bef0bd17f9162ce161"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.21,<2" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pandas" +version = "2.2.1" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "partd" +version = "1.4.1" +description = "Appendable key-value storage" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "partd-1.4.1-py3-none-any.whl", hash = "sha256:27e766663d36c161e2827aa3e28541c992f0b9527d3cca047e13fb3acdb989e6"}, + {file = "partd-1.4.1.tar.gz", hash = "sha256:56c25dd49e6fea5727e731203c466c6e092f308d8f0024e199d02f6aa2167f67"}, +] + +[package.dependencies] +locket = "*" +toolz = "*" + +[package.extras] +complete = ["blosc", "numpy (>=1.9.0)", "pandas (>=0.19.0)", "pyzmq"] + +[[package]] +name = "pillow" +version = "10.2.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "planetary-computer" +version = "1.0.0" +description = "Planetary Computer SDK for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "planetary-computer-1.0.0.tar.gz", hash = "sha256:5958a8e1d8ba1aafc7ac45878df2d7d03405806ae31ed2e675333faebca960cc"}, + {file = "planetary_computer-1.0.0-py3-none-any.whl", hash = "sha256:7af5839f9346c1d23d53fff4e80e955db18a2d81992877816e22dcbc2f90c40d"}, +] + +[package.dependencies] +click = ">=7.1" +packaging = "*" +pydantic = ">=1.7.3" +pystac = ">=1.0.0" +pystac-client = ">=0.2.0" +python-dotenv = "*" +pytz = ">=2020.5" +requests = ">=2.25.1" + +[package.extras] +adlfs = ["adlfs"] +azure = ["azure-storage-blob"] +dev = ["black", "flake8", "mypy", "pytest", "responses", "setuptools", "types-requests"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.6.4" +description = "Data validation using Python type hints" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.3" +description = "" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyproj" +version = "3.6.1" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyproj-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab7aa4d9ff3c3acf60d4b285ccec134167a948df02347585fdd934ebad8811b4"}, + {file = "pyproj-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4bc0472302919e59114aa140fd7213c2370d848a7249d09704f10f5b062031fe"}, + {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5279586013b8d6582e22b6f9e30c49796966770389a9d5b85e25a4223286cd3f"}, + {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fafd1f3eb421694857f254a9bdbacd1eb22fc6c24ca74b136679f376f97d35"}, + {file = "pyproj-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c41e80ddee130450dcb8829af7118f1ab69eaf8169c4bf0ee8d52b72f098dc2f"}, + {file = "pyproj-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:db3aedd458e7f7f21d8176f0a1d924f1ae06d725228302b872885a1c34f3119e"}, + {file = "pyproj-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebfbdbd0936e178091309f6cd4fcb4decd9eab12aa513cdd9add89efa3ec2882"}, + {file = "pyproj-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:447db19c7efad70ff161e5e46a54ab9cc2399acebb656b6ccf63e4bc4a04b97a"}, + {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e13c40183884ec7f94eb8e0f622f08f1d5716150b8d7a134de48c6110fee85"}, + {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ad699e0c830e2b8565afe42bd58cc972b47d829b2e0e48ad9638386d994915"}, + {file = "pyproj-3.6.1-cp311-cp311-win32.whl", hash = "sha256:8b8acc31fb8702c54625f4d5a2a6543557bec3c28a0ef638778b7ab1d1772132"}, + {file = "pyproj-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:38a3361941eb72b82bd9a18f60c78b0df8408416f9340521df442cebfc4306e2"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1e9fbaf920f0f9b4ee62aab832be3ae3968f33f24e2e3f7fbb8c6728ef1d9746"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d227a865356f225591b6732430b1d1781e946893789a609bb34f59d09b8b0f8"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83039e5ae04e5afc974f7d25ee0870a80a6bd6b7957c3aca5613ccbe0d3e72bf"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb059ba3bced6f6725961ba758649261d85ed6ce670d3e3b0a26e81cf1aa8d"}, + {file = "pyproj-3.6.1-cp312-cp312-win32.whl", hash = "sha256:2d6ff73cc6dbbce3766b6c0bce70ce070193105d8de17aa2470009463682a8eb"}, + {file = "pyproj-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:7a27151ddad8e1439ba70c9b4b2b617b290c39395fa9ddb7411ebb0eb86d6fb0"}, + {file = "pyproj-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ba1f9b03d04d8cab24d6375609070580a26ce76eaed54631f03bab00a9c737b"}, + {file = "pyproj-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18faa54a3ca475bfe6255156f2f2874e9a1c8917b0004eee9f664b86ccc513d3"}, + {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd43bd9a9b9239805f406fd82ba6b106bf4838d9ef37c167d3ed70383943ade1"}, + {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50100b2726a3ca946906cbaa789dd0749f213abf0cbb877e6de72ca7aa50e1ae"}, + {file = "pyproj-3.6.1-cp39-cp39-win32.whl", hash = "sha256:9274880263256f6292ff644ca92c46d96aa7e57a75c6df3f11d636ce845a1877"}, + {file = "pyproj-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:36b64c2cb6ea1cc091f329c5bd34f9c01bb5da8c8e4492c709bda6a09f96808f"}, + {file = "pyproj-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd93c1a0c6c4aedc77c0fe275a9f2aba4d59b8acf88cebfc19fe3c430cfabf4f"}, + {file = "pyproj-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6420ea8e7d2a88cb148b124429fba8cd2e0fae700a2d96eab7083c0928a85110"}, + {file = "pyproj-3.6.1.tar.gz", hash = "sha256:44aa7c704c2b7d8fb3d483bbf75af6cb2350d30a63b144279a09b75fead501bf"}, +] + +[package.dependencies] +certifi = "*" + +[[package]] +name = "pystac" +version = "1.9.0" +description = "Python library for working with the SpatioTemporal Asset Catalog (STAC) specification" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pystac-1.9.0-py3-none-any.whl", hash = "sha256:64d5654166290169ad6ad2bc0d5337a1664ede1165635f0b73b327065b801a2f"}, + {file = "pystac-1.9.0.tar.gz", hash = "sha256:c6b5a86e241fca5e9267a7902c26679f208749a107e9015fe6aaf73a9dd40948"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18,<5.0", optional = true, markers = "extra == \"validation\""} +python-dateutil = ">=2.7.0" + +[package.extras] +bench = ["asv (>=0.6.0,<0.7.0)", "packaging (>=23.1,<24.0)", "virtualenv (>=20.22,<21.0)"] +docs = ["Sphinx (>=6.2,<7.0)", "boto3 (>=1.28,<2.0)", "ipython (>=8.12,<9.0)", "jinja2 (<4.0)", "jupyter (>=1.0,<2.0)", "nbsphinx (>=0.9.0,<0.10.0)", "pydata-sphinx-theme (>=0.13,<1.0)", "rasterio (>=1.3,<2.0)", "shapely (>=2.0,<3.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-design (>=0.5.0,<0.6.0)", "sphinxcontrib-fulltoc (>=1.2,<2.0)"] +jinja2 = ["jinja2 (<4.0)"] +orjson = ["orjson (>=3.5)"] +test = ["black (>=23.3,<24.0)", "codespell (>=2.2,<3.0)", "coverage (>=7.2,<8.0)", "doc8 (>=1.1,<2.0)", "html5lib (>=1.1,<2.0)", "jinja2 (<4.0)", "jsonschema (>=4.18,<5.0)", "mypy (>=1.2,<2.0)", "orjson (>=3.8,<4.0)", "pre-commit (>=3.2,<4.0)", "pytest (>=7.3,<8.0)", "pytest-cov (>=4.0,<5.0)", "pytest-mock (>=3.10,<4.0)", "pytest-recording (>=0.13.0,<0.14.0)", "requests-mock (>=1.11,<2.0)", "ruff (==0.1.1)", "types-html5lib (>=1.1,<2.0)", "types-jsonschema (>=4.18,<5.0)", "types-orjson (>=3.6,<4.0)", "types-python-dateutil (>=2.8,<3.0)", "types-urllib3 (>=1.26,<2.0)"] +urllib3 = ["urllib3 (>=1.26)"] +validation = ["jsonschema (>=4.18,<5.0)"] + +[[package]] +name = "pystac-client" +version = "0.7.6" +description = "Python library for working with SpatioTemporal Asset Catalog (STAC) APIs." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pystac-client-0.7.6.tar.gz", hash = "sha256:9a45cd0296e48a55c5e2cb77a752e2236a7226e96b4804584851d1c6c52b0345"}, + {file = "pystac_client-0.7.6-py3-none-any.whl", hash = "sha256:221800ea54f408557fc947be08928638fb83b3e7506d3629f9a7ad594c577975"}, +] + +[package.dependencies] +pystac = {version = ">=1.8.2", extras = ["validation"]} +python-dateutil = ">=2.8.2" +requests = ">=2.28.2" + +[package.extras] +dev = ["black (>=24.0,<25.0)", "codespell (>=2.2.4,<2.3.0)", "coverage (>=7.2,<8.0)", "doc8 (>=1.1.1,<1.2.0)", "importlib-metadata (>=7.0,<8.0)", "mypy (>=1.2,<2.0)", "orjson (>=3.8,<4.0)", "pre-commit (>=3.2,<4.0)", "pytest (>=8.0,<9.0)", "pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-console-scripts (>=1.4.0,<1.5.0)", "pytest-cov (>=4.1.0,<4.2.0)", "pytest-recording (>=0.13,<1.0)", "recommonmark (>=0.7.1,<0.8.0)", "requests-mock (>=1.11.0,<1.12.0)", "ruff (==0.2.2)", "tomli (>=2.0,<3.0)", "types-python-dateutil (>=2.8.19,<2.9.0)", "types-requests (>=2.31.0,<2.32.0)", "urllib3 (<2)"] +docs = ["Sphinx (>=6.2,<7.0)", "boto3 (>=1.26,<2.0)", "cartopy (>=0.21,<1.0)", "geojson (>=3.1.0,<3.2.0)", "geopandas (>=0.14.0,<0.15.0)", "geoviews (>=1.9,<2.0)", "hvplot (>=0.9.0,<0.10.0)", "ipykernel (>=6.22,<7.0)", "ipython (>=8.12,<9.0)", "jinja2 (<4.0)", "matplotlib (>=3.8,<4.0)", "myst-parser (>=2.0,<3.0)", "nbsphinx (>=0.9,<1.0)", "pydata-sphinx-theme (>=0.13,<1.0)", "pygeoif (>=1.0,<2.0)", "scipy (>=1.10,<2.0)", "sphinxcontrib-fulltoc (>=1.2,<2.0)"] + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rasterio" +version = "1.3.9" +description = "Fast and direct raster I/O for use with Numpy and SciPy" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rasterio-1.3.9-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:04247da9f4002587ac2bec967c3a72f63fc0e6654101c06850bae3d8131b700d"}, + {file = "rasterio-1.3.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c9edce37b70f4cd4be5d3f5d314877e3130aeebb612120405cd28f83fe200865"}, + {file = "rasterio-1.3.9-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:fd6a850a37840ba590ddcf7ff90ba007b1e231b04434d8b4ac5ce0f746ada91a"}, + {file = "rasterio-1.3.9-cp310-cp310-win_amd64.whl", hash = "sha256:0c83156a44f8fda11876ff9f2ff1b602d7e7434447f7d621353f2929cefb1bf1"}, + {file = "rasterio-1.3.9-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0172dbd80bd9adc105ec2c9bd207dbd5519ea06b438a4d965c6290ae8ed6ff9f"}, + {file = "rasterio-1.3.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ea5b42597d85868ee88c750cc33f2ae729e1b5e3fe28f99071f39e1417bf1c0"}, + {file = "rasterio-1.3.9-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:be9b343bd08245df22115775dc9513c912afb4134d832662fa165d70cb805c34"}, + {file = "rasterio-1.3.9-cp311-cp311-win_amd64.whl", hash = "sha256:06d53e2e0885f039f960beb7c861400b92ea3e0e5abc2c67483fb56b1e5cbc13"}, + {file = "rasterio-1.3.9-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a34bb9eef67b7896e2dfb39e10ba6372f9894226fb790bd7a46f5748f205b7d8"}, + {file = "rasterio-1.3.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67b144b9678f9ad4cf5f2c3f455cbc6a7166c0523179249cee8f2e2c57d76c5b"}, + {file = "rasterio-1.3.9-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:99b72fccb702a921f43e56a4507b4cafe2a9196b478b993b98e82ec6851916d7"}, + {file = "rasterio-1.3.9-cp312-cp312-win_amd64.whl", hash = "sha256:6777fad3c31eb3e5da0ccaa28a032ad07c20d003bcd14f8bc13e16ca2f62348c"}, + {file = "rasterio-1.3.9-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:55bb1a2701dd67c1952b261a2ffbabd947a435d4457f13c25092a32ab7a4b36e"}, + {file = "rasterio-1.3.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726d8e8884359c34f672312171310052d5483af550ef00fb4f2562cc022a6f5a"}, + {file = "rasterio-1.3.9-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:f65879415df188fdc9388ccf2ee01e0659abae370d12518a17b60151e7d04efe"}, + {file = "rasterio-1.3.9-cp38-cp38-win_amd64.whl", hash = "sha256:89771b70ee722c4cc808e2a6139b367bef1a736ecd497b311b3515d78a5d16bc"}, + {file = "rasterio-1.3.9-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:14df8413c030b04e54d478d6ecec4e5958b46585c3cb970bf0dc19b4831146c8"}, + {file = "rasterio-1.3.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:911e54e0bb97c456a045f6d8e24b00aeb055a235d2aa7c2c1f9128f4c6c7a52d"}, + {file = "rasterio-1.3.9-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:01e428ee5ba8444f5cb4fff56225acb1ab9bc8b77209b6e4198e04565d8a8509"}, + {file = "rasterio-1.3.9-cp39-cp39-win_amd64.whl", hash = "sha256:26d9aea05b035927647bb32cc04fad0a68346a2f5186224dc1c2555c33515183"}, + {file = "rasterio-1.3.9.tar.gz", hash = "sha256:fc6d0d290492fa1a5068711cfebb21cc936968891b7ed9da0690c8a7388885c5"}, +] + +[package.dependencies] +affine = "*" +attrs = "*" +certifi = "*" +click = ">=4.0" +click-plugins = "*" +cligj = ">=0.5" +numpy = "*" +setuptools = "*" +snuggs = ">=1.4.1" + +[package.extras] +all = ["boto3 (>=1.2.4)", "ghp-import", "hypothesis", "ipython (>=2.0)", "matplotlib", "numpydoc", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely", "sphinx", "sphinx-rtd-theme"] +docs = ["ghp-import", "numpydoc", "sphinx", "sphinx-rtd-theme"] +ipython = ["ipython (>=2.0)"] +plot = ["matplotlib"] +s3 = ["boto3 (>=1.2.4)"] +test = ["boto3 (>=1.2.4)", "hypothesis", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely"] + +[[package]] +name = "referencing" +version = "0.34.0" +description = "JSON Referencing + Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + +[[package]] +name = "ruff" +version = "0.3.3" +description = "An extremely fast Python linter and code formatter, written in Rust." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:973a0e388b7bc2e9148c7f9be8b8c6ae7471b9be37e1cc732f8f44a6f6d7720d"}, + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfa60d23269d6e2031129b053fdb4e5a7b0637fc6c9c0586737b962b2f834493"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eca7ff7a47043cf6ce5c7f45f603b09121a7cc047447744b029d1b719278eb5"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7d3f6762217c1da954de24b4a1a70515630d29f71e268ec5000afe81377642d"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c19e8598916d9c6f5a5437671f55ee93c212a2c4c569605dc3842b6820386"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a6cbf216b69c7090f0fe4669501a27326c34e119068c1494f35aaf4cc683778"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352e95ead6964974b234e16ba8a66dad102ec7bf8ac064a23f95371d8b198aab"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6ab88c81c4040a817aa432484e838aaddf8bfd7ca70e4e615482757acb64f8"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79bca3a03a759cc773fca69e0bdeac8abd1c13c31b798d5bb3c9da4a03144a9f"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2700a804d5336bcffe063fd789ca2c7b02b552d2e323a336700abb8ae9e6a3f8"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd66469f1a18fdb9d32e22b79f486223052ddf057dc56dea0caaf1a47bdfaf4e"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45817af234605525cdf6317005923bf532514e1ea3d9270acf61ca2440691376"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0da458989ce0159555ef224d5b7c24d3d2e4bf4c300b85467b08c3261c6bc6a8"}, + {file = "ruff-0.3.3-py3-none-win32.whl", hash = "sha256:f2831ec6a580a97f1ea82ea1eda0401c3cdf512cf2045fa3c85e8ef109e87de0"}, + {file = "ruff-0.3.3-py3-none-win_amd64.whl", hash = "sha256:be90bcae57c24d9f9d023b12d627e958eb55f595428bafcb7fec0791ad25ddfc"}, + {file = "ruff-0.3.3-py3-none-win_arm64.whl", hash = "sha256:0171aab5fecdc54383993389710a3d1227f2da124d76a2784a7098e818f92d61"}, + {file = "ruff-0.3.3.tar.gz", hash = "sha256:38671be06f57a2f8aba957d9f701ea889aa5736be806f18c0cd03d6ff0cbca8d"}, +] + +[[package]] +name = "scipy" +version = "1.12.0" +description = "Fundamental algorithms for scientific computing in Python" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, + {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, + {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, + {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, + {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, + {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, + {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, + {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, + {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, + {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<1.29.0" + +[package.extras] +dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shapely" +version = "2.0.3" +description = "Manipulation and analysis of geometric objects" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:af7e9abe180b189431b0f490638281b43b84a33a960620e6b2e8d3e3458b61a1"}, + {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98040462b36ced9671e266b95c326b97f41290d9d17504a1ee4dc313a7667b9c"}, + {file = "shapely-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71eb736ef2843f23473c6e37f6180f90f0a35d740ab284321548edf4e55d9a52"}, + {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:881eb9dbbb4a6419667e91fcb20313bfc1e67f53dbb392c6840ff04793571ed1"}, + {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10d2ccf0554fc0e39fad5886c839e47e207f99fdf09547bc687a2330efda35b"}, + {file = "shapely-2.0.3-cp310-cp310-win32.whl", hash = "sha256:6dfdc077a6fcaf74d3eab23a1ace5abc50c8bce56ac7747d25eab582c5a2990e"}, + {file = "shapely-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:64c5013dacd2d81b3bb12672098a0b2795c1bf8190cfc2980e380f5ef9d9e4d9"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56cee3e4e8159d6f2ce32e421445b8e23154fd02a0ac271d6a6c0b266a8e3cce"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:619232c8276fded09527d2a9fd91a7885ff95c0ff9ecd5e3cb1e34fbb676e2ae"}, + {file = "shapely-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2a7d256db6f5b4b407dc0c98dd1b2fcf1c9c5814af9416e5498d0a2e4307a4b"}, + {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45f0c8cd4583647db3216d965d49363e6548c300c23fd7e57ce17a03f824034"}, + {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb37d3826972a82748a450328fe02a931dcaed10e69a4d83cc20ba021bc85f"}, + {file = "shapely-2.0.3-cp311-cp311-win32.whl", hash = "sha256:9302d7011e3e376d25acd30d2d9e70d315d93f03cc748784af19b00988fc30b1"}, + {file = "shapely-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6b464f2666b13902835f201f50e835f2f153f37741db88f68c7f3b932d3505fa"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e86e7cb8e331a4850e0c2a8b2d66dc08d7a7b301b8d1d34a13060e3a5b4b3b55"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c91981c99ade980fc49e41a544629751a0ccd769f39794ae913e53b07b2f78b9"}, + {file = "shapely-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd45d456983dc60a42c4db437496d3f08a4201fbf662b69779f535eb969660af"}, + {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:882fb1ffc7577e88c1194f4f1757e277dc484ba096a3b94844319873d14b0f2d"}, + {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9f2d93bff2ea52fa93245798cddb479766a18510ea9b93a4fb9755c79474889"}, + {file = "shapely-2.0.3-cp312-cp312-win32.whl", hash = "sha256:99abad1fd1303b35d991703432c9481e3242b7b3a393c186cfb02373bf604004"}, + {file = "shapely-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:6f555fe3304a1f40398977789bc4fe3c28a11173196df9ece1e15c5bc75a48db"}, + {file = "shapely-2.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983cc418c1fa160b7d797cfef0e0c9f8c6d5871e83eae2c5793fce6a837fad9"}, + {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18bddb8c327f392189a8d5d6b9a858945722d0bb95ccbd6a077b8e8fc4c7890d"}, + {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:442f4dcf1eb58c5a4e3428d88e988ae153f97ab69a9f24e07bf4af8038536325"}, + {file = "shapely-2.0.3-cp37-cp37m-win32.whl", hash = "sha256:31a40b6e3ab00a4fd3a1d44efb2482278642572b8e0451abdc8e0634b787173e"}, + {file = "shapely-2.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:59b16976c2473fec85ce65cc9239bef97d4205ab3acead4e6cdcc72aee535679"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:705efbce1950a31a55b1daa9c6ae1c34f1296de71ca8427974ec2f27d57554e3"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:601c5c0058a6192df704cb889439f64994708563f57f99574798721e9777a44b"}, + {file = "shapely-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f24ecbb90a45c962b3b60d8d9a387272ed50dc010bfe605f1d16dfc94772d8a1"}, + {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8c2a2989222c6062f7a0656e16276c01bb308bc7e5d999e54bf4e294ce62e76"}, + {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42bceb9bceb3710a774ce04908fda0f28b291323da2688f928b3f213373b5aee"}, + {file = "shapely-2.0.3-cp38-cp38-win32.whl", hash = "sha256:54d925c9a311e4d109ec25f6a54a8bd92cc03481a34ae1a6a92c1fe6729b7e01"}, + {file = "shapely-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:300d203b480a4589adefff4c4af0b13919cd6d760ba3cbb1e56275210f96f654"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:083d026e97b6c1f4a9bd2a9171c7692461092ed5375218170d91705550eecfd5"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:27b6e1910094d93e9627f2664121e0e35613262fc037051680a08270f6058daf"}, + {file = "shapely-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:71b2de56a9e8c0e5920ae5ddb23b923490557ac50cb0b7fa752761bf4851acde"}, + {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d279e56bbb68d218d63f3efc80c819cedcceef0e64efbf058a1df89dc57201b"}, + {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88566d01a30f0453f7d038db46bc83ce125e38e47c5f6bfd4c9c287010e9bf74"}, + {file = "shapely-2.0.3-cp39-cp39-win32.whl", hash = "sha256:58afbba12c42c6ed44c4270bc0e22f3dadff5656d711b0ad335c315e02d04707"}, + {file = "shapely-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5026b30433a70911979d390009261b8c4021ff87c7c3cbd825e62bb2ffa181bc"}, + {file = "shapely-2.0.3.tar.gz", hash = "sha256:4d65d0aa7910af71efa72fd6447e02a8e5dd44da81a983de9d736d6e6ccbe674"}, +] + +[package.dependencies] +numpy = ">=1.14,<2" + +[package.extras] +docs = ["matplotlib", "numpydoc (>=1.1.0,<1.2.0)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snuggs" +version = "1.4.7" +description = "Snuggs are s-expressions for Numpy" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"}, + {file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"}, +] + +[package.dependencies] +numpy = "*" +pyparsing = ">=2.1.6" + +[package.extras] +test = ["hypothesis", "pytest"] + +[[package]] +name = "stackstac" +version = "0.5.0" +description = "Load a STAC collection into xarray with dask" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "stackstac-0.5.0-py3-none-any.whl", hash = "sha256:95cdb0e9d32f36996d8a3578a8c1804aeab349a862e535ae76b3358cc40bf261"}, + {file = "stackstac-0.5.0.tar.gz", hash = "sha256:d3174e7eda6ea97807961c9c8bf7e2d1c73d5b7135b14244e4080ed2a605e67a"}, +] + +[package.dependencies] +dask = {version = ">=2022.1.1", extras = ["array"]} +pyproj = ">=3.0.0,<4.0.0" +rasterio = ">=1.3.0,<2.0.0" +xarray = ">=0.18" + +[package.extras] +viz = ["Pillow (>=9.0.1,<10.0.0)", "aiohttp (>=3.7.4,<4.0.0)", "cachetools (>=4.2.2,<5.0.0)", "distributed (>=2022.1.1)", "ipyleaflet (>=0.13.6,<1.0.0)", "ipywidgets (>=7.6.3,<8.0.0)", "jupyter-server-proxy (>=3.2)", "matplotlib (>=3.4.1)", "mercantile (>=1.1.6,<2.0.0)", "scipy (>=1.6.1,<2.0.0)"] + +[[package]] +name = "toolz" +version = "0.12.1" +description = "List processing tools and functional utilities" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, + {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "xarray" +version = "2024.2.0" +description = "N-D labeled arrays and datasets in Python" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "xarray-2024.2.0-py3-none-any.whl", hash = "sha256:a31a9b37e39bd5aeb098070a75d6dd4d59019eb339d735b86108b9e0cb391f94"}, + {file = "xarray-2024.2.0.tar.gz", hash = "sha256:a105f02791082c888ebe2622090beaff2e7b68571488d62fe6afdab35b4b717f"}, +] + +[package.dependencies] +numpy = ">=1.23" +packaging = ">=22" +pandas = ">=1.5" + +[package.extras] +accel = ["bottleneck", "flox", "numbagg", "opt-einsum", "scipy"] +complete = ["xarray[accel,dev,io,parallel,viz]"] +dev = ["hypothesis", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "ruff", "xarray[complete]"] +io = ["cftime", "fsspec", "h5netcdf", "netCDF4", "pooch", "pydap", "scipy", "zarr"] +parallel = ["dask[complete]"] +viz = ["matplotlib", "nc-time-axis", "seaborn"] + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "8f44439a80cb47fca0f806882773d1aad46332f8bba7f135fbf0d2a8c74b8e8c" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..be52128 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,46 @@ +[tool.poetry] +name = "pcgrits" +version = "0.1.0" +description = "" +authors = ["Denis "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +geopandas = "^0.14.3" +stackstac = "^0.5.0" +xarray = "^2024.2.0" +scipy = "^1.12.0" +pystac-client = "^0.7.6" +planetary-computer = "^1.0.0" +ruff = "^0.3.3" +matplotlib = "^3.8.3" +pytest = "^8.1.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + + +[tool.black] +line-length = 120 + +[tool.ruff] +line-length = 120 +lint.extend-select = [ + "I001", # unsorted-imports + "RUF100", # unused-noqa +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra -q" +pythonpath = "." +testpaths = [ + "tests", +] + diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..35248cc --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,31 @@ +import pytest + +import pandas as pd +import numpy as np +import xarray as xr + + +@pytest.fixture +def xarray_dataset(): + np.random.seed(0) + temperature = 15 + 8 * np.random.randn(2, 2, 3) + precipitation = 10 * np.random.rand(2, 2, 3) + lon = [[-99.83, -99.32], [-99.79, -99.23]] + lat = [[42.25, 42.21], [42.63, 42.59]] + time = pd.date_range("2014-09-06", periods=3) + reference_time = pd.Timestamp("2014-09-05") + + ds = xr.Dataset( + data_vars=dict( + temperature=(["x", "y", "time"], temperature), + precipitation=(["x", "y", "time"], precipitation), + ), + coords=dict( + lon=(["x", "y"], lon), + lat=(["x", "y"], lat), + time=time, + reference_time=reference_time, + ), + attrs=dict(description="Weather related data."), + ) + return ds \ No newline at end of file diff --git a/tests/utils/test_grits.py b/tests/utils/test_grits.py new file mode 100644 index 0000000..c25aabe --- /dev/null +++ b/tests/utils/test_grits.py @@ -0,0 +1,13 @@ +from pandas._libs.testing import assert_almost_equal + +from utils.grits import get_min_and_max_values + + +def test_get_min_and_max_values(xarray_dataset): + # um xarray_dataset qualquer + # quando eu buscar pelo min/max dos quantis de referência + min_max_lim_map = get_min_and_max_values(xarray_dataset, ["temperature", "precipitation"]) + + # espero ter o valor dos min e max corretamente + assert_almost_equal(min_max_lim_map["precipitation"], [0.2580834, 9.72785955]) + assert_almost_equal(min_max_lim_map["temperature"], [7.90858715, 32.59861061]) \ No newline at end of file diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/get_IVs_unify.py b/utils/get_IVs_unify.py new file mode 100644 index 0000000..c48506e --- /dev/null +++ b/utils/get_IVs_unify.py @@ -0,0 +1,380 @@ +# %% +print(""" + Vegetation Indices series extraction + from Landsat series + + --- + created by Denis Mariano + denis@seca.space + www.seca.space + 2023-12 + ToDo's + - verificar porque EVI e LAI não estão displaying no valuetool + - TEM QUE DAR UM TRATO NOS VALUES + - agregar no tempo, zscores + - plots + - extraction + + """) + +# %% +import time + +start = time.time() + +import matplotlib.pyplot as plt +from datetime import date +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +print("all good!") +# %% DEFINE AREA OF INTEREST +# ========================= +# %% Uniguiri Farm +# name = 'Uniguiri_full_' + +# # AOI file and layer (for GPKG) +# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' +# file = path_vector + 'fazenda_uniguiri.gpkg' +# layer = 'piquetes_tid' + +# # Get FIELD +# field = gpd.read_file(file, layer=layer) +# #field = field[field['Re'] == 80000] + +# bbox, lat_range, lon_range = get_lims(field) +# print(field.head()) +# field.plot(column='TID') + +# %% THE CAR WAY +""" + XXX ler o gpkg do MT leva 30 segundos, não está bom + +""" +car = "MT-5103601-948E6FB555E3445CB7E0538F61483371" +car = "MT-5104807-84F5196D22B847C1BD91AA27DB598BC1" +car = "SP-3548906-AEEFC5ECB2EF42AF9721E496EC7678D9" # Embrapa Pecuaria Sudeste +car = "MT-5107941-3E795652613843F98A703C84BCF9CDA6" # Tabapora +# %% +if car: + name = car + gdf = gpd.read_file("/home/jovyan/PlanetaryComputerExamples/vetorial/CAR/MT_CAR_AREA_IMOVEL_.gpkg") + field = gdf[gdf["cod_imovel"] == name] + + bbox, lat_range, lon_range = get_lims(field) + print(field.head()) + del gdf + print(f"área da fazenda = {field.geometry.to_crs(6933).area.values[0]/10000:.1f} ha") + field.plot() + +# %% Embrapa Sao Carlos +name = "embrapa_saocarlos" +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "fazenda_embrapa.gpkg" +layer = "talhoes" + +# Get FIELD +field = gpd.read_file(file, layer=layer) +# field = field[field['Re'] == 80000] + +bbox, lat_range, lon_range = get_lims(field) +print(field.head()) +field.plot(column="tid") +plt.title(name) + + +# %% Define period and output path +datetime = "1985-01-01/" + str(date.today()) +datetime = "1985-01-01/2022-04-01" # +str(date.today()) +# para embrapa sanca deu erro em 2022-04-02 +# datetime='2015-01-01/2017-01-01' +print(datetime) + +# Parameters to save raster data? +savenc = True +zscores = True +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/" + +# parameters for extracting data +savecsv = True +column = "TID" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + +# some parameters to filter scenes +max_cloud = 30 +# %% QUERY LANDSAT +items57 = query_Landsat_items( + datetime=datetime, + bbox=bbox, + max_cloud=max_cloud, + landsats=[ + "landsat-5", + "landsat-7", + ], +) + +items89 = query_Landsat_items(datetime=datetime, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"]) + +# %% LOAD BANDS +indices = ["NDVI", "LAI", "BSI", "MSAVI", "NDMI"] # EVI, LAI +assets = ["blue", "green", "red", "nir08", "swir16", "swir22"] +# get the data the lazy way +data89 = stackstac.stack( + items89, + assets=assets, + bounds_latlon=bbox, + epsg=4326, +) +del data89.attrs["spec"] + +data57 = stackstac.stack( + items57, + assets=assets, + bounds_latlon=bbox, + epsg=4326, +) +del data57.attrs["spec"] + +# %% MATCH REPROJECTION using rioxarray +print(f"matching datasets ... ") +ds57 = data57.to_dataset(dim="band") +ds57 = ds57.rio.write_crs("4326") +ds89 = data89.to_dataset(dim="band") +ds89 = ds89.rio.write_crs("4326") + +ds57 = ds57.rio.reproject_match(ds89) + +# %% CONCAT DATASETS + +ds = xr.concat([ds57, ds89], dim="time", join="outer") +ds = ds.sortby("time") + +# REPROJECT +# %% +print("reprojecting") +ds = ds.rio.write_crs("4326") +ds = ds.rio.reproject("EPSG:4326") +ds = ds.rename({"x": "longitude", "y": "latitude"}) + +# %% clip nodata and run simple diagnostic +ds_ = xr.where(ds > 60000, np.nan, ds) + +# for var in list(ds.data_vars): +# print(var, ds_[var].quantile([.01,.1,.5,.9,.99], skipna=True), '\n') + +# %% + +# INTERPOLATE NANs +print("interpolating NaNs") +ds_ = ds_.interpolate_na( + dim="time", + method="pchip", + # limit = 7, + use_coordinate=True, +) + +# %% XXX SMOOTHENING WOULD BE COOL + +smooth = True +w = 4 +sm = "pchip_smW" + str(w) +if smooth: + print("smoothening...") + ds_ = ds_.chunk(dict(time=-1)) + ds_ = ds_.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + + +# %% CALCULATE INDICES + +ds_ = ds_.rename({"nir08": "nir"}) +dsi = calculate_indices( + ds_, + index=indices, + satellite_mission="ls", + # normalise=True, + drop=True, +) +dsi +# %% REPROJECT +print("reprojecting") +dsi = dsi.rio.write_crs("4326") +dsi = dsi.rio.reproject("EPSG:4326") +dsi = dsi.rename({"x": "longitude", "y": "latitude"}) +# %% +# DROPPING STUFF +drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + "sci:doi", +] +dsi = dsi.drop_vars(drops) +dsi = dsi.astype("float32") + +# %% +dsi.to_netcdf(f"{path_nc}/{name}_IVs.nc") +# XXX BSI e NDVI ok, LAI e EVI weird + +# #%% +# for iv in indices: +# dsi[iv].to_netcdf(f'{path_nc}/{name}_{iv}.nc') + +# %% XXX OS INDICES SAO GERADOS APARENTEMENTE OK + +lat = field.geometry.centroid.y.values[0] +lon = field.geometry.centroid.x.values[0] + +for iv in indices: + dsi[iv].sel(latitude=lat, longitude=lon, method="nearest").plot() + plt.grid() + plt.show() + plt.close() + + +# %% IVS Climatology +Cdsi = dsi.groupby("time.month").mean(skipna=True) + +Cdsi.load() + +# %% +Cdsi.to_netcdf(f"{path_nc}/{name}_IVs_cli.nc") + +# %% +for iv in indices: + Cdsi[iv].sel(latitude=lat, longitude=lon, method="nearest").plot() + plt.grid() + plt.plot() + plt.show() + +# %% +for iv in indices: + dsi[iv].sel(latitude=lat, longitude=lon, method="nearest").plot() + plt.grid() + plt.plot() + plt.show() + + +# %% +if zscores: + print("calculating zscores") + dsi_mean = dsi.groupby("time.month").mean(dim="time") + dsi_std = dsi.groupby("time.month").std(dim="time") + + dsi_anom = dsi.groupby("time.month") - dsi_mean + dsi_z = dsi_anom.groupby("time.month") / dsi_std + + dsi_z.to_netcdf(f"{path_nc}/{name}_Z-{sm}.nc") + print("zscores saved") + +print(f"{time.time()-start} seconds") + + +# XXX XXX XXX XXX ... ,,, XXX XXX +# %% THE EXTRACTION MISSION +def mask_farm(field, dst): + mask = xr_rasterize(field, dst) + # #mask data + dst = dst.where(mask) + # #convert to float 32 to conserve memory + # ds = ds.astype(np.int16) * 1000 + dst = dst.astype(np.float32) + print("Farm masked outside of boundaries!") + return dst + + +if savecsv: + print("Masking farm") + dam = mask_farm(field, dsi) + + +# %% Create zones for paddocks +def farm_zones(field, data, column, ochunk=64): + fz = xr_rasterize(field, data, attribute_col=column, verbose=True) + fz = fz.chunk(ochunk) + fz.astype("int16") + return fz + + +start = time.time() + + +def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): + """ + Extract time-series for farm zones for one variable + band is, for example in LST 89, 'lwir11' + """ + fz = farm_zones(field, data, column, ochunk) + tozip = [] + dstrc = dst.chunk(ochunk) + + # + tempo = pd.to_datetime(dam.time.values) + anos = np.unique([str(x) for x in tempo.year]) + + for ano in anos[:-1]: + # get stats for the first dataframe + print(f"working on {ano}") + data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() + data_ = data_.sel(time=slice(ano + "-01-01", str(int(ano) + 1) + "12-31")) + print(f"computing stats for the first date of year {ano}") + outst = zonal_stats(zones=fz, values=data_).compute() + outst["date"] = str(dstrc[band].time.values[0]) + data_.close() + + # and through the loop + for t in dstrc.time.values[1:]: + data_ = dstrc[band].sel(time=t).squeeze() + if verbose: + print(f"computing stats for {t}") + + outst1 = zonal_stats(zones=fz, values=data_).compute() + + outst1["date"] = str(t) + outst = pd.concat([outst, outst1]) + data_.close() + del outst1 + namestr = f"{path_csv}/{name}_{band}_{ano}_{suffix}.csv" + # tozip.append(namestr) + outst.to_csv(namestr) + print(f"{namestr} SAVED \n \n") + del outst, dstrc, data_ + + # if zip: + # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: + # for file in tozip: + # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) + + +# %% +# ds = da.to_dataset() +extract_fz_timeseries(dsi, ds, field, "mod_fiscal", path_csv, name, "_-_", "NDVI", ochunk=64, verbose=False) +# %% diff --git a/utils/get_IVs_unify_rolo_embrapa.py b/utils/get_IVs_unify_rolo_embrapa.py new file mode 100644 index 0000000..5737a81 --- /dev/null +++ b/utils/get_IVs_unify_rolo_embrapa.py @@ -0,0 +1,368 @@ +import time + +start = time.time() + +import matplotlib.pyplot as plt +import sys + +import geopandas as gpd +import stackstac +import xarray as xr +from scipy.signal import savgol_filter + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "fazenda_embrapa.gpkg" +layer = "talhoes" + + +a = 1 +b = 2 +c = 3 + + +def the_func(path: str, filename: str, layers: str | None = None) -> None: + print(path, filename, layers) + return + + +the_func(path=1, filename=2, layers=3) + + +def the_func(path: str, filename: str, layers: str) -> None: + print(""" + Vegetation Indices series extraction + from Landsat series + + --- + created by Denis Mariano + denis@seca.space + www.seca.space + 2023-12 + ToDo's + - verificar porque EVI e LAI não estão displaying no valuetool + - TEM QUE DAR UM TRATO NOS VALUES + - agregar no tempo, zscores + - plots + - extraction + - o pior de todos - datas faltantes + + """) + + # %% + + print("all good!") + # %% DEFINE AREA OF INTEREST + # ========================= + + # %% THE CAR WAY + + # %% Embrapa Sao Carlos + + # Get FIELD + field = gpd.read_file(file, layer=layer) + # field = field[field['Re'] == 80000] + + bbox, lat_range, lon_range = get_lims(field) + print(field.head()) + field.plot(column="tid") + + # %% Define period and output path + # 2022-04-02, '2022-11-10' - embrapa sanca + # '2022-07-01' - tabapora mt + name = "embrapa_saocarlos" + # dt1 = '1985-01-01' + dt1 = "2020-11-10" + dt2 = "2022-04-01" + + # dt1 = '2022-11-15' + # dt2 = '2022-10-28' + + # dt2 = str(date.today()) + + datetime = dt1 + "/" + dt2 + + print(datetime) + + # Parameters to save raster data? + savenc = True + zscores = True + path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/" + + # parameters for extracting data + savecsv = True + column = "TID" + path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + + # some parameters to filter scenes + max_cloud = 70 + # %% QUERY LANDSAT + items57 = query_Landsat_items( + datetime=datetime, + bbox=bbox, + max_cloud=max_cloud, + landsats=[ + "landsat-5", + "landsat-7", + ], + ) + + items89 = query_Landsat_items( + datetime=datetime, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"] + ) + + # %% LOAD BANDS + indices = ["NDVI", "MSAVI", "NDMI", "BSI", "LAI"] # EVI, LAI + assets = ["blue", "green", "red", "nir08", "swir16", "swir22"] + # get the data the lazy way + data89 = stackstac.stack( + items89, + assets=assets, + bounds_latlon=bbox, + epsg=4326, + ) + del data89.attrs["spec"] + + data57 = stackstac.stack( + items57, + assets=assets, + bounds_latlon=bbox, + epsg=4326, + ) + del data57.attrs["spec"] + + # %% MATCH REPROJECTION using rioxarray + print(f"matching datasets ... ") + ds57 = data57.to_dataset(dim="band") + ds57 = ds57.rio.write_crs("4326") + ds89 = data89.to_dataset(dim="band") + ds89 = ds89.rio.write_crs("4326") + + ds57 = ds57.rio.reproject_match(ds89) + + # %% CONCAT DATASETS + ds = xr.concat([ds57, ds89], dim="time", join="outer") + ds = ds.sortby("time") + ds = ds.chunk(dict(time=-1)) + + # %% + # if pau: + # ds2 = ds.copy() + # #%% + # dss = xr.concat([ds2,ds], dim="time", join='outer') + # dss = dss.sortby('time') + # if dss: + # print('reprojecting') + # dss = dss.rio.write_crs('4326') + # dss = dss.rio.reproject('EPSG:4326') + # dss = dss.rename({'x': 'longitude','y': 'latitude'}) + # ds_ = xr.where(dss > 50000, np.nan, dss) + + # #%% REPROJECT + # print('reprojecting') + # ds = ds.rio.write_crs('4326') + # ds = ds.rio.reproject('EPSG:4326') + # ds = ds.rename({'x': 'longitude','y': 'latitude'}) + + # %% clip nodata and run simple diagnostic + ds = xr.where(ds > 65000, np.nan, ds) + + for var in list(ds.data_vars): + print(var, ds[var].quantile([0.01, 0.1, 0.5, 0.9, 0.99], skipna=True).values, "\n") + + # %% + + # INTERPOLATE NANs + print("interpolating NaNs") + ds = ds.interpolate_na( + dim="time", + method="pchip", # pchip + # limit = 7, + use_coordinate=True, + ) + + # %% XXX SMOOTHENING WOULD BE COOL + + smooth = True + w = 4 + sm = "pchip_w" + str(w) + if smooth: + print("smoothening...") + ds = ds.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + + # %% CALCULATE INDICES + ds = ds.rename({"nir08": "nir"}) + dsi = calculate_indices( + ds, + index=indices, + satellite_mission="ls", + # normalise=True, + drop=True, + ) + # %% REPROJECT + print("reprojecting") + dsi = dsi.rio.write_crs("4326") + dsi = dsi.rio.reproject("EPSG:4326") + dsi = dsi.rename({"x": "longitude", "y": "latitude"}) + # %% + # DROPPING STUFF + dsi = dsi.astype("float32") + + drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + "sci:doi", + ] + dsi = dsi.drop_vars(drops) + + # %% + iv = 0 + dsi.to_netcdf(f"{path_nc}/{dt1}_{dt2}_{iv}_{name}_.nc") + # XXX BSI e NDVI ok, LAI e EVI weird + + # #%% + # for iv in indices: + # dsi[iv].to_netcdf(f'{path_nc}/{name}_{iv}.nc') + + # %% XXX OS INDICES SAO GERADOS APARENTEMENTE OK + lat = field.geometry.centroid.y.values[0] + lon = field.geometry.centroid.x.values[0] + + for iv in indices: + dsi[iv].sel(latitude=lat, longitude=lon, method="nearest").plot() + plt.grid() + plt.show() + plt.close() + + # %% + # #%% IVS Climatology + # Cdsi = dsi.groupby('time.month').mean(skipna=True) + + # Cdsi.load() + + # #%% + # Cdsi.to_netcdf(f'{path_nc}/{name}_IVs_cli.nc') + + # #%% + # for iv in indices: + # Cdsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() + # plt.grid();plt.plot(); plt.show() + + # #%% + # for iv in indices: + # dsi[iv].sel(latitude=lat, longitude=lon, method='nearest').plot() + # plt.grid();plt.plot(); plt.show() + + # # %% + # if zscores: + # print('calculating zscores') + # dsi_mean = dsi.groupby('time.month').mean(dim='time') + # dsi_std = dsi.groupby('time.month').std(dim='time') + + # dsi_anom = dsi.groupby('time.month') - dsi_mean + # dsi_z = dsi_anom.groupby('time.month') / dsi_std + + # dsi_z.to_netcdf(f'{path_nc}/{name}_Z-{sm}.nc') + # print('zscores saved') + + # print(f'{time.time()-start} seconds') + + # # XXX XXX XXX XXX ... ,,, XXX XXX + # # %% THE EXTRACTION MISSION + # def mask_farm(field,dst): + + # mask = xr_rasterize(field,dst) + # # #mask data + # dst = dst.where(mask) + # # #convert to float 32 to conserve memory + # #ds = ds.astype(np.int16) * 1000 + # dst = dst.astype(np.float32) + # print('Farm masked outside of boundaries!') + # return dst + + # if savecsv: + # print('Masking farm') + # dam = mask_farm(field,dsi) + + # # %% Create zones for paddocks + # def farm_zones(field,data,column,ochunk=64): + + # fz = xr_rasterize(field,data,attribute_col=column,verbose=True) + # fz = fz.chunk(ochunk) + # fz.astype('int16') + # return fz + + # start = time.time() + + # def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): + # ''' + # Extract time-series for farm zones for one variable + # band is, for example in LST 89, 'lwir11' + # ''' + # fz = farm_zones(field,data,column,ochunk) + # tozip = [] + # dstrc = dst.chunk(ochunk) + + # # + # tempo = pd.to_datetime(dam.time.values) + # anos = np.unique([str(x) for x in tempo.year]) + + # for ano in anos[:-1]: + + # # get stats for the first dataframe + # print(f'working on {ano}') + # data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() + # data_ = data_.sel(time=slice(ano+'-01-01',str(int(ano)+1)+'12-31')) + # print(f'computing stats for the first date of year {ano}') + # outst = zonal_stats(zones=fz, values=data_).compute() + # outst['date'] = str(dstrc[band].time.values[0]) + # data_.close() + + # # and through the loop + # for t in dstrc.time.values[1:]: + # data_ = dstrc[band].sel(time=t).squeeze() + # if verbose: print(f'computing stats for {t}') + + # outst1 = zonal_stats(zones=fz, values=data_).compute() + + # outst1['date'] = str(t) + # outst = pd.concat([outst,outst1]) + # data_.close() + # del outst1 + # namestr = f'{path_csv}/{name}_{band}_{ano}_{suffix}.csv' + # #tozip.append(namestr) + # outst.to_csv(namestr) + # print(f'{namestr} SAVED \n \n') + # del outst, dstrc, data_ + + # # if zip: + # # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: + # # for file in tozip: + # # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) + + # # %% + # #ds = da.to_dataset() + # extract_fz_timeseries(dsi, + # ds, + # field, + # 'mod_fiscal', + # path_csv, + # name, + # '_-_', + # 'NDVI', + # ochunk=64, verbose=False) + + # %% + return + + +if __name__ == "__main__": + path = sys.argv[1] + return a_funcao_do_denis(path, filename, param, param2, param3) diff --git a/utils/get_IVs_yearly.py b/utils/get_IVs_yearly.py new file mode 100644 index 0000000..9275a9d --- /dev/null +++ b/utils/get_IVs_yearly.py @@ -0,0 +1,170 @@ +# %% +import pylab as plt +from datetime import date +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +print("all good!") + + +# %% Embrapa Sao Carlos +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "fazenda_embrapa.gpkg" +layer = "talhoes" + +# Get FIELD +field = gpd.read_file(file, layer=layer) +# field = field[field['Re'] == 80000] + +bbox, lat_range, lon_range = get_lims(field) +print(field.head()) +field.plot(column="tid") + +# to save +savenc = True +zscores = True +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/" +# parameters for extracting data +savecsv = True +column = "TID" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + +# some parameters to filter scenes +max_cloud = 70 + +name = "embrapa_sc_testeyearly" + + +indices = ["NDVI", "MSAVI", "NDMI", "BSI"] # EVI, LAI +assets = ["blue", "green", "red", "nir08", "swir16", "swir22"] + +# %% +### THE FUCKING for +### + +for ano in range(2022, 2023): + dt1 = str(ano) + "-11-15" + dt2 = str(ano + 1) + "-06-20" + + datetime = dt1 + "/" + dt2 + print(datetime) + # get items + items57 = query_Landsat_items( + datetime=datetime, + bbox=bbox, + max_cloud=max_cloud, + landsats=[ + "landsat-5", + "landsat-7", + ], + ) + + items89 = query_Landsat_items( + datetime=datetime, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"] + ) + + # get Data + data89 = stackstac.stack( + items89, + assets=assets, + bounds_latlon=bbox, + epsg=4326, + ) + del data89.attrs["spec"] + + data57 = stackstac.stack( + items57, + assets=assets, + bounds_latlon=bbox, + epsg=4326, + ) + del data57.attrs["spec"] + + # Match, Repro, Concat + print(f"matching datasets ... ") + ds57 = data57.to_dataset(dim="band") + ds57 = ds57.rio.write_crs("4326") + ds89 = data89.to_dataset(dim="band") + ds89 = ds89.rio.write_crs("4326") + + ds57 = ds57.rio.reproject_match(ds89) + + ds = xr.concat([ds57, ds89], dim="time", join="outer") + ds = ds.sortby("time") + ds = ds.chunk(dict(time=-1)) + + # data wrangling + ds = xr.where(ds > 65000, np.nan, ds) + + print("interpolating NaNs") + ds = ds.interpolate_na( + dim="time", + method="pchip", # pchip + # limit = 7, + use_coordinate=True, + ) + + smooth = True + w = 4 + sm = "pchip_w" + str(w) + if smooth: + print("smoothening...") + ds = ds.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + + # CALCULATE INDICES + ds = ds.rename({"nir08": "nir"}) + dsi = calculate_indices(ds, index=indices, satellite_mission="ls", drop=True) + + print("reprojecting") + dsi = dsi.rio.write_crs("4326") + dsi = dsi.rio.reproject("EPSG:4326") + dsi = dsi.rename({"x": "longitude", "y": "latitude"}) + + # DROPPING STUFF + dsi = dsi.astype("float32") + + drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + "sci:doi", + ] + dsi = dsi.drop_vars(drops) + + # SAVE + print("saving...") + dsi.to_netcdf(f"{path_nc}/{dt1}_{dt2}_{name}.nc") + print(f"{path_nc}/{dt1}_{dt2}_{name}.nc saved") + + del dsi, ds, ds57, data57, items57, ds89, data89, items89 + +# %% diff --git a/utils/get_LST.py b/utils/get_LST.py new file mode 100644 index 0000000..5707659 --- /dev/null +++ b/utils/get_LST.py @@ -0,0 +1,273 @@ +# %% +import time + +start = time.time() + +from datetime import date +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +print("all good!") + +# %% DEFINE AREA OF INTEREST + +# Name for reference +name = "Uniguiri_farm_unify" + +# AOI file and layer (for GPKG) +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "fazenda_uniguiri.gpkg" +layer = "piquetes_tid" + +# Landsat 4,5,7 have 'lwir' and 8 and 9 have 'lwir11' +# datetime89 = '2013-05-01/'+str(date.today()) +# datetime457 = '1985-01-01/2013-05-01' +datetime89 = "2021-05-01/2022-11-22" +datetime457 = "2006-01-01/2008-05-01" + + +# Parameters to save raster data? +savenc = True +zscores = True +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/" + +# parameters for extracting data +savecsv = True +column = "TID" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + +# some parameters to filter scenes +max_cloud = 50 + +# %%# Get FIELD +field = gpd.read_file(file, layer=layer) +# field = field[field['Re'] == 80000] + +bbox, lat_range, lon_range = get_lims(field) +print(field.head()) +field.plot(column="TID") + +# %% QUERY LANDSAT +# You can exclude some Landsats from the list +items89 = query_L89_items(datetime=datetime89, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"]) + +items457 = query_L457_items( + datetime=datetime457, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-4", "landsat-5", "landsat-7"] +) + +# %% +assets89 = ["lwir11"] +data89 = stackstac.stack( + items89, + assets=assets89, + bounds_latlon=bbox, + epsg=4326, # o xarray de imagens será retornado no EPSG:4326 + # resolution = 0.000281612818071153, # cuidado se for mexer na resolucao, tente algo como 0.001 para começar, pois é graus (não metros) +) +data89 = data89.rename({"x": "longitude", "y": "latitude"}) +dst89 = data89.to_dataset(dim="band") +del dst89.attrs["spec"] +# %% +assets457 = ["lwir"] +data457 = stackstac.stack( + items457, + assets=assets457, + bounds_latlon=bbox, + epsg=4326, # o xarray de imagens será retornado no EPSG:4326 + # resolution = 0.000281612818071153, # cuidado se for mexer na resolucao, tente algo como 0.001 para começar, pois é graus (não metros) +) + +data457 = data457.rename({"x": "longitude", "y": "latitude"}) +dst457 = data457.to_dataset(dim="band") +del dst457.attrs["spec"] + + +# %% +def get_lst(lwirband, items, dst, w=5): + """ + Convert lwir to Celcius and prepare dataset for further processing + lwirband (str): 'lwir' for 457 and lwirband for 89 + da (DataArray loaded from items__) + w (int): rolling mean window size, default is 5 + """ + # get lwir11 band info + band_info = items[0].assets[lwirband].extra_fields["raster:bands"][0] + print(band_info) + + dst[lwirband] = dst[lwirband].astype(float) + dst[lwirband] *= band_info["scale"] + dst[lwirband] += band_info["offset"] + dst[lwirband] -= 273.15 + + # variables to drop so I can save the .nc later on + drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + "instruments", + ] + dst = dst.drop_vars(drops) + # interpolate NaNs (rechunk it first) + dst = dst.chunk(dict(time=-1)) + dst[lwirband] = xr.where(dst[lwirband] < 1, np.nan, dst[lwirband]) # + dst[lwirband] = xr.where(dst[lwirband] > 65, np.nan, dst[lwirband]) + dst[lwirband] = dst[lwirband].interpolate_na(dim="time", method="linear") + + # I`m overwriting the raw data + dst[lwirband] = dst[lwirband].rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + del band_info + return dst + + +# %% finally, get the Land Surface Temperature in Celcius +lst89 = get_lst("lwir11", items89, dst89, 5) +lst457 = get_lst("lwir", items457, dst457, 5) + + +# %% +def lst2nc(dst, path_nc, name, Landsats): + """ + save LST data to netcdf + """ + Landsats = str(Landsats) + + print("Reprojecting and saving ... \n") + dst = dst.rio.write_crs("4326") + dst = dst.rio.reproject("EPSG:4326") + dst = dst.rename({"x": "longitude", "y": "latitude"}) + print("... saving ...") + + try: + dst.to_netcdf(f"{path_nc}lst_{name}_{Landsats}.nc", mode="w") + except: + print("trying to remove some weird shit") + dst = dst.drop_vars(["raster:bands", "instruments"]) + dst.to_netcdf(f"{path_nc}lst_{name}_{Landsats}.nc", mode="w") + + print(f"lst_{name}_{Landsats}.nc saved!") + + +if savenc: + lst2nc(dst89, path_nc, name, 89) + lst2nc(dst457, path_nc, name, 457) + + +# %% EXTRACTING data + + +def mask_farm(field, dst): + mask = xr_rasterize(field, dst) + # #mask data + dst = dst.where(mask) + # #convert to float 32 to conserve memory + # ds = ds.astype(np.int16) * 1000 + dst = dst.astype(np.float32) + print("Farm masked outside of boundaries!") + return dst + + +if savecsv: + print("Masking farm") + lst89m = mask_farm(field, lst89) + lst457m = mask_farm(field, lst457) + + +print(f"Tempo total de processamento salvando os netcdfs no final: {time.time() - start} segundos") + + +if savecsv: + start = time.time() + + +# %% Create zones for paddocks +def farm_zones(field, data, column, ochunk=64): + fz = xr_rasterize(field, data, attribute_col=column, verbose=True) + fz = fz.chunk(ochunk) + fz.astype("int16") + return fz + + +# %% and finally, the extraction +def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, zip=False, verbose=False): + """ + Extract time-series for farm zones for one variable + band is, for example in LST 89, 'lwir11' + """ + fz = farm_zones(field, data, column, ochunk) + tozip = [] + dstrc = dst.chunk(ochunk) + # get stats for the first dataframe + data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() + print("computing stats for the first date") + outst = zonal_stats(zones=fz, values=data_).compute() + outst["date"] = str(dstrc[band].time.values[0]) + data_.close() + + # and through the loop + for t in dstrc.time.values[1:]: + data_ = dstrc[band].sel(time=t).squeeze() + if verbose: + print(f"computing stats for {t}") + + outst1 = zonal_stats(zones=fz, values=data_).compute() + + outst1["date"] = str(t) + outst = pd.concat([outst, outst1]) + data_.close() + del outst1 + namestr = f"{path_csv}/{name}_{band}_{suffix}.csv" + tozip.append(namestr) + outst.to_csv(namestr) + print(f"{namestr} SAVED \n \n") + del outst, dstrc, data_ + + if zip: + with zipfile.ZipFile(f"{path_csv}/{name}_{band}.zip", "w") as zipMe: + for file in tozip: + zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) + + +# %% +extract_fz_timeseries(da, data, field, "TID", path_csv, name, "allLandsat", "lst", ochunk=64, verbose=False) +# %% +# extract_fz_timeseries(lst457, +# data457, +# field, +# 'TID', +# path_csv, +# name, +# '457', +# 'lwir', +# ochunk=64, zip=False, verbose=True) +# #%% + +print(f"Tempo total de processamento das extractions: {time.time() - start} segundos") diff --git a/utils/get_LST_unify.py b/utils/get_LST_unify.py new file mode 100644 index 0000000..017b415 --- /dev/null +++ b/utils/get_LST_unify.py @@ -0,0 +1,303 @@ +# %% +print(""" + Landsat Land Surface Temperature series extractor + created by Denis Mariano + www.seca.space + denis@seca.space + """) + +# %% +import time + +start = time.time() + +from datetime import date +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import matplotlib.pyplot as plt +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +print("all good!") +# %% DEFINE AREA OF INTEREST +# ========================= +# Name for reference +# name = 'Uniguiri_full_' + +# # AOI file and layer (for GPKG) +# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' +# file = path_vector + 'fazenda_uniguiri.gpkg' +# layer = 'piquetes_tid' + +# # Get FIELD +# field = gpd.read_file(file, layer=layer) +# #field = field[field['Re'] == 80000] + +# bbox, lat_range, lon_range = get_lims(field) +# print(field.head()) +# field.plot(column='TID') + +# %% THE CAR WAY +""" +a CAR MT-5103601-948E6FB555E3445CB7E0538F61483371 + XXX ler o gpkg do MT leva 30 segundos, não está bom + +""" +# car = 'MT-5103601-948E6FB555E3445CB7E0538F61483371' +# car = 'MT-5104807-84F5196D22B847C1BD91AA27DB598BC1' +# if car: +# name = car +# gdf = gpd.read_file('/home/jovyan/PlanetaryComputerExamples/vetorial/CAR/MT_CAR_AREA_IMOVEL_.gpkg') +# field = gdf[gdf['cod_imovel'] == name] + +# bbox, lat_range, lon_range = get_lims(field) +# print(field.head()) +# del gdf +# field.plot() + +# %% EMBRAPA SANCA +name = "embrapa_sc" +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "fazenda_embrapa.gpkg" +layer = "talhoes" +field = gpd.read_file(file, layer=layer) + +# %% Belem PA +# name = 'Belem2' +# path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/' +# file = path_vector + 'belem.gpkg' +# field = gpd.read_file(file) + + +# Get FIELD + +# field = field[field['Re'] == 80000] + +bbox, lat_range, lon_range = get_lims(field) +print(field.head()) +field.plot() # column='tid' +plt.title(name) + +# %% Define period and output path +# Landsat 4,5,7 have 'lwir' and 8 and 9 have 'lwir11' +# datetime='1985-01-01/'+str(date.today()) +# datetime='1985-01-01/2022-04-01'#+str(date.today()) +datetime = "2022-11-25/" + str(date.today()) + +print(datetime) + +# Parameters to save raster data? +savenc = True +zscores = True +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/" + +# parameters for extracting data +savecsv = True +column = "TID" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + +# some parameters to filter scenes +max_cloud = 50 +# %% QUERY LANDSAT +items57 = query_Landsat_items( + datetime=datetime, + bbox=bbox, + max_cloud=max_cloud, + landsats=[ + "landsat-5", + "landsat-7", + ], +) + +items89 = query_Landsat_items(datetime=datetime, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"]) + +# get the data the lazy way +data89 = stackstac.stack( + items89, + assets=["lwir11"], + bounds_latlon=bbox, + epsg=4326, +) + +data57 = stackstac.stack( + items57, + assets=["lwir"], + bounds_latlon=bbox, + epsg=4326, +) +# %% The CONCAT Way +# SQUEEZE monoBAND +data89 = data89.rename("lwir").squeeze() +data57 = data57.rename("lwir").squeeze() + +# MATCH REPROJECTION using rioxarray +print("matching DataArrays spatially") +data57 = data57.rio.reproject_match(data89) + +# CONCATENATE DATAARRAYS +da = xr.concat([data89, data57], dim="time", join="outer") + +# RESCALE AND FILTER FOR LAND SURFACE TEMPERATURE +print("reescaling LST") +scale = items89[0].assets["lwir11"].extra_fields["raster:bands"][0]["scale"] +offset = items89[0].assets["lwir11"].extra_fields["raster:bands"][0]["offset"] +da = da * scale + offset - 273.15 +da = da.astype("float32") +da = xr.where((da < -5) | (da > 65), np.nan, da) + +# REPROJECT +print("reprojecting") +da = da.rio.write_crs("4326") +da = da.rio.reproject("EPSG:4326") +da = da.rename({"x": "longitude", "y": "latitude"}) + +# REORDER +da = da.rename("lst") +da = da.sortby("time") + +# INTERPOLATE NANs +print("interpolating NaNs") +da = da.interpolate_na(dim="time", method="pchip", limit=7, use_coordinate=True) + +# %% XXX SMOOTHENING WOULD BE COOL +smooth = True +w = 7 +sm = "pchip_smW" + str(w) +if smooth: + print("smoothening...") + da = da.chunk(dict(time=-1)) + da = da.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + +# DROPPING STUFF +drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", +] +da = da.drop_vars(drops) + +# %% Save NC +da.to_netcdf(f"{path_nc}/{name}_LST{sm}.nc") +print(f"SAVED {path_nc}/{name}_LST{sm}.nc") +# %% +if zscores: + print("calculating zscores") + da_mean = da.groupby("time.month").mean(dim="time") + da_std = da.groupby("time.month").std(dim="time") + + da_anom = da.groupby("time.month") - da_mean + da_z = da_anom.groupby("time.month") / da_std + + da_z.to_netcdf(f"{path_nc}/{name}_Z-LST{sm}.nc") + print("zscores saved") + +print(f"{time.time()-start} seconds") + + +# XXX XXX XXX XXX ... ,,, XXX XXX +# %% THE EXTRACTION MISSION +def mask_farm(field, dst): + mask = xr_rasterize(field, dst) + # #mask data + dst = dst.where(mask) + # #convert to float 32 to conserve memory + # ds = ds.astype(np.int16) * 1000 + dst = dst.astype(np.float32) + print("Farm masked outside of boundaries!") + return dst + + +if savecsv: + print("Masking farm") + dam = mask_farm(field, da) + + +# %% Create zones for paddocks +def farm_zones(field, data, column, ochunk=64): + fz = xr_rasterize(field, data, attribute_col=column, verbose=True) + fz = fz.chunk(ochunk) + fz.astype("int16") + return fz + + +start = time.time() + + +def extract_fz_timeseries(dst, data, field, column, path_csv, name, suffix, band, ochunk=64, verbose=False): + """ + Extract time-series for farm zones for one variable + band is, for example in LST 89, 'lwir11' + """ + fz = farm_zones(field, data, column, ochunk) + tozip = [] + dstrc = dst.chunk(ochunk) + + # + tempo = pd.to_datetime(dam.time.values) + anos = np.unique([str(x) for x in tempo.year]) + + for ano in anos[:-1]: + # get stats for the first dataframe + print(f"working on {ano}") + data_ = dstrc[band].sel(time=dstrc[band].time.values[0]).squeeze() + data_ = data_.sel(time=slice(ano + "-01-01", str(int(ano) + 1) + "12-31")) + print(f"computing stats for the first date of year {ano}") + outst = zonal_stats(zones=fz, values=data_).compute() + outst["date"] = str(dstrc[band].time.values[0]) + data_.close() + + # and through the loop + for t in dstrc.time.values[1:]: + data_ = dstrc[band].sel(time=t).squeeze() + if verbose: + print(f"computing stats for {t}") + + outst1 = zonal_stats(zones=fz, values=data_).compute() + + outst1["date"] = str(t) + outst = pd.concat([outst, outst1]) + data_.close() + del outst1 + namestr = f"{path_csv}/{name}_{band}_{ano}_{suffix}.csv" + # tozip.append(namestr) + outst.to_csv(namestr) + print(f"{namestr} SAVED \n \n") + del outst, dstrc, data_ + + # if zip: + # with zipfile.ZipFile(f'{path_csv}/{name}_{band}.zip', 'w') as zipMe: + # for file in tozip: + # zipMe.write(file, compress_type=zipfile.ZIP_DEFLATED) + + +# %% +ds = da.to_dataset() +extract_fz_timeseries(ds, da, field, "TID", path_csv, name, "allLandsat", "lst", ochunk=64, verbose=False) +# %% diff --git a/get_nc_data.py b/utils/get_nc_data.py similarity index 51% rename from get_nc_data.py rename to utils/get_nc_data.py index f92283b..97db530 100644 --- a/get_nc_data.py +++ b/utils/get_nc_data.py @@ -1,76 +1,78 @@ -#%% +# %% import xarray as xr import pandas as pd import pylab as plt import geopandas as gpd import os, sys from glob import glob -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") from grits import * from xrspatial import zonal_stats # from dask_gateway import GatewayCluster -#%% -# cluster = GatewayCluster() +# %% +# cluster = GatewayCluster() # client = cluster.get_client() # cluster.adapt(minimum=4, maximum=24) # print(cluster.dashboard_link) # %% Area Of Interest # name embrapa_sc , layer talhoes, fazenda_embrapa.gpkg, column 'tid' -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/iacanga/' -if not os.path.exists(path_csv) : os.makedirs(path_csv) +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/iacanga/" +if not os.path.exists(path_csv): + os.makedirs(path_csv) -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'iacanga_r400.shp' +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "iacanga_r400.shp" layer = None -column = 'grid' -attribute_col = 'TID' +column = "grid" +attribute_col = "TID" field = gpd.read_file(file, layer=layer) print(field.dtypes) -#%% -band = 'NDVI' - -for r in [100,200,300,400]: +# %% +band = "NDVI" - field_ = field[field[column] == r ] +for r in [100, 200, 300, 400]: + field_ = field[field[column] == r] bbox, lat_range, lon_range = get_lims(field_) print(field_.head()) field_.plot(column=attribute_col, legend=True) - plt.show();plt.close() + plt.show() + plt.close() - files = sorted(glob(path_nc+'*_'+str(r)+'.nc')) + files = sorted(glob(path_nc + "*_" + str(r) + ".nc")) print(files) for f in files: dst = xr.open_dataset(f, chunks=64) - fz = xr_rasterize(field_,dst,attribute_col='TID',verbose=True) + fz = xr_rasterize(field_, dst, attribute_col="TID", verbose=True) tempo = pd.to_datetime(dst.time.values) fz = fz.chunk(64) # extraindo primeira data data_ = dst.sel(time=tempo[0]).squeeze() - print(f'computing stats for {tempo[0]}') + print(f"computing stats for {tempo[0]}") outst = zonal_stats(zones=fz, values=data_[band]).compute() data_.close() - + # the whole thing for t in tempo[1:]: data_ = dst[band].sel(time=t).squeeze() - print(f'computing stats for {t}') + print(f"computing stats for {t}") outst1 = zonal_stats(zones=fz, values=data_).compute() - outst1['date'] = str(t) - outst = pd.concat([outst,outst1]) + outst1["date"] = str(t) + outst = pd.concat([outst, outst1]) data_.close() del outst1 - d0 = f.split('/')[-1].split('_')[0] - d1 = f.split('/')[-1].split('_')[1] - outst.to_csv(f'{path_csv}/{r}_{d0}_{d1}_{band}.csv') - print(f'{path_csv}/{r}_{d0}_{d1}_{band}.csv \n') + d0 = f.split("/")[-1].split("_")[0] + d1 = f.split("/")[-1].split("_")[1] + outst.to_csv(f"{path_csv}/{r}_{d0}_{d1}_{band}.csv") + print(f"{path_csv}/{r}_{d0}_{d1}_{band}.csv \n") # %% diff --git a/get_stats_nc.py b/utils/get_stats_nc.py similarity index 51% rename from get_stats_nc.py rename to utils/get_stats_nc.py index 08d547b..7f6223b 100644 --- a/get_stats_nc.py +++ b/utils/get_stats_nc.py @@ -1,76 +1,76 @@ -#%% +# %% import xarray as xr import pandas as pd import pylab as plt import geopandas as gpd import os, sys from glob import glob -sys.path.append('/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/') + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") from grits import * from xrspatial import zonal_stats # %% Area Of Interest # name embrapa_sc , layer talhoes, fazenda_embrapa.gpkg, column 'tid' -path_nc = '/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/' -path_csv = '/home/jovyan/PlanetaryComputerExamples/OUT/csv/iacanga/' -if not os.path.exists(path_csv) : os.makedirs(path_csv) +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/" +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/iacanga/" +if not os.path.exists(path_csv): + os.makedirs(path_csv) -path_vector = '/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/' -file = path_vector + 'iacanga_r400.shp' +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "iacanga_r400.shp" layer = None -column = 'grid' -attribute_col = 'TID' +column = "grid" +attribute_col = "TID" field = gpd.read_file(file, layer=layer) print(field.dtypes) -#%% +# %% # XXX XXX XXX # ano, precisa ir por ANOOOO - - r = 200 -band = 'lst' +band = "lst" -field_ = field[field[column] == r ] +field_ = field[field[column] == r] bbox, lat_range, lon_range = get_lims(field_) print(field_.head()) field_.plot(column=attribute_col, legend=True) -#%% -files = sorted(glob(path_nc+'*_'+str(r)+'_LST_*')) +# %% +files = sorted(glob(path_nc + "*_" + str(r) + "_LST_*")) print(files) -#%% Starting stats extraction +# %% Starting stats extraction dst = xr.open_dataset(files[0], chunks=64) -fz = xr_rasterize(field_,dst,attribute_col='TID',verbose=True) +fz = xr_rasterize(field_, dst, attribute_col="TID", verbose=True) tempo = pd.to_datetime(dst.time.values) fz = fz.chunk(64) # %% first date data_ = dst.sel(time=tempo[0]).squeeze() -print(f'computing stats for {tempo[0]}') +print(f"computing stats for {tempo[0]}") outst = zonal_stats(zones=fz, values=data_[band]).compute() # %% the whole thing data_.close() for t in tempo[1:]: data_ = dst[band].sel(time=t).squeeze() - print(f'computing stats for {t}') - + print(f"computing stats for {t}") + outst1 = zonal_stats(zones=fz, values=data_).compute() - outst1['date'] = str(t) - outst = pd.concat([outst,outst1]) + outst1["date"] = str(t) + outst = pd.concat([outst, outst1]) data_.close() del outst1 -outst.to_csv(f'{path_csv}/{r}_2020-2024_{band}.csv') -print(f'{path_csv}/{r}_2020-2024_{band}.csv \n') +outst.to_csv(f"{path_csv}/{r}_2020-2024_{band}.csv") +print(f"{path_csv}/{r}_2020-2024_{band}.csv \n") # %% diff --git a/grits.py b/utils/grits.py similarity index 80% rename from grits.py rename to utils/grits.py index 6990411..e0aac1f 100644 --- a/grits.py +++ b/utils/grits.py @@ -3,6 +3,7 @@ from geopandas import read_file import pystac_client import planetary_computer +from xarray import Dataset, DataArray """ Estou juntando aqui em 'grits' as funções. @@ -40,36 +41,37 @@ def humanbytes(B): """Return the given bytes as a human friendly KB, MB, GB, or TB string.""" B = float(B) KB = float(1024) - MB = float(KB ** 2) # 1,048,576 - GB = float(KB ** 3) # 1,073,741,824 - TB = float(KB ** 4) # 1,099,511,627,776 + MB = float(KB**2) # 1,048,576 + GB = float(KB**3) # 1,073,741,824 + TB = float(KB**4) # 1,099,511,627,776 if B < KB: - return '{0} {1}'.format(B, 'Bytes' if 0 == B > 1 else 'Byte') + return "{0} {1}".format(B, "Bytes" if 0 == B > 1 else "Byte") elif KB <= B < MB: - return '{0:.2f} KB'.format(B / KB) + return "{0:.2f} KB".format(B / KB) elif MB <= B < GB: - return '{0:.2f} MB'.format(B / MB) + return "{0:.2f} MB".format(B / MB) elif GB <= B < TB: - return '{0:.2f} GB'.format(B / GB) + return "{0:.2f} GB".format(B / GB) elif TB <= B: - return '{0:.2f} TB'.format(B / TB) + return "{0:.2f} TB".format(B / TB) -def get_field(file, column, ID, layer=None, multi_IDs=False, IDs=None): - ''' - file.:str: vector file containing group of farms (CARs, SIGEFs) or in the case of a farm, the farm itself - column.:str: 'column' to find farms or in the case of a farm file, the column to identify fields (padocks, plots, talhoes, piquetes) - ID.:str,int: farm ID in the 'column' - multi_IDs.:bool: default 'False'. If true, one must provide the list of IDs to aggregate as a single field. +def get_field(file, column, index, layer=None, multi_ids=False, ids=None): + """ + + file.:str: vector file containing group of farms (CARs, SIGEFs) or in the case of a farm, the farm itself + column.:str: 'column' to find farms or in the case of a farm file, the column to identify fields (padocks, plots, talhoes, piquetes) + index.:str,int: farm ID in the 'column' + multi_IDs.:bool: default 'False'. If true, one must provide the list of IDs to aggregate as a single field. - return 'field': a farm, a field or a group of fields combined as one. - ''' + return 'field': a farm, a field or a group of fields combined as one. + """ -# if column == None: -# gdf_ = read_file( file, layer=layer ) + # if column == None: + # gdf_ = read_file( file, layer=layer ) - if file[-4:] == 'gpkg': + if file[-4:] == "gpkg": layer = layer gdf_ = read_file(file, layer=layer) else: @@ -78,70 +80,63 @@ def get_field(file, column, ID, layer=None, multi_IDs=False, IDs=None): if column == None: field = gdf_ - if multi_IDs == False: - - field = gdf_[gdf_[column] == ID] + if multi_ids == False: + field = gdf_[gdf_[column] == index] else: - field = gdf_[gdf_[column].isin(IDs)] + field = gdf_[gdf_[column].isin(ids)] return field -def get_lims(gdf): - ''' - get bbox, lat_range and lon_range from a geodataframe, let's say, a farm or field. - It returns the info as three tuples. +def get_limits_from_gdf(gdf): + """ + get bbox, lat_range and lon_range from a geodataframe, let's say, a farm or field. + It returns the info as three tuples. - gdf.:GeoDataframe: + gdf.:GeoDataframe: - return bbox, lat_range and lon_range - ''' + return bbox, lat_range and lon_range + """ - limites = gdf - bbox = (limites.bounds.minx.min(), - limites.bounds.miny.min(), - limites.bounds.maxx.max(), - limites.bounds.maxy.max() - ) + bbox = (gdf.bounds.minx.min(), gdf.bounds.miny.min(), gdf.bounds.maxx.max(), gdf.bounds.maxy.max()) lat_range = (bbox[1], bbox[3]) lon_range = (bbox[0], bbox[2]) - print('got bbox, lat_range, lon_range') + print("got bbox, lat_range, lon_range") return bbox, lat_range, lon_range -def get_mms(ds, indices, qmin_qmax=[.01, .99]): - ''' - Return a dictionary of minimuns and maximuns for each variable in a xarray dataset - ds.:xarray dataset: the xarray dataset - qmin_qmax.:2 float list: the minimum and maximum quantile, default is [.01,.99] 1% and 99% +def get_min_and_max_values(ds: Dataset | DataArray, indices: list[str], qmin: float = 0.01, qmax: float = 0.99): + """ + Return a dictionary of minimuns and maximuns for each variable in a xarray dataset + ds.:xarray dataset: the xarray dataset + qmin_qmax.:2 float list: the minimum and maximum quantile, default is [.01,.99] 1% and 99% - ''' + """ import numpy as np mms = {} keys = indices for i in keys: - line = np.nanquantile(ds[i].values, qmin_qmax) + line = np.nanquantile(ds[i].values, [qmin, qmax]) mms[i] = line print(mms) return mms -#====================== + +# ====================== # QUERY SATELLITE DATA -#====================== -def query_l2a_items(bbox, - datetime, - max_cloud_cover): - ''' - Query Sentinel 2 L2A items for a given bounding box withing a - datetime range - bbox.:tuple with coordinates of the 2 corners of a bounding box: it is retrieved by the - get_lims function - max_cloud_cover.:int: percentage of max cloud allowed. - - ''' +# ====================== +def query_l2a_items(bbox, datetime, max_cloud_cover): + """ + Query Sentinel 2 L2A items for a given bounding box withing a + datetime range + bbox.:tuple with coordinates of the 2 corners of a bounding box: it is retrieved by the + get_lims function + max_cloud_cover.:int: percentage of max cloud allowed. + + """ catalog = pystac_client.Client.open( "https://planetarycomputer.microsoft.com/api/stac/v1", @@ -150,27 +145,23 @@ def query_l2a_items(bbox, query_params = {"eo:cloud_cover": {"lt": max_cloud_cover}} - search = catalog.search(bbox=bbox, - collections=["sentinel-2-l2a"], - datetime=datetime, - query=query_params) + search = catalog.search(bbox=bbox, collections=["sentinel-2-l2a"], datetime=datetime, query=query_params) items = search.item_collection() - print(f' found {len(items)} items') + print(f" found {len(items)} items") return items -def query_modis_items(bbox, - datetime, - collection): - ''' - Query MODIS items for a given bounding box withing a - datetime range - bbox.:tuple with coordinates of the 2 corners of a bounding box: it is retrieved by the - get_lims function - collection.:str: collection. - ... product? band? - ''' + +def query_modis_items(bbox, datetime, collection): + """ + Query MODIS items for a given bounding box withing a + datetime range + bbox.:tuple with coordinates of the 2 corners of a bounding box: it is retrieved by the + get_lims function + collection.:str: collection. + ... product? band? + """ # import pystac_client # import planetary_computer @@ -182,59 +173,59 @@ def query_modis_items(bbox, # query_params = {"eo:cloud_cover": {"lt": max_cloud_cover}} - search = catalog.search(bbox=bbox, - collections=collections, - datetime=datetime - ) + search = catalog.search(bbox=bbox, collections=collections, datetime=datetime) items = search.item_collection() - print(f' found {len(items)} items') + print(f" found {len(items)} items") return items -def query_Landsat_items(datetime, - bbox, - max_cloud = 30, - landsats = ["landsat-4", "landsat-5","landsat-7", - "landsat-8", "landsat-9"], - tiers = ['T1'] + +def query_Landsat_items( + datetime, + bbox, + max_cloud=30, + landsats=["landsat-4", "landsat-5", "landsat-7", "landsat-8", "landsat-9"], + tiers=["T1"], ): - ''' - query Landsat 8 and 9 - ''' + """ + query Landsat 8 and 9 + """ # stac object from Planetary Computer stac = pystac_client.Client.open( - "https://planetarycomputer.microsoft.com/api/stac/v1", - modifier=planetary_computer.sign_inplace, + "https://planetarycomputer.microsoft.com/api/stac/v1", + modifier=planetary_computer.sign_inplace, ) # some parameters query_params = { "eo:cloud_cover": {"lt": max_cloud}, "platform": {"in": landsats}, - "landsat:collection_category": { "in": tiers} - } + "landsat:collection_category": {"in": tiers}, + } # search search = stac.search( bbox=bbox, - datetime=datetime, - collections='landsat-c2-l2', - query=query_params, + datetime=datetime, + collections="landsat-c2-l2", + query=query_params, ) # sign items items = planetary_computer.sign(search) items = search.item_collection() - print(f'\n found {len(items)} items \n first: {items[-1]} \n last: {items[0]} \n') + print(f"\n found {len(items)} items \n first: {items[-1]} \n last: {items[0]} \n") print(items[0].assets.keys()) return items -#========================== -# from DEA plotting.py -#========================== + +# ========================== +# from DEA plotting.py +# ========================== + def display_map(x, y, crs="EPSG:4326", margin=-0.5, zoom_bias=0): """ @@ -285,16 +276,8 @@ def display_map(x, y, crs="EPSG:4326", margin=-0.5, zoom_bias=0): all_longitude, all_latitude = transformer.transform(all_x, all_y) # Calculate zoom level based on coordinates - lat_zoom_level = ( - _degree_to_zoom_level(min(all_latitude), max( - all_latitude), margin=margin) - + zoom_bias - ) - lon_zoom_level = ( - _degree_to_zoom_level(min(all_longitude), max( - all_longitude), margin=margin) - + zoom_bias - ) + lat_zoom_level = _degree_to_zoom_level(min(all_latitude), max(all_latitude), margin=margin) + zoom_bias + lon_zoom_level = _degree_to_zoom_level(min(all_longitude), max(all_longitude), margin=margin) + zoom_bias zoom_level = min(lat_zoom_level, lon_zoom_level) # Identify centre point for plotting @@ -318,10 +301,7 @@ def display_map(x, y, crs="EPSG:4326", margin=-0.5, zoom_bias=0): ] # Add bounding box as an overlay - interactive_map.add_child( - folium.features.PolyLine( - locations=line_segments, color="red", opacity=0.8) - ) + interactive_map.add_child(folium.features.PolyLine(locations=line_segments, color="red", opacity=0.8)) # Add clickable lat-lon popup box interactive_map.add_child(folium.features.LatLngPopup()) @@ -427,7 +407,6 @@ def rgb( # If ax is supplied via kwargs, ignore aspect and size if "ax" in kwargs: - # Create empty aspect size kwarg that will be passed to imshow aspect_size_kwarg = {} else: @@ -441,7 +420,6 @@ def rgb( # If no value is supplied for `index` (the default), plot using default # values and arguments passed via `**kwargs` if index is None: - # Select bands and convert to DataArray da = ds[bands].to_array() @@ -453,13 +431,11 @@ def rgb( # If there are more than three dimensions and the index dimension == 1, # squeeze this dimension out to remove it if (len(ds.dims) > 2) and ("col" not in kwargs) and (len(da[index_dim]) == 1): - da = da.squeeze(dim=index_dim) # If there are more than three dimensions and the index dimension # is longer than 1, raise exception to tell user to use 'col'/`index` elif (len(ds.dims) > 2) and ("col" not in kwargs) and (len(da[index_dim]) > 1): - raise Exception( f"The input dataset `ds` has more than two dimensions: " "{list(ds.dims.keys())}. Please select a single observation " @@ -468,26 +444,18 @@ def rgb( "call" ) da = da.compute() - img = da.plot.imshow( - robust=robust, col_wrap=col_wrap, **aspect_size_kwarg, **kwargs - ) + img = da.plot.imshow(robust=robust, col_wrap=col_wrap, **aspect_size_kwarg, **kwargs) # If values provided for `index`, extract corresponding observations and # plot as either single image or facet plot else: - # If a float is supplied instead of an integer index, raise exception if isinstance(index, float): - raise Exception( - f"Please supply `index` as either an integer or a list of " "integers" - ) + raise Exception(f"Please supply `index` as either an integer or a list of " "integers") # If col argument is supplied as well as `index`, raise exception if "col" in kwargs: - raise Exception( - f"Cannot supply both `index` and `col`; please remove one and " - "try again" - ) + raise Exception(f"Cannot supply both `index` and `col`; please remove one and " "try again") # Convert index to generic type list so that number of indices supplied # can be computed @@ -503,7 +471,6 @@ def rgb( # If multiple index values are supplied, plot as a faceted plot if len(index) > 1: - img = da.plot.imshow( robust=robust, col=index_dim, @@ -515,16 +482,12 @@ def rgb( # If only one index is supplied, squeeze out index_dim and plot as a # single panel else: - - img = da.squeeze(dim=index_dim).plot.imshow( - robust=robust, **aspect_size_kwarg, **kwargs - ) + img = da.squeeze(dim=index_dim).plot.imshow(robust=robust, **aspect_size_kwarg, **kwargs) # If an export path is provided, save image to file. Individual and # faceted plots have a different API (figure vs fig) so we get around this # using a try statement: if savefig_path: - print(f"Exporting image to {savefig_path}") try: @@ -533,9 +496,10 @@ def rgb( img.figure.savefig(savefig_path, **savefig_kwargs) -#========================= +# ========================= # from DEA bandindices.py -#========================= +# ========================= + def calculate_indices( ds, @@ -597,9 +561,9 @@ def calculate_indices( * ``'WI'`` (Water Index, Fisher 2016) collection : str - Deprecated in version 0.1.7. Use `satellite_mission` instead. + Deprecated in version 0.1.7. Use `satellite_mission` instead. - Valid options are: + Valid options are: * ``'c2'`` (for USGS Landsat Collection 2) If 'c2', then `satellite_mission='ls'`. * ``'s2'`` (for Sentinel-2) @@ -670,22 +634,13 @@ def calculate_indices( # Normalised Difference Vegation Index, Rouse 1973 "NDVI": lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red), # Enhanced Vegetation Index, Huete 2002 - "EVI": lambda ds: ( - 2.5 * ((ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1)) - ), + "EVI": lambda ds: (2.5 * ((ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1))), # Leaf Area Index, Boegh 2002 - "LAI": lambda ds: ( - 3.618 - * ((2.5 * (ds.nir - ds.red)) / (ds.nir + (6 * ds.red) - (7.5 * ds.blue) + 1)) - - 0.118 - ), + "LAI": lambda ds: (3.618 * ((2.5 * (ds.nir - ds.red)) / (ds.nir + (6 * ds.red) - (7.5 * ds.blue) + 1)) - 0.118), # Soil Adjusted Vegetation Index, Huete 1988 "SAVI": lambda ds: ((1.5 * (ds.nir - ds.red)) / (ds.nir + ds.red + 0.5)), # Mod. Soil Adjusted Vegetation Index, Qi et al. 1994 - "MSAVI": lambda ds: ( - (2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) - / 2 - ), + "MSAVI": lambda ds: ((2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2), # Normalised Difference Moisture Index, Gao 1996 "NDMI": lambda ds: (ds.nir - ds.swir16) / (ds.nir + ds.swir16), # Normalised Burn Ratio, Lopez Garcia 1991 @@ -704,33 +659,19 @@ def calculate_indices( # Normalised Difference Built-Up Index, Zha 2003 "NDBI": lambda ds: (ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir), # Built-Up Index, He et al. 2010 - "BUI": lambda ds: ((ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir)) - - ((ds.nir - ds.red) / (ds.nir + ds.red)), + "BUI": lambda ds: ((ds.swir16 - ds.nir) / (ds.swir_1 + ds.nir)) - ((ds.nir - ds.red) / (ds.nir + ds.red)), # Built-up Area Extraction Index, Bouzekri et al. 2015 "BAEI": lambda ds: (ds.red + 0.3) / (ds.green + ds.swir16), # New Built-up Index, Jieli et al. 2010 "NBI": lambda ds: (ds.swir16 + ds.red) / ds.nir, # Bare Soil Index, Rikimaru et al. 2002 - "BSI": lambda ds: ((ds.swir16 + ds.red) - (ds.nir + ds.blue)) - / ((ds.swir16 + ds.red) + (ds.nir + ds.blue)), + "BSI": lambda ds: ((ds.swir16 + ds.red) - (ds.nir + ds.blue)) / ((ds.swir16 + ds.red) + (ds.nir + ds.blue)), # Automated Water Extraction Index (no shadows), Feyisa 2014 - "AWEI_ns": lambda ds: ( - 4 * (ds.green - ds.swir16) - (0.25 * ds.nir * +2.75 * ds.swir22) - ), + "AWEI_ns": lambda ds: (4 * (ds.green - ds.swir16) - (0.25 * ds.nir * +2.75 * ds.swir22)), # Automated Water Extraction Index (shadows), Feyisa 2014 - "AWEI_sh": lambda ds: ( - ds.blue + 2.5 * ds.green - 1.5 * \ - (ds.nir + ds.swir16) - 0.25 * ds.swir22 - ), + "AWEI_sh": lambda ds: (ds.blue + 2.5 * ds.green - 1.5 * (ds.nir + ds.swir16) - 0.25 * ds.swir22), # Water Index, Fisher 2016 - "WI": lambda ds: ( - 1.7204 - + 171 * ds.green - + 3 * ds.red - - 70 * ds.nir - - 45 * ds.swir16 - - 71 * ds.swir22 - ), + "WI": lambda ds: (1.7204 + 171 * ds.green + 3 * ds.red - 70 * ds.nir - 45 * ds.swir16 - 71 * ds.swir22), # Tasseled Cap Wetness, Crist 1985 "TCW": lambda ds: ( 0.0315 * ds.blue @@ -775,38 +716,36 @@ def mndwi(ds): return (ds.green - ds.swir16) / (ds.green + ds.swir16) def swir_diff(ds): - return ds.swir16/ds.swir22 + return ds.swir16 / ds.swir22 def alpha(ds): - return (2*(np.mean(ds.blue)))/(np.mean(swir_diff(ds)) + np.mean(mndwi(ds)**2)) + return (2 * (np.mean(ds.blue))) / (np.mean(swir_diff(ds)) + np.mean(mndwi(ds) ** 2)) def ENDISI(ds): m = mndwi(ds) s = swir_diff(ds) a = alpha(ds) - return (ds.blue - (a)*(s + m**2))/(ds.blue + (a)*(s + m**2)) + return (ds.blue - (a) * (s + m**2)) / (ds.blue + (a) * (s + m**2)) index_dict["ENDISI"] = ENDISI # Artificial Surface Index, Yongquan Zhao & Zhe Zhu 2022 def af(ds): AF = (ds.nir - ds.blue) / (ds.nir + ds.blue) - AF_norm = (AF - AF.min(dim=["y", "x"])) / \ - (AF.max(dim=["y", "x"]) - AF.min(dim=["y", "x"])) + AF_norm = (AF - AF.min(dim=["y", "x"])) / (AF.max(dim=["y", "x"]) - AF.min(dim=["y", "x"])) return AF_norm def ndvi(ds): return (ds.nir - ds.red) / (ds.nir + ds.red) def msavi(ds): - return ((2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2) + return (2 * ds.nir + 1 - ((2 * ds.nir + 1) ** 2 - 8 * (ds.nir - ds.red)) ** 0.5) / 2 def vsf(ds): NDVI = ndvi(ds) MSAVI = msavi(ds) VSF = 1 - NDVI * MSAVI - VSF_norm = (VSF - VSF.min(dim=["y", "x"])) / \ - (VSF.max(dim=["y", "x"]) - VSF.min(dim=["y", "x"])) + VSF_norm = (VSF - VSF.min(dim=["y", "x"])) / (VSF.max(dim=["y", "x"]) - VSF.min(dim=["y", "x"])) return VSF_norm def mbi(ds): @@ -820,16 +759,14 @@ def embi(ds): def ssf(ds): EMBI = embi(ds) SSF = 1 - EMBI - SSF_norm = (SSF - SSF.min(dim=["y", "x"])) / \ - (SSF.max(dim=["y", "x"]) - SSF.min(dim=["y", "x"])) + SSF_norm = (SSF - SSF.min(dim=["y", "x"])) / (SSF.max(dim=["y", "x"]) - SSF.min(dim=["y", "x"])) return SSF_norm + # Overall modulation using the Modulation Factor (MF). def mf(ds): - MF = ((ds.blue + ds.green) - (ds.nir + ds.swir16)) / \ - ((ds.blue + ds.green) + (ds.nir + ds.swir16)) - MF_norm = (MF - MF.min(dim=["y", "x"])) / \ - (MF.max(dim=["y", "x"]) - MF.min(dim=["y", "x"])) + MF = ((ds.blue + ds.green) - (ds.nir + ds.swir16)) / ((ds.blue + ds.green) + (ds.nir + ds.swir16)) + MF_norm = (MF - MF.min(dim=["y", "x"])) / (MF.max(dim=["y", "x"]) - MF.min(dim=["y", "x"])) return MF_norm def ASI(ds): @@ -847,7 +784,6 @@ def ASI(ds): # calculate for each index in the list of indices supplied (indexes) for index in indices: - # Select an index function from the dictionary index_func = index_dict.get(str(index)) @@ -855,7 +791,6 @@ def ASI(ds): # invalid option being provided, raise an exception informing user to # choose from the list of valid options if index is None: - raise ValueError( f"No remote sensing `index` was provided. Please " "refer to the function \ndocumentation for a full " @@ -876,7 +811,6 @@ def ASI(ds): ] and not normalise ): - warnings.warn( f"\nA coefficient-based index ('{index}') normally " "applied to surface reflectance values in the \n" @@ -886,7 +820,6 @@ def ASI(ds): ) elif index_func is None: - raise ValueError( f"The selected index '{index}' is not one of the " "valid remote sensing index options. \nPlease " @@ -896,9 +829,11 @@ def ASI(ds): # Deprecation warning if `collection` is specified instead of `satellite_mission`. if collection is not None: - warnings.warn('`collection` was deprecated in version 0.1.7. Use `satelite_mission` instead.', - DeprecationWarning, - stacklevel=2) + warnings.warn( + "`collection` was deprecated in version 0.1.7. Use `satelite_mission` instead.", + DeprecationWarning, + stacklevel=2, + ) # Map the collection values to the valid satellite_mission values. if collection == "c2": satellite_mission = "ls" @@ -909,14 +844,14 @@ def ASI(ds): raise ValueError( f"'{collection}' is not a valid option for " "`collection`. Please specify either \n" - "'c2' or 's2'.") + "'c2' or 's2'." + ) # Rename bands to a consistent format if depending on what satellite mission # is specified in `satellite_mission`. This allows the same index calculations # to be applied to all satellite missions. If no satellite mission was provided, # raise an exception. if satellite_mission is None: - raise ValueError( "No `satellite_mission` was provided. Please specify " "either 'ls' or 's2' to ensure the \nfunction " @@ -941,15 +876,12 @@ def ASI(ds): } # Rename bands in dataset to use simple names (e.g. 'red') - bands_to_rename = { - a: b for a, b in bandnames_dict.items() if a in ds.variables - } + bands_to_rename = {a: b for a, b in bandnames_dict.items() if a in ds.variables} elif satellite_mission == "s2": sr_max = 10000 # Dictionary mapping full data names to simpler alias names bandnames_dict = { - "B02": "blue", "B03": "green", "B04": "red", @@ -963,9 +895,7 @@ def ASI(ds): } # Rename bands in dataset to use simple names (e.g. 'red') - bands_to_rename = { - a: b for a, b in bandnames_dict.items() if a in ds.variables - } + bands_to_rename = {a: b for a, b in bandnames_dict.items() if a in ds.variables} # Raise error if no valid satellite_mission name is provided: else: @@ -982,10 +912,7 @@ def ASI(ds): index_array = index_func(ds.rename(bands_to_rename) / mult) except AttributeError: - raise ValueError( - f"Please verify that all bands required to " - f"compute {index} are present in `ds`." - ) + raise ValueError(f"Please verify that all bands required to " f"compute {index} are present in `ds`.") # Add as a new variable in dataset output_band_name = custom_varname if custom_varname else index @@ -1001,8 +928,8 @@ def ASI(ds): def dualpol_indices( ds, - co_pol='vv', - cross_pol='vh', + co_pol="vv", + cross_pol="vh", index=None, custom_varname=None, drop=False, @@ -1091,7 +1018,7 @@ def purity(ds): return (1 - ratio(ds)) / (1 + ratio(ds)) def theta(ds): - return np.arctan((1 - ratio(ds))**2 / (1 + ratio(ds)**2 - ratio(ds))) + return np.arctan((1 - ratio(ds)) ** 2 / (1 + ratio(ds) ** 2 - ratio(ds))) def P1(ds): return 1 / (1 + ratio(ds)) @@ -1100,12 +1027,12 @@ def P2(ds): return 1 - P1(ds) def entropy(ds): - return P1(ds)*np.log2(P1(ds)) + P2(ds)*np.log2(P2(ds)) + return P1(ds) * np.log2(P1(ds)) + P2(ds) * np.log2(P2(ds)) # Dictionary containing remote sensing index band recipes index_dict = { # Radar Vegetation Index for dual-pol, Trudel et al. 2012 - "RVI": lambda ds: 4*ds[cross_pol] / (ds[co_pol] + ds[cross_pol]), + "RVI": lambda ds: 4 * ds[cross_pol] / (ds[co_pol] + ds[cross_pol]), # Vertical dual depolarization index, Periasamy 2018 "VDDPI": lambda ds: (ds[co_pol] + ds[cross_pol]) / ds[co_pol], # cross-pol/co-pol ratio @@ -1124,7 +1051,6 @@ def entropy(ds): # calculate for each index in the list of indices supplied (indexes) for index in indices: - # Select an index function from the dictionary index_func = index_dict.get(str(index)) @@ -1132,7 +1058,6 @@ def entropy(ds): # invalid option being provided, raise an exception informing user to # choose from the list of valid options if index is None: - raise ValueError( f"No radar `index` was provided. Please " "refer to the function \ndocumentation for a full " @@ -1140,7 +1065,6 @@ def entropy(ds): ) elif index_func is None: - raise ValueError( f"The selected index '{index}' is not one of the " "valid remote sensing index options. \nPlease " @@ -1162,20 +1086,23 @@ def entropy(ds): # Return input dataset with added water index variable return ds -#======================== + +# ======================== # from DEA spatial.py -#======================== -def xr_rasterize(gdf, - da, - attribute_col=False, - crs=None, - transform=None, - name=None, - x_dim='x', - y_dim='y', - export_tiff=None, - verbose=False, - **rasterio_kwargs): +# ======================== +def xr_rasterize( + gdf, + da, + attribute_col=False, + crs=None, + transform=None, + name=None, + x_dim="x", + y_dim="y", + export_tiff=None, + verbose=False, + **rasterio_kwargs, +): """ Rasterizes a geopandas.GeoDataFrame into an xarray.DataArray. @@ -1185,33 +1112,33 @@ def xr_rasterize(gdf, A geopandas.GeoDataFrame object containing the vector/shapefile data you want to rasterise. da : xarray.DataArray or xarray.Dataset - The shape, coordinates, dimensions, and transform of this object - are used to build the rasterized shapefile. It effectively - provides a template. The attributes of this object are also + The shape, coordinates, dimensions, and transform of this object + are used to build the rasterized shapefile. It effectively + provides a template. The attributes of this object are also appended to the output xarray.DataArray. attribute_col : string, optional - Name of the attribute column in the geodataframe that the pixels - in the raster will contain. If set to False, output will be a + Name of the attribute column in the geodataframe that the pixels + in the raster will contain. If set to False, output will be a boolean array of 1's and 0's. crs : str, optional CRS metadata to add to the output xarray. e.g. 'epsg:3577'. - The function will attempt get this info from the input + The function will attempt get this info from the input GeoDataFrame first. transform : affine.Affine object, optional - An affine.Affine object (e.g. `from affine import Affine; - Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the - affine transformation used to convert raster coordinates - (e.g. [0, 0]) to geographic coordinates. If none is provided, - the function will attempt to obtain an affine transformation + An affine.Affine object (e.g. `from affine import Affine; + Affine(30.0, 0.0, 548040.0, 0.0, -30.0, "6886890.0) giving the + affine transformation used to convert raster coordinates + (e.g. [0, 0]) to geographic coordinates. If none is provided, + the function will attempt to obtain an affine transformation from the xarray object (e.g. either at `da.transform` or `da.geobox.transform`). x_dim : str, optional - An optional string allowing you to override the xarray dimension - used for x coordinates. Defaults to 'x'. Useful, for example, - if x and y dims instead called 'lat' and 'lon'. + An optional string allowing you to override the xarray dimension + used for x coordinates. Defaults to 'x'. Useful, for example, + if x and y dims instead called 'lat' and 'lon'. y_dim : str, optional - An optional string allowing you to override the xarray dimension - used for y coordinates. Defaults to 'y'. Useful, for example, + An optional string allowing you to override the xarray dimension + used for y coordinates. Defaults to 'y'. Useful, for example, if x and y dims instead called 'lat' and 'lon'. export_tiff: str, optional If a filepath is provided (e.g 'output/output.tif'), will export a @@ -1219,7 +1146,7 @@ def xr_rasterize(gdf, is not supplied by the user a default name, 'data', is used verbose : bool, optional Print debugging messages. Default False. - **rasterio_kwargs : + **rasterio_kwargs : A set of keyword arguments to rasterio.features.rasterize Can include: 'all_touched', 'merge_alg', 'dtype'. @@ -1241,9 +1168,11 @@ def xr_rasterize(gdf, crs = da.crs except: if crs is None: - raise ValueError("Please add a `crs` attribute to the " - "xarray.DataArray, or provide a CRS using the " - "function's `crs` parameter (e.g. crs='EPSG:3577')") + raise ValueError( + "Please add a `crs` attribute to the " + "xarray.DataArray, or provide a CRS using the " + "function's `crs` parameter (e.g. crs='EPSG:3577')" + ) # Check if transform is provided as a xarray.DataArray method. # If not, require supplied Affine @@ -1259,10 +1188,12 @@ def xr_rasterize(gdf, except: # If neither of those options work, raise an exception telling the # user to provide a transform - raise TypeError("Please provide an Affine transform object using the " - "`transform` parameter (e.g. `from affine import " - "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " - "6886890.0)`") + raise TypeError( + "Please provide an Affine transform object using the " + "`transform` parameter (e.g. `from affine import " + "Affine; Affine(30.0, 0.0, 548040.0, 0.0, -30.0, " + "6886890.0)`" + ) # Grab the 2D dims (not time) try: @@ -1282,14 +1213,14 @@ def xr_rasterize(gdf, # Reproject shapefile to match CRS of raster if verbose: - print(f'Rasterizing to match xarray.DataArray dimensions ({y}, {x})') + print(f"Rasterizing to match xarray.DataArray dimensions ({y}, {x})") try: gdf_reproj = gdf.to_crs(crs=crs) except: # Sometimes the crs can be a datacube utils CRS object # so convert to string before reprojecting - gdf_reproj = gdf.to_crs(crs={'init': str(crs)}) + gdf_reproj = gdf.to_crs(crs={"init": str(crs)}) # If an attribute column is specified, rasterise using vector # attribute values. Otherwise, rasterise into a boolean array @@ -1301,17 +1232,10 @@ def xr_rasterize(gdf, shapes = gdf_reproj.geometry # Rasterise shapes into an array - arr = rasterize(shapes=shapes, - out_shape=(y, x), - transform=transform, - **rasterio_kwargs) + arr = rasterize(shapes=shapes, out_shape=(y, x), transform=transform, **rasterio_kwargs) # Convert result to a xarray.DataArray - xarr = xr.DataArray(arr, - coords=xy_coords, - dims=dims, - attrs=da.attrs, - name=name if name else None) + xarr = xr.DataArray(arr, coords=xy_coords, dims=dims, attrs=da.attrs, name=name if name else None) # Add back crs if xarr.attrs doesn't have it if xarr.geobox is None: @@ -1320,8 +1244,6 @@ def xr_rasterize(gdf, if export_tiff: if verbose: print(f"Exporting GeoTIFF to {export_tiff}") - write_cog(xarr, - export_tiff, - overwrite=True) + write_cog(xarr, export_tiff, overwrite=True) return xarr diff --git a/utils/grits_lst89_p1.py b/utils/grits_lst89_p1.py new file mode 100644 index 0000000..15e7b4c --- /dev/null +++ b/utils/grits_lst89_p1.py @@ -0,0 +1,227 @@ +""" + +Extraindo LST por region (grid) em uma grande propriedade + + Por enquanto, somente Landsat 8 e 9 + + Feb, 9, 2024 + +""" + +# %% +import time +from datetime import date +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import pylab as plt +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +# %% Area Of Interest +# name embrapa_sc , layer talhoes, fazenda_embrapa.gpkg, column 'tid' +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "iaca_r400.shp" +layer = None +column = "grid" +field = gpd.read_file(file, layer=layer) +print(field.dtypes) + + +bbox, lat_range, lon_range = get_lims(field) +print(field.head()) +field.plot(column=column, legend=True) + + +savenc = True +zscores = True + + +# parameters for extracting data +savecsv = True +path_csv = "/home/jovyan/PlanetaryComputerExamples/OUT/csv/" + + +# %% +# ### THE FUCKING FOR +# + +name = "iacanga" +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/" +max_cloud = 50 +# DATETIME CONTROL +a0 = 2020 +a1 = 2024 +p = 1 # ano a ano +d0 = "-06-20" +d1 = "-06-20" + +# for record, Iacanga +# grid 100 -> 2013 - 2019[ ok +# pulando 2019-2020 + +#### +for i in sorted(field.grid.unique()): + gridstart = time.time() + print(i) + name_ = f"{name}_{i}" + field_ = field[field[column] == i] + bbox, lat_range, lon_range = get_lims(field_) + print(bbox, lat_range, lon_range) + ha = field_.area_ha.sum() + print(f"{name_} de {ha} ha") + field_.plot() + plt.show() + plt.close() + # + # + # + + for ano in range(a0, a1, p): + pstart = time.time() + dt0 = str(ano) + d0 + dt1 = str(ano + p) + d1 + datetime = dt0 + "/" + dt1 + print(f"periodo {datetime}, {column} = {i}") + + # items57 = query_Landsat_items(datetime=datetime, + # bbox=bbox, + # max_cloud=max_cloud, + # landsats = [ + # "landsat-5", "landsat-7", + # ]) + # print('items57 created') + items89 = query_Landsat_items( + datetime=datetime, + bbox=bbox, + max_cloud=max_cloud, + landsats=[ # "landsat-5", "landsat-7", + "landsat-8", + "landsat-9", + ], + ) + scale = items89[0].assets["lwir11"].extra_fields["raster:bands"][0]["scale"] + offset = items89[0].assets["lwir11"].extra_fields["raster:bands"][0]["offset"] + print(f"items89 created, scale {scale} and offset {offset}") + + # get the data the lazy way + data89 = stackstac.stack( + items89, + assets=["lwir11"], + bounds_latlon=bbox, + epsg=4326, + # resolution=100 + ) + data89 = data89.rename("lwir").squeeze() + print("data89 ok!") + print(humanbytes(data89.nbytes)) + # data57 = ( + # stackstac.stack( + # items57, + # assets=['lwir'], + # bounds_latlon=bbox, + # epsg=4326, + # resolution=100 + # )) + # data89 + # print('data57 ok!') + ## %% The CONCAT Way + + # MATCH REPROJECTION using rioxarray + # print(f'matching DataArrays spatially for _{datetime}') + # data57 = data57.rio.reproject_match(data89) + + # CONCATENATE DATAARRAYS + # da = xr.concat([data89, data57], dim="time", join='outer') + + # RESCALE AND FILTER FOR LAND SURFACE TEMPERATURE + + da = data89.copy() + + print("reescaling LST") + da = da * scale + offset - 273.15 + da = da.astype("float32") + da = xr.where((da < -5) | (da > 65), np.nan, da) + + # REPROJECT + # print(f'reprojecting_{datetime}') + print("reprojecting...") + da = da.rio.write_crs("4326") + da = da.rio.reproject("EPSG:4326") + da = da.rename({"x": "longitude", "y": "latitude"}) + print("reprojecting... done") + + # REORDER + da = da.rename("lst") + da = da.sortby("time") + + # INTERPOLATE NANs + print("interpolating NaNs") + da = da.chunk(dict(time=-1)) + da = da.interpolate_na(dim="time", method="pchip", limit=7, use_coordinate=True) + print("interpolating NaNs... done") + + # XXX SMOOTHENING WOULD BE COOL + smooth = True + w = 3 + sm = "pchip_" + str(w) + if smooth: + print("smoothening...") + da = da.chunk(dict(time=-1)) + da = da.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + print("smoothing... done.") + + # DROPPING STUFF + drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + ] + da = da.drop_vars(drops) + + # SAVE + print("saving...") + da.to_netcdf(f"{path_nc}/{dt0}_{dt1}_{name}_{i}_LST_{sm}.nc") + print(f"saving... {path_nc}/{dt0}_{dt1}_{name}_{i}_LST_{sm}.nc DONE!") + del ( + da, + data89, + items89, + ) + pend = time.time() + print(f"{dt0}_{dt1}_{name}_{i} took {pend - pstart} seconds to complete.") + + gridend = time.time() + print(f" Grid {i} took {(gridend - gridstart)} seconds") + +def fun_grits_lst(): + return + + +from grits_vis_p1 import fun_grits_vis \ No newline at end of file diff --git a/utils/grits_vis_p1.py b/utils/grits_vis_p1.py new file mode 100644 index 0000000..fba90a9 --- /dev/null +++ b/utils/grits_vis_p1.py @@ -0,0 +1,159 @@ +# %% +print(""" + Vegetation Indices series extraction + from Landsat series + + --- + created by Denis Mariano + denis@seca.space + www.seca.space + 2024-02-09 + ToDo's + - verificar porque EVI e LAI não estão displaying no valuetool + - TEM QUE DAR UM TRATO NOS VALUES + - agregar no tempo, zscores + - plots + - extraction + + """) + +# %% +import time + +start = time.time() + +import sys +import subprocess +import pkg_resources + +required = {"rasterstats", "odc-ui"} +installed = {pkg.key for pkg in pkg_resources.working_set} +missing = required - installed + +if missing: + python = sys.executable + subprocess.check_call([python, "-m", "pip", "install", *missing], stdout=subprocess.DEVNULL) +else: + print(f"Required packages {required} already installed.") + +import geopandas as gpd +import stackstac +import xarray as xr +import numpy as np +import rioxarray +from scipy.signal import savgol_filter +import zipfile +from xrspatial import zonal_stats +import pandas as pd +import numpy as np + +sys.path.append("/home/jovyan/PlanetaryComputerExamples/CODE/pcgrits/") +from grits import * + +print("all good!") +# %% DEFINE AREA OF INTEREST +path_vector = "/home/jovyan/PlanetaryComputerExamples/vetorial/FAZENDAS/" +file = path_vector + "iaca_r400.shp" +field_ = gpd.read_file(file) +print(field_.grid.unique()) +# %% # some parameters to filter scenes +indices = ["NDVI", "MSAVI"] # EVI, LAI,"NDMI","BSI", +# assets = ['blue','green','red','nir08','swir16','swir22'] +assets = ["red", "nir08"] +path_nc = "/home/jovyan/PlanetaryComputerExamples/OUT/nc/iacanga/" +max_cloud = 30 + +datetime = "2022-05-02/2024-02-08" + +# -> Iacanga +# '2019-06-20/2022-04-01' done +# '2022-04-02/2024-02-08' done +# '2022-05-02/2024-02-08' done +# '2013-04-02/2019-06-19' + + +for grid in sorted(field_.grid.unique()): + name = f"iacanga_{grid}" + field = field_[field_["grid"] == grid] + bbox, lat_range, lon_range = get_lims(field) + ha = field.area_ha.sum() + print(f"{name} de {ha} ha - periodo {datetime}") + + items89 = query_Landsat_items( + datetime=datetime, bbox=bbox, max_cloud=max_cloud, landsats=["landsat-8", "landsat-9"] + ) + # get the data the lazy way + data89 = stackstac.stack( + items89, + assets=assets, + bounds_latlon=bbox, + epsg=4326, + ) + del data89.attrs["spec"] + + ds89 = data89.to_dataset(dim="band") + ds = ds89.rio.write_crs("4326") + + ds_ = xr.where(ds > 60000, np.nan, ds) + + # INTERPOLATE NANs + print("interpolating NaNs") + ds_ = ds_.chunk(dict(time=-1)) + ds_ = ds_.interpolate_na( + dim="time", + method="pchip", + # limit = 7, + use_coordinate=True, + ) + + smooth = True + w = 4 + sm = "pchip_smW" + str(w) + if smooth: + print("smoothening...") + ds_ = ds_.chunk(dict(time=-1)) + ds_ = ds_.rolling(time=w, center=True).mean(savgol_filter, window=w, polyorder=2) + + # CALCULATE INDICES + ds_ = ds_.rename({"nir08": "nir"}) + dsi = calculate_indices( + ds_, + index=indices, + satellite_mission="ls", + # normalise=True, + drop=True, + ) + # REPROJECT + print("reprojecting") + dsi = dsi.rio.write_crs("4326") + dsi = dsi.rio.reproject("EPSG:4326") + dsi = dsi.rename({"x": "longitude", "y": "latitude"}) + + # DROPPING STUFF + drops = [ + "landsat:correction", + "landsat:wrs_path", + "landsat:wrs_row", + "landsat:collection_number", + "landsat:wrs_type", + "instruments", + "raster:bands", + "sci:doi", + ] + dsi = dsi.drop_vars(drops) + dsi = dsi.astype("float32") + + # Saving + dt1 = datetime.split("/")[0] + dt2 = datetime.split("/")[1] + dsi.to_netcdf(f"{path_nc}/{dt1}_{dt2}_{name}.nc") + print(f"SAVED ___ {path_nc}/{dt1}_{dt2}_{name}.nc ___SAVED ") + + del dsi, ds, ds_, data89, items89, ds89 + # %% + + +def fun_grits_vis(): + return + +from grits_lst89_p1 import fun_grits_lst \ No newline at end of file diff --git a/utils/x_Landsat_QA.py b/utils/x_Landsat_QA.py new file mode 100644 index 0000000..4daec3c --- /dev/null +++ b/utils/x_Landsat_QA.py @@ -0,0 +1,78 @@ +# Flags definition por Landsat 8-9 +# https://docs.digitalearthafrica.org/en/latest/sandbox/notebooks/Frequently_used_code/Cloud_and_pixel_quality_masking.html + +flags_def = { + "cirrus": {"bits": 2, "values": {"0": "not_high_confidence", "1": "high_confidence"}}, + "cirrus_confidence": {"bits": [14, 15], "values": {"0": "none", "1": "low", "2": "reserved", "3": "high"}}, + "clear": {"bits": 6, "values": {"0": False, "1": True}}, + "cloud": {"bits": 3, "values": {"0": "not_high_confidence", "1": "high_confidence"}}, + "cloud_confidence": {"bits": [8, 9], "values": {"0": "none", "1": "low", "2": "medium", "3": "high"}}, + "cloud_shadow": {"bits": 4, "values": {"0": "not_high_confidence", "1": "high_confidence"}}, + "cloud_shadow_confidence": {"bits": [10, 11], "values": {"0": "none", "1": "low", "2": "reserved", "3": "high"}}, + "dilated_cloud": {"bits": 1, "values": {"0": "not_dilated", "1": "dilated"}}, + "nodata": {"bits": 0, "values": {"0": False, "1": True}}, + "snow": {"bits": 5, "values": {"0": "not_high_confidence", "1": "high_confidence"}}, + "snow_ice_confidence": {"bits": [12, 13], "values": {"0": "none", "1": "low", "2": "reserved", "3": "high"}}, + "water": {"bits": 7, "values": {"0": "land_or_cloud", "1": "water"}}, +} + + +def apply_bitmask(arr) -> xr.DataArray or np.array: + """Apply QA pixel bit mask for each array depending on platform""" + + unique_platform = np.unique(arr.platform.to_numpy()) + + if ["landsat-8", "landsat-9"] in unique_platform: + mask_bitfields = [1, 2, 3, 4] # dilated cloud, cirrus, cloud, cloud shadow + elif ["landsat-4", "landsat-5", "landsat-7"] in unique_platform: + mask_bitfields = [1, 3, 4, 5] # dilated cloud, cirrus, cloud, cloud shadow + elif ["landsat-4", "landsat-5"] in unique_platform: + mask_bitfields = [1, 3, 4, 5] # dilated cloud, cirrus, cloud, cloud shadow + else: + raise ValueError(f"No bit mask defined for {arr.platform.to_numpy()}") + + print(unique_platform) + bitmask = 0 + for field in mask_bitfields: + bitmask |= 1 << field + + qa = arr.sel(band="qa").astype("uint16") + bad = qa & bitmask # just look at those 4 bits + + arr = arr.where(bad == 0) + + return arr + + +# https://archive.li/wykEi +L8_flags = { + "dilated_cloud": 1 << 1, + "cirrus": 1 << 2, + "cloud": 1 << 3, + "shadow": 1 << 4, + "snow": 1 << 5, + "clear": 1 << 6, + "water": 1 << 7, +} + + +def get_mask(mask, flags_list): + # first we will create the result mask filled with zeros and the same shape as the mask + final_mask = np.zeros_like(mask) + + # then we will loop through the flags and add the + for flag in flags_list: + # get the mask for this flag + flag_mask = np.bitwise_and(mask, L8_flags[flag]) + + # add it to the final flag + final_mask = final_mask | flag_mask + + return final_mask > 0 + + clouds = get_mask(imgqa, ["cirrus", "cloud", "dilated_cloud"]) + shadows = get_mask(imgqa, ["shadow"]) + + fig, ax = plt.subplots(1, 2, figsize=(15, 7)) + ax[0].imshow(clouds) + ax[1].imshow(shadows) diff --git a/x_gemini_landsat.py b/utils/x_gemini_landsat.py similarity index 70% rename from x_gemini_landsat.py rename to utils/x_gemini_landsat.py index 8e9076a..521809f 100644 --- a/x_gemini_landsat.py +++ b/utils/x_gemini_landsat.py @@ -1,19 +1,13 @@ - -#Create an AWS account and IAM user with appropriate permissions to access the Landsat STAC catalog. Alternatively, use existing credentials with sufficient access. -#Set up environment variables to contain your AWS credentials securely: -export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY_ID" -export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_ACCESS_KEY" -export AWS_DEFAULT_REGION="YOUR_REGION" # Replace with your region (e.g., "us-east-1") - import stackstac import xarray as xr -import boto3 +import os +import requests client = stackstac.Client( base_url="https://landsat-pds.s3.amazonaws.com/stac", aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"), - region_name=os.environ.get("AWS_DEFAULT_REGION") + region_name=os.environ.get("AWS_DEFAULT_REGION"), ) collection = "LC08" @@ -25,7 +19,7 @@ "collection": collection, "bbox": bbox, "datetime": datetime, - "ids": None # Optional: Filter by specific item IDs + "ids": None, # Optional: Filter by specific item IDs } datacube = client.get_datacube(query=query) @@ -56,4 +50,4 @@ multiband_dataset = xr.combine_nested(datasets, concat="bands") output_path = "landsat_datacube.nc" -multiband_dataset.to_netcdf( XXXX ) +# multiband_dataset.to_netcdf( XXXX ) diff --git a/x_Landsat_QA.py b/x_Landsat_QA.py deleted file mode 100644 index a103a9d..0000000 --- a/x_Landsat_QA.py +++ /dev/null @@ -1,97 +0,0 @@ -# Flags definition por Landsat 8-9 -# https://docs.digitalearthafrica.org/en/latest/sandbox/notebooks/Frequently_used_code/Cloud_and_pixel_quality_masking.html - -flags_def = {'cirrus': {'bits': 2, - 'values': {'0': 'not_high_confidence', '1': 'high_confidence'}}, - 'cirrus_confidence': {'bits': [14, 15], - 'values': {'0': 'none', - '1': 'low', - '2': 'reserved', - '3': 'high'}}, - 'clear': {'bits': 6, 'values': {'0': False, '1': True}}, - 'cloud': {'bits': 3, - 'values': {'0': 'not_high_confidence', '1': 'high_confidence'}}, - 'cloud_confidence': {'bits': [8, 9], - 'values': {'0': 'none', - '1': 'low', - '2': 'medium', - '3': 'high'}}, - 'cloud_shadow': {'bits': 4, - 'values': {'0': 'not_high_confidence', - '1': 'high_confidence'}}, - 'cloud_shadow_confidence': {'bits': [10, 11], - 'values': {'0': 'none', - '1': 'low', - '2': 'reserved', - '3': 'high'}}, - 'dilated_cloud': {'bits': 1, 'values': {'0': 'not_dilated', '1': 'dilated'}}, - 'nodata': {'bits': 0, 'values': {'0': False, '1': True}}, - 'snow': {'bits': 5, - 'values': {'0': 'not_high_confidence', '1': 'high_confidence'}}, - 'snow_ice_confidence': {'bits': [12, 13], - 'values': {'0': 'none', - '1': 'low', - '2': 'reserved', - '3': 'high'}}, - 'water': {'bits': 7, 'values': {'0': 'land_or_cloud', '1': 'water'}}} - - -def apply_bitmask(arr) -> xr.DataArray or np.array: - """Apply QA pixel bit mask for each array depending on platform""" - - unique_platform = np.unique(arr.platform.to_numpy()) - - if ["landsat-8", "landsat-9"] in unique_platform: - mask_bitfields = [1, 2, 3, 4] # dilated cloud, cirrus, cloud, cloud shadow - elif ["landsat-4", "landsat-5", "landsat-7"] in unique_platform: - mask_bitfields = [1, 3, 4, 5] # dilated cloud, cirrus, cloud, cloud shadow - elif ["landsat-4", "landsat-5"] in unique_platform: - mask_bitfields = [1, 3, 4, 5] # dilated cloud, cirrus, cloud, cloud shadow - else: - raise ValueError(f"No bit mask defined for {arr.platform.to_numpy()}") - - print(unique_platform) - bitmask = 0 - for field in mask_bitfields: - bitmask |= 1 << field - - qa = arr.sel(band="qa").astype("uint16") - bad = qa & bitmask # just look at those 4 bits - - arr = arr.where(bad == 0) - - return arr - -# https://archive.li/wykEi -L8_flags = {'dilated_cloud': 1<<1, - 'cirrus': 1<<2, - 'cloud': 1<<3, - 'shadow': 1<<4, - 'snow': 1<<5, - 'clear': 1<<6, - 'water': 1<<7} - -def get_mask(mask, flags_list): - -# first we will create the result mask filled with zeros and the same shape as the mask - final_mask = np.zeros_like(mask) - - # then we will loop through the flags and add the - for flag in flags_list: - # get the mask for this flag - flag_mask = np.bitwise_and(mask, L8_flags[flag]) - - # add it to the final flag - final_mask = final_mask | flag_mask - - return final_mask > 0 - - - clouds = get_mask(imgqa, ['cirrus', 'cloud', 'dilated_cloud']) - shadows = get_mask(imgqa, ['shadow']) - - fig, ax = plt.subplots(1, 2, figsize=(15, 7)) - ax[0].imshow(clouds) - ax[1].imshow(shadows) - -