Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
109c6fc
Copy files from lisflood-preprocessing
casadoj Aug 27, 2024
89ee2bf
Update setup.py
casadoj Aug 27, 2024
b6bf8a3
Update lfcoords.py
casadoj Aug 27, 2024
07a6496
Add bin/lfcoords0
casadoj Aug 27, 2024
ce689ff
Add test data for lfcoords
casadoj Aug 27, 2024
b31653c
Update requirements.txt
casadoj Aug 28, 2024
27336b6
Update .py in lfcoords
casadoj Aug 28, 2024
0c3f47e
Add data for lfcoords test
casadoj Aug 28, 2024
f683636
Add test for lfcoords
casadoj Aug 28, 2024
38a06c5
Update requirements.txt
casadoj Aug 28, 2024
40635cb
Update data for testing `lfcoords`
casadoj Aug 28, 2024
41eff6a
Update .py in `lfcoords`
casadoj Aug 28, 2024
88cc932
Update test `lfcoords`
casadoj Aug 28, 2024
05a9d5d
Update README from branch "casadoj-patch-1"
casadoj Aug 28, 2024
c7068f7
Improve help `lfcoords`
casadoj Aug 28, 2024
f414b30
Update README
casadoj Aug 28, 2024
5535f4f
Update README¡
casadoj Aug 28, 2024
7e96631
Update README.md
casadoj Aug 30, 2024
d065096
Update "lfcoords" to include the reservoirs flag
casadoj Sep 5, 2024
fee7bc0
Update lfcoords to check for conflicts
casadoj Sep 10, 2024
f3ed956
Update "lfcoords" to create log file and check points before processing
casadoj Sep 12, 2024
ec57831
Improve documentation "thresholds"
casadoj Oct 4, 2024
1d016ae
Update thresholds
casadoj Oct 4, 2024
cd84812
Update thresholds
casadoj Oct 6, 2024
61284bd
Update thresholds
casadoj Oct 6, 2024
12fb3e5
Update thresholds: "method" option
Oct 6, 2024
0f9dce8
Update `thresholds.py`
casadoj Mar 24, 2025
9c7b26b
Fix lfcoords test
Aug 19, 2025
e3b4bba
Transfer new version of `lfcoords`
Aug 20, 2025
7d4e445
Remove function `downstream_pixel()` in `lfcoords.utils`
Aug 20, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
712 changes: 14 additions & 698 deletions README.md

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions bin/lfcoords
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!python

import os
import sys

current_dir = os.path.dirname(os.path.abspath(__file__))
src_dir = os.path.normpath(os.path.join(current_dir, '../src/'))
if os.path.exists(src_dir):
sys.path.append(src_dir)

from lisfloodutilities.lfcoords.lfcoords import main_script

if __name__ == '__main__':
main_script()
10 changes: 7 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ coverage>=6.0
dask>=2022.2.0
eccodes-python>=0.9.7
exceptiongroup>=1.1.1
fsspec>=2023.1.0
fsspec>=2023.1.0f
geopandas>=0.12.2
geopy>=2.4.1
importlib-metadata<5.0.0
iniconfig>=2.0.0
Expand All @@ -22,6 +23,7 @@ pandas>=1.3.5
partd>=1.4.0
pluggy>=1.0.0
pycparser>=2.21
pyflwdir>=0.5.8
pyg2p>=3.2.7
pytest>=7.3.1
pytest-cov>=4.0.0
Expand All @@ -31,11 +33,13 @@ python-dateutil>=2.8.2
pytz>=2023.3
PyYAML>=6.0
rasterio>=1.2.10
rioxarray>=0.13.3
scipy>=1.7.3
six>=1.16.0
tomli>=2.0.1
toolz>=0.12.0
tqdm>=4.65.0
typing_extensions>=4.5.0
ujson>=2.0.2
xarray>=0.20.2
zipp>=3.15.0
xarray>=0.20.1
zipp>=3.15.0
17 changes: 9 additions & 8 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,14 +124,15 @@ def run(self):
version=version,
packages=find_packages('src'),
description='A set of utilities for lisflood users. '
'pcr2nc: Convert PCRaster files to netCDF CF 1.6; '
'nc2pcr: Convert netCDF files ot PCRaster format; '
'cutmaps: cut netCDF files; '
'catchstats: calculates catchment statistics; '
'compare: compare two set of netCDF files; '
'thresholds: compute discharge return period thresholds; '
'cutmaps: cut netCDF files; '
'gridding: interpolate meteo variables observations; '
'lfcoords: finds coordinates in the LISFLOOD grid'
'nc2pcr: Convert netCDF files ot PCRaster format; '
'ncextract: extract values from netCDF files; '
'catchstats: calculates catchment statistics; ',
'pcr2nc: Convert PCRaster files to netCDF CF 1.6; '
'thresholds: compute discharge return period thresholds; ',
long_description=long_description,
long_description_content_type='text/markdown',
setup_requires=[
Expand All @@ -143,12 +144,12 @@ def run(self):
# 'GDAL=={}'.format(gdal_version),
'netCDF4>=1.5.3', 'toolz', 'xarray>=0.15.1',
'dask', 'pandas>=0.25.1', 'nine', 'pyg2p'],
author="Valerio Lorini, Stefania Grimaldi, Carlo Russo, Domenico Nappo, Lorenzo Alfieri",
author_email="[email protected],[email protected],[email protected],[email protected],[email protected]",
author="Valerio Lorini, Stefania Grimaldi, Carlo Russo, Domenico Nappo, Lorenzo Alfieri, Jesús Casado Rodríguez",
author_email="[email protected],[email protected],[email protected],[email protected],[email protected],[email protected]",
keywords=['netCDF4', 'PCRaster', 'mapstack', 'lisflood', 'efas', 'glofas', 'ecmwf', 'copernicus'],
license='EUPL 1.2',
url='https://github.com/ec-jrc/lisflood-utilities',
scripts=['bin/pcr2nc', 'bin/cutmaps', 'bin/compare', 'bin/nc2pcr', 'bin/thresholds', 'bin/gridding', 'bin/cddmap', 'bin/ncextract','bin/catchstats',],
scripts=['bin/pcr2nc', 'bin/cutmaps', 'bin/compare', 'bin/nc2pcr', 'bin/thresholds', 'bin/gridding', 'bin/cddmap', 'bin/ncextract','bin/catchstats','bin/lfcoords'],
zip_safe=True,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
Expand Down
204 changes: 204 additions & 0 deletions src/lisfloodutilities/lfcoords/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
import os
import logging
import yaml
from pathlib import Path
from typing import Dict
import numpy as np
import pandas as pd
import geopandas as gpd
import xarray as xr
import rioxarray as rxr

# set logger
logger = logging.getLogger(__name__)

# Use a module-level constant for the environment variable setting
os.environ['USE_PYGEOS'] = '0'

class Config:
"""
Manages the application's configuration by reading a YAML file
and setting default values.
"""

def __init__(self, config_file: Path):
"""
Reads the configuration from a YAML file and sets default values if not provided.

Parameters:
-----------
config_file: string or pathlib.Path
The path to the YAML configuration file.
"""

# read configuration file
with open(config_file, 'r', encoding='utf8') as ymlfile:
config = yaml.load(ymlfile, Loader=yaml.FullLoader)

# input file paths
self.points = Path(config['input']['points'])
self.ldd_fine = Path(config['input']['ldd_fine'])
self.upstream_fine = Path(config['input']['upstream_fine'])
self.ldd_coarse = Path(config['input']['ldd_coarse'])
self.upstream_coarse = Path(config['input']['upstream_coarse'])

# resolutions
self.fine_resolution = None
self.coarse_resolution = None

# output folder
self.output_folder = Path(config.get('output_folder', './shapefiles'))
self.output_folder.mkdir(parents=True, exist_ok=True)

# conditions
self.min_area = config['conditions'].get('min_area', 10)
self.abs_error = config['conditions'].get('abs_error', 50)
self.pct_error = config['conditions'].get('pct_error', 1)

def update_config(
self,
fine_grid: xr.DataArray,
coarse_grid: xr.DataArray
):
"""
Extracts the resolution from the finer and coarser grids and updates the configuration object.

Parameters:
-----------
fine_grid: xarray.DataArray
Any map in the fine grid
coarse_grid: xarray.DataArray
Any map in the coarse grid
"""

# resolution of the finer grid
cellsize = np.mean(np.diff(fine_grid.x)) # degrees
cellsize_arcsec = int(np.round(cellsize * 3600, 0)) # arcsec
logger.info(f'The resolution of the finer grid is {cellsize_arcsec} arcseconds')
self.fine_resolution = f'{cellsize_arcsec}sec'

# resolution of the input maps
cellsize = np.round(np.mean(np.diff(coarse_grid.x)), 6) # degrees
cellsize_arcmin = int(np.round(cellsize * 60, 0)) # arcmin
logger.info(f'The resolution of the coarser grid is {cellsize_arcmin} arcminutes')
self.coarse_resolution = f'{cellsize_arcmin}min'


def read_input_files(cfg: Config) -> Dict:
"""
Reads input files, updates the Config object, and returns a dictionary
of the loaded data.

Parameters:
-----------
cfg: Config
Configuration object containing file paths and parameters.

Returns:
--------
Dict
A dictionary containing the loaded data:
* 'points': geopandas.GeoDataFrame of input points
* 'ldd_fine': xarray.DataArray of local drainage directions in the fine grid
* 'upstream_fine': xarray.DataArray of upstream area (km2) in the fine grid
* 'ldd_coarse': xarray.DataArray of local drainage directions in the coarse grid
* 'upstream_coarse': xarray.DataArray of upstream area (m2) in the coarse grid
"""

# a helper function to reduce code repetition
def open_raster(path):
"""Helper to open and squeeze a raster file."""
return rxr.open_rasterio(path).squeeze(dim='band')

# read upstream map with fine resolution
upstream_fine = rxr.open_rasterio(cfg.upstream_fine).squeeze(dim='band')
logger.info(f'Map of upstream area in the finer grid corretly read: {cfg.upstream_fine}')

# read local drainage direction map
ldd_fine = rxr.open_rasterio(cfg.ldd_fine).squeeze(dim='band')
logger.info(f'Map of local drainage directions in the finer grid corretly read: {cfg.ldd_fine}')

# read upstream area map of coarse grid
upstream_coarse = rxr.open_rasterio(cfg.upstream_coarse).squeeze(dim='band')
logger.info(f'Map of upstream area in the coarser grid corretly read: {cfg.upstream_coarse}')

# read local drainage direction map
ldd_coarse = rxr.open_rasterio(cfg.ldd_coarse).squeeze(dim='band')
logger.info(f'Map of local drainage directions in the coarser grid correctly read: {cfg.ldd_coarse}')

# read points text file
points = pd.read_csv(cfg.points, index_col='ID')
points.columns = points.columns.str.lower()
logger.info(f'Table of points correctly read: {cfg.points}')
points = check_points(cfg, points, ldd_fine)

# convert to geopandas and export as shapefile
points = gpd.GeoDataFrame(
points,
geometry=gpd.points_from_xy(points['lon'], points['lat']),
crs=ldd_coarse.rio.crs
)
point_shp = cfg.output_folder / f'{cfg.points.stem}.shp'
points.to_file(point_shp)
logger.info(f'The original points table has been exported to: {point_shp}')

inputs = {
'points': points,
'ldd_fine': ldd_fine,
'upstream_fine': upstream_fine,
'ldd_coarse': ldd_coarse,
'upstream_coarse': upstream_coarse,
}

# update Config
cfg.update_config(ldd_fine, ldd_coarse)

return inputs


def check_points(
cfg: Config,
points: pd.DataFrame,
ldd: xr.DataArray
) -> pd.DataFrame:
"""
Removes input points that have missing values, a small catchment area,
or are outside the map extent.

Parameters:
-----------
cfg: Config
Configuration object.
points: pandas.DataFrame
Table of input points with fields 'lat', 'lon' and 'area' (km2)
ldd: xarray.DataArray
Map of local drainage directions

Returns:
--------
pandas.DataFrame
The input table with points with conflicts removed.
"""

# remove points with missing values
mask_nan = points.isnull().any(axis=1)
if mask_nan.sum() > 0:
points = points[~mask_nan]
logger.warning(f'{mask_nan.sum()} points were removed because of missing values.')

# remove points with small catchment area
mask_area = points['area'] < cfg.min_area
if mask_area.sum() > 0:
points = points[~mask_area]
logger.info(f'{mask_area.sum()} points were removed due to their small catchment area.')

# remove points outside the input LDD map
lon_min, lat_min, lon_max, lat_max = np.round(ldd.rio.bounds(), 6)
mask_lon = (points.lon < lon_min) | (points.lon > lon_max)
mask_lat = (points.lat < lat_min) | (points.lat > lat_max)
mask_extent = mask_lon | mask_lat
if mask_extent.sum() > 0:
points = points[~mask_extent]
logger.info(f'{mask_extent.sum()} points were removed because they are outside the input LDD map.')

return points
Loading
Loading