From 0c92749d2599b6131336a7b42efbbdce1848975d Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 18:15:32 +0100 Subject: [PATCH 01/20] S104 reader: report timePoint, waterLevelTrendThreshold, trendInterval metadata items from values group --- frmts/hdf5/s104dataset.cpp | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index 53d6ab63d7e1..d716985e4d32 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -427,6 +427,21 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) return nullptr; } + // Read additional metadata + for (const char *pszAttrName : + {"timePoint", "waterLevelTrendThreshold", "trendInterval"}) + { + auto poAttr = poGroup->GetAttribute(pszAttrName); + if (poAttr) + { + const char *pszVal = poAttr->ReadAsString(); + if (pszVal) + { + poDS->GDALDataset::SetMetadataItem(pszAttrName, pszVal); + } + } + } + auto poValuesArray = poGroup->OpenMDArray("values"); if (!poValuesArray) { From 074b08c911a1a2abe3baaef2b8cae2ef5d1d5b21 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 22:21:44 +0100 Subject: [PATCH 02/20] S104 reader: report uncertainty value from HDF5 'uncertainy' array --- frmts/hdf5/s104dataset.cpp | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index d716985e4d32..be52791c0e75 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -555,6 +555,44 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) poWaterLevelTrendBand->m_poRAT = std::move(poRAT); poDS->SetBand(2, poWaterLevelTrendBand.release()); + + auto poUncertaintyDataset = + poFeatureInstance->OpenMDArray("uncertainty"); + if (poUncertaintyDataset) + { + const auto &apoUncertaintyDims = + poUncertaintyDataset->GetDimensions(); + const auto oUncertaintyType = poUncertaintyDataset->GetDataType(); + if (apoUncertaintyDims.size() == 1 && + apoUncertaintyDims[0]->GetSize() == 1 && + oUncertaintyType.GetClass() == GEDTC_COMPOUND) + { + const auto &oUncertaintyComponents = + oUncertaintyType.GetComponents(); + if (oUncertaintyComponents.size() == 2 && + oUncertaintyComponents[1]->GetType().GetClass() == + GEDTC_NUMERIC) + { + auto poView = poUncertaintyDataset->GetView( + std::string("[\"") + .append(oUncertaintyComponents[1]->GetName()) + .append("\"]")); + double dfVal = 0; + const GUInt64 arrayStartIdx[] = {0}; + const size_t count[] = {1}; + const GInt64 arrayStep[] = {0}; + const GPtrDiff_t bufferStride[] = {0}; + if (poView && + poView->Read( + arrayStartIdx, count, arrayStep, bufferStride, + GDALExtendedDataType::Create(GDT_Float64), &dfVal)) + { + poDS->GDALDataset::SetMetadataItem( + "uncertainty", CPLSPrintf("%f", dfVal)); + } + } + } + } } poDS->GDALDataset::SetMetadataItem(GDALMD_AREA_OR_POINT, GDALMD_AOP_POINT); From 5bb61875be0666fe1eb6c13301fd1746b8a147cf Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 23:21:36 +0100 Subject: [PATCH 03/20] S104 reader: report horizontalPositionUncertainty, verticalUncertainty, timeUncertainty, commonPointRule from waterLevel / feature group --- frmts/hdf5/s104dataset.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index be52791c0e75..b310e441a804 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -189,7 +189,9 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) // Read additional metadata for (const char *pszAttrName : - {"methodWaterLevelProduct", "minDatasetHeight", "maxDatasetHeight"}) + {"methodWaterLevelProduct", "minDatasetHeight", "maxDatasetHeight", + "horizontalPositionUncertainty", "verticalUncertainty", + "timeUncertainty", "commonPointRule"}) { auto poAttr = poWaterLevel->GetAttribute(pszAttrName); if (poAttr) From b014d448df631b1853bb94325d7e582cc7e7d152 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 11:07:29 +0100 Subject: [PATCH 04/20] S104 reader: add read support for uncertainty band --- doc/source/drivers/raster/s104.rst | 1 + frmts/hdf5/s104dataset.cpp | 21 +++++++++++++++++++-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/doc/source/drivers/raster/s104.rst b/doc/source/drivers/raster/s104.rst index 4b2e13004656..010c367042cf 100644 --- a/doc/source/drivers/raster/s104.rst +++ b/doc/source/drivers/raster/s104.rst @@ -15,6 +15,7 @@ which is a specific product profile in an HDF5 file. S-104 files have two image bands representing water level height (band 1) and water level trend (band 2) values for each cell in a raster grid area. +An optional third band can contain the uncertainty in water level height. When opening a S-104 file, no raster band is directly available. But a list of subdatasets will be reported, one for each timestamp available in the file. diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index b310e441a804..b46426fe47c7 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -470,13 +470,16 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) } const auto &oComponents = oType.GetComponents(); - if (oComponents.size() != 2 || + if ((oComponents.size() != 2 && oComponents.size() != 3) || oComponents[0]->GetName() != "waterLevelHeight" || oComponents[0]->GetType().GetNumericDataType() != GDT_Float32 || oComponents[1]->GetName() != "waterLevelTrend" || (oComponents[1]->GetType().GetNumericDataType() != GDT_Byte && // In theory should be Byte, but 104US00_ches_dcf2_20190606T12Z.h5 uses Int32 - oComponents[1]->GetType().GetNumericDataType() != GDT_Int32)) + oComponents[1]->GetType().GetNumericDataType() != GDT_Int32) || + (oComponents.size() == 3 && + (oComponents[2]->GetName() != "uncertainty" || + oComponents[2]->GetType().GetNumericDataType() != GDT_Float32))) { CPLError(CE_Failure, CPLE_AppDefined, "Wrong data type for %s", poValuesArray->GetFullName().c_str()); @@ -558,6 +561,20 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) poDS->SetBand(2, poWaterLevelTrendBand.release()); + if (oComponents.size() == 3) + { + // Create uncertainty band + auto poUncertaintyArray = + poValuesArray->GetView("[\"uncertainty\"]"); + auto poUncertaintyDS = std::unique_ptr( + poUncertaintyArray->AsClassicDataset(1, 0)); + auto poUncertaintyBand = + std::make_unique(std::move(poUncertaintyDS)); + poUncertaintyBand->SetDescription("uncertainty"); + poUncertaintyBand->m_osUnitType = "metre"; + poDS->SetBand(3, poUncertaintyBand.release()); + } + auto poUncertaintyDataset = poFeatureInstance->OpenMDArray("uncertainty"); if (poUncertaintyDataset) From c2920414aaf2e6dcf0a5ad205ed02c64ffaa1bd9 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 20:25:41 +0100 Subject: [PATCH 05/20] S104: fix setting description vs physicalFilename --- frmts/hdf5/s104dataset.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index b46426fe47c7..baac1eda3717 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -617,7 +617,12 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) poDS->GDALDataset::SetMetadataItem(GDALMD_AREA_OR_POINT, GDALMD_AOP_POINT); // Setup/check for pam .aux.xml. - poDS->SetDescription(osFilename.c_str()); + if (osFilename != poOpenInfo->pszFilename) + { + poDS->SetSubdatasetName((osFeatureInstance + "/" + osGroup).c_str()); + poDS->SetPhysicalFilename(osFilename.c_str()); + } + poDS->SetDescription(poOpenInfo->pszFilename); poDS->TryLoadXML(); // Setup overviews. From 92d05f78fee79771263e2480467081baffac953a Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 20:25:57 +0100 Subject: [PATCH 06/20] S111: fix setting description vs physicalFilename --- frmts/hdf5/s111dataset.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index 5761b9eb93ae..e1e3318ce83a 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -636,7 +636,12 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) poDS->GDALDataset::SetMetadataItem(GDALMD_AREA_OR_POINT, GDALMD_AOP_POINT); // Setup/check for pam .aux.xml. - poDS->SetDescription(osFilename.c_str()); + if (osFilename != poOpenInfo->pszFilename) + { + poDS->SetSubdatasetName((osFeatureInstance + "/" + osGroup).c_str()); + poDS->SetPhysicalFilename(osFilename.c_str()); + } + poDS->SetDescription(poOpenInfo->pszFilename); poDS->TryLoadXML(); // Setup overviews. From 98a33c1f07dc2816d137c9fccced91f9a09d767a Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 00:59:52 +0100 Subject: [PATCH 07/20] S100 reader: reset vertical datum metadata items when reading from a new group --- frmts/hdf5/s100.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/frmts/hdf5/s100.cpp b/frmts/hdf5/s100.cpp index 2b5e32b6b577..b6a63cdb1398 100644 --- a/frmts/hdf5/s100.cpp +++ b/frmts/hdf5/s100.cpp @@ -857,6 +857,15 @@ void S100ReadVerticalDatum(GDALMajorObject *poMO, const GDALGroup *poGroup) if (poVerticalDatum && poVerticalDatum->GetDataType().GetClass() == GEDTC_NUMERIC) { + poMO->GDALMajorObject::SetMetadataItem(S100_VERTICAL_DATUM_MEANING, + nullptr); + poMO->GDALMajorObject::SetMetadataItem(S100_VERTICAL_DATUM_ABBREV, + nullptr); + poMO->GDALMajorObject::SetMetadataItem("VERTICAL_DATUM_EPSG_CODE", + nullptr); + poMO->GDALMajorObject::SetMetadataItem(S100_VERTICAL_DATUM_NAME, + nullptr); + poMO->GDALMajorObject::SetMetadataItem("verticalDatum", nullptr); if (nVerticalDatumReference == 1) { bool bFound = false; From 62b82ad7f49ffde5f983cb55f43517a44bcec0f3 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 22 Oct 2025 23:38:27 +0200 Subject: [PATCH 08/20] Add validate_s102.py sample validation script --- scripts/typos_allowlist.txt | 2 + .../osgeo_utils/samples/validate_s102.py | 2208 +++++++++++++++++ 2 files changed, 2210 insertions(+) create mode 100755 swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index 5841356ae445..21ab2941d814 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -369,3 +369,5 @@ for Bosnia and Herzegovina (country code "BA"), limited to the "county" subtype. for (const CPLXMLNode *psIter = psTreNode->psChild; for (const CPLXMLNode *psIter = psTreNode->psChild; gdal vector sql --oo MODEL=OeREBKRM09vs.imd --config OGR_STROKE_CURVE=TRUE --sql 'SELECT Rechtsstatus,publiziertAb,MetadatenGeobasisdaten,Eigentumsbeschraenkung,ZustaendigeStelle,Flaeche FROM "OeREBKRM09trsfr.Transferstruktur.Geometrie"' -f "ESRI Shapefile" ch.bazl.sicherheitszonenplan.oereb_20131118.xtf shpdir + (0, 2, "metres"), + (1, 2, "metres"), diff --git a/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py b/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py new file mode 100755 index 000000000000..5e3f18517079 --- /dev/null +++ b/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py @@ -0,0 +1,2208 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +############################################################################### +# +# Project: GDAL/OGR +# Purpose: Test compliance of IHO S102 v3.0 dataset +# Author: Even Rouault +# +############################################################################### +# Copyright (c) 2025, Even Rouault +# +# SPDX-License-Identifier: MIT +############################################################################### + +# Validates against +# https://iho-ohi.github.io/S-102-Product-Specification/documents/3.0.0/document.html and +# https://iho.int/uploads/user/pubs/standards/s-100/S-100_5.2.0_Final_Clean.pdf + +# "102_DevXXXX" are for traceability with respect to requirements of the spreadsheet: +# https://raw.githubusercontent.com/iho-ohi/S-100-Validation-Checks/refs/heads/main/Documents/S-158-102/0.2.0/S-158_102_0_2_0_20241118.xlsx +# Note that there are a few checks in that spreadsheet that are specific only of 2.3.0, and not 3.0.0... + +import os +import re +import struct +import sys + +# Standard Python modules +from collections import namedtuple + +# Extension modules +import h5py +import numpy as np + +try: + from osgeo import osr + + osr.UseExceptions() + gdal_available = True +except ImportError: + gdal_available = False + +ERROR = "Error" +CRITICAL_ERROR = "Critical error" + +AttributeDefinition = namedtuple( + "AttributeDefinition", ["name", "required", "type", "fixed_value"] +) + + +def _get_int_value_or_none(v): + try: + return int(v) + except ValueError: + return None + + +def _get_int_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_int_value_or_none(group.attrs[attr_name]) + + +def _get_float_value_or_none(v): + try: + return float(v) + except ValueError: + return None + + +def _get_float_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_float_value_or_none(group.attrs[attr_name]) + + +def _cast_to_float32(v): + return struct.unpack("f", struct.pack("f", v))[0] + + +class S102ValidationException(Exception): + pass + + +class S102Checker: + def __init__(self, filename, abort_at_first_error=False): + self.filename = filename + self.abort_at_first_error = abort_at_first_error + self.errors = [] + self.warnings = [] + self.checks_done = set([]) + + def _log_check(self, name): + self.checks_done.add(name) + + def _warning(self, msg): + self.warnings += [msg] + + def _error(self, msg): + self.errors += [(ERROR, msg)] + if self.abort_at_first_error: + raise S102ValidationException(f"{ERROR}: {msg}") + + def _critical_error(self, msg): + self.errors += [(CRITICAL_ERROR, msg)] + if self.abort_at_first_error: + raise S102ValidationException(f"{CRITICAL_ERROR}: {msg}") + + def _is_uint8(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 1 + ) + + def _is_uint16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 2 + ) + + def _is_uint32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 4 + ) + + def _is_int16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 2 + ) + + def _is_int32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 4 + ) + + def _is_float32(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 4 + + def _is_float64(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 8 + + def _is_string(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeStringID) + + def _is_enumeration(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeEnumID) + + def _check_attributes(self, ctxt_name, group, attr_list): + + for attr_def in attr_list: + if attr_def.required and attr_def.name not in group.attrs: + # 102_Dev1002: check presence of required attributes + self._critical_error( + f"Required {ctxt_name} attribute '{attr_def.name}' is missing" + ) + + elif attr_def.name in group.attrs: + attr = group.attrs[attr_def.name] + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + h5_type = group.attrs.get_id(attr_def.name).get_type() + + # 102_Dev1004: check type + + if attr_def.type == "string": + if not self._is_string(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string " + ) + + elif attr_def.type == "time": + if not self._is_string(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + + # 102_Dev1005: validate date or time + self._log_check("102_Dev1005") + pattern = re.compile( + r"^(?:[01]\d|2[0-3])[0-5]\d[0-5]\d(?:Z|[+-](?:[01]\d|2[0-3])[0-5]\d)$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid time: {attr}" + ) + + elif attr_def.type == "date": + if not isinstance(h5_type, h5py.h5t.TypeStringID): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + elif h5_type.get_size() != 8: + self._warning( + f"{ctxt_name} attribute '{attr_def.name}' is not a 8-character string" + ) + + # 102_Dev1005: validate date or time + self._log_check("102_Dev1005") + pattern = re.compile( + r"^(?:[0-9]{4})(?:(?:0[1-9]|1[0-2])(?:0[1-9]|[12][0-9]|3[01]))$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid date: {attr}" + ) + + elif attr_def.type == "uint8": + if not self._is_uint8(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint8" + ) + + elif attr_def.type == "uint16": + if not self._is_uint16(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint16" + ) + + elif attr_def.type == "uint32": + if not self._is_uint32(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint32" + ) + + elif attr_def.type == "int32": + if not self._is_int32(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a int32" + ) + + elif attr_def.type == "float32": + if not self._is_float32(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float32" + ) + + elif attr_def.type == "float64": + if not self._is_float64(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float64" + ) + + elif attr_def.type == "enumeration": + if not self._is_enumeration(h5_type): + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' is not an enumeration" + ) + + else: + + raise Exception( + f"Programming error: unexpected type {attr_def.type}" + ) + + if attr_def.fixed_value: + self._log_check("102_Dev1006") + if attr != attr_def.fixed_value: + self._critical_error( + f"{ctxt_name} attribute '{attr_def.name}' has value '{attr}', whereas '{attr_def.fixed_value}' is expected" + ) + + self._log_check("102_Dev1028") + attr_dict = {a.name: a for a in attr_list} + for attr in group.attrs: + if attr not in attr_dict: + self._warning(f"Extra element in {ctxt_name} group: '{attr}'") + + def check(self): + + try: + f = h5py.File(self.filename, "r") + except Exception as e: + self._critical_error(str(e)) + return + + self._log_check("102_Dev9005") + file_size = os.stat(self.filename).st_size + if file_size > 10 * 1024 * 1024: + self._warning( + f"File size of {self.filename} = {file_size}, which exceeds 10 MB" + ) + + basename = os.path.basename(self.filename) + if not basename.startswith("102"): + self._warning("File name should start with '102'") + if not basename.upper().endswith(".H5"): + self._warning("File name should end with '.H5'") + pattern = r"^102[a-zA-Z0-9]{4}[a-zA-Z0-9_]{1,12}\.(?:h5|H5)$" + if not re.match(pattern, basename): + self._warning( + f"File name '{basename}' does not match expected pattern '{pattern}'" + ) + + self._log_check("102_Dev1028") + for key in f.keys(): + if key not in ( + "Group_F", + "BathymetryCoverage", + "QualityOfBathymetryCoverage", + ): + self._warning(f"Unexpected element {key} in top level group") + + self._log_check("102_Dev1001") + if "Group_F" in f.keys(): + self._validate_group_f(f, f["Group_F"]) + else: + self._critical_error("No feature information group ('Group_F')") + + # Cf Table 10-2 - Root group attributes + topLevelAttributesList = [ + AttributeDefinition( + name="productSpecification", + required=True, + type="string", + fixed_value="INT.IHO.S-102.3.0.0", + ), + AttributeDefinition( + name="issueTime", required=False, type="time", fixed_value=None + ), + AttributeDefinition( + name="issueDate", required=True, type="date", fixed_value=None + ), + AttributeDefinition( + name="horizontalCRS", required=True, type="int32", fixed_value=None + ), + AttributeDefinition( + name="epoch", required=False, type="string", fixed_value=None + ), + AttributeDefinition( + name="westBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="metadata", required=False, type="string", fixed_value=None + ), + # 102_Dev1020 + AttributeDefinition( + name="verticalCS", required=True, type="int32", fixed_value=6498 + ), + AttributeDefinition( + name="verticalCoordinateBase", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="verticalDatumReference", + required=True, + type="enumeration", + fixed_value=1, + ), + AttributeDefinition( + name="verticalDatum", required=True, type="uint16", fixed_value=None + ), + ] + + self._log_check("102_Dev1002") + self._log_check("102_Dev1003") + self._log_check("102_Dev1004") + self._check_attributes("top level", f, topLevelAttributesList) + if _get_int_attr_or_none(f, "verticalCS"): + self._log_check("102_Dev1020") + + self._validate_verticalCoordinateBase(f) + self._validate_verticalDatumReference(f) + self._validate_verticalDatum(f) + self._validate_epoch(f) + self._validate_metadata(f, self.filename) + self._validate_horizontalCRS(f) + self._validate_bounds("top level", f) + + if "BathymetryCoverage" in f.keys(): + self._validate_BathymetryCoverage(f) + else: + self._log_check("102_Dev1026") + self._critical_error("Missing /BathymetryCoverage group") + + if "QualityOfBathymetryCoverage" in f.keys(): + self._validate_QualityOfBathymetryCoverage(f) + + self.checks_done = sorted(self.checks_done) + + def _validate_enumeration(self, group, attr_name, expected_values): + h5_type = group.attrs.get_id(attr_name).get_type() + if isinstance(h5_type, h5py.h5t.TypeEnumID): + if h5_type.get_nmembers() != len(expected_values): + self._warning( + f"Expected {len(expected_values)} members for enumeration {attr_name}" + ) + else: + for code in expected_values: + try: + value = h5_type.enum_nameof(code).decode("utf-8") + except Exception: + value = None + self._warning( + f"Enumeration {attr_name}: did not find value for code {code}" + ) + if value: + expected = expected_values[code] + if value != expected: + self._error( + f"Enumeration {attr_name}: for code {code}, found value {value}, whereas {expected} was expected" + ) + + def _validate_verticalCoordinateBase(self, f): + if "verticalCoordinateBase" in f.attrs: + expected_values = { + 1: "seaSurface", + 2: "verticalDatum", + 3: "seaBottom", + } + self._validate_enumeration(f, "verticalCoordinateBase", expected_values) + + def _validate_verticalDatumReference(self, f): + if "verticalDatumReference" in f.attrs: + expected_values = { + 1: "s100VerticalDatum", + 2: "EPSG", + } + self._validate_enumeration(f, "verticalDatumReference", expected_values) + + def _validate_verticalDatum(self, f): + if "verticalDatum" in f.attrs: + value = f.attrs["verticalDatum"] + if isinstance(value, int) and not ( + (value >= 1 and value <= 30) or value == 44 + ): + # 102_Dev1006 + self._critical_error( + f"Top level attribute verticalDatum has value '{value}', whereas it should be in [1, 30] range or 44" + ) + + def _validate_epoch(self, f): + self._log_check("102_Dev1007") + epoch = _get_float_attr_or_none(f, "epoch") + if epoch and not (epoch >= 1980 and epoch <= 2100): + self._warning(f"Top level attribute epoch has invalid value: {epoch}") + + def _validate_metadata(self, f, filename): + if "metadata" in f.attrs: + metadata = f.attrs["metadata"] + if isinstance(metadata, str) and metadata: + basename = os.path.basename(filename) + if basename.endswith(".h5") or basename.endswith(".H5"): + basename = basename[0:-3] + if metadata not in (f"MD_{basename}.xml", f"MD_{basename}.XML"): + self._critical_error( + f"Top level attribute metadata has value '{metadata}', whereas it should be empty, 'MD_{basename}.xml' or 'MD_{basename}.XML'" + ) + + def _validate_horizontalCRS(self, f): + self._log_check("102_Dev1009") + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS and not ( + horizontalCRS in (4326, 5041, 5042) + or (horizontalCRS >= 32601 and horizontalCRS <= 32660) + or (horizontalCRS >= 32701 and horizontalCRS <= 32760) + ): + self._critical_error( + f"Top level attribute 'horizontalCRS'={horizontalCRS} must be 4326, 5041, 5042 or in [32601,32660] or [32701,32760] ranges" + ) + + def _validate_bounds(self, ctxt_name, f): + west = _get_float_attr_or_none(f, "westBoundLongitude") + east = _get_float_attr_or_none(f, "eastBoundLongitude") + north = _get_float_attr_or_none(f, "northBoundLatitude") + south = _get_float_attr_or_none(f, "southBoundLatitude") + if ( + west is not None + and east is not None + and north is not None + and south is not None + ): + + if not (west >= -180 and west <= 180): + self._warning( + f"{ctxt_name}: westBoundLongitude is not in [-180, 180] range" + ) + if not (east >= -180 and east <= 180): + self._warning( + f"{ctxt_name}: eastBoundLongitude is not in [-180, 180] range" + ) + if west >= east: + self._warning( + f"{ctxt_name}: westBoundLongitude is greater or equal to eastBoundLongitude" + ) + if not (north >= -90 and north <= 90): + self._warning( + f"{ctxt_name}: northBoundLatitude is not in [-90, 90] range" + ) + if not (south >= -90 and south <= 90): + self._warning( + f"{ctxt_name}: southBoundLatitude is not in [-90, 90] range" + ) + if south >= north: + self._warning( + f"{ctxt_name}: southBoundLatitude is greater or equal to northBoundLatitude" + ) + + def _validate_group_f(self, rootGroup, group_f): + + for key in group_f.keys(): + if key not in ( + "featureCode", + "BathymetryCoverage", + "QualityOfBathymetryCoverage", + ): + self._warning(f"Unexpected element {key} in Group_F") + + self._log_check("102_Dev1021") + if "featureCode" in group_f.keys(): + self._validate_group_f_featureCode( + rootGroup, group_f, group_f["featureCode"] + ) + else: + self._critical_error( + "No featureCode array in feature information group ('/Group_F/featureCode')" + ) + + def _validate_group_f_featureCode(self, rootGroup, group_f, featureCode): + + self._log_check("102_Dev1021") + if not isinstance(featureCode, h5py.Dataset): + self._critical_error("'/Group_F/featureCode' is not a dataset") + return + + if len(featureCode.shape) != 1: + self._critical_error( + "'/Group_F/featureCode' is not a one-dimensional dataset" + ) + return + + self._log_check("102_Dev1022") + values = set([v.decode("utf-8") for v in featureCode[:]]) + if "BathymetryCoverage" not in values: + self._critical_error( + "Bathymetry data feature missing from featureCode array" + ) + + self._log_check("102_Dev1023") + if ( + "QualityOfBathymetryCoverage" not in values + or "QualityOfBathymetryCoverage" not in rootGroup + ): + self._warning("Quality feature not used") + + self._log_check("102_Dev1024") + for value in values: + if value not in ("BathymetryCoverage", "QualityOfBathymetryCoverage"): + # + self._critical_error( + f"Group_F feature information must correspond to feature catalog. Did not expect {value}" + ) + + self._log_check("102_Dev1025") + if value not in group_f.keys(): + self._critical_error( + f"Feature information dataset for feature type {value} missing" + ) + + self._log_check("102_Dev1026") + if value not in rootGroup.keys(): + self._critical_error(f"No feature instances for feature type {value}") + + if "BathymetryCoverage" in group_f.keys(): + self._validate_group_f_BathymetryCoverage(group_f) + + if "QualityOfBathymetryCoverage" in group_f.keys(): + self._validate_group_f_QualityOfBathymetryCoverage(group_f) + + def _validate_group_f_BathymetryCoverage(self, group_f): + self._log_check("102_Dev1027") + + BathymetryCoverage = group_f["BathymetryCoverage"] + if not isinstance(BathymetryCoverage, h5py.Dataset): + self._critical_error("'/Group_F/BathymetryCoverage' is not a dataset") + elif BathymetryCoverage.shape not in ((1,), (2,)): + self._critical_error( + "'/Group_F/BathymetryCoverage' is not a one-dimensional dataset of shape 1 or 2" + ) + elif BathymetryCoverage.dtype != [ + ("code", "O"), + ("name", "O"), + ("uom.name", "O"), + ("fillValue", "O"), + ("datatype", "O"), + ("lower", "O"), + ("upper", "O"), + ("closure", "O"), + ]: + self._critical_error( + "'/Group_F/BathymetryCoverage' has not expected data type" + ) + else: + type = BathymetryCoverage.id.get_type() + assert isinstance(type, h5py.h5t.TypeCompoundID) + for member_idx in range(type.get_nmembers()): + subtype = type.get_member_type(member_idx) + if not isinstance(subtype, h5py.h5t.TypeStringID): + self._critical_error( + f"Member of index {member_idx} in /Group_F/BathymetryCoverage is not a string" + ) + return + if not subtype.is_variable_str(): + self._critical_error( + f"Member of index {member_idx} in /Group_F/BathymetryCoverage is not a variable length string" + ) + + values = BathymetryCoverage[:] + expected_values = [ + (0, 0, "depth"), + (0, 1, "depth"), + (0, 2, "metres"), + (0, 3, "1000000"), + (0, 4, "H5T_FLOAT"), + (0, 5, "-14"), + (0, 6, "11050"), + (0, 7, "closedInterval"), + (1, 0, "uncertainty"), + (1, 1, "uncertainty"), + (1, 2, "metres"), + (1, 3, "1000000"), + (1, 4, "H5T_FLOAT"), + (1, 5, "0"), + (1, 6, ""), + (1, 7, "geSemiInterval"), + ] + + for row, col, expected_value in expected_values: + if row < BathymetryCoverage.shape[0]: + value = values[row][col].decode("utf-8") + if value != expected_value: + self._critical_error( + f"/Group_F/BathymetryCoverage: row {row}, {col}, got value '{value}', whereas '{expected_value}' is expected" + ) + + def _validate_group_f_QualityOfBathymetryCoverage(self, group_f): + self._log_check("102_Dev1027") + + QualityOfBathymetryCoverage = group_f["QualityOfBathymetryCoverage"] + if not isinstance(QualityOfBathymetryCoverage, h5py.Dataset): + self._critical_error( + "'/Group_F/QualityOfBathymetryCoverage' is not a dataset" + ) + elif QualityOfBathymetryCoverage.shape != (1,): + self._critical_error( + "'/Group_F/QualityOfBathymetryCoverage' is not a one-dimensional dataset of shape 1" + ) + elif QualityOfBathymetryCoverage.dtype != [ + ("code", "O"), + ("name", "O"), + ("uom.name", "O"), + ("fillValue", "O"), + ("datatype", "O"), + ("lower", "O"), + ("upper", "O"), + ("closure", "O"), + ]: + self._critical_error( + "'/Group_F/QualityOfBathymetryCoverage' has not expected data type" + ) + else: + type = QualityOfBathymetryCoverage.id.get_type() + assert isinstance(type, h5py.h5t.TypeCompoundID) + for member_idx in range(type.get_nmembers()): + subtype = type.get_member_type(member_idx) + if not isinstance(subtype, h5py.h5t.TypeStringID): + self._critical_error( + f"Member of index {member_idx} in /Group_F/QualityOfBathymetryCoverage is not a string" + ) + return + if not subtype.is_variable_str(): + self._critical_error( + f"Member of index {member_idx} in /Group_F/QualityOfBathymetryCoverage is not a variable length string" + ) + + values = QualityOfBathymetryCoverage[:] + expected_values = [ + (0, 0, "iD"), + (0, 1, "ID"), + (0, 2, ""), + (0, 3, "0"), + (0, 4, "H5T_INTEGER"), + (0, 5, "1"), + (0, 6, ""), + (0, 7, "geSemiInterval"), + ] + + for row, col, expected_value in expected_values: + value = values[row][col].decode("utf-8") + if value != expected_value: + self._critical_error( + f"/Group_F/QualityOfBathymetryCoverage: row {row}, {col}, got value '{value}', whereas '{expected_value}' is expected" + ) + + def _validate_BathymetryCoverage(self, f): + BathymetryCoverage = f["BathymetryCoverage"] + if not isinstance(BathymetryCoverage, h5py.Group): + self._critical_error("/BathymetryCoverage is not a group") + return + + # Cf Table 10-4 - Attributes of BathymetryCoverage feature container group + attr_list = [ + AttributeDefinition( + name="dataCodingFormat", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="dimension", + required=True, + type="uint8", + fixed_value=2, + ), + AttributeDefinition( + name="commonPointRule", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="horizontalPositionUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="verticalUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numInstances", + required=True, + type="uint8", + fixed_value=None, + ), + AttributeDefinition( + name="sequencingRule.type", + required=True, + type="enumeration", + fixed_value=1, + ), + AttributeDefinition( + name="sequencingRule.scanDirection", + required=True, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="interpolationType", + required=True, + type="enumeration", + fixed_value=1, + ), + AttributeDefinition( + name="dataOffsetCode", + required=True, + type="enumeration", + fixed_value=5, + ), + ] + + self._log_check("102_Dev2001") + self._check_attributes( + "BathymetryCoverage group", BathymetryCoverage, attr_list + ) + + numInstances = _get_int_attr_or_none(BathymetryCoverage, "numInstances") + if numInstances is not None: + if numInstances <= 0: + self._critical_error( + '/BathymetryCoverage["numInstances"] attribute value must be >= 1' + ) + numInstances = None + + if "commonPointRule" in BathymetryCoverage.attrs: + expected_values = { + 1: "average", + 2: "low", + 3: "high", + 4: "all", + } + self._validate_enumeration( + BathymetryCoverage, "commonPointRule", expected_values + ) + + if "dataCodingFormat" in BathymetryCoverage.attrs: + expected_values = { + 1: "Fixed Stations", + 2: "Regular Grid", + 3: "Ungeorectified Grid", + 4: "Moving Platform", + 5: "Irregular Grid", + 6: "Variable cell size", + 7: "TIN", + 8: "Fixed Stations (Stationwise)", + 9: "Feature oriented Regular Grid", + } + self._validate_enumeration( + BathymetryCoverage, "dataCodingFormat", expected_values + ) + + horizontalPositionUncertainty = _get_float_attr_or_none( + BathymetryCoverage, "horizontalPositionUncertainty" + ) + if horizontalPositionUncertainty and not ( + horizontalPositionUncertainty == -1.0 or horizontalPositionUncertainty >= 0 + ): + self._warning( + '/BathymetryCoverage["horizontalPositionUncertainty"] attribute value must be -1 or positive' + ) + + verticalUncertainty = _get_float_attr_or_none( + BathymetryCoverage, "verticalUncertainty" + ) + if verticalUncertainty and not ( + verticalUncertainty == -1.0 or verticalUncertainty >= 0 + ): + self._warning( + '/BathymetryCoverage["verticalUncertainty"] attribute value must be -1 or positive' + ) + + scanDirection_values = None + if "sequencingRule.scanDirection" in BathymetryCoverage.attrs: + scanDirection = BathymetryCoverage.attrs["sequencingRule.scanDirection"] + if isinstance(scanDirection, str): + # strip leading space. IMHO there should not be any, but + # the examples in the specification sometimes show one... + scanDirection_values = [x.lstrip() for x in scanDirection.split(",")] + + self._log_check("102_Dev2011") + if len(scanDirection_values) != 2: + self._warning( + '/BathymetryCoverage["sequencingRule.scanDirection"] attribute should have 2 values' + ) + elif "axisNames" in BathymetryCoverage.keys(): + + scanDirection_values_without_orientation = [] + for v in scanDirection_values: + if v.startswith("-"): + scanDirection_values_without_orientation.append(v[1:]) + else: + scanDirection_values_without_orientation.append(v) + scanDirection_values_without_orientation = set( + scanDirection_values_without_orientation + ) + + axisNames = BathymetryCoverage["axisNames"] + if ( + isinstance(axisNames, h5py.Dataset) + and axisNames.shape == (2,) + and isinstance(axisNames.id.get_type(), h5py.h5t.TypeStringID) + ): + axisNames_values = set( + [v.decode("utf-8") for v in axisNames[:]] + ) + if scanDirection_values_without_orientation != axisNames_values: + self._warning( + f"Sequencing rule scanDirection contents ({scanDirection_values_without_orientation}) does not match axis names ({axisNames_values}" + ) + + # Check that QualityOfBathymetryCoverage has (almost) the same attributes as BathymetryCoverage + if "QualityOfBathymetryCoverage" in f.keys(): + QualityOfBathymetryCoverage = f["QualityOfBathymetryCoverage"] + if not isinstance(QualityOfBathymetryCoverage, h5py.Group): + self._critical_error("/QualityOfBathymetryCoverage is not a group") + else: + attr_list[0] = AttributeDefinition( + name="dataCodingFormat", + required=True, + type="enumeration", + fixed_value=9, + ) + self._log_check("102_Dev2002") + self._check_attributes( + "QualityOfBathymetryCoverage group", + QualityOfBathymetryCoverage, + attr_list, + ) + + self._validate_axisNames(f, BathymetryCoverage) + + subgroups = set( + [ + name + for name, item in BathymetryCoverage.items() + if isinstance(item, h5py.Group) + ] + ) + + self._log_check("102_Dev2007") + if len(subgroups) == 0: + self._critical_error("/BathymetryCoverage has no groups") + else: + for i in range(1, len(subgroups) + 1): + expected_name = "BathymetryCoverage.%02d" % i + if expected_name not in subgroups: + self._critical_error( + "/BathymetryCoverage/{expected_name} group does not exist" + ) + + for name in subgroups: + if not name.startswith("BathymetryCoverage."): + self._warning( + "/BathymetryCoverage/{expected_name} is an unexpected group" + ) + + self._log_check("102_Dev2008") + if numInstances and len(subgroups) != numInstances: + self._critical_error( + f"/BathymetryCoverage has {len(subgroups)} groups whereas numInstances={numInstances}" + ) + + # Attributes and groups already checked above + self._log_check("102_Dev2012") + for name, item in BathymetryCoverage.items(): + if isinstance(item, h5py.Dataset) and name != "axisNames": + self._warning(f"/BathymetryCoverage has unexpected dataset {name}") + + if isinstance(item, h5py.Group) and name.startswith("BathymetryCoverage."): + self._validate_BathymetryCoverage_instance(f, BathymetryCoverage, item) + + def _validate_BathymetryCoverage_instance(self, f, BathymetryCoverage, instance): + + # Cf Table 10-6 - Attributes of BathymetryCoverage feature instance group + attr_list = [ + AttributeDefinition( + name="westBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numGRP", + required=True, + type="uint8", + fixed_value=1, + ), + AttributeDefinition( + name="gridOriginLongitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridOriginLatitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLongitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLatitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLongitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLatitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="startSequence", + required=True, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="verticalDatum", + required=False, + type="uint16", + fixed_value=None, + ), + AttributeDefinition( + name="verticalDatumReference", + required=False, + type="uint8", + fixed_value=1, + ), + ] + + self._log_check("102_Dev3001") + self._check_attributes( + f"BathymetryCoverage feature instance group {instance.name}", + instance, + attr_list, + ) + + present = [] + missing = [] + for name in ( + "westBoundLongitude", + "eastBoundLongitude", + "northBoundLatitude", + "southBoundLatitude", + ): + if name in instance.attrs: + present.append(name) + else: + missing.append(name) + + if present and missing: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: attributes {present} are present, but {missing} are missing" + ) + + westBoundLongitude = _get_float_attr_or_none(instance, "westBoundLongitude") + eastBoundLongitude = _get_float_attr_or_none(instance, "eastBoundLongitude") + northBoundLatitude = _get_float_attr_or_none(instance, "northBoundLatitude") + southBoundLatitude = _get_float_attr_or_none(instance, "southBoundLatitude") + + top_westBoundLongitude = _get_float_attr_or_none(f, "westBoundLongitude") + top_eastBoundLongitude = _get_float_attr_or_none(f, "eastBoundLongitude") + top_northBoundLatitude = _get_float_attr_or_none(f, "northBoundLatitude") + top_southBoundLatitude = _get_float_attr_or_none(f, "southBoundLatitude") + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS: + if horizontalCRS == 4326: + # 102_Dev3002 + self._validate_bounds( + f"BathymetryCoverage feature instance group {instance.name}", + instance, + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + self._log_check("102_Dev3004") + if westBoundLongitude < top_westBoundLongitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: westBoundLongitude={westBoundLongitude} < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southBoundLatitude < top_southBoundLatitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: southBoundLatitude={southBoundLatitude} < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastBoundLongitude > top_eastBoundLongitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: eastBoundLongitude={eastBoundLongitude} > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northBoundLatitude > top_northBoundLatitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: northBoundLatitude={northBoundLatitude} > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + if ( + abs(westBoundLongitude) <= 180 + and abs(eastBoundLongitude) <= 180 + and abs(northBoundLatitude) <= 90 + and abs(southBoundLatitude) <= 90 + ): + self._error( + f"BathymetryCoverage feature instance group {instance.name}: westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude are longitudes/latitudes whereas they should be projected coordinates, given the horizontalCRS is projected" + ) + + if gdal_available: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(int(horizontalCRS)) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation( + horizontalCRS_srs, longlat_srs + ) + westLon, southLat, eastLon, northLat = ct.TransformBounds( + westBoundLongitude, + southBoundLatitude, + eastBoundLongitude, + northBoundLatitude, + 21, + ) + + self._log_check("102_Dev3004") + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if westLon + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: westLon={westLon} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if southLat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: southLat={southLat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if eastLon - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: eastLon={eastLon} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if northLat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: northLat={northLat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + # Add an epsilon to take into account potential different ways of doing bounding box reprojection + epsilon = 0.01 + if westLon + epsilon < top_westBoundLongitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: westBoundLongitude={westLon} ({westBoundLongitude}) < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southLat + epsilon < top_southBoundLatitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: southBoundLatitude={southLat} ({southBoundLatitude}) < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastLon - epsilon > top_eastBoundLongitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: eastBoundLongitude={eastLon} ({eastBoundLongitude}) > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northLat - epsilon > top_northBoundLatitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: northBoundLatitude={northLat} ({northBoundLatitude}) > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + self._warning( + "Test checking consistency of bounds in BathymetryCoverage feature instance group compared to top level attributes skipped due to GDAL not available" + ) + + self._log_check("102_Dev3003") + if eastBoundLongitude <= westBoundLongitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: eastBoundLongitude <= westBoundLongitude" + ) + if northBoundLatitude <= southBoundLatitude: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: northBoundLatitude <= southBoundLatitude" + ) + + if len(present) == 0 and "domainExtent.polygon" not in instance.keys(): + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: dataset 'domainExtent.polygon' missing" + ) + elif "domainExtent.polygon" in instance.keys() and present: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: both dataset 'domainExtent.polygon' and westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude attributes are present" + ) + + gridOriginLongitude = _get_float_attr_or_none(instance, "gridOriginLongitude") + gridOriginLatitude = _get_float_attr_or_none(instance, "gridOriginLatitude") + if gridOriginLongitude is not None and gridOriginLatitude is not None: + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + self._log_check("102_Dev3005") + + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + gridOriginLongitude + 1e-6 * abs(gridOriginLongitude) + < westBoundLongitude + ): + self._error( + f"BathymetryCoverage feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} < westBoundLongitude={westBoundLongitude}" + ) + if ( + gridOriginLongitude - 1e-6 * abs(gridOriginLongitude) + > eastBoundLongitude + ): + self._error( + f"BathymetryCoverage feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} > eastBoundLongitude={eastBoundLongitude}" + ) + if ( + gridOriginLatitude + 1e-6 * abs(gridOriginLatitude) + < southBoundLatitude + ): + self._error( + f"BathymetryCoverage feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} < southBoundLatitude={southBoundLatitude}" + ) + if ( + gridOriginLatitude - 1e-6 * abs(gridOriginLatitude) + > northBoundLatitude + ): + self._error( + f"BathymetryCoverage feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} > northBoundLatitude={northBoundLatitude}" + ) + + if gdal_available and horizontalCRS: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(horizontalCRS) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation(horizontalCRS_srs, longlat_srs) + origin_long, origin_lat, _ = ct.TransformPoint( + gridOriginLongitude, gridOriginLatitude, 0 + ) + + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if origin_long + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: origin_long={origin_long} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if origin_lat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: origin_lat={origin_lat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if origin_long - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: origin_long={origin_long} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if origin_lat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: origin_lat={origin_lat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + self._log_check("102_Dev3006") + gridSpacingLongitudinal = _get_float_attr_or_none( + instance, "gridSpacingLongitudinal" + ) + if gridSpacingLongitudinal is not None and gridSpacingLongitudinal <= 0: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLongitudinal <= 0" + ) + + self._log_check("102_Dev3006") + gridSpacingLatitudinal = _get_float_attr_or_none( + instance, "gridSpacingLatitudinal" + ) + if gridSpacingLatitudinal is not None and gridSpacingLatitudinal <= 0: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLatitudinal <= 0" + ) + + self._log_check("102_Dev3007") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and gridSpacingLongitudinal > (eastBoundLongitude - westBoundLongitude) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLongitudinal > (eastBoundLongitude - westBoundLongitude)" + ) + + self._log_check("102_Dev3007") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and gridSpacingLatitudinal > (northBoundLatitude - southBoundLatitude) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLatitudinal > (northBoundLatitude - southBoundLatitude)" + ) + + self._log_check("102_Dev3010") + numPointsLongitudinal = _get_int_attr_or_none(instance, "numPointsLongitudinal") + if numPointsLongitudinal < 1: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: Grid must be at least 1X1: numPointsLongitudinal < 1" + ) + + self._log_check("102_Dev3010") + numPointsLatitudinal = _get_int_attr_or_none(instance, "numPointsLatitudinal") + if numPointsLatitudinal < 1: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: Grid must be at least 1X1: numPointsLatitudinal < 1" + ) + + self._log_check("102_Dev3009") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and numPointsLongitudinal is not None + and numPointsLongitudinal > 1 + and gridSpacingLongitudinal * (1 - 1e-6) + > (eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLongitudinal={gridSpacingLongitudinal} > (eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)={(eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)}" + ) + + self._log_check("102_Dev3009") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and numPointsLatitudinal is not None + and numPointsLatitudinal > 1 + and gridSpacingLatitudinal * (1 - 1e-6) + > (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLatitudinal={gridSpacingLatitudinal} > (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)={(northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)}" + ) + + self._log_check("102_Dev3012") + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + westBoundLongitude is not None + and gridOriginLongitude is not None + and abs(westBoundLongitude - gridOriginLongitude) + > 1e-6 * abs(westBoundLongitude) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Grid origin does not coincide with instance bounding box; westBoundLongitude={westBoundLongitude} != gridOriginLongitude={_cast_to_float32(gridOriginLongitude)}" + ) + + self._log_check("102_Dev3012") + if ( + southBoundLatitude is not None + and gridOriginLatitude is not None + and abs(southBoundLatitude - gridOriginLatitude) + > 1e-6 * abs(southBoundLatitude) + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Grid origin does not coincide with instance bounding box: southBoundLatitude={southBoundLatitude} != gridOriginLatitude={_cast_to_float32(gridOriginLatitude)}" + ) + + self._log_check("102_Dev3013") + if "startSequence" in instance.attrs: + startSequence = instance.attrs["startSequence"] + if isinstance(startSequence, str): + startSequence = startSequence.split(",") + if ( + len(startSequence) != 2 + or _get_int_value_or_none(startSequence[0]) is None + or _get_int_value_or_none(startSequence[1]) is None + ): + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: invalid content for startSequence in instance" + ) + else: + self._log_check("102_Dev3014") + if startSequence != ["0", "0"]: + # other tests are probably not compatible of a non (0,0) startSequence + self._warning( + f"BathymetryCoverage feature instance group {instance.name}: Values in startSequence in instance group are incompatible with the scan direction in sequencingRule" + ) + + self._log_check("102_Dev3015") + # Attributes already checked above + countGroups = 0 + for name, item in instance.items(): + if isinstance(item, h5py.Dataset) and name != "domainExtent.polygon": + # 102_Dev2012 + self._warning( + f"BathymetryCoverage feature instance group {instance.name} has unexpected dataset '{name}'" + ) + + elif isinstance(item, h5py.Group): + countGroups += 1 + if name != "Group_001": + self._warning( + f"BathymetryCoverage feature instance group {instance.name} has unexpected group '{name}'" + ) + + self._log_check("102_Dev3016") + numGRP = _get_int_attr_or_none(instance, "numGRP") + if numGRP is not None: + if numGRP != countGroups: + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: Count of values groups does not match attribute numGRP in instance group" + ) + + # Check that QualityOfBathymetryCoverage.QualityOfBathymetryCoverage.01 + # has same attributes as BathymetryCoverage.BathymetryCoverage.01 + self._log_check("102_Dev3017") + if "QualityOfBathymetryCoverage" in f.keys(): + QualityOfBathymetryCoverage = f["QualityOfBathymetryCoverage"] + if isinstance(QualityOfBathymetryCoverage, h5py.Group): + if ( + "QualityOfBathymetryCoverage.01" + in QualityOfBathymetryCoverage.keys() + ): + QualityOfBathymetryCoverage01 = QualityOfBathymetryCoverage[ + "QualityOfBathymetryCoverage.01" + ] + if isinstance(QualityOfBathymetryCoverage01, h5py.Group): + set1 = set([name for name in instance.attrs.keys()]) + set2 = set( + [ + name + for name in QualityOfBathymetryCoverage01.attrs.keys() + ] + ) + if set1 != set2: + self._error( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01 has not same set of attributes ({set1}) as /BathymetryCoverage/BathymetryCoverage.01 ({set2})" + ) + + for name in set1: + attr1 = instance.attrs[name] + if name in set2: + attr2 = QualityOfBathymetryCoverage01.attrs[name] + if attr1 != attr2: + self._error( + f'/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01["{name}"] = {attr1} has not same same value as /BathymetryCoverage/BathymetryCoverage.01["{name}"] = {attr2}' + ) + + if "Group_001" not in instance.keys() or not isinstance( + instance["Group_001"], h5py.Group + ): + self._critical_error( + f"BathymetryCoverage feature instance group {instance.name}: no Group_001 subgroup" + ) + else: + self._validate_Group_001( + f, instance["Group_001"], numPointsLongitudinal, numPointsLatitudinal + ) + + def _validate_Group_001( + self, f, Group_001, numPointsLongitudinal, numPointsLatitudinal + ): + + # Cf Table 10-7 - Attributes of values group + attr_list = [ + AttributeDefinition( + name="minimumDepth", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="maximumDepth", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="minimumUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="maximumUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="timePoint", + required=True, + type="string", + fixed_value="00010101T000000Z", + ), + ] + + self._log_check("102_Dev5001") + self._check_attributes( + "Group_001", + Group_001, + attr_list, + ) + + self._log_check("102_Dev5002") + minimumDepth = _get_float_attr_or_none(Group_001, "minimumDepth") + if minimumDepth is not None and not ( + minimumDepth >= -14 and minimumDepth <= 11050 + ): + self._warning( + f"Group_001: minimumDepth={minimumDepth} should be in [-14, 11050] range" + ) + + maximumDepth = _get_float_attr_or_none(Group_001, "maximumDepth") + if maximumDepth is not None and not ( + maximumDepth >= -14 and maximumDepth <= 11050 + ): + self._warning( + f"Group_001: maximumDepth={maximumDepth} should be in [-14, 11050] range" + ) + + if ( + minimumDepth is not None + and maximumDepth is not None + and minimumDepth > maximumDepth + ): + self._warning( + f"Group_001: minimumDepth={minimumDepth} > maximumDepth={maximumDepth}" + ) + + minimumUncertainty = _get_float_attr_or_none(Group_001, "minimumUncertainty") + if minimumUncertainty is not None and not ( + minimumUncertainty >= 0 or minimumUncertainty == 1000000 + ): + self._warning( + f"Group_001: minimumUncertainty={minimumUncertainty} should be in [0, inf) range or equal to 1000000" + ) + + maximumUncertainty = _get_float_attr_or_none(Group_001, "maximumUncertainty") + if maximumUncertainty is not None and not ( + maximumUncertainty >= 0 or maximumUncertainty == 1000000 + ): + self._warning( + f"Group_001: maximumUncertainty={maximumUncertainty} should be in [0, inf) range or equal to 1000000" + ) + + if ( + minimumUncertainty is not None + and maximumUncertainty is not None + and minimumUncertainty != 1000000 + and maximumUncertainty != 1000000 + and minimumUncertainty > maximumUncertainty + ): + self._warning( + f"Group_001: minimumUncertainty={minimumUncertainty} > maximumUncertainty={maximumUncertainty}" + ) + + self._log_check("102_Dev5003") + if "values" not in Group_001.keys() or not isinstance( + Group_001["values"], h5py.Dataset + ): + self._critical_error( + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values dataset missing" + ) + else: + self._validate_values( + f, + Group_001["values"], + numPointsLongitudinal, + numPointsLatitudinal, + minimumDepth, + maximumDepth, + minimumUncertainty, + maximumUncertainty, + ) + + def _validate_values( + self, + f, + values, + numPointsLongitudinal, + numPointsLatitudinal, + minimumDepth, + maximumDepth, + minimumUncertainty, + maximumUncertainty, + ): + + self._log_check("102_Dev5004") + if len(values.shape) != 2: + self._critical_error( + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values dataset is not 2-dimensional" + ) + return + + if ( + numPointsLatitudinal + and numPointsLongitudinal + and values.shape != (numPointsLatitudinal, numPointsLongitudinal) + ): + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values dataset shape is {values.shape} instead of {(numPointsLatitudinal, numPointsLongitudinal)}" + ) + return + + self._log_check("102_Dev5005") + values_type = values.id.get_type() + if not isinstance(values_type, h5py.h5t.TypeCompoundID): + self._critical_error( + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values type is not compound" + ) + return + + Group_F_BathymetryCoverage = None + if "Group_F" in f: + Group_F = f["Group_F"] + if isinstance(Group_F, h5py.Group) and "BathymetryCoverage" in Group_F: + Group_F_BathymetryCoverage = Group_F["BathymetryCoverage"] + if ( + isinstance(Group_F_BathymetryCoverage, h5py.Dataset) + and len(Group_F_BathymetryCoverage.shape) == 1 + ): + num_components = Group_F_BathymetryCoverage.shape[0] + if values_type.get_nmembers() != num_components: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values type has {values_type.get_nmembers()} members whereas {num_components} are expected from /Group_F/BathymetryCoverage" + ) + return + else: + Group_F_BathymetryCoverage = None + + # Check consistency between "values" and "/Group_F/BathymetryCoverage" + found_depth = False + found_uncertainty = False + for member_idx in range(values_type.get_nmembers()): + subtype = values_type.get_member_type(member_idx) + component_name = values_type.get_member_name(member_idx) + if Group_F_BathymetryCoverage: + expected = Group_F_BathymetryCoverage[member_idx][0] + if component_name != expected: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values member {member_idx} name = {component_name} is not Group_F_BathymetryCoverage[{member_idx}]['name']] = {expected}" + ) + if not self._is_float32(subtype): + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values member {component_name} is not a float32" + ) + + if component_name == b"depth": + found_depth = True + elif component_name == b"uncertainty": + found_uncertainty = True + + self._log_check("102_Dev5006") + if found_depth: + masked_depth = np.ma.masked_equal(values[:]["depth"], 1000000) + + actualMinDepth = masked_depth.min() + if minimumDepth and actualMinDepth < minimumDepth: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: minimum depth is {actualMinDepth}, whereas minimumDepth attribute = {minimumDepth}" + ) + + actualMaxDepth = masked_depth.max() + if maximumDepth and actualMaxDepth > maximumDepth: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: minimum depth is {actualMaxDepth}, whereas maximumDepth attribute = {maximumDepth}" + ) + + self._log_check("102_Dev5009") + # check if the precision of any depth or uncertainty value exceeds 0.01 meters + depth_100 = values[:]["depth"] * 100 + depth_100_round = np.round(depth_100) + max_prec_cm = np.max(np.abs(depth_100 - depth_100_round)) + if max_prec_cm > 0.001: # tolerate some epsilon + self._warning( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: maximum precision of depth is {max_prec_cm} cm, whereas it should not be better than centimetric" + ) + + if found_uncertainty: + masked_uncertainty = np.ma.masked_equal(values[:]["uncertainty"], 1000000) + + actualMinUncertainty = masked_uncertainty.min() + if minimumUncertainty and actualMinUncertainty < minimumUncertainty: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: minimum uncertainty is {actualMinUncertainty}, whereas minimumUncertainty attribute = {minimumUncertainty}" + ) + + actualMaxUncertainty = masked_uncertainty.max() + if maximumUncertainty and actualMaxUncertainty > maximumUncertainty: + self._critical_error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: minimum uncertainty is {actualMaxUncertainty}, whereas maximumUncertainty attribute = {maximumUncertainty}" + ) + + self._log_check("102_Dev5009") + # check if the precision of any depth or uncertainty value exceeds 0.01 meters + depth_100 = values[:]["uncertainty"] * 100 + depth_100_round = np.round(depth_100) + max_prec_cm = np.max(np.abs(depth_100 - depth_100_round)) + if max_prec_cm > 0.001: # tolerate some epsilon + self._warning( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values: maximum precision of uncertainty is {max_prec_cm} cm, whereas it should not be better than centimetric" + ) + + def _validate_QualityOfBathymetryCoverage(self, f): + + QualityOfBathymetryCoverage = f["QualityOfBathymetryCoverage"] + if not isinstance(QualityOfBathymetryCoverage, h5py.Group): + self._critical_error("/QualityOfBathymetryCoverage is not a group") + return + + self._validate_axisNames(f, QualityOfBathymetryCoverage) + + self._validate_featureAttributeTable(QualityOfBathymetryCoverage) + + subgroups = set( + [ + name + for name, item in QualityOfBathymetryCoverage.items() + if isinstance(item, h5py.Group) + ] + ) + self._log_check("102_Dev2009") + if len(subgroups) == 0: + self._critical_error("/QualityOfBathymetryCoverage has no groups") + else: + self._log_check("102_Dev2010") + for i in range(1, len(subgroups) + 1): + expected_name = "QualityOfBathymetryCoverage.%02d" % i + if expected_name not in subgroups: + self._critical_error( + "/QualityOfBathymetryCoverage/{expected_name} group does not exist" + ) + + for name in subgroups: + if not name.startswith("QualityOfBathymetryCoverage."): + self._warning( + "/QualityOfBathymetryCoverage/{expected_name} is an unexpected group" + ) + + if "numInstances" in QualityOfBathymetryCoverage.attrs: + numInstances = QualityOfBathymetryCoverage.attrs["numInstances"] + if not isinstance(numInstances, int): + numInstances = None + else: + numInstances = None + + self._log_check("102_Dev2010") + if numInstances and len(subgroups) != numInstances: + self._warning( + "/QualityOfBathymetryCoverage has {len(subgroups)} groups whereas numInstances={numInstances}" + ) + + # Attributes and groups already checked above + self._log_check("102_Dev2012") + for name, item in QualityOfBathymetryCoverage.items(): + if isinstance(item, h5py.Dataset) and name not in ( + "axisNames", + "featureAttributeTable", + ): + self._warning( + f"/QualityOfBathymetryCoverage has unexpected dataset {name}" + ) + + if "QualityOfBathymetryCoverage.01" in subgroups and isinstance( + QualityOfBathymetryCoverage["QualityOfBathymetryCoverage.01"], h5py.Group + ): + QualityOfBathymetryCoverage_01 = QualityOfBathymetryCoverage[ + "QualityOfBathymetryCoverage.01" + ] + self._validate_QualityOfBathymetryCoverage_01( + QualityOfBathymetryCoverage, QualityOfBathymetryCoverage_01 + ) + + def _validate_QualityOfBathymetryCoverage_01( + self, QualityOfBathymetryCoverage, QualityOfBathymetryCoverage_01 + ): + self._log_check("102_Dev5010") + subgroups = set( + [ + name + for name, item in QualityOfBathymetryCoverage_01.items() + if isinstance(item, h5py.Group) + ] + ) + if subgroups != set(["Group_001"]): + self._warning( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01 has unexpected group list: {subgroups}" + ) + + datasets = set( + [ + name + for name, item in QualityOfBathymetryCoverage_01.items() + if isinstance(item, h5py.Dataset) + ] + ) + if datasets: + self._warning( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01 has unexpected dataset list: {datasets}" + ) + + if "Group_001" in subgroups and isinstance( + QualityOfBathymetryCoverage_01["Group_001"], h5py.Group + ): + + numPointsLongitudinal = _get_int_attr_or_none( + QualityOfBathymetryCoverage_01, "numPointsLongitudinal" + ) + numPointsLatitudinal = _get_int_attr_or_none( + QualityOfBathymetryCoverage_01, "numPointsLatitudinal" + ) + + Group_001 = QualityOfBathymetryCoverage_01["Group_001"] + self._validate_QualityOfBathymetryCoverage_01_Group_001( + QualityOfBathymetryCoverage, + Group_001, + numPointsLongitudinal, + numPointsLatitudinal, + ) + + def _validate_QualityOfBathymetryCoverage_01_Group_001( + self, + QualityOfBathymetryCoverage, + Group_001, + numPointsLongitudinal, + numPointsLatitudinal, + ): + if "values" in Group_001 and isinstance(Group_001["values"], h5py.Dataset): + values = Group_001["values"] + self._validate_QualityOfBathymetryCoverage_01_Group_001_values( + QualityOfBathymetryCoverage, + values, + numPointsLongitudinal, + numPointsLatitudinal, + ) + else: + self._critical_error( + "Missing /QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01/Group_001/values dataset" + ) + + self._log_check("102_Dev5010") + subgroups = set( + [name for name, item in Group_001.items() if isinstance(item, h5py.Group)] + ) + if subgroups: + self._warning( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01/Group_001 has unexpected group list: {subgroups}" + ) + + datasets = set( + [name for name, item in Group_001.items() if isinstance(item, h5py.Dataset)] + ) + if datasets != set(["values"]): + self._warning( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01/Group_001 has unexpected dataset list: {datasets}" + ) + + def _validate_QualityOfBathymetryCoverage_01_Group_001_values( + self, + QualityOfBathymetryCoverage, + values, + numPointsLongitudinal, + numPointsLatitudinal, + ): + + self._log_check("102_Dev5007") + if len(values.shape) != 2: + self._critical_error( + "/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01/Group_001/values dataset is not 2-dimensional" + ) + return + + if ( + numPointsLatitudinal + and numPointsLongitudinal + and values.shape != (numPointsLatitudinal, numPointsLongitudinal) + ): + self._critical_error( + f"/QualityOfBathymetryCoverage/QualityOfBathymetryCoverage.01/Group_001/values dataset shape is {values.shape} instead of {(numPointsLatitudinal, numPointsLongitudinal)}" + ) + return + + self._log_check("102_Dev5007") + values_type = values.id.get_type() + if not self._is_uint32(values_type): + self._critical_error( + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values type is not uint32" + ) + if ( + isinstance(values_type, h5py.h5t.TypeCompoundID) + and values_type.get_nmembers() == 1 + and self._is_uint32(values_type.get_member_type(0)) + ): + # Tolerance for dataset 102DE00CA22_UNC_MD.H5 to proceed to further checks + values = values[:][values_type.get_member_name(0).decode("utf-8")] + else: + return + + self._log_check("102_Dev5008") + if "featureAttributeTable" in QualityOfBathymetryCoverage and isinstance( + QualityOfBathymetryCoverage["featureAttributeTable"], h5py.Dataset + ): + fat = QualityOfBathymetryCoverage["featureAttributeTable"] + fat_type = fat.id.get_type() + if len(fat.shape) == 1 and isinstance(fat_type, h5py.h5t.TypeCompoundID): + try: + idx = fat_type.get_member_index(b"id") + except Exception: + idx = -1 + if idx >= 0: + set_values = set(np.unique(values)) + set_fat_values = set(np.unique(fat[:]["id"])) + for v in set_values: + if v != 0 and v not in set_fat_values: + self._error( + f"/BathymetryCoverage/BathymetryCoverage.01/Group_001/values contain value {v}, which is not a valid 'id' of the featureAttributeTable" + ) + + def _validate_featureAttributeTable(self, QualityOfBathymetryCoverage): + self._log_check("102_Dev2005") + if "featureAttributeTable" not in QualityOfBathymetryCoverage: + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable dataset does not exist" + ) + elif not isinstance( + QualityOfBathymetryCoverage["featureAttributeTable"], + h5py.Dataset, + ): + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable is not a dataset" + ) + else: + self._log_check("102_Dev2006") + featureAttributeTable = QualityOfBathymetryCoverage["featureAttributeTable"] + + # Cf Table 10-8 - Elements of featureAttributeTable compound datatype + if len(featureAttributeTable.shape) != 1: + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable is not a one-dimensional dataset" + ) + return + + type = featureAttributeTable.id.get_type() + if not isinstance(type, h5py.h5t.TypeCompoundID): + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable type is not compound" + ) + return + + try: + idx = type.get_member_index(b"id") + except Exception: + idx = -1 + if idx < 0: + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable compound type does not contain an 'id' member" + ) + return + h5_type = type.get_member_type(idx) + if not ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 4 + ): + self._error( + "/QualityOfBathymetryCoverage/featureAttributeTable['id'] type is not uint32" + ) + return + + MemberDefinition = namedtuple( + "MemberDefinition", ["name", "type", "allowed_values"] + ) + + allowed_members = [ + MemberDefinition("dataAssessment", "uint8", (1, 2, 3)), + MemberDefinition( + "featuresDetected.leastDepthOfDetectedFeaturesMeasured", + "uint8", + (0, 1), + ), + MemberDefinition( + "featuresDetected.significantFeaturesDetected", "uint8", (0, 1) + ), + MemberDefinition( + "featuresDetected.sizeOfFeaturesDetected", "float32", None + ), + MemberDefinition("featureSizeVar", "float32", None), + MemberDefinition("fullSeafloorCoverageAchieved", "uint8", (0, 1)), + MemberDefinition("bathyCoverage", "uint8", (0, 1)), + MemberDefinition( + "zoneOfConfidence.horizontalPositionUncertainty.uncertaintyFixed", + "float32", + None, + ), + MemberDefinition( + "zoneOfConfidence.horizontalPositionUncertainty.uncertaintyVariableFactor", + "float32", + None, + ), + MemberDefinition("surveyDateRange.dateStart", "date", None), + MemberDefinition("surveyDateRange.dateEnd", "date", None), + MemberDefinition("sourceSurveyID", "string", None), + MemberDefinition("surveyAuthority", "string", None), + MemberDefinition( + "typeOfBathymetricEstimationUncertainty", "enumeration", None + ), + ] + + allowed_members_dict = {t.name: t for t in allowed_members} + + for idx in range(type.get_nmembers()): + name = type.get_member_name(idx).decode("utf-8") + if name == "id": + continue + if name not in allowed_members_dict: + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not an allowed member" + ) + continue + h5_type = type.get_member_type(idx) + expected_type = allowed_members_dict[name].type + if expected_type == "uint8": + if not self._is_uint8(h5_type): + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not of type uint8, but {h5_type}" + ) + elif expected_type == "float32": + if not self._is_float32(h5_type): + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not of type float32, but {h5_type}" + ) + elif expected_type == "date": + if not self._is_string(h5_type): + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not of type date, but {h5_type}" + ) + elif expected_type == "string": + if not self._is_string(h5_type): + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not of type string, but {h5_type}" + ) + elif expected_type == "enumeration": + if not self._is_enumeration(h5_type): + self._error( + f"/QualityOfBathymetryCoverage/featureAttributeTable['{name}'] is not of type enumeration, but {h5_type}" + ) + else: + raise Exception( + f"Programming error: unexpected type {expected_type}" + ) + + def _validate_axisNames(self, f, group): + + groupName = group.name + + self._log_check("102_Dev2003") + if "axisNames" not in group.keys(): + self._error(f"{groupName}/axisNames dataset does not exist") + elif not isinstance(group["axisNames"], h5py.Dataset): + self._error(f"{groupName}/axisNames is not a dataset") + else: + axisNames = group["axisNames"] + if axisNames.shape != (2,): + self._error( + f"{groupName}/axisNames dataset is not a one-dimensional array of length 2" + ) + else: + type = axisNames.id.get_type() + if not isinstance(type, h5py.h5t.TypeStringID): + self._error(f"{groupName}/axisNames type is not a string") + else: + self._log_check("102_Dev2004") + values = [v.decode("utf-8") for v in axisNames[:]] + if values not in ( + ["Easting", "Northing"], + ["Latitude", "Longitude"], + ): + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"] or ["Latitude", "Longitude"]' + ) + elif "horizontalCRS" in f.attrs: + horizontalCRS = f.attrs["horizontalCRS"] + if isinstance(horizontalCRS, int): + if horizontalCRS == 4326: + if values != ["Latitude", "Longitude"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Latitude", "Longitude"]' + ) + else: + if values != ["Easting", "Northing"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"]' + ) + + +# Public function +def check( + filename, + abort_at_first_error=False, +): + """Check specified filename and return a tuple (errors, warnings, checks_done)""" + checker = S102Checker( + filename, + abort_at_first_error=abort_at_first_error, + ) + checker.check() + return checker.errors, checker.warnings, checker.checks_done + + +def usage(): + print("Usage: validate_s102.py [-q] ") + print("") + print("Validates a S102 files against the Edition 3.0.0 specification.") + print("") + print("-q: quiet mode. Only exit code indicates success (0) or error (1)") + + +def main(argv=sys.argv): + filename = None + quiet = False + + for arg in argv[1:]: + if arg == "-q": + quiet = True + elif arg == "-h": + usage() + return 0 + elif arg[0] == "-": + print(f"Invalid option: {arg}\n") + return 2 + else: + filename = arg + + if filename is None: + print("Filename missing\n") + return 2 + + errors, warnings, checks_done = check( + filename, + abort_at_first_error=False, + ) + + if not quiet: + print(f"Checks done: {checks_done}") + + if warnings: + print("") + print("Warnings:") + for msg in warnings: + print(f"Warning: {msg}") + + if errors: + print("") + print("Errors:") + for criticity, msg in errors: + print(f"{criticity}: {msg}") + print("") + print("Errors found: validation failed!") + else: + print("") + print("No errors found: validation succeeded.") + + return 1 if errors else 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) From 6dde5ea9959bf04a5bbf4290e075860cad7cfcec Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Fri, 24 Oct 2025 16:33:14 +0200 Subject: [PATCH 09/20] S102: add write (CreateCopy()) support for S102 v3.0 --- .../expected_gdalinfo_formats.txt | 2 +- ...indows_conda_expected_gdalinfo_formats.txt | 2 +- autotest/gdrivers/s102.py | 949 ++++++++++++- doc/source/drivers/raster/s102.rst | 189 ++- frmts/hdf5/bagdataset.cpp | 20 - frmts/hdf5/gh5_convenience.cpp | 163 ++- frmts/hdf5/gh5_convenience.h | 138 ++ frmts/hdf5/hdf5drivercore.cpp | 27 + frmts/hdf5/s100.cpp | 916 ++++++++++++- frmts/hdf5/s100.h | 101 ++ frmts/hdf5/s102dataset.cpp | 1221 ++++++++++++++++- scripts/typos_allowlist.txt | 2 + .../osgeo_utils/samples/validate_s102.py | 30 +- 13 files changed, 3652 insertions(+), 108 deletions(-) diff --git a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt index a83e6973b4c2..465379ccf7c4 100644 --- a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt +++ b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt @@ -102,7 +102,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, USGSDEM -raster- (rov): USGS Optional ASCII DEM (and CDED) (*.dem) GXF -raster- (rov): GeoSoft Grid Exchange Format (*.gxf) BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) - S102 -raster,multidimensional raster- (rovs): S-102 Bathymetric Surface Product (*.h5) + S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) S104 -raster,multidimensional raster- (rovs): S-104 Water Level Information for Surface Navigation Product (*.h5) S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) diff --git a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt index 136d309b99fe..2be4a3b9cd0b 100644 --- a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt +++ b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt @@ -104,7 +104,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, GXF -raster- (rov): GeoSoft Grid Exchange Format (*.gxf) KEA -raster- (rw+uv): KEA Image Format (.kea) (*.kea) BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) - S102 -raster,multidimensional raster- (rovs): S-102 Bathymetric Surface Product (*.h5) + S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) S104 -raster,multidimensional raster- (rovs): S-104 Water Level Information for Surface Navigation Product (*.h5) S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) diff --git a/autotest/gdrivers/s102.py b/autotest/gdrivers/s102.py index 140a46c16379..1471f53b86e9 100755 --- a/autotest/gdrivers/s102.py +++ b/autotest/gdrivers/s102.py @@ -3,11 +3,11 @@ ############################################################################### # # Project: GDAL/OGR Test Suite -# Purpose: Test read functionality for S102 driver. +# Purpose: Test read/write functionality for S102 driver. # Author: Even Rouault # ############################################################################### -# Copyright (c) 2023, Even Rouault +# Copyright (c) 2023-2025, Even Rouault # # SPDX-License-Identifier: MIT ############################################################################### @@ -15,11 +15,13 @@ import os import shutil import struct +import sys import gdaltest import pytest +from test_py_scripts import samples_path -from osgeo import gdal +from osgeo import gdal, osr pytestmark = pytest.mark.require_driver("S102") @@ -430,3 +432,944 @@ def test_s102_multiple_feature_instance_groups(): gdal.Open( 'S102:"data/s102/multiple_feature_instance_groups.h5":BathymetryCoverage.03' ) + + +############################################################################### + + +def validate( + filename, expected_errors=None, expected_warnings=None, expected_check_count=None +): + + path = samples_path + if path not in sys.path: + sys.path.append(path) + try: + import validate_s102 + except ImportError: + print("Cannot import validate_s102") + return True + + errors, warnings, checks_done = validate_s102.check(filename) + + if expected_errors: + assert errors == expected_errors + else: + if errors: + print(errors) + assert not errors + + if expected_warnings: + assert warnings == expected_warnings + else: + if warnings: + print(warnings) + assert not warnings + + if expected_check_count: + assert len(checks_done) == expected_check_count + + +############################################################################### + + +def test_s102_validator(): + + # Fake product: many unconformities + expected_errors = [ + ( + "Critical error", + "No featureCode array in feature information group ('/Group_F/featureCode')", + ), + ("Error", "top level attribute 'issueDate' is not a valid date: 2023-12-31"), + ( + "Critical error", + "Required top level attribute 'westBoundLongitude' is missing", + ), + ( + "Critical error", + "Required top level attribute 'eastBoundLongitude' is missing", + ), + ( + "Critical error", + "Required top level attribute 'southBoundLatitude' is missing", + ), + ( + "Critical error", + "Required top level attribute 'northBoundLatitude' is missing", + ), + ( + "Critical error", + "top level attribute 'verticalCoordinateBase' is not an enumeration", + ), + ( + "Critical error", + "top level attribute 'verticalDatumReference' is not an enumeration", + ), + ("Critical error", "top level attribute 'verticalDatum' is not a uint16"), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'dataCodingFormat' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'dimension' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'commonPointRule' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'horizontalPositionUncertainty' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'verticalUncertainty' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'numInstances' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'sequencingRule.type' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'sequencingRule.scanDirection' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'interpolationType' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage group attribute 'dataOffsetCode' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'dataCodingFormat' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'dimension' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'commonPointRule' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'horizontalPositionUncertainty' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'verticalUncertainty' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'numInstances' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'sequencingRule.type' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'sequencingRule.scanDirection' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'interpolationType' is missing", + ), + ( + "Critical error", + "Required QualityOfBathymetryCoverage group attribute 'dataOffsetCode' is missing", + ), + ("Error", "/BathymetryCoverage/axisNames dataset does not exist"), + ( + "Critical error", + "Required BathymetryCoverage feature instance group /BathymetryCoverage/BathymetryCoverage.01 attribute 'numGRP' is missing", + ), + ( + "Critical error", + "Required BathymetryCoverage feature instance group /BathymetryCoverage/BathymetryCoverage.01 attribute 'startSequence' is missing", + ), + ( + "Critical error", + "BathymetryCoverage feature instance group /BathymetryCoverage/BathymetryCoverage.01: dataset 'domainExtent.polygon' missing", + ), + ( + "Critical error", + "Required Group_001 attribute 'minimumUncertainty' is missing", + ), + ( + "Critical error", + "Required Group_001 attribute 'maximumUncertainty' is missing", + ), + ("Critical error", "Required Group_001 attribute 'timePoint' is missing"), + ("Error", "/QualityOfBathymetryCoverage/axisNames dataset does not exist"), + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['floatval'] is not an allowed member", + ), + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['strval'] is not an allowed member", + ), + ( + "Critical error", + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values type is not uint32", + ), + ] + expected_warnings = [ + "File name should start with '102'", + "File name 'test_s102_v3.0_without_uncertainty_nodata_0.h5' does not match expected pattern '^102[a-zA-Z0-9]{4}[a-zA-Z0-9_]{1,12}\\.(?:h5|H5)$'", + "Extra element in top level group: 'geographicIdentifier'", + "Extra element in top level group: 'producer'", + ] + validate( + "data/s102/test_s102_v3.0_without_uncertainty_nodata_0.h5", + expected_errors=expected_errors, + expected_warnings=expected_warnings, + ) + + +############################################################################### + + +def test_s102_write_errors(tmp_vsimem): + + with pytest.raises( + Exception, match="Source dataset dimension must be at least 1x1 pixel" + ): + gdal.Translate( + tmp_vsimem / "102xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("", 0, 0), + format="S102", + ) + + with pytest.raises( + Exception, match="S102 driver requires a source dataset with a geotransform" + ): + gdal.Translate( + tmp_vsimem / "102xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("", 1, 1), + format="S102", + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1) + with pytest.raises( + Exception, match="S102 driver requires a source dataset with a geotransform" + ): + gdal.Translate(tmp_vsimem / "102xxxxyyyy.h5", src_ds, format="S102") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1) + src_ds.SetGeoTransform([0, 1, 0.2, 0, 0, 1]) + with pytest.raises( + Exception, + match="S102 driver requires a source dataset with a non-rotated geotransform", + ): + gdal.Translate(tmp_vsimem / "102xxxxyyyy.h5", src_ds, format="S102") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + with pytest.raises( + Exception, match="S102 driver requires a source dataset with a CRS" + ): + gdal.Translate(tmp_vsimem / "102xxxxyyyy.h5", src_ds, format="S102") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=longlat") + src_ds.SetSpatialRef(srs) + with pytest.raises( + Exception, + match="S102 driver requires a source dataset whose CRS has an EPSG identifier", + ): + gdal.Translate(tmp_vsimem / "102xxxxyyyy.h5", src_ds, format="S102") + + with pytest.raises( + Exception, match="VERTICAL_DATUM creation option must be specified" + ): + gdal.Translate(tmp_vsimem / "102xxxxyyyy.h5", "data/byte.tif", format="S102") + + with pytest.raises( + Exception, match="Cannot create file /i/do_not/exist/102xxxxyyyy.h5" + ): + gdal.Translate( + "/i/do_not/exist/102xxxxyyyy.h5", + gdal.Translate("", "data/byte.tif", format="MEM", outputSRS="EPSG:32611"), + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + +############################################################################### + + +def test_s102_write_warnings(tmp_vsimem): + + with gdaltest.error_raised(gdal.CE_Warning, match="Only EPSG codes"): + gdal.Translate( + tmp_vsimem / "102xxxxyyyy.h5", + "data/byte.tif", + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S102 dataset filenames should start with '102'" + ): + gdal.Translate( + tmp_vsimem / "non_conformant_prefix.h5", + gdal.Translate("", "data/byte.tif", format="MEM", outputSRS="EPSG:32611"), + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S102 dataset filenames should have a '.H5' extension" + ): + gdal.Translate( + tmp_vsimem / "102xxxxyyyy.oops", + gdal.Translate("", "data/byte.tif", format="MEM", outputSRS="EPSG:32611"), + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + +############################################################################### + + +def test_s102_write_basic(tmp_path): + + with gdaltest.error_raised(gdal.CE_None): + out_ds = gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + gdal.Translate("", "data/byte.tif", format="MEM", outputSRS="EPSG:32611"), + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + assert out_ds.GetRasterBand(1).Checksum() == 4672 + assert out_ds.GetSpatialRef().GetAuthorityCode(None) == "32611" + assert out_ds.GetGeoTransform() == (440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0) + + out_ds.Close() + + validate( + tmp_path / "102xxxxyyyy.h5", + expected_warnings=["Quality feature not used"], + expected_check_count=46, + ) + + +############################################################################### + + +def test_s102_write_elevation_to_depth(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 3, 1, gdal.GDT_Float32) + src_ds.GetRasterBand(1).SetDescription("elevation") + src_ds.GetRasterBand(1).SetNoDataValue(255) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 3, struct.pack("f" * 6, -1, -2, -3, -4, -5, 255) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, -1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Automatically convert from elevation to depth by negating elevation values", + ): + out_ds = gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + assert struct.unpack("f" * 6, out_ds.GetRasterBand(1).ReadRaster()) == ( + 1, + 2, + 3, + 4, + 5, + 1e6, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "value,warning_msg,validate_warning", + [ + ( + -14.1, + "Range of depth in the dataset is [-14.100000, 0.000000] whereas the allowed range is [-14, 11050]", + "Group_001: minimumDepth=-14.100000381469727 should be in [-14, 11050] range", + ), + ( + 11050.1, + "Range of depth in the dataset is [0.000000, 11050.099609] whereas the allowed range is [-14, 11050]", + "Group_001: maximumDepth=11050.099609375 should be in [-14, 11050] range", + ), + ], +) +def test_s102_write_depth_not_in_range(tmp_path, value, warning_msg, validate_warning): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 1, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("f" * 4, value, 0, 0, 0) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, -1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match=warning_msg, + ): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + validate( + tmp_path / "102xxxxyyyy.h5", + expected_warnings=["Quality feature not used", validate_warning], + ) + + +############################################################################### + + +def test_s102_write_uncertainty_not_in_range(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(2).WriteRaster(0, 0, 2, 2, struct.pack("f" * 4, -0.1, 0, 0, 0)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, -1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Negative uncertainty value found, which is not allowed", + ): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + validate( + tmp_path / "102xxxxyyyy.h5", + expected_warnings=[ + "Quality feature not used", + "Group_001: minimumUncertainty=-0.10000000149011612 should be in [0, inf) range or equal to 1000000", + ], + ) + + +############################################################################### + + +def test_s102_write_with_uncertainty(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 3, 2) + src_ds.GetRasterBand(1).SetNoDataValue(255) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 3, struct.pack("B" * 6, 1, 2, 3, 4, 5, 255) + ) + src_ds.GetRasterBand(2).SetNoDataValue(254) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 2, 3, struct.pack("B" * 6, 11, 12, 13, 14, 15, 254) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_None): + out_ds = gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + assert struct.unpack("f" * 6, out_ds.GetRasterBand(1).ReadRaster()) == ( + 5.0, + 1000000.0, + 3.0, + 4.0, + 1.0, + 2.0, + ) + assert struct.unpack("f" * 6, out_ds.GetRasterBand(2).ReadRaster()) == ( + 15.0, + 1000000.0, + 13.0, + 14.0, + 11.0, + 12.0, + ) + assert out_ds.GetRasterBand(1).GetMinimum() == 1 + assert out_ds.GetRasterBand(1).GetMaximum() == 5 + assert out_ds.GetRasterBand(2).GetMinimum() == 11 + assert out_ds.GetRasterBand(2).GetMaximum() == 15 + + out_ds.Close() + + validate( + tmp_path / "102xxxxyyyy.h5", + expected_warnings=["Quality feature not used"], + expected_check_count=46, + ) + + +############################################################################### + + +def test_s102_write_with_quality(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 2, struct.pack("B" * 4, 1, 2, 3, 4)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + rat = gdal.RasterAttributeTable() + + rat.CreateColumn("id", gdal.GFT_Integer, gdal.GFU_MinMax) + rat.CreateColumn("dataAssessment", gdal.GFT_Integer, gdal.GFU_Generic) + rat.CreateColumn("surveyDateRange.dateStart", gdal.GFT_String, gdal.GFU_Generic) + rat.CreateColumn("featureSizeVar", gdal.GFT_Real, gdal.GFU_Generic) + rat.CreateColumn("bathyCoverage", gdal.GFT_Boolean, gdal.GFU_Generic) + rat.CreateColumn("sourceSurveyID", gdal.GFT_String, gdal.GFU_Generic) + rat.CreateColumn( + "typeOfBathymetricEstimationUncertainty", gdal.GFT_Integer, gdal.GFU_Generic + ) + + rat.SetRowCount(2) + + rat.SetValueAsInt(0, 0, 1) + rat.SetValueAsInt(0, 1, 1) + rat.SetValueAsString(0, 2, "20251026") + rat.SetValueAsDouble(0, 3, 1.5) + rat.SetValueAsBoolean(0, 4, True) + rat.SetValueAsString(0, 5, "X") + rat.SetValueAsInt(0, 6, 2) + + rat.SetValueAsInt(1, 0, 2) + rat.SetValueAsInt(1, 1, 3) + rat.SetValueAsString(1, 2, "2025-10-27") + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "quality.tif", 2, 2 + ) as quality_ds: + quality_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 1, 1, 2, 2) + ) + quality_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + quality_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + quality_ds.GetRasterBand(1).SetDefaultRAT(rat) + + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "QUALITY_DATASET": tmp_path / "quality.tif", + }, + ) + + validate(tmp_path / "102xxxxyyyy.h5") + + ds = gdal.Open(f'S102:"{tmp_path}/102xxxxyyyy.h5":QualityOfBathymetryCoverage') + info = gdal.Info(ds, format="json") + rat = info["bands"][0]["rat"] + assert rat == { + "tableType": "thematic", + "fieldDefn": [ + {"index": 0, "name": "id", "type": 0, "usage": 5}, + {"index": 1, "name": "dataAssessment", "type": 0, "usage": 0}, + {"index": 2, "name": "surveyDateRange.dateStart", "type": 4, "usage": 0}, + {"index": 3, "name": "featureSizeVar", "type": 1, "usage": 0}, + {"index": 4, "name": "bathyCoverage", "type": 3, "usage": 0}, + {"index": 5, "name": "sourceSurveyID", "type": 2, "usage": 0}, + { + "index": 6, + "name": "typeOfBathymetricEstimationUncertainty", + "type": 0, + "usage": 0, + }, + ], + "row": [ + { + "index": 0, + "f": [1, 1, "2025-10-26T00:00:00.000+00:00", 1.5, True, "X", 2], + }, + { + "index": 1, + "f": [2, 3, "2025-10-27T00:00:00.000+00:00", 0.0, False, "", 0], + }, + ], + } + + +############################################################################### + + +def test_s102_write_with_quality_custom_fields_rat(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 2, struct.pack("B" * 4, 1, 2, 3, 4)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + rat = gdal.RasterAttributeTable() + + rat.CreateColumn("id", gdal.GFT_Integer, gdal.GFU_MinMax) + rat.CreateColumn("my_int_field", gdal.GFT_Integer, gdal.GFU_Generic) + rat.CreateColumn("my_real_field", gdal.GFT_Real, gdal.GFU_Generic) + rat.CreateColumn("my_string_field", gdal.GFT_String, gdal.GFU_Generic) + rat.CreateColumn("my_bool_field", gdal.GFT_Boolean, gdal.GFU_Generic) + + rat.SetValueAsInt(0, 0, 1) + rat.SetValueAsInt(0, 1, 123) + rat.SetValueAsDouble(0, 2, 1.25) + rat.SetValueAsString(0, 3, "foo") + rat.SetValueAsBoolean(0, 4, True) + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "quality.tif", 2, 2 + ) as quality_ds: + quality_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 1, 1, 1, 1) + ) + quality_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + quality_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + quality_ds.GetRasterBand(1).SetDefaultRAT(rat) + + with gdaltest.error_raised(gdal.CE_Warning): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "QUALITY_DATASET": tmp_path / "quality.tif", + }, + ) + + expected_errors = [ + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['my_int_field'] is not an allowed member", + ), + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['my_real_field'] is not an allowed member", + ), + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['my_string_field'] is not an allowed member", + ), + ( + "Error", + "/QualityOfBathymetryCoverage/featureAttributeTable['my_bool_field'] is not an allowed member", + ), + ] + validate(tmp_path / "102xxxxyyyy.h5", expected_errors=expected_errors) + + ds = gdal.Open(f'S102:"{tmp_path}/102xxxxyyyy.h5":QualityOfBathymetryCoverage') + info = gdal.Info(ds, format="json") + rat = info["bands"][0]["rat"] + assert rat == { + "tableType": "thematic", + "fieldDefn": [ + {"index": 0, "name": "id", "type": 0, "usage": 5}, + {"index": 1, "name": "my_int_field", "type": 0, "usage": 0}, + {"index": 2, "name": "my_real_field", "type": 1, "usage": 0}, + {"index": 3, "name": "my_string_field", "type": 2, "usage": 0}, + {"index": 4, "name": "my_bool_field", "type": 0, "usage": 0}, + ], + "row": [{"index": 0, "f": [1, 123, 1.25, "foo", 1]}], + } + + +############################################################################### + + +def test_s102_write_with_quality_missing_id_in_rat(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 2, struct.pack("B" * 4, 1, 2, 3, 4)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + rat = gdal.RasterAttributeTable() + + rat.CreateColumn("id", gdal.GFT_Integer, gdal.GFU_MinMax) + + rat.SetValueAsInt(0, 0, 1) + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "quality.tif", 2, 2 + ) as quality_ds: + quality_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("B" * 4, 1, 1, 1, 2) + ) + quality_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + quality_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + quality_ds.GetRasterBand(1).SetDefaultRAT(rat) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Quality grid contains nodes with id 2, but there is no such entry in the feature attribute table", + ): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "QUALITY_DATASET": tmp_path / "quality.tif", + }, + ) + + expected_errors = [ + ( + "Error", + "/BathymetryCoverage/BathymetryCoverage.01/Group_001/values contain value 2, which is not a valid 'id' of the featureAttributeTable", + ) + ] + validate(tmp_path / "102xxxxyyyy.h5", expected_errors=expected_errors) + + +############################################################################### + + +@pytest.mark.parametrize( + "error_source,expected_error_msg", + [ + ("non_existing_dataset", "i_do_not_exist"), + ("no_rat", "does not have a raster attribute table"), + ("no_gt", "does not have the same geotransform"), + ("no_srs", "does not have the same CRS"), + ("other_width", "does not have the same dimensions as"), + ("other_height", "does not have the same dimensions as"), + ("wrong_band_count", "does not have a single band"), + ("wrong_data_type", "is not of an integer data type"), + ("no_id_field", "Input raster attribute table lacks an integer 'id' field"), + ("id_0", "id=0 is not allowed in input raster attribute table"), + ("id_negative", "Negative id is not allowed in input raster attribute table"), + ("id_same", "Several rows of input raster attribute table have id=1"), + ], +) +def test_s102_write_with_quality_errors(tmp_path, error_source, expected_error_msg): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2) + src_ds.GetRasterBand(1).WriteRaster(0, 0, 2, 2, struct.pack("B" * 4, 1, 2, 3, 4)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + rat = gdal.RasterAttributeTable() + rat.CreateColumn( + "id" if error_source != "no_id_field" else "foo", + gdal.GFT_Integer, + gdal.GFU_MinMax, + ) + if error_source == "id_0": + rat.SetValueAsInt(0, 0, 0) + elif error_source == "id_negative": + rat.SetValueAsInt(0, 0, -1) + elif error_source == "id_same": + rat.SetValueAsInt(0, 0, 1) + rat.SetValueAsInt(1, 0, 1) + else: + rat.SetValueAsInt(0, 0, 1) + + quality_width = 2 + if error_source == "other_width": + quality_width = 1 + quality_height = 2 + if error_source == "other_height": + quality_height = 1 + quality_band_count = 1 + if error_source == "wrong_band_count": + quality_band_count = 2 + quality_data_type = gdal.GDT_Byte + if error_source == "wrong_data_type": + quality_data_type = gdal.GDT_Float32 + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "quality.tif", + quality_width, + quality_height, + quality_band_count, + quality_data_type, + ) as quality_ds: + quality_ds.GetRasterBand(1).Fill(1) + if error_source != "no_gt": + quality_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + if error_source != "no_srs": + quality_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + if error_source != "no_rat": + quality_ds.GetRasterBand(1).SetDefaultRAT(rat) + + with pytest.raises(Exception, match=expected_error_msg), gdal.quiet_errors(): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "QUALITY_DATASET": ( + tmp_path / "i_do_not_exist" + if error_source == "non_existing_dataset" + else tmp_path / "quality.tif" + ), + }, + ) + + +############################################################################### + + +def test_s102_write_large_file(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 1200, 1200, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(2).WriteRaster(0, 0, 1, 1, struct.pack("f", 1)) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + filename = str(tmp_path / "102xxxxyyyy.h5") + with gdaltest.error_raised( + gdal.CE_Warning, + match="file size exceeds 10 MB", + ): + gdal.Translate( + filename, + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "COMPRESS": "NONE", + }, + ) + + expected_warnings = [ + f"File size of {filename} = 11548224, which exceeds 10 MB", + "Quality feature not used", + ] + validate(filename, expected_warnings=expected_warnings) + + +############################################################################### + + +def test_s102_write_several_feature_instances(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 1, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("f" * 4, 1, 2, float("nan"), float("nan")) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={"VERTICAL_DATUM": "MLLW"}, + ) + + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("f" * 4, float("nan"), float("nan"), 3, 4) + ) + src_ds.SetGeoTransform([500000 + 200, 1, 0, 4500000 + 200, 0, 1]) + + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLW", + "APPEND_SUBDATASET": "YES", + }, + ) + validate( + tmp_path / "102xxxxyyyy.h5", expected_warnings=["Quality feature not used"] + ) + + ds = gdal.Open(f'S102:"{tmp_path}/102xxxxyyyy.h5":BathymetryCoverage.01') + assert struct.unpack("f" * 4, ds.GetRasterBand(1).ReadRaster()) == (1e6, 1e6, 1, 2) + assert ds.GetMetadataItem("VERTICAL_DATUM_ABBREV") == "MLLW" + assert ds.GetGeoTransform() == (500000, 1, 0, 4500002, 0, -1) + + ds = gdal.Open(f'S102:"{tmp_path}/102xxxxyyyy.h5":BathymetryCoverage.02') + assert struct.unpack("f" * 4, ds.GetRasterBand(1).ReadRaster()) == (3, 4, 1e6, 1e6) + assert ds.GetMetadataItem("VERTICAL_DATUM_ABBREV") == "MLW" + assert ds.GetGeoTransform() == (500000 + 200, 1, 0, 4500002 + 200, 0, -1) + + +############################################################################### + + +def test_s102_write_several_feature_instances_error_not_existing_s102(tmp_path): + + open(tmp_path / "102xxxxyyyy.h5", "wb").close() + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 1, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("f" * 4, 1, 2, float("nan"), float("nan")) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with pytest.raises(Exception, match="is not a valid existing S102 dataset"): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "APPEND_SUBDATASET": "YES", + }, + ) + + +############################################################################### + + +def test_s102_write_several_feature_instances_error_not_same_srs(tmp_path): + + open(tmp_path / "102xxxxyyyy.h5", "wb").close() + + src_ds = gdal.GetDriverByName("MEM").Create("", 2, 2, 1, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 2, 2, struct.pack("f" * 4, 1, 2, float("nan"), float("nan")) + ) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + }, + ) + + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32632)) + with pytest.raises(Exception, match="CRS.*is not the same as the one"): + gdal.Translate( + tmp_path / "102xxxxyyyy.h5", + src_ds, + format="S102", + creationOptions={ + "VERTICAL_DATUM": "MLLW", + "APPEND_SUBDATASET": "YES", + }, + ) diff --git a/doc/source/drivers/raster/s102.rst b/doc/source/drivers/raster/s102.rst index 084ee3f1016b..7b53a389054e 100644 --- a/doc/source/drivers/raster/s102.rst +++ b/doc/source/drivers/raster/s102.rst @@ -10,7 +10,7 @@ S102 -- S-102 Bathymetric Surface Product .. versionadded:: 3.8 -This driver provides read-only support for bathymetry data in the S-102 format, +This driver provides support for bathymetry data in the S-102 format, which is a specific product profile in an HDF5 file. S-102 files have two image bands representing depth (band 1), @@ -25,16 +25,20 @@ Georeferencing is reported. Nodata, minimum and maximum values for each band are also reported. -Supported versions of the specification are S-102 v2.1, v2.2 and v3.0. +For reading, supported versions of the specification are S-102 v2.1, v2.2 and v3.0. Since GDAL 3.12, multiple feature instance groups per dataset (to encode grids using different vertical datums) are supported. In that case, each feature instance group is exposed as a GDAL subdataset, whose name is of the form ``S102:"{filename.h5}":BathymetricCoverage.{XX}``. +Write support for S-102 v3.0 has been added in GDAL 3.13 + Driver capabilities ------------------- +.. supports_createcopy:: + .. supports_georeferencing:: .. supports_virtualio:: @@ -83,6 +87,178 @@ dataset is exposed as a GDAL Raster Attribute Table associated to the GDAL raster band. The pixel values of the raster match the ``id`` column of the Raster Attribute Table. +Write support +------------- + +.. versionadded:: 3.13 + +Creation of a S-102 v3.0 dataset from another existing GDAL supported dataset is +possible using the :cpp:func:`GDALDriver::CreateCopy` function, or utilities +like :ref:`gdal_translate` or :ref:`gdal_raster_convert`. The input dataset +must have one or two bands. The first band must represent depths (positive values +are down below the vertical datum surface) in meters, and the optional second band +must contain the uncertainty value in meters. If the first band has a description +"elevation" (typically for BAG datasets), elevations will be automatically converted +to depths by negating the sign of source values. + +The input dataset must be in the following CRS: + +- WGS 84 longitude/latitude (EPSG:4326) +- Any of the 60 north/south UTM projected CRS over WGS 84 (EPSG:32601 to 32660, or 32701 to 32760) +- "WGS 84 / UPS North (E,N)" (EPSG:5041) +- "WGS 84 / UPS South (E,N)" (EPSG:5042) + +If several vertical datums are needed, the :co:`APPEND_SUBDATASET` creation option +can be set to ``YES`` to add an extra feature instance group ("BathymetricCoverage.XX") +to an existing S-102 dataset. + +The following creation options are available: + +- .. co:: VERTICAL_DATUM + :choices: + + Vertical datum. This is a required creation option. + + Possible values are either a S100 vertical datum numeric code in the + 1 to 30 range, or value 44. Or their string meaning or abbreviation among + the following list: + + - 1: ``meanLowWaterSprings`` / ``MLWS`` + - 2: ``meanLowerLowWaterSprings`` + - 3: ``meanSeaLevel`` / ``MSL`` + - 4: ``lowestLowWater`` + - 5: ``meanLowWater`` / ``MLW`` + - 6: ``lowestLowWaterSprings`` + - 7: ``approximateMeanLowWaterSprings`` + - 8: ``indianSpringLowWater`` + - 9: ``lowWaterSprings`` + - 10: ``approximateLowestAstronomicalTide`` + - 11: ``nearlyLowestLowWater`` + - 12: ``meanLowerLowWater`` / ``MLLW`` + - 13: ``lowWater`` / ``LW`` + - 14: ``approximateMeanLowWater`` + - 15: ``approximateMeanLowerLowWater`` + - 16: ``meanHighWater`` / ``MHW`` + - 17: ``meanHighWaterSprings`` / ``MHWS`` + - 18: ``highWater`` / ``HW`` + - 19: ``approximateMeanSeaLevel`` + - 20: ``highWaterSprings`` + - 21: ``meanHigherHighWater`` / ``MHHW`` + - 22: ``equinoctialSpringLowWater`` + - 23: ``lowestAstronomicalTide`` / ``LAT`` + - 24: ``localDatum`` + - 25: ``internationalGreatLakesDatum1985`` + - 26: ``meanWaterLevel`` + - 27: ``lowerLowWaterLargeTide`` + - 28: ``higherHighWaterLargeTide`` + - 29: ``nearlyHighestHighWater`` + - 30: ``highestAstronomicalTide`` / ``HAT`` + - 44: ``balticSeaChartDatum2000`` + +- .. co:: ISSUE_DATE + :choices: as + + If not specified, defaults to the current date. + +- .. co:: ISSUE_TIME + :choices: + + Issue time as or + +- .. co:: HORIZONTAL_POSITION_UNCERTAINTY + :choices: + + Horizontal position uncertainty in meter + +- .. co:: VERTICAL_UNCERTAINTY + :choices: + + Vertical uncertainty in meter + +- .. co:: QUALITY_DATASET + :choices: + + Path to a dataset with the quality of bathymetric coverage (spatial metadata). + + This must point to a GDAL recognized dataset, with a single band of an + integer data type, containing quality codes. The band must be associated + with a raster attribute table, with an integer column named ``id`` for each + value in the band pixels, and optional columns among the ones allowed by + the S-102 specification: + + - ``dataAssessment`` (uint8) + - ``featuresDetected.leastDepthOfDetectedFeaturesMeasured`` (boolean) + - ``featuresDetected.significantFeaturesDetected`` (boolean) + - ``featuresDetected.sizeOfFeaturesDetected`` (float32) + - ``featureSizeVar`` (float32) + - ``fullSeafloorCoverageAchieved`` (boolean) + - ``bathyCoverage`` (boolean) + - ``zoneOfConfidence.horizontalPositionUncertainty.uncertaintyFixed`` (float32) + - ``zoneOfConfidence.horizontalPositionUncertainty.uncertaintyVariableFactor`` (float32) + - ``surveyDateRange.dateStart`` (date) + - ``surveyDateRange.dateEnd`` (date) + - ``sourceSurveyID`` (string) + - ``surveyAuthority`` (string) + - ``typeOfBathymetricEstimationUncertaint`` (enumeration) + +- .. co:: COMPRESS + :choices: NONE, DEFLATE + :default: DEFLATE + + Compression for elevation and uncertainty grids. + +- .. co:: ZLEVEL + :choices: 1-9 + :default: 6 + + Deflate compression level. + +- .. co:: BLOCK_SIZE + :choices: + + Chunking size of the HDF5 arrays. Default + to 100, or the maximum dimension of the raster if smaller than 100. + +- .. co:: APPEND_SUBDATASET + :choices: YES, NO + :default: NO + + Whether to append the new dataset to an existing S-102 dataset as + an extra feature instance group ("BathymetricCoverage.XX") + + +Validation script +----------------- + +.. versionadded:: 3.13 + +The Python script :source_file:`swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py` +can be used to validate the conformity of a S-102 v3.0 dataset against the specification. +It requires the `h5py `__ Python module to be installed +(typically through "pip install h5py") + +Its usage is: + +:: + + $ python validate_s102.py 102TESTXXXX.h5 + + +Note that the GDAL S-102 reader is more tolerant that the validation script and +can read files with slight non-conformities. + + +Examples +-------- + +- Converting a GeoTIFF with depth and uncertainty and another one with + quality information to a S-102 dataset + + :: + + $ gdal_translate depth_uncertainty.tif 102TESTXXXX.h5 -of S102 -co VERTICAL_DATUM=MMLW -co QUALITY_DATASET=quality.tif + + See Also -------- @@ -91,3 +267,12 @@ See Also - :ref:`BAG driver ` - :ref:`S-104 driver ` - :ref:`S-111 driver ` + + + +.. below is an allow-list for spelling checker. + +.. spelling:word-list:: + hhmmssZ + hhmmss + HHMM diff --git a/frmts/hdf5/bagdataset.cpp b/frmts/hdf5/bagdataset.cpp index 20d0842aaf12..81bc4ced1530 100644 --- a/frmts/hdf5/bagdataset.cpp +++ b/frmts/hdf5/bagdataset.cpp @@ -57,26 +57,6 @@ struct BAGRefinementGrid constexpr float fDEFAULT_NODATA = 1000000.0f; -/************************************************************************/ -/* h5check() */ -/************************************************************************/ - -#ifdef DEBUG -template static T h5check(T ret, const char *filename, int line) -{ - if (ret < 0) - { - CPLError(CE_Failure, CPLE_AppDefined, "HDF5 API failed at %s:%d", - filename, line); - } - return ret; -} - -#define H5_CHECK(x) h5check(x, __FILE__, __LINE__) -#else -#define H5_CHECK(x) (x) -#endif - /************************************************************************/ /* ==================================================================== */ /* BAGDataset */ diff --git a/frmts/hdf5/gh5_convenience.cpp b/frmts/hdf5/gh5_convenience.cpp index 5ab0e1309595..b7fb805e79b6 100644 --- a/frmts/hdf5/gh5_convenience.cpp +++ b/frmts/hdf5/gh5_convenience.cpp @@ -292,22 +292,6 @@ GDALDataType GH5_GetDataType(hid_t TypeID) bool GH5_CreateAttribute(hid_t loc_id, const char *pszAttrName, hid_t TypeID, unsigned nMaxLen) { -#ifdef notdef_write_variable_length_string - if (TypeID == H5T_C_S1) - { - hsize_t dims[1] = {1}; - hid_t dataspace = H5Screate_simple(1, dims, nullptr); - hid_t type = H5Tcopy(TypeID); - H5Tset_size(type, H5T_VARIABLE); - hid_t att = - H5Acreate(loc_id, pszAttrName, type, dataspace, H5P_DEFAULT); - H5Tclose(type); - H5Aclose(att); - H5Sclose(dataspace); - return true; - } -#endif - hid_t hDataSpace = H5Screate(H5S_SCALAR); if (hDataSpace < 0) return false; @@ -321,7 +305,12 @@ bool GH5_CreateAttribute(hid_t loc_id, const char *pszAttrName, hid_t TypeID, if (TypeID == H5T_C_S1) { - if (H5Tset_size(hDataType, nMaxLen) < 0) + if (nMaxLen == VARIABLE_LENGTH) + { + H5Tset_size(hDataType, H5T_VARIABLE); + H5Tset_strpad(hDataType, H5T_STR_NULLTERM); + } + else if (H5Tset_size(hDataType, nMaxLen) < 0) { H5Tclose(hDataType); H5Sclose(hDataSpace); @@ -368,11 +357,10 @@ bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, bool bSuccess = false; if (H5Tget_class(hAttrNativeType) == H5T_STRING) { -#ifdef notdef_write_variable_length_string - bSuccess = H5Awrite(hAttr, hDataType, &pszValue) >= 0; -#else - bSuccess = H5Awrite(hAttr, hDataType, pszValue) >= 0; -#endif + if (H5Tis_variable_str(hAttrNativeType) > 0) + bSuccess = H5Awrite(hAttr, hDataType, &pszValue) >= 0; + else + bSuccess = H5Awrite(hAttr, hDataType, pszValue) >= 0; } else { @@ -433,6 +421,111 @@ bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, double dfValue) /* GH5_WriteAttribute() */ /************************************************************************/ +bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, int nValue) +{ + + hid_t hAttr = H5Aopen_name(loc_id, pszAttrName); + if (hAttr < 0) + return false; + + hid_t hDataType = H5Aget_type(hAttr); + if (hDataType < 0) + { + H5Aclose(hAttr); + return false; + } + + hid_t hEnumType = -1; + if (H5Tget_class(hDataType) == H5T_ENUM) + { + hEnumType = hDataType; + hDataType = H5Tget_super(hDataType); + } + + hid_t hAttrNativeType = H5Tget_native_type(hDataType, H5T_DIR_DEFAULT); + bool bSuccess = false; + if (hEnumType < 0 && H5Tequal(hAttrNativeType, H5T_NATIVE_INT)) + { + bSuccess = H5Awrite(hAttr, hAttrNativeType, &nValue) >= 0; + } + else if (hEnumType < 0 && H5Tequal(hAttrNativeType, H5T_NATIVE_UINT)) + { + if (nValue < 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Attribute %s has value %d which is negative but the type " + "is uint", + pszAttrName, nValue); + } + else + { + bSuccess = H5Awrite(hAttr, hAttrNativeType, &nValue) >= 0; + } + } + else if (hEnumType < 0 && H5Tequal(hAttrNativeType, H5T_NATIVE_UINT8)) + { + if (nValue < 0 || nValue >= 255) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Attribute %s has value %d which is not in the range of a " + "uint8", + pszAttrName, nValue); + } + else + { + uint8_t nUint8 = static_cast(nValue); + bSuccess = H5Awrite(hAttr, hAttrNativeType, &nUint8) >= 0; + } + } + else if (hEnumType < 0 && H5Tequal(hAttrNativeType, H5T_NATIVE_UINT16)) + { + if (nValue < 0 || nValue >= 65536) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Attribute %s has value %d which is not in the range of a " + "uint16", + pszAttrName, nValue); + } + else + { + uint16_t nUint16 = static_cast(nValue); + bSuccess = H5Awrite(hAttr, hAttrNativeType, &nUint16) >= 0; + } + } + else if (hEnumType >= 0 && H5Tequal(hAttrNativeType, H5T_NATIVE_UINT8)) + { + if (nValue < 0 || nValue > 255) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Attribute %s has value %d which is not in the range of a " + "uint8", + pszAttrName, nValue); + } + else + { + uint8_t nUint8 = static_cast(nValue); + bSuccess = H5Awrite(hAttr, hEnumType, &nUint8) >= 0; + } + } + else + { + CPLError(CE_Failure, CPLE_AppDefined, + "Attribute %s is not of type int/uint", pszAttrName); + } + + H5Tclose(hAttrNativeType); + H5Aclose(hAttr); + H5Tclose(hDataType); + if (hEnumType >= 0) + H5Tclose(hEnumType); + + return bSuccess; +} + +/************************************************************************/ +/* GH5_WriteAttribute() */ +/************************************************************************/ + bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, unsigned nValue) { @@ -447,13 +540,33 @@ bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, unsigned nValue) return false; } + hid_t hEnumType = -1; + if (H5Tget_class(hDataType) == H5T_ENUM) + { + hEnumType = hDataType; + hDataType = H5Tget_super(hDataType); + } + hid_t hAttrNativeType = H5Tget_native_type(hDataType, H5T_DIR_DEFAULT); bool bSuccess = false; - if (H5Tequal(hAttrNativeType, H5T_NATIVE_INT) || - H5Tequal(hAttrNativeType, H5T_NATIVE_UINT)) + if (H5Tequal(hAttrNativeType, H5T_NATIVE_UINT)) { bSuccess = H5Awrite(hAttr, hAttrNativeType, &nValue) >= 0; } + else if (H5Tequal(hAttrNativeType, H5T_NATIVE_INT)) + { + if (nValue > static_cast(INT_MAX)) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "Attribute %s has value %u which does not fit on a signed int", + pszAttrName, nValue); + } + else + { + bSuccess = H5Awrite(hAttr, hAttrNativeType, &nValue) >= 0; + } + } else { CPLError(CE_Failure, CPLE_AppDefined, @@ -463,6 +576,8 @@ bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, unsigned nValue) H5Tclose(hAttrNativeType); H5Aclose(hAttr); H5Tclose(hDataType); + if (hEnumType >= 0) + H5Tclose(hEnumType); return bSuccess; } diff --git a/frmts/hdf5/gh5_convenience.h b/frmts/hdf5/gh5_convenience.h index 83c8b4a38936..30ee84bc0fa2 100644 --- a/frmts/hdf5/gh5_convenience.h +++ b/frmts/hdf5/gh5_convenience.h @@ -18,6 +18,8 @@ #include "cpl_string.h" #include "gdal.h" +#include + /* release 1.6.3 or 1.6.4 changed the type of count in some api functions */ #if H5_VERS_MAJOR == 1 && H5_VERS_MINOR <= 6 && \ @@ -32,11 +34,147 @@ bool GH5_FetchAttribute(hid_t loc_id, const char *pszName, CPLString &osResult, bool GH5_FetchAttribute(hid_t loc_id, const char *pszName, double &dfResult, bool bReportError = false); GDALDataType GH5_GetDataType(hid_t TypeID); +constexpr unsigned VARIABLE_LENGTH = UINT32_MAX; bool GH5_CreateAttribute(hid_t loc_id, const char *pszAttrName, hid_t TypeID, unsigned nMaxLen = 0); bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, const char *pszValue); bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, double dfValue); +bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, int nValue); bool GH5_WriteAttribute(hid_t loc_id, const char *pszAttrName, unsigned nValue); +/************************************************************************/ +/* h5check() */ +/************************************************************************/ + +#ifdef DEBUG +template static T h5check(T ret, const char *filename, int line) +{ + if (ret < 0) + { + CPLError(CE_Failure, CPLE_AppDefined, "HDF5 API failed at %s:%d", + filename, line); + } + return ret; +} + +#define H5_CHECK(x) h5check(x, __FILE__, __LINE__) +#else +#define H5_CHECK(x) (x) +#endif + +/************************************************************************/ +/* GH5_HIDBaseHolder */ +/************************************************************************/ + +template struct GH5_HIDBaseHolder /* non final */ +{ + inline hid_t get() const + { + return m_hid; + } + + inline operator bool() const + { + return m_hid >= 0; + } + + inline operator hid_t() const + { + return m_hid; + } + + inline ~GH5_HIDBaseHolder() + { + clear(); + } + + inline void reset(hid_t hid) + { + clear(); + m_hid = hid; + } + + inline bool clear() + { + const bool ret = m_hid < 0 || H5_CHECK(closeFunc(m_hid)) >= 0; + m_hid = -1; + return ret; + } + + protected: + inline explicit GH5_HIDBaseHolder(hid_t hid) : m_hid(hid) + { + } + + hid_t m_hid = -1; + + private: + CPL_DISALLOW_COPY_ASSIGN(GH5_HIDBaseHolder) +}; + +struct GH5_HIDFileHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDFileHolder(hid_t hid = -1) : GH5_HIDBaseHolder(hid) + { + } +}; + +struct GH5_HIDGroupHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDGroupHolder(hid_t hid = -1) : GH5_HIDBaseHolder(hid) + { + } +}; + +struct GH5_HIDTypeHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDTypeHolder(hid_t hid = -1) : GH5_HIDBaseHolder(hid) + { + } +}; + +struct GH5_HIDSpaceHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDSpaceHolder(hid_t hid = -1) : GH5_HIDBaseHolder(hid) + { + } +}; + +struct GH5_HIDDatasetHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDDatasetHolder(hid_t hid = -1) + : GH5_HIDBaseHolder(hid) + { + } +}; + +struct GH5_HIDParametersHolder : public GH5_HIDBaseHolder +{ + inline explicit GH5_HIDParametersHolder(hid_t hid = -1) + : GH5_HIDBaseHolder(hid) + { + } +}; + +// Silence "HDF5-DIAG: Error detected in HDF5" messages coming from libhdf4 +struct GH5_libhdf5_error_silencer +{ + H5E_auto2_t old_func = nullptr; + void *old_data = nullptr; + + GH5_libhdf5_error_silencer() + { + H5Eget_auto2(H5E_DEFAULT, &old_func, &old_data); + H5Eset_auto2(H5E_DEFAULT, nullptr, nullptr); + } + + ~GH5_libhdf5_error_silencer() + { + H5Eset_auto2(H5E_DEFAULT, old_func, old_data); + } + + CPL_DISALLOW_COPY_ASSIGN(GH5_libhdf5_error_silencer) +}; + #endif /* ndef GH5_CONVENIENCE_H_INCLUDED_ */ diff --git a/frmts/hdf5/hdf5drivercore.cpp b/frmts/hdf5/hdf5drivercore.cpp index c61198a1871d..ffca313619ec 100644 --- a/frmts/hdf5/hdf5drivercore.cpp +++ b/frmts/hdf5/hdf5drivercore.cpp @@ -538,6 +538,7 @@ void S102DriverSetCommonMetadata(GDALDriver *poDriver) poDriver->SetMetadataItem(GDAL_DCAP_VIRTUALIO, "YES"); poDriver->SetMetadataItem(GDAL_DMD_EXTENSION, "h5"); poDriver->SetMetadataItem(GDAL_DMD_SUBDATASETS, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATE_SUBDATASETS, "YES"); poDriver->SetMetadataItem( GDAL_DMD_OPENOPTIONLIST, @@ -551,8 +552,34 @@ void S102DriverSetCommonMetadata(GDALDriver *poDriver) "description='Whether the top line of the dataset should be the " "northern-most one'/>" ""); + + poDriver->SetMetadataItem( + GDAL_DMD_CREATIONOPTIONLIST, + "" + " " + " "); + poDriver->pfnIdentify = S102DatasetIdentify; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATECOPY, "YES"); } /************************************************************************/ diff --git a/frmts/hdf5/s100.cpp b/frmts/hdf5/s100.cpp index b6a63cdb1398..fcdbd28bd1ba 100644 --- a/frmts/hdf5/s100.cpp +++ b/frmts/hdf5/s100.cpp @@ -10,8 +10,11 @@ * SPDX-License-Identifier: MIT ****************************************************************************/ +#include "cpl_time.h" + #include "s100.h" #include "hdf5dataset.h" +#include "gh5_convenience.h" #include "proj.h" #include "proj_experimental.h" @@ -19,6 +22,7 @@ #include "ogr_proj_p.h" #include +#include #include /************************************************************************/ @@ -793,56 +797,79 @@ bool S100GetDimensions( return false; } +/************************************************************************/ +/* gasVerticalDatums */ +/************************************************************************/ + +// https://iho.int/uploads/user/pubs/standards/s-100/S-100_5.2.0_Final_Clean.pdf +// Table 10c-25 - Vertical and sounding datum, page 53 +static const struct +{ + int nCode; + const char *pszMeaning; + const char *pszAbbrev; +} gasVerticalDatums[] = { + {1, "meanLowWaterSprings", "MLWS"}, + {2, "meanLowerLowWaterSprings", nullptr}, + {3, "meanSeaLevel", "MSL"}, + {4, "lowestLowWater", nullptr}, + {5, "meanLowWater", "MLW"}, + {6, "lowestLowWaterSprings", nullptr}, + {7, "approximateMeanLowWaterSprings", nullptr}, + {8, "indianSpringLowWater", nullptr}, + {9, "lowWaterSprings", nullptr}, + {10, "approximateLowestAstronomicalTide", nullptr}, + {11, "nearlyLowestLowWater", nullptr}, + {12, "meanLowerLowWater", "MLLW"}, + {13, "lowWater", "LW"}, + {14, "approximateMeanLowWater", nullptr}, + {15, "approximateMeanLowerLowWater", nullptr}, + {16, "meanHighWater", "MHW"}, + {17, "meanHighWaterSprings", "MHWS"}, + {18, "highWater", "HW"}, + {19, "approximateMeanSeaLevel", nullptr}, + {20, "highWaterSprings", nullptr}, + {21, "meanHigherHighWater", "MHHW"}, + {22, "equinoctialSpringLowWater", nullptr}, + {23, "lowestAstronomicalTide", "LAT"}, + {24, "localDatum", nullptr}, + {25, "internationalGreatLakesDatum1985", nullptr}, + {26, "meanWaterLevel", nullptr}, + {27, "lowerLowWaterLargeTide", nullptr}, + {28, "higherHighWaterLargeTide", nullptr}, + {29, "nearlyHighestHighWater", nullptr}, + {30, "highestAstronomicalTide", "HAT"}, + {44, "balticSeaChartDatum2000", nullptr}, + {46, "internationalGreatLakesDatum2020", nullptr}, + {47, "seaFloor", nullptr}, + {48, "seaSurface", nullptr}, + {49, "hydrographicZero", nullptr}, +}; + +/************************************************************************/ +/* S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev() */ +/************************************************************************/ + +int S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(const char *pszStr) +{ + const int nCode = atoi(pszStr); + for (const auto &sEntry : gasVerticalDatums) + { + if (sEntry.nCode == nCode || EQUAL(pszStr, sEntry.pszMeaning) || + (sEntry.pszAbbrev && EQUAL(pszStr, sEntry.pszAbbrev))) + { + return sEntry.nCode; + } + } + return -1; +} + /************************************************************************/ /* S100ReadVerticalDatum() */ /************************************************************************/ void S100ReadVerticalDatum(GDALMajorObject *poMO, const GDALGroup *poGroup) { - // https://iho.int/uploads/user/pubs/standards/s-100/S-100_5.2.0_Final_Clean.pdf - // Table 10c-25 - Vertical and sounding datum, page 53 - static const struct - { - int nCode; - const char *pszMeaning; - const char *pszAbbrev; - } asVerticalDatums[] = { - {1, "meanLowWaterSprings", "MLWS"}, - {2, "meanLowerLowWaterSprings", nullptr}, - {3, "meanSeaLevel", "MSL"}, - {4, "lowestLowWater", nullptr}, - {5, "meanLowWater", "MLW"}, - {6, "lowestLowWaterSprings", nullptr}, - {7, "approximateMeanLowWaterSprings", nullptr}, - {8, "indianSpringLowWater", nullptr}, - {9, "lowWaterSprings", nullptr}, - {10, "approximateLowestAstronomicalTide", nullptr}, - {11, "nearlyLowestLowWater", nullptr}, - {12, "meanLowerLowWater", "MLLW"}, - {13, "lowWater", "LW"}, - {14, "approximateMeanLowWater", nullptr}, - {15, "approximateMeanLowerLowWater", nullptr}, - {16, "meanHighWater", "MHW"}, - {17, "meanHighWaterSprings", "MHWS"}, - {18, "highWater", "HW"}, - {19, "approximateMeanSeaLevel", nullptr}, - {20, "highWaterSprings", nullptr}, - {21, "meanHigherHighWater", "MHHW"}, - {22, "equinoctialSpringLowWater", nullptr}, - {23, "lowestAstronomicalTide", "LAT"}, - {24, "localDatum", nullptr}, - {25, "internationalGreatLakesDatum1985", nullptr}, - {26, "meanWaterLevel", nullptr}, - {27, "lowerLowWaterLargeTide", nullptr}, - {28, "higherHighWaterLargeTide", nullptr}, - {29, "nearlyHighestHighWater", nullptr}, - {30, "highestAstronomicalTide", "HAT"}, - {44, "balticSeaChartDatum2000", nullptr}, - {46, "internationalGreatLakesDatum2020", nullptr}, - {47, "seaFloor", nullptr}, - {48, "seaSurface", nullptr}, - {49, "hydrographicZero", nullptr}, - }; int nVerticalDatumReference = 1; auto poVerticalDatumReference = @@ -870,7 +897,7 @@ void S100ReadVerticalDatum(GDALMajorObject *poMO, const GDALGroup *poGroup) { bool bFound = false; const auto nVal = poVerticalDatum->ReadAsInt(); - for (const auto &sVerticalDatum : asVerticalDatums) + for (const auto &sVerticalDatum : gasVerticalDatums) { if (sVerticalDatum.nCode == nVal) { @@ -991,3 +1018,802 @@ std::string S100ReadMetadata(GDALDataset *poDS, const std::string &osFilename, } return osMetadataFile; } + +/************************************************************************/ +/* S100BaseWriter::S100BaseWriter() */ +/************************************************************************/ + +S100BaseWriter::S100BaseWriter(const char *pszDestFilename, + GDALDataset *poSrcDS, CSLConstList papszOptions) + : m_osDestFilename(pszDestFilename), m_poSrcDS(poSrcDS), + m_aosOptions(papszOptions) +{ +} + +/************************************************************************/ +/* S100BaseWriter::~S100BaseWriter() */ +/************************************************************************/ + +S100BaseWriter::~S100BaseWriter() +{ + // Check that destructors of derived classes have called themselves their + // Close implementation + CPLAssert(!m_hdf5); +} + +/************************************************************************/ +/* S100BaseWriter::BaseClose() */ +/************************************************************************/ + +bool S100BaseWriter::BaseClose() +{ + bool ret = m_GroupF.clear(); + ret = m_valuesGroup.clear() && ret; + ret = m_featureInstanceGroup.clear() && ret; + ret = m_featureGroup.clear() && ret; + ret = m_hdf5.clear() && ret; + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::BaseChecks() */ +/************************************************************************/ + +bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG) +{ + if (m_poSrcDS->GetRasterXSize() < 1 || m_poSrcDS->GetRasterYSize() < 1) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Source dataset dimension must be at least 1x1 pixel"); + return false; + } + + if (m_poSrcDS->GetGeoTransform(m_gt) != CE_None) + { + CPLError(CE_Failure, CPLE_NotSupported, + "%s driver requires a source dataset with a geotransform", + pszDriverName); + return false; + } + if (m_gt[2] != 0 || m_gt[4] != 0) + { + CPLError(CE_Failure, CPLE_NotSupported, + "%s driver requires a source dataset with a non-rotated " + "geotransform", + pszDriverName); + return false; + } + + m_poSRS = m_poSrcDS->GetSpatialRef(); + if (!m_poSRS) + { + CPLError(CE_Failure, CPLE_NotSupported, + "%s driver requires a source dataset with a CRS", + pszDriverName); + return false; + } + + const char *pszAuthName = m_poSRS->GetAuthorityName(nullptr); + const char *pszAuthCode = m_poSRS->GetAuthorityCode(nullptr); + if (pszAuthName && pszAuthCode && EQUAL(pszAuthName, "EPSG")) + { + m_nEPSGCode = atoi(pszAuthCode); + } + if (crsMustBeEPSG && m_nEPSGCode == 0) + { + CPLError(CE_Failure, CPLE_NotSupported, + "%s driver requires a source dataset whose CRS has an EPSG " + "identifier", + pszDriverName); + return false; + } + + const char *pszVerticalDatum = + m_aosOptions.FetchNameValue("VERTICAL_DATUM"); + if (!pszVerticalDatum) + pszVerticalDatum = m_poSrcDS->GetMetadataItem("VERTICAL_DATUM_MEANING"); + if (!pszVerticalDatum) + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_DATUM creation option must be specified"); + return false; + } + m_nVerticalDatum = + S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(pszVerticalDatum); + if (m_nVerticalDatum <= 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_DATUM value is invalid"); + return false; + } + + const std::string osFilename = CPLGetFilename(m_osDestFilename.c_str()); + CPLAssert(pszDriverName[0] == 'S'); + const char *pszExpectedFilenamePrefix = pszDriverName + 1; + if (!cpl::starts_with(osFilename, pszExpectedFilenamePrefix)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "%s dataset filenames should start with '%s'", pszDriverName, + pszExpectedFilenamePrefix); + } + if (!cpl::ends_with(osFilename, ".h5") && + !cpl::ends_with(osFilename, ".H5")) + { + CPLError(CE_Warning, CPLE_AppDefined, + "%s dataset filenames should have a '.H5' extension", + pszDriverName); + } + + return true; +} + +/************************************************************************/ +/* S100BaseWriter::OpenFileUpdateMode() */ +/************************************************************************/ + +bool S100BaseWriter::OpenFileUpdateMode() +{ + hid_t fapl = H5_CHECK(H5Pcreate(H5P_FILE_ACCESS)); + H5Pset_driver(fapl, HDF5GetFileDriver(), nullptr); + m_hdf5.reset(H5Fopen(m_osDestFilename.c_str(), H5F_ACC_RDWR, fapl)); + H5Pclose(fapl); + if (!m_hdf5) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot open file %s in update mode", + m_osDestFilename.c_str()); + return false; + } + return true; +} + +/************************************************************************/ +/* S100BaseWriter::CreateFile() */ +/************************************************************************/ + +bool S100BaseWriter::CreateFile() +{ + hid_t fapl = H5_CHECK(H5Pcreate(H5P_FILE_ACCESS)); + H5Pset_driver(fapl, HDF5GetFileDriver(), nullptr); + { + GH5_libhdf5_error_silencer oErrorSilencer; + m_hdf5.reset(H5Fcreate(m_osDestFilename.c_str(), H5F_ACC_TRUNC, + H5P_DEFAULT, fapl)); + } + H5Pclose(fapl); + if (!m_hdf5) + { + CPLError(CE_Failure, CPLE_AppDefined, "Cannot create file %s", + m_osDestFilename.c_str()); + return false; + } + return true; +} + +/************************************************************************/ +/* S100BaseWriter::WriteUInt8Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteUInt8Value(hid_t hGroup, const char *pszName, + int value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_STD_U8LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + +/************************************************************************/ +/* S100BaseWriter::WriteUInt32Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteUInt32Value(hid_t hGroup, const char *pszName, + unsigned value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_STD_U32LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + +/************************************************************************/ +/* S100BaseWriter::WriteFloat32Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteFloat32Value(hid_t hGroup, const char *pszName, + double value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_IEEE_F32LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + +/************************************************************************/ +/* S100BaseWriter::WriteFloat64Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteFloat64Value(hid_t hGroup, const char *pszName, + double value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_IEEE_F64LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + +/************************************************************************/ +/* S100BaseWriter::WriteVarLengthStringValue() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVarLengthStringValue(hid_t hGroup, + const char *pszName, + const char *pszValue) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_C_S1, VARIABLE_LENGTH) && + GH5_WriteAttribute(hGroup, pszName, pszValue); +} + +/************************************************************************/ +/* S100BaseWriter::WriteFixedLengthStringValue() */ +/************************************************************************/ + +bool S100BaseWriter::WriteFixedLengthStringValue(hid_t hGroup, + const char *pszName, + const char *pszValue) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_C_S1, + static_cast(strlen(pszValue))) && + GH5_WriteAttribute(hGroup, pszName, pszValue); +} + +/************************************************************************/ +/* S100BaseWriter::WriteProductSpecification() */ +/************************************************************************/ + +bool S100BaseWriter::WriteProductSpecification( + const char *pszProductSpecification) +{ + return WriteVarLengthStringValue(m_hdf5, "productSpecification", + pszProductSpecification); +} + +/************************************************************************/ +/* S100BaseWriter::WriteIssueDate() */ +/************************************************************************/ + +bool S100BaseWriter::WriteIssueDate() +{ + const char *pszIssueDate = m_aosOptions.FetchNameValue("ISSUE_DATE"); + if (!pszIssueDate) + { + const char *pszTmp = m_poSrcDS->GetMetadataItem("issueDate"); + if (pszTmp && strlen(pszTmp) == 8) + pszIssueDate = pszTmp; + } + + std::string osIssueDate; // keep in that scope + if (pszIssueDate) + { + if (strlen(pszIssueDate) != 8) + CPLError(CE_Warning, CPLE_AppDefined, + "ISSUE_DATE should be 8 digits: YYYYMMDD"); + } + else + { + time_t now; + time(&now); + struct tm brokenDown; + CPLUnixTimeToYMDHMS(now, &brokenDown); + osIssueDate = CPLSPrintf("%04d%02d%02d", brokenDown.tm_year + 1900, + brokenDown.tm_mon + 1, brokenDown.tm_mday); + pszIssueDate = osIssueDate.c_str(); + } + + return WriteVarLengthStringValue(m_hdf5, "issueDate", pszIssueDate); +} + +/************************************************************************/ +/* S100BaseWriter::WriteIssueTime() */ +/************************************************************************/ + +bool S100BaseWriter::WriteIssueTime() +{ + const char *pszIssueTime = m_aosOptions.FetchNameValue("ISSUE_TIME"); + if (!pszIssueTime) + { + const char *pszTmp = m_poSrcDS->GetMetadataItem("issueTime"); + if (pszTmp && strlen(pszTmp) == 7 && pszTmp[6] == 'Z') + pszIssueTime = pszTmp; + } + return !pszIssueTime || pszIssueTime[0] == 0 || + WriteVarLengthStringValue(m_hdf5, "issueTime", pszIssueTime); +} + +/************************************************************************/ +/* S100BaseWriter::WriteTopLevelBoundingBox() */ +/************************************************************************/ + +bool S100BaseWriter::WriteTopLevelBoundingBox() +{ + + OGREnvelope sExtent; + if (m_poSrcDS->GetExtentWGS84LongLat(&sExtent) != OGRERR_NONE) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot get dataset extent in WGS84 longitude/latitude"); + return false; + } + + return WriteFloat32Value(m_hdf5, "westBoundLongitude", sExtent.MinX) && + WriteFloat32Value(m_hdf5, "southBoundLatitude", sExtent.MinY) && + WriteFloat32Value(m_hdf5, "eastBoundLongitude", sExtent.MaxX) && + WriteFloat32Value(m_hdf5, "northBoundLatitude", sExtent.MaxY); +} + +/************************************************************************/ +/* S100BaseWriter::WriteHorizontalCRS() */ +/************************************************************************/ + +bool S100BaseWriter::WriteHorizontalCRS(int nCode) +{ + return GH5_CreateAttribute(m_hdf5, "horizontalCRS", H5T_STD_I32LE) && + GH5_WriteAttribute(m_hdf5, "horizontalCRS", nCode); +} + +/************************************************************************/ +/* S100BaseWriter::WriteVerticalCoordinateBase() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVerticalCoordinateBase(int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val; + val = 1; + ret = + ret && H5_CHECK(H5Tenum_insert(hEnumType, "seaSurface", &val)) >= 0; + val = 2; + ret = ret && + H5_CHECK(H5Tenum_insert(hEnumType, "verticalDatum", &val)) >= 0; + val = 3; + ret = + ret && H5_CHECK(H5Tenum_insert(hEnumType, "seaBottom", &val)) >= 0; + + ret = + ret && + GH5_CreateAttribute(m_hdf5, "verticalCoordinateBase", hEnumType) && + GH5_WriteAttribute(m_hdf5, "verticalCoordinateBase", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteVerticalDatumReference() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVerticalDatumReference(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val; + val = 1; + ret = ret && H5_CHECK(H5Tenum_insert(hEnumType, "s100VerticalDatum", + &val)) >= 0; + val = 2; + ret = ret && H5_CHECK(H5Tenum_insert(hEnumType, "EPSG", &val)) >= 0; + + ret = + ret && + GH5_CreateAttribute(hGroup, "verticalDatumReference", hEnumType) && + GH5_WriteAttribute(hGroup, "verticalDatumReference", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteVerticalCS() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVerticalCS(int nCode) +{ + return GH5_CreateAttribute(m_hdf5, "verticalCS", H5T_STD_I32LE) && + GH5_WriteAttribute(m_hdf5, "verticalCS", nCode); +} + +/************************************************************************/ +/* S100BaseWriter::WriteVerticalDatum() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVerticalDatum(hid_t hGroup, hid_t hType, int nCode) +{ + return GH5_CreateAttribute(hGroup, "verticalDatum", hType) && + GH5_WriteAttribute(hGroup, "verticalDatum", nCode); +} + +/************************************************************************/ +/* S100BaseWriter::CreateGroupF() */ +/************************************************************************/ + +bool S100BaseWriter::CreateGroupF() +{ + m_GroupF.reset(H5_CHECK(H5Gcreate(m_hdf5, "Group_F", 0))); + return m_GroupF; +} + +/************************************************************************/ +/* S100BaseWriter::CreateFeatureGroup() */ +/************************************************************************/ + +bool S100BaseWriter::CreateFeatureGroup(const char *name) +{ + m_featureGroup.reset(H5_CHECK(H5Gcreate(m_hdf5, name, 0))); + return m_featureGroup; +} + +/************************************************************************/ +/* S100BaseWriter::WriteDataCodingFormat() */ +/************************************************************************/ + +bool S100BaseWriter::WriteDataCodingFormat(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val = 0; + for (const char *pszEnumName : + {"Fixed Stations", "Regular Grid", "Ungeorectified Grid", + "Moving Platform", "Irregular Grid", "Variable cell size", "TIN", + "Fixed Stations (Stationwise)", "Feature oriented Regular Grid"}) + { + ++val; + ret = ret && + H5_CHECK(H5Tenum_insert(hEnumType, pszEnumName, &val)) >= 0; + } + + ret = ret && + GH5_CreateAttribute(hGroup, "dataCodingFormat", hEnumType) && + GH5_WriteAttribute(hGroup, "dataCodingFormat", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteCommonPointRule() */ +/************************************************************************/ + +bool S100BaseWriter::WriteCommonPointRule(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val = 0; + for (const char *pszEnumName : {"average", "low", "high", "all"}) + { + ++val; + ret = ret && + H5_CHECK(H5Tenum_insert(hEnumType, pszEnumName, &val)) >= 0; + } + + ret = ret && + GH5_CreateAttribute(hGroup, "commonPointRule", hEnumType) && + GH5_WriteAttribute(hGroup, "commonPointRule", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteDataOffsetCode() */ +/************************************************************************/ + +bool S100BaseWriter::WriteDataOffsetCode(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val = 0; + for (const char *pszEnumName : + {"XMin, YMin (\"Lower left\") corner (\"Cell origin\")", + "XMax, YMax (\"Upper right\") corner", + "XMax, YMin (\"Lower right\") corner", + "XMin, YMax (\"Upper left\") corner", + "Barycenter (centroid) of cell"}) + { + ++val; + ret = ret && + H5_CHECK(H5Tenum_insert(hEnumType, pszEnumName, &val)) >= 0; + } + + ret = ret && GH5_CreateAttribute(hGroup, "dataOffsetCode", hEnumType) && + GH5_WriteAttribute(hGroup, "dataOffsetCode", nCode); + } + + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteDimension() */ +/************************************************************************/ + +bool S100BaseWriter::WriteDimension(hid_t hGroup, int nCode) +{ + return WriteUInt8Value(hGroup, "dimension", nCode); +} + +/************************************************************************/ +/* S100BaseWriter::WriteHorizontalPositionUncertainty() */ +/************************************************************************/ + +bool S100BaseWriter::WriteHorizontalPositionUncertainty(hid_t hGroup, + float fValue) +{ + return WriteFloat32Value(hGroup, "horizontalPositionUncertainty", fValue); +} + +/************************************************************************/ +/* S100BaseWriter::WriteInterpolationType() */ +/************************************************************************/ + +bool S100BaseWriter::WriteInterpolationType(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val = 0; + constexpr const char *NULL_STRING = nullptr; + for (const char *pszEnumName : { + "nearestneighbor", // 1 + NULL_STRING, // 2 + NULL_STRING, // 3 + NULL_STRING, // 4 + "bilinear", // 5 + "biquadratic", // 6 + "bicubic", // 7 + NULL_STRING, // 8 + "barycentric", // 9 + "discrete" // 10 + }) + { + ++val; + if (pszEnumName) + { + ret = ret && H5_CHECK(H5Tenum_insert(hEnumType, pszEnumName, + &val)) >= 0; + } + } + + ret = ret && + GH5_CreateAttribute(hGroup, "interpolationType", hEnumType) && + GH5_WriteAttribute(hGroup, "interpolationType", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteNumInstances() */ +/************************************************************************/ + +bool S100BaseWriter::WriteNumInstances(hid_t hGroup, int numInstances) +{ + return WriteUInt8Value(hGroup, "numInstances", numInstances); +} + +/************************************************************************/ +/* S100BaseWriter::WriteSequencingRuleScanDirection() */ +/************************************************************************/ + +bool S100BaseWriter::WriteSequencingRuleScanDirection(hid_t hGroup, + const char *pszValue) +{ + return WriteVarLengthStringValue(hGroup, "sequencingRule.scanDirection", + pszValue); +} + +/************************************************************************/ +/* S100BaseWriter::WriteSequencingRuleType() */ +/************************************************************************/ + +bool S100BaseWriter::WriteSequencingRuleType(hid_t hGroup, int nCode) +{ + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bool ret = hEnumType; + if (hEnumType) + { + uint8_t val = 0; + for (const char *pszEnumName : + {"linear", "boustrophedonic", "CantorDiagonal", "spiral", "Morton", + "Hilbert"}) + { + ++val; + ret = ret && + H5_CHECK(H5Tenum_insert(hEnumType, pszEnumName, &val)) >= 0; + } + + ret = ret && + GH5_CreateAttribute(hGroup, "sequencingRule.type", hEnumType) && + GH5_WriteAttribute(hGroup, "sequencingRule.type", nCode); + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteVerticalUncertainty() */ +/************************************************************************/ + +bool S100BaseWriter::WriteVerticalUncertainty(hid_t hGroup, float fValue) +{ + return WriteFloat32Value(hGroup, "verticalUncertainty", fValue); +} + +/************************************************************************/ +/* S100BaseWriter::WriteOneDimensionalVarLengthStringArray() */ +/************************************************************************/ + +bool S100BaseWriter::WriteOneDimensionalVarLengthStringArray( + hid_t hGroup, const char *name, CSLConstList values) +{ + bool ret = false; + hsize_t dims[1] = {static_cast(CSLCount(values))}; + GH5_HIDSpaceHolder hSpaceId(H5_CHECK(H5Screate_simple(1, dims, NULL))); + GH5_HIDTypeHolder hTypeId(H5_CHECK(H5Tcopy(H5T_C_S1))); + if (hSpaceId && hTypeId) + { + ret = H5_CHECK(H5Tset_size(hTypeId, H5T_VARIABLE)) >= 0 && + H5_CHECK(H5Tset_strpad(hTypeId, H5T_STR_NULLTERM)) >= 0; + GH5_HIDDatasetHolder hDSId; + if (ret) + { + hDSId.reset(H5_CHECK( + H5Dcreate(hGroup, name, hTypeId, hSpaceId, H5P_DEFAULT))); + if (hDSId) + ret = H5Dwrite(hDSId, hTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, + values) >= 0; + } + } + return ret; +} + +/************************************************************************/ +/* S100BaseWriter::WriteAxisNames() */ +/************************************************************************/ + +bool S100BaseWriter::WriteAxisNames(hid_t hGroup) +{ + const char *axisProjected[] = {"Easting", "Northing", nullptr}; + const char *axisGeographic[] = {"Latitude", "Longitude", nullptr}; + return WriteOneDimensionalVarLengthStringArray( + hGroup, "axisNames", + m_poSRS->IsProjected() ? axisProjected : axisGeographic); +} + +/************************************************************************/ +/* S100BaseWriter::CreateFeatureInstanceGroup() */ +/************************************************************************/ + +bool S100BaseWriter::CreateFeatureInstanceGroup(const char *name) +{ + CPLAssert(m_featureGroup); + m_featureInstanceGroup.reset(H5_CHECK(H5Gcreate(m_featureGroup, name, 0))); + return m_featureInstanceGroup; +} + +/************************************************************************/ +/* S100BaseWriter::WriteFIGGridRelatedParameters() */ +/************************************************************************/ + +bool S100BaseWriter::WriteFIGGridRelatedParameters(hid_t hGroup) +{ + // From pixel-corner convention to pixel-center convention + const double dfMinX = m_gt[0] + m_gt[1] / 2; + const double dfMinY = + m_gt[5] < 0 + ? m_gt[3] + m_gt[5] * m_poSrcDS->GetRasterYSize() - m_gt[5] / 2 + : m_gt[3] + m_gt[5] / 2; + const double dfMaxX = dfMinX + (m_poSrcDS->GetRasterXSize() - 1) * m_gt[1]; + const double dfMaxY = + dfMinY + (m_poSrcDS->GetRasterYSize() - 1) * std::fabs(m_gt[5]); + + return WriteFloat32Value(hGroup, "westBoundLongitude", dfMinX) && + WriteFloat32Value(hGroup, "southBoundLatitude", dfMinY) && + WriteFloat32Value(hGroup, "eastBoundLongitude", dfMaxX) && + WriteFloat32Value(hGroup, "northBoundLatitude", dfMaxY) && + WriteFloat64Value(hGroup, "gridOriginLongitude", dfMinX) && + WriteFloat64Value(hGroup, "gridOriginLatitude", dfMinY) && + WriteFloat64Value(hGroup, "gridSpacingLongitudinal", m_gt[1]) && + WriteFloat64Value(hGroup, "gridSpacingLatitudinal", + std::fabs(m_gt[5])) && + WriteUInt32Value(hGroup, "numPointsLongitudinal", + m_poSrcDS->GetRasterXSize()) && + WriteUInt32Value(hGroup, "numPointsLatitudinal", + m_poSrcDS->GetRasterYSize()) && + WriteVarLengthStringValue(hGroup, "startSequence", "0,0"); +} + +/************************************************************************/ +/* S100BaseWriter::WriteNumGRP() */ +/************************************************************************/ + +bool S100BaseWriter::WriteNumGRP(hid_t hGroup, int numGRP) +{ + return WriteUInt8Value(hGroup, "numGRP", numGRP); +} + +/************************************************************************/ +/* S100BaseWriter::CreateValuesGroup() */ +/************************************************************************/ + +bool S100BaseWriter::CreateValuesGroup(const char *name) +{ + CPLAssert(m_featureInstanceGroup); + m_valuesGroup.reset(H5_CHECK(H5Gcreate(m_featureInstanceGroup, name, 0))); + return m_valuesGroup; +} + +/************************************************************************/ +/* S100BaseWriter::WriteGroupFDataset() */ +/************************************************************************/ + +bool S100BaseWriter::WriteGroupFDataset( + const char *name, + const std::vector> + &rows) +{ + GH5_HIDTypeHolder hDataType(H5_CHECK( + H5Tcreate(H5T_COMPOUND, GROUP_F_DATASET_FIELD_COUNT * sizeof(char *)))); + GH5_HIDTypeHolder hVarLengthType(H5_CHECK(H5Tcopy(H5T_C_S1))); + bool bRet = + hDataType && hVarLengthType && + H5_CHECK(H5Tset_size(hVarLengthType, H5T_VARIABLE)) >= 0 && + H5_CHECK(H5Tset_strpad(hVarLengthType, H5T_STR_NULLTERM)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "code", 0 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "name", 1 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "uom.name", 2 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "fillValue", 3 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "datatype", 4 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "lower", 5 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "upper", 6 * sizeof(char *), + hVarLengthType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "closure", 7 * sizeof(char *), + hVarLengthType)) >= 0; + + hsize_t dims[] = {static_cast(rows.size())}; + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(1, dims, nullptr))); + bRet = bRet && hDataSpace; + GH5_HIDDatasetHolder hDatasetID; + if (bRet) + { + hDatasetID.reset(H5_CHECK( + H5Dcreate(m_GroupF, name, hDataType, hDataSpace, H5P_DEFAULT))); + bRet = hDatasetID; + } + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + + hsize_t count[] = {1}; + GH5_HIDSpaceHolder hMemSpace(H5_CHECK(H5Screate_simple(1, count, nullptr))); + bRet = bRet && hMemSpace; + + H5OFFSET_TYPE nOffset = 0; + for (const auto &row : rows) + { + H5OFFSET_TYPE offset[] = {nOffset}; + bRet = bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hMemSpace, hFileSpace, + H5P_DEFAULT, row.data())) >= 0; + ++nOffset; + } + + return bRet; +} diff --git a/frmts/hdf5/s100.h b/frmts/hdf5/s100.h index f744f65237af..367f0e66f47a 100644 --- a/frmts/hdf5/s100.h +++ b/frmts/hdf5/s100.h @@ -19,6 +19,11 @@ #include "gdal_priv.h" #include "ogr_spatialref.h" +#include "hdf5_api.h" +#include "gh5_convenience.h" + +#include + /************************************************************************/ /* S100BaseDataset */ /************************************************************************/ @@ -47,6 +52,101 @@ class S100BaseDataset CPL_NON_FINAL : public GDALPamDataset char **GetFileList() override; }; +/************************************************************************/ +/* S100BaseWriter */ +/************************************************************************/ + +class S100BaseWriter CPL_NON_FINAL +{ + public: + virtual ~S100BaseWriter(); + + protected: + S100BaseWriter(const char *pszDestFilename, GDALDataset *poSrcDS, + CSLConstList papszOptions); + + // to be called by destructor of derived classes which must also + // end up by calling BaseClose() + virtual bool Close() = 0; + bool BaseClose(); + + bool BaseChecks(const char *pszDriverName, bool crsMustBeEPSG); + + static bool WriteUInt8Value(hid_t hGroup, const char *pszName, int value); + static bool WriteUInt32Value(hid_t hGroup, const char *pszName, + unsigned value); + static bool WriteFloat32Value(hid_t hGroup, const char *pszName, + double value); + static bool WriteFloat64Value(hid_t hGroup, const char *pszName, + double value); + static bool WriteVarLengthStringValue(hid_t hGroup, const char *pszName, + const char *pszValue); + static bool WriteFixedLengthStringValue(hid_t hGroup, const char *pszName, + const char *pszValue); + static bool WriteOneDimensionalVarLengthStringArray(hid_t hGroup, + const char *name, + CSLConstList values); + + bool OpenFileUpdateMode(); + bool CreateFile(); + bool WriteProductSpecification(const char *pszProductSpecification); + bool WriteIssueDate(); + bool WriteIssueTime(); + bool WriteTopLevelBoundingBox(); + bool WriteHorizontalCRS(int nCode); + bool WriteVerticalCS(int nCode); + bool WriteVerticalCoordinateBase(int nCode); + static bool WriteVerticalDatumReference(hid_t hGroup, int nCode); + static bool WriteVerticalDatum(hid_t hGroup, hid_t hType, int nCode); + + bool CreateFeatureGroup(const char *name); + static bool WriteDataCodingFormat(hid_t hGroup, int nCode); + static bool WriteCommonPointRule(hid_t hGroup, int nCode); + static bool WriteDataOffsetCode(hid_t hGroup, int nCode); + static bool WriteDimension(hid_t hGroup, int nCode); + static bool WriteHorizontalPositionUncertainty(hid_t hGroup, float fValue); + static bool WriteVerticalUncertainty(hid_t hGroup, float fValue); + static bool WriteInterpolationType(hid_t hGroup, int nCode); + static bool WriteNumInstances(hid_t hGroup, int numInstances); + static bool WriteSequencingRuleScanDirection(hid_t hGroup, + const char *pszValue); + static bool WriteSequencingRuleType(hid_t hGroup, int nCode); + bool WriteAxisNames(hid_t hGroup); + + bool CreateFeatureInstanceGroup(const char *name); + bool WriteFIGGridRelatedParameters(hid_t hGroup); + static bool WriteNumGRP(hid_t hGroup, int numGRP); + + bool CreateValuesGroup(const char *name); + + bool CreateGroupF(); + + static constexpr int GROUP_F_DATASET_FIELD_COUNT = 8; + bool WriteGroupFDataset( + const char *name, + const std::vector> + &rows); + + const std::string m_osDestFilename; + GDALDataset *const m_poSrcDS; + const CPLStringList m_aosOptions; + GDALGeoTransform m_gt{}; + GH5_HIDFileHolder m_hdf5{}; + GH5_HIDGroupHolder m_GroupF{}; + GH5_HIDGroupHolder m_featureGroup{}; + GH5_HIDGroupHolder m_featureInstanceGroup{}; + GH5_HIDGroupHolder m_valuesGroup{}; + const OGRSpatialReference *m_poSRS = nullptr; + int m_nVerticalDatum = 0; + int m_nEPSGCode = 0; + + CPL_DISALLOW_COPY_ASSIGN(S100BaseWriter) +}; + +/************************************************************************/ +/* Function declarations */ +/************************************************************************/ + bool S100GetNumPointsLongitudinalLatitudinal(const GDALGroup *poGroup, int &nNumPointsLongitudinal, int &nNumPointsLatitudinal); @@ -65,6 +165,7 @@ constexpr const char *S100_VERTICAL_DATUM_MEANING = "VERTICAL_DATUM_MEANING"; constexpr const char *S100_VERTICAL_DATUM_ABBREV = "VERTICAL_DATUM_ABBREV"; constexpr const char *S100_VERTICAL_DATUM_NAME = "VERTICAL_DATUM_NAME"; +int S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(const char *pszStr); void S100ReadVerticalDatum(GDALMajorObject *poMO, const GDALGroup *poGroup); std::string S100ReadMetadata(GDALDataset *poDS, const std::string &osFilename, diff --git a/frmts/hdf5/s102dataset.cpp b/frmts/hdf5/s102dataset.cpp index c726c44f45ff..12686dcf5ba8 100644 --- a/frmts/hdf5/s102dataset.cpp +++ b/frmts/hdf5/s102dataset.cpp @@ -5,12 +5,14 @@ * Author: Even Rouault * ****************************************************************************** - * Copyright (c) 2023, Even Rouault + * Copyright (c) 2023-2025, Even Rouault * * SPDX-License-Identifier: MIT ****************************************************************************/ #include "cpl_port.h" +#include "cpl_vsi.h" + #include "hdf5dataset.h" #include "hdf5drivercore.h" #include "gh5_convenience.h" @@ -22,8 +24,13 @@ #include "gdal_proxy.h" #include "gdal_rat.h" +#include +#include #include #include +#include +#include +#include /************************************************************************/ /* S102Dataset */ @@ -43,6 +50,11 @@ class S102Dataset final : public S100BaseDataset ~S102Dataset() override; static GDALDataset *Open(GDALOpenInfo *); + static GDALDataset *CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int bStrict, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData); }; S102Dataset::~S102Dataset() = default; @@ -749,6 +761,1212 @@ bool S102Dataset::OpenQuality(GDALOpenInfo *poOpenInfo, return true; } +/************************************************************************/ +/* S102Creator */ +/************************************************************************/ + +class S102Creator final : public S100BaseWriter +{ + public: + S102Creator(const char *pszDestFilename, GDALDataset *poSrcDS, + CSLConstList papszOptions) + : S100BaseWriter(pszDestFilename, poSrcDS, papszOptions) + { + } + + ~S102Creator() override; + + bool Create(GDALProgressFunc pfnProgress, void *pProgressData); + + // From the S102 spec + static constexpr float NODATA = 1000000.0f; + static constexpr const char *FEATURE_TYPE = "BathymetryCoverage"; + static constexpr const char *QUALITY_FEATURE_TYPE = + "QualityOfBathymetryCoverage"; + + protected: + bool Close() override + { + return BaseClose(); + } + + private: + bool WriteFeatureGroupAttributes(bool isQuality); + bool CopyValues(GDALProgressFunc pfnProgress, void *pProgressData); + bool CopyQualityValues(GDALDataset *poQualityDS, + const std::set &oSetRATId, + GDALProgressFunc pfnProgress, void *pProgressData); + bool WriteFeatureAttributeTable(const GDALRasterAttributeTable *poRAT); + bool CreateGroupF(bool hasQualityOfBathymetryCoverage); +}; + +/************************************************************************/ +/* S102Creator::~S102Creator() */ +/************************************************************************/ + +S102Creator::~S102Creator() +{ + S102Creator::Close(); +} + +/************************************************************************/ +/* S102Creator::Create() */ +/************************************************************************/ + +// S102 v3.0 Table 10-8 - Elements of featureAttributeTable compound datatype +static const struct +{ + const char *pszName; + const char *pszType; +} gasFeatureAttributeTableMembers[] = { + {"id", "uint32"}, + {"dataAssessment", "uint8"}, + {"featuresDetected.leastDepthOfDetectedFeaturesMeasured", "boolean"}, + {"featuresDetected.significantFeaturesDetected", "boolean"}, + {"featuresDetected.sizeOfFeaturesDetected", "float32"}, + {"featureSizeVar", "float32"}, + {"fullSeafloorCoverageAchieved", "boolean"}, + {"bathyCoverage", "boolean"}, + {"zoneOfConfidence.horizontalPositionUncertainty.uncertaintyFixed", + "float32"}, + {"zoneOfConfidence.horizontalPositionUncertainty.uncertaintyVariableFactor", + "float32"}, + {"surveyDateRange.dateStart", "date"}, + {"surveyDateRange.dateEnd", "date"}, + {"sourceSurveyID", "string"}, + {"surveyAuthority", "string"}, + {"typeOfBathymetricEstimationUncertainty", "enumeration"}, +}; + +bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) +{ + if (m_poSrcDS->GetRasterCount() != 1 && m_poSrcDS->GetRasterCount() != 2) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Source dataset must have one or two bands"); + return false; + } + + if (!BaseChecks("S102", true)) + return false; + + const bool bAppendSubdataset = + CPLTestBool(m_aosOptions.FetchNameValueDef("APPEND_SUBDATASET", "NO")); + + std::unique_ptr poQualityDS; + const char *pszQualityDataset = + m_aosOptions.FetchNameValue("QUALITY_DATASET"); + const GDALRasterAttributeTable *poRAT = nullptr; + if (!pszQualityDataset && !bAppendSubdataset) + { + const char *pszSubDSName = + m_poSrcDS->GetMetadataItem("SUBDATASET_2_NAME", "SUBDATASETS"); + if (pszSubDSName && + cpl::starts_with(std::string_view(pszSubDSName), "S102:") && + cpl::ends_with(std::string_view(pszSubDSName), + ":QualityOfBathymetryCoverage")) + { + pszQualityDataset = pszSubDSName; + } + } + + std::set oSetRATId; + if (pszQualityDataset) + { + if (bAppendSubdataset) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Quality dataset can only be set on initial creation"); + return false; + } + poQualityDS.reset(GDALDataset::Open( + pszQualityDataset, GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR, nullptr, + nullptr, nullptr)); + if (!poQualityDS) + return false; + + if (poQualityDS->GetRasterCount() != 1) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s does not have a single band.", pszQualityDataset); + return false; + } + if (!GDALDataTypeIsInteger( + poQualityDS->GetRasterBand(1)->GetRasterDataType())) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s band is not of an integer data type.", + pszQualityDataset); + return false; + } + if (poQualityDS->GetRasterXSize() != m_poSrcDS->GetRasterXSize() || + poQualityDS->GetRasterYSize() != m_poSrcDS->GetRasterYSize()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s does not have the same dimensions as %s.", + pszQualityDataset, m_poSrcDS->GetDescription()); + return false; + } + + const auto poQualityDS_SRS = poQualityDS->GetSpatialRef(); + if (!poQualityDS_SRS || !poQualityDS_SRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s does not have the same CRS as %s.", pszQualityDataset, + m_poSrcDS->GetDescription()); + return false; + } + + GDALGeoTransform gt; + if (poQualityDS->GetGeoTransform(gt) != CE_None || gt != m_gt) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s does not have the same geotransform as %s.", + pszQualityDataset, m_poSrcDS->GetDescription()); + return false; + } + + poRAT = poQualityDS->GetRasterBand(1)->GetDefaultRAT(); + if (!poRAT) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s does not have a raster attribute table.", + poQualityDS->GetDescription()); + return false; + } + + const int nRATColumnCount = poRAT->GetColumnCount(); + std::set setKnownColumnNames; + for (const auto &entry : gasFeatureAttributeTableMembers) + setKnownColumnNames.insert(entry.pszName); + int iRATIdField = -1; + for (int i = 0; i < nRATColumnCount; ++i) + { + const char *pszColName = poRAT->GetNameOfCol(i); + if (strcmp(pszColName, "id") == 0) + { + iRATIdField = i; + } + else if (!cpl::contains(setKnownColumnNames, pszColName)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "'%s' is not a valid S102 feature attribute table " + "column name.", + pszColName); + } + } + if (iRATIdField < 0) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "Input raster attribute table lacks an integer 'id' field"); + return false; + } + const int nRATRowCount = poRAT->GetRowCount(); + for (int i = 0; i < nRATRowCount; ++i) + { + const int nID = poRAT->GetValueAsInt(i, iRATIdField); + if (nID == 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "id=0 is not allowed in input raster attribute table"); + return false; + } + else if (nID < 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Negative id is not allowed in input raster attribute " + "table"); + return false; + } + else if (cpl::contains(oSetRATId, nID)) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "Several rows of input raster attribute table have id=%d", + nID); + return false; + } + oSetRATId.insert(nID); + } + } + + if (!((m_nVerticalDatum >= 1 && m_nVerticalDatum <= 30) || + m_nVerticalDatum == 44)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "VERTICAL_DATUM=%d value is a valid S100 value but not " + "allowed in S102. Valid values are [1, 30] or 44", + m_nVerticalDatum); + } + + if (!(m_nEPSGCode == 4326 || m_nEPSGCode == 5041 || m_nEPSGCode == 5042 || + (m_nEPSGCode >= 32601 && m_nEPSGCode <= 32660) || + (m_nEPSGCode >= 32701 && m_nEPSGCode <= 32760))) + { + CPLError(CE_Warning, CPLE_NotSupported, + "The EPSG code of the CRS is %d. " + "Only EPSG codes 4326, 5041, 5042, [32601, 32660], " + "[32701, 32760] are officially supported. " + "The dataset may not be recognized by other software", + m_nEPSGCode); + } + + if (bAppendSubdataset) + { + GDALOpenInfo oOpenInfo(m_osDestFilename.c_str(), GA_ReadOnly); + auto poOriDS = + std::unique_ptr(S102Dataset::Open(&oOpenInfo)); + if (!poOriDS) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s is not a valid existing S102 dataset", + m_osDestFilename.c_str()); + return false; + } + const auto poOriSRS = poOriDS->GetSpatialRef(); + if (!poOriSRS) + { + // shouldn't happen + return false; + } + if (!poOriSRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "CRS of %s is not the same as the one of %s", + m_osDestFilename.c_str(), m_poSrcDS->GetDescription()); + return false; + } + poOriDS.reset(); + + OGREnvelope sExtent; + if (m_poSrcDS->GetExtentWGS84LongLat(&sExtent) != OGRERR_NONE) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot get dataset extent in WGS84 longitude/latitude"); + return false; + } + + bool ret = OpenFileUpdateMode(); + if (ret) + { + m_featureGroup.reset( + H5_CHECK(H5Gopen(m_hdf5, "BathymetryCoverage"))); + } + + ret = ret && m_featureGroup; + double dfNumInstances = 0; + ret = ret && GH5_FetchAttribute(m_featureGroup, "numInstances", + dfNumInstances, true); + if (ret && !(dfNumInstances >= 1 && dfNumInstances <= 99 && + std::round(dfNumInstances) == dfNumInstances)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid value for numInstances"); + ret = false; + } + else if (ret && dfNumInstances == 99) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Too many existing feature instances"); + ret = false; + } + else + { + double dfMainVerticalDatum = 0; + ret = ret && GH5_FetchAttribute(m_hdf5, "verticalDatum", + dfMainVerticalDatum, true); + + const int newNumInstances = static_cast(dfNumInstances) + 1; + ret = ret && GH5_WriteAttribute(m_featureGroup, "numInstances", + newNumInstances); + ret = ret && CreateFeatureInstanceGroup(CPLSPrintf( + "BathymetryCoverage.%02d", newNumInstances)); + ret = ret && WriteFIGGridRelatedParameters(m_featureInstanceGroup); + if (dfMainVerticalDatum != m_nVerticalDatum) + { + ret = ret && + GH5_CreateAttribute(m_featureInstanceGroup, + "verticalDatumReference", + H5T_STD_U8LE) && + // s100VerticalDatum + GH5_WriteAttribute(m_featureInstanceGroup, + "verticalDatumReference", 1); + ret = + ret && WriteVerticalDatum(m_featureInstanceGroup, + H5T_STD_U16LE, m_nVerticalDatum); + } + + ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + + ret = ret && CreateValuesGroup("Group_001"); + ret = ret && WriteVarLengthStringValue(m_valuesGroup, "timePoint", + "00010101T000000Z"); + ret = ret && CopyValues(pfnProgress, pProgressData); + } + + // Update global bounding box + OGREnvelope sExistingExtent; + ret = ret && GH5_FetchAttribute(m_hdf5, "westBoundLongitude", + sExistingExtent.MinX, true); + ret = ret && GH5_FetchAttribute(m_hdf5, "southBoundLatitude", + sExistingExtent.MinY, true); + ret = ret && GH5_FetchAttribute(m_hdf5, "eastBoundLongitude", + sExistingExtent.MaxX, true); + ret = ret && GH5_FetchAttribute(m_hdf5, "northBoundLatitude", + sExistingExtent.MaxY, true); + + sExtent.Merge(sExistingExtent); + ret = ret && + GH5_WriteAttribute(m_hdf5, "westBoundLongitude", sExtent.MinX); + ret = ret && + GH5_WriteAttribute(m_hdf5, "southBoundLatitude", sExtent.MinY); + ret = ret && + GH5_WriteAttribute(m_hdf5, "eastBoundLongitude", sExtent.MaxX); + ret = ret && + GH5_WriteAttribute(m_hdf5, "northBoundLatitude", sExtent.MaxY); + + return Close() && ret; + } + else + { + bool ret = CreateFile(); + ret = ret && WriteProductSpecification("INT.IHO.S-102.3.0.0"); + ret = ret && WriteIssueDate(); + ret = ret && WriteIssueTime(); + ret = ret && WriteHorizontalCRS(m_nEPSGCode); + ret = ret && WriteTopLevelBoundingBox(); + ret = ret && WriteVerticalCS(6498); // Depth, metre, down + ret = ret && WriteVerticalCoordinateBase(2); // verticalDatum + // s100VerticalDatum + ret = ret && WriteVerticalDatumReference(m_hdf5, 1); + ret = + ret && WriteVerticalDatum(m_hdf5, H5T_STD_U16LE, m_nVerticalDatum); + + // BathymetryCoverage + ret = ret && CreateFeatureGroup(FEATURE_TYPE); + ret = ret && WriteFeatureGroupAttributes(/* isQuality = */ false); + ret = ret && WriteAxisNames(m_featureGroup); + + ret = ret && CreateFeatureInstanceGroup("BathymetryCoverage.01"); + ret = ret && WriteFIGGridRelatedParameters(m_featureInstanceGroup); + ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + + ret = ret && CreateValuesGroup("Group_001"); + + ret = ret && WriteVarLengthStringValue(m_valuesGroup, "timePoint", + "00010101T000000Z"); + + const double dfIntermediatePct = + m_poSrcDS->GetRasterCount() / + (m_poSrcDS->GetRasterCount() + (poQualityDS ? 1.0 : 0.0)); + std::unique_ptr + pScaledProgressData(GDALCreateScaledProgress(0.0, dfIntermediatePct, + pfnProgress, + pProgressData), + GDALDestroyScaledProgress); + ret = ret && CopyValues(GDALScaledProgress, pScaledProgressData.get()); + + if (poQualityDS) + { + // QualityOfBathymetryCoverage group + ret = ret && CreateFeatureGroup(QUALITY_FEATURE_TYPE); + ret = ret && WriteFeatureGroupAttributes(/* isQuality = */ true); + ret = ret && WriteAxisNames(m_featureGroup); + ret = ret && WriteFeatureAttributeTable(poRAT); + + ret = ret && + CreateFeatureInstanceGroup("QualityOfBathymetryCoverage.01"); + ret = ret && WriteFIGGridRelatedParameters(m_featureInstanceGroup); + ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + + ret = ret && CreateValuesGroup("Group_001"); + pScaledProgressData.reset(GDALCreateScaledProgress( + dfIntermediatePct, 1.0, pfnProgress, pProgressData)); + ret = ret && CopyQualityValues(poQualityDS.get(), oSetRATId, + GDALScaledProgress, + pScaledProgressData.get()); + } + + ret = ret && CreateGroupF(poQualityDS != nullptr); + + return Close() && ret; + } +} + +/************************************************************************/ +/* S102Creator::WriteFeatureGroupAttributes() */ +/************************************************************************/ + +bool S102Creator::WriteFeatureGroupAttributes(bool isQuality) +{ + CPLAssert(m_featureGroup); + + bool ret = WriteCommonPointRule(m_featureGroup, 2); // low + if (isQuality) + { + // Feature oriented Regular Grid + ret = ret && WriteDataCodingFormat(m_featureGroup, 9); + } + else + { + ret = ret && WriteDataCodingFormat(m_featureGroup, 2); // Regular grid + } + ret = ret && WriteDataOffsetCode(m_featureGroup, 5); // Center of cell + ret = ret && WriteDimension(m_featureGroup, 2); + const char *pszHorizontalPositionUncertainty = + m_aosOptions.FetchNameValue("HORIZONTAL_POSITION_UNCERTAINTY"); + ret = + ret && + WriteHorizontalPositionUncertainty( + m_featureGroup, + pszHorizontalPositionUncertainty && + pszHorizontalPositionUncertainty[0] + ? static_cast(CPLAtof(pszHorizontalPositionUncertainty)) + : -1.0f); + const char *pszVerticalUncertainty = + m_aosOptions.FetchNameValue("VERTICAL_UNCERTAINTY"); + ret = ret && WriteVerticalUncertainty( + m_featureGroup, + pszVerticalUncertainty && pszVerticalUncertainty[0] + ? static_cast(CPLAtof(pszVerticalUncertainty)) + : -1.0f); + ret = ret && WriteInterpolationType(m_featureGroup, 1); // Nearest neighbor + ret = ret && WriteNumInstances(m_featureGroup, 1); + ret = ret && WriteSequencingRuleScanDirection(m_featureGroup, + m_poSRS->IsProjected() + ? "Easting, Northing" + : "Longitude, Latitude"); + ret = ret && WriteSequencingRuleType(m_featureGroup, 1); // Linear + return ret; +} + +/************************************************************************/ +/* S102Creator::WriteFeatureAttributeTable() */ +/************************************************************************/ + +bool S102Creator::WriteFeatureAttributeTable( + const GDALRasterAttributeTable *poRAT) +{ + CPLAssert(m_featureGroup); + + std::map mapKnownColumns; + for (const auto &entry : gasFeatureAttributeTableMembers) + mapKnownColumns[entry.pszName] = entry.pszType; + + const int nColCount = poRAT->GetColumnCount(); + + size_t nCompoundSize = 0; + size_t nMEMCompoundSize = 0; + for (int i = 0; i < nColCount; ++i) + { + const char *pszColName = poRAT->GetNameOfCol(i); + const auto iter = mapKnownColumns.find(pszColName); + size_t nMemberSize = sizeof(char *); + if (iter != mapKnownColumns.end()) + { + const char *pszType = iter->second; + if (strcmp(pszType, "uint8") == 0 || + strcmp(pszType, "boolean") == 0 || + strcmp(pszType, "enumeration") == 0) + { + nMemberSize = sizeof(uint8_t); + } + else if (strcmp(pszType, "uint32") == 0) + { + nMemberSize = sizeof(uint32_t); + } + else if (strcmp(pszType, "float32") == 0) + { + nMemberSize = sizeof(float); + } + else if (strcmp(pszType, "string") == 0 || + strcmp(pszType, "date") == 0) + { + nMemberSize = sizeof(char *); + } + else + { + CPLAssert(false); + } + } + else + { + GDALRATFieldType eType = poRAT->GetTypeOfCol(i); + switch (eType) + { + case GFT_Integer: + nMemberSize = sizeof(int32_t); + break; + case GFT_Real: + nMemberSize = sizeof(double); + break; + case GFT_Boolean: + nMemberSize = sizeof(uint8_t); + break; + case GFT_String: + case GFT_DateTime: + case GFT_WKBGeometry: + nMemberSize = sizeof(char *); + break; + } + } + nCompoundSize += nMemberSize; + if ((nMEMCompoundSize % nMemberSize) != 0) + nMEMCompoundSize += nMemberSize - (nMEMCompoundSize % nMemberSize); + nMEMCompoundSize += nMemberSize; + } + + GH5_HIDTypeHolder hDataType( + H5_CHECK(H5Tcreate(H5T_COMPOUND, nCompoundSize))); + GH5_HIDTypeHolder hDataTypeMEM( + H5_CHECK(H5Tcreate(H5T_COMPOUND, nMEMCompoundSize))); + GH5_HIDTypeHolder hVarLengthType(H5_CHECK(H5Tcopy(H5T_C_S1))); + bool bRet = hDataType && hDataTypeMEM && hVarLengthType && + H5_CHECK(H5Tset_size(hVarLengthType, H5T_VARIABLE)) >= 0 && + H5_CHECK(H5Tset_strpad(hVarLengthType, H5T_STR_NULLTERM)) >= 0; + + GH5_HIDTypeHolder hEnumType; + std::vector apszTypes; + + size_t nOffset = 0; + size_t nMEMOffset = 0; + std::vector anMEMOffsets; + for (int i = 0; i < nColCount && bRet; ++i) + { + const char *pszColName = poRAT->GetNameOfCol(i); + const auto iter = mapKnownColumns.find(pszColName); + hid_t hMemberType = hVarLengthType.get(); + hid_t hMemberNativeType = hVarLengthType.get(); + if (iter != mapKnownColumns.end()) + { + const char *pszType = iter->second; + if (strcmp(pszType, "uint8") == 0 || + strcmp(pszType, "boolean") == 0) + { + hMemberType = H5T_STD_U8LE; + hMemberNativeType = H5T_NATIVE_UCHAR; + } + else if (strcmp(pszType, "uint32") == 0) + { + hMemberType = H5T_STD_U32LE; + hMemberNativeType = H5T_NATIVE_UINT; + } + else if (strcmp(pszType, "float32") == 0) + { + hMemberType = H5T_IEEE_F32LE; + hMemberNativeType = H5T_NATIVE_FLOAT; + } + else if (strcmp(pszType, "string") == 0 || + strcmp(pszType, "date") == 0) + { + hMemberType = hVarLengthType.get(); + hMemberNativeType = hVarLengthType.get(); + } + else if (strcmp(pszType, "enumeration") == 0 && + strcmp(pszColName, + "typeOfBathymetricEstimationUncertainty") == 0) + { + hEnumType.reset(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bRet = hEnumType; + if (bRet) + { + uint8_t val; + val = 1; + bRet = bRet && + H5_CHECK(H5Tenum_insert( + hEnumType, "rawStandardDeviation", &val)) >= 0; + val = 2; + bRet = bRet && + H5_CHECK(H5Tenum_insert( + hEnumType, "cUBEStandardDeviation", &val)) >= 0; + val = 3; + bRet = bRet && + H5_CHECK(H5Tenum_insert( + hEnumType, "productUncertainty", &val)) >= 0; + val = 4; + bRet = bRet && H5_CHECK(H5Tenum_insert( + hEnumType, "historicalStandardDeviation", + &val)) >= 0; + + hMemberType = hEnumType.get(); + hMemberNativeType = hEnumType.get(); + } + } + else + { + CPLAssert(false); + } + apszTypes.push_back(pszType); + } + else + { + GDALRATFieldType eType = poRAT->GetTypeOfCol(i); + switch (eType) + { + case GFT_Integer: + hMemberType = H5T_STD_I32LE; + hMemberNativeType = H5T_NATIVE_INT; + apszTypes.push_back("int32"); + break; + case GFT_Real: + hMemberType = H5T_IEEE_F64LE; + hMemberNativeType = H5T_NATIVE_DOUBLE; + apszTypes.push_back("float64"); + break; + case GFT_Boolean: + hMemberType = H5T_STD_U8LE; + hMemberNativeType = H5T_NATIVE_UCHAR; + apszTypes.push_back("boolean"); + break; + case GFT_String: + case GFT_DateTime: + case GFT_WKBGeometry: + apszTypes.push_back("string"); + break; + } + } + + CPLAssert(H5Tget_size(hMemberType) == H5Tget_size(hMemberNativeType)); + + bRet = bRet && H5_CHECK(H5Tinsert(hDataType, pszColName, nOffset, + hMemberType)) >= 0; + + const size_t nMemberSize = H5Tget_size(hMemberType); + if ((nMEMOffset % nMemberSize) != 0) + nMEMOffset += nMemberSize - (nMEMOffset % nMemberSize); + anMEMOffsets.push_back(nMEMOffset); + bRet = bRet && H5_CHECK(H5Tinsert(hDataTypeMEM, pszColName, nMEMOffset, + hMemberNativeType)) >= 0; + nOffset += nMemberSize; + nMEMOffset += nMemberSize; + } + CPLAssert(nOffset == nCompoundSize); + CPLAssert(nMEMOffset == nMEMCompoundSize); + + CPLAssert(apszTypes.size() == static_cast(nColCount)); + + const int nRowCount = poRAT->GetRowCount(); + hsize_t dims[] = {static_cast(nRowCount)}; + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(1, dims, nullptr))); + bRet = bRet && hDataSpace; + GH5_HIDDatasetHolder hDatasetID; + GH5_HIDSpaceHolder hFileSpace; + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams; + if (bRet) + { + H5_CHECK(H5Pset_layout(hParams, H5D_CHUNKED)); + hsize_t chunk_size[] = {static_cast(1)}; + H5_CHECK(H5Pset_chunk(hParams, 1, chunk_size)); + hDatasetID.reset( + H5_CHECK(H5Dcreate(m_featureGroup, "featureAttributeTable", + hDataType, hDataSpace, hParams))); + bRet = hDatasetID; + } + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + + hsize_t count[] = {1}; + GH5_HIDSpaceHolder hMemSpace(H5_CHECK(H5Screate_simple(1, count, nullptr))); + bRet = bRet && hMemSpace; + + std::vector abyBuffer(nMEMCompoundSize); + std::vector asBuffers(nColCount); + for (int iRow = 0; iRow < nRowCount && bRet; ++iRow) + { + for (int iCol = 0; iCol < nColCount && bRet; ++iCol) + { + const char *const pszType = apszTypes[iCol]; + GByte *const pabyDst = abyBuffer.data() + anMEMOffsets[iCol]; + if (strcmp(pszType, "uint8") == 0 || + strcmp(pszType, "boolean") == 0 || + strcmp(pszType, "enumeration") == 0) + { + const uint8_t nVal = + static_cast(poRAT->GetValueAsInt(iRow, iCol)); + *pabyDst = nVal; + } + else if (strcmp(pszType, "int32") == 0 || + strcmp(pszType, "uint32") == 0) + { + const int nVal = poRAT->GetValueAsInt(iRow, iCol); + memcpy(pabyDst, &nVal, sizeof(nVal)); + } + else if (strcmp(pszType, "float32") == 0) + { + const float fVal = + static_cast(poRAT->GetValueAsDouble(iRow, iCol)); + memcpy(pabyDst, &fVal, sizeof(fVal)); + } + else if (strcmp(pszType, "float64") == 0) + { + const double dfVal = poRAT->GetValueAsDouble(iRow, iCol); + memcpy(pabyDst, &dfVal, sizeof(dfVal)); + } + else if (strcmp(pszType, "string") == 0) + { + asBuffers[iCol] = poRAT->GetValueAsString(iRow, iCol); + const char *pszStr = asBuffers[iCol].c_str(); + memcpy(pabyDst, &pszStr, sizeof(pszStr)); + } + else if (strcmp(pszType, "date") == 0) + { + asBuffers[iCol] = poRAT->GetValueAsString(iRow, iCol); + if (asBuffers[iCol].size() != 8) + { + OGRField sField; + if (OGRParseDate(asBuffers[iCol].c_str(), &sField, 0)) + { + asBuffers[iCol] = CPLString().Printf( + "%04d%02d%02d", sField.Date.Year, sField.Date.Month, + sField.Date.Day); + } + } + const char *pszStr = asBuffers[iCol].c_str(); + memcpy(pabyDst, &pszStr, sizeof(pszStr)); + } + else + { + CPLAssert(false); + } + } + + H5OFFSET_TYPE offset[] = {static_cast(iRow)}; + bRet = + bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + H5_CHECK(H5Dwrite(hDatasetID, hDataTypeMEM, hMemSpace, hFileSpace, + H5P_DEFAULT, abyBuffer.data())) >= 0; + } + + return bRet; +} + +/************************************************************************/ +/* S102Creator::CreateGroupF() */ +/************************************************************************/ + +// Per S-102 v3.0 spec +#define MIN_DEPTH_VALUE -14 +#define MAX_DEPTH_VALUE 11050 + +#define STRINGIFY(x) #x +#define XSTRINGIFY(x) STRINGIFY(x) + +bool S102Creator::CreateGroupF(bool hasQualityOfBathymetryCoverage) +{ + bool ret = S100BaseWriter::CreateGroupF(); + + CPLStringList aosFeatureCodes; + aosFeatureCodes.push_back(FEATURE_TYPE); + if (hasQualityOfBathymetryCoverage) + aosFeatureCodes.push_back(QUALITY_FEATURE_TYPE); + ret = ret && WriteOneDimensionalVarLengthStringArray( + m_GroupF, "featureCode", aosFeatureCodes.List()); + + { + std::vector> rows{ + {"depth", "depth", "metres", "1000000", "H5T_FLOAT", + XSTRINGIFY(MIN_DEPTH_VALUE), XSTRINGIFY(MAX_DEPTH_VALUE), + "closedInterval"}, + {"uncertainty", "uncertainty", "metres", "1000000", "H5T_FLOAT", + "0", "", "geSemiInterval"}}; + rows.resize(m_poSrcDS->GetRasterCount()); + ret = ret && WriteGroupFDataset(FEATURE_TYPE, rows); + } + { + std::vector> rows{ + {"iD", "ID", "", "0", "H5T_INTEGER", "1", "", "geSemiInterval"}}; + ret = ret && WriteGroupFDataset(QUALITY_FEATURE_TYPE, rows); + } + + return ret; +} + +/************************************************************************/ +/* S102Creator::CopyValues() */ +/************************************************************************/ + +bool S102Creator::CopyValues(GDALProgressFunc pfnProgress, void *pProgressData) +{ + CPLAssert(m_valuesGroup.get() >= 0); + + const int nYSize = m_poSrcDS->GetRasterYSize(); + const int nXSize = m_poSrcDS->GetRasterXSize(); + + hsize_t dims[] = {static_cast(nYSize), + static_cast(nXSize)}; + + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(2, dims, nullptr))); + bool bRet = hDataSpace; + + const bool bDeflate = + EQUAL(m_aosOptions.FetchNameValueDef("COMPRESS", "DEFLATE"), "DEFLATE"); + const int nCompressionLevel = + atoi(m_aosOptions.FetchNameValueDef("ZLEVEL", "6")); + const int nBlockSize = + std::min(4096, std::max(100, atoi(m_aosOptions.FetchNameValueDef( + "BLOCK_SIZE", "100")))); + const int nBlockXSize = std::min(nXSize, nBlockSize); + const int nBlockYSize = std::min(nYSize, nBlockSize); + const float fNoDataValue = NODATA; + const int nComponents = m_poSrcDS->GetRasterCount(); + + GH5_HIDTypeHolder hDataType( + H5_CHECK(H5Tcreate(H5T_COMPOUND, nComponents * sizeof(float)))); + bRet = bRet && hDataType && + H5_CHECK(H5Tinsert(hDataType, "depth", 0, H5T_IEEE_F32LE)) >= 0 && + (nComponents == 1 || + H5_CHECK(H5Tinsert(hDataType, "uncertainty", sizeof(float), + H5T_IEEE_F32LE)) >= 0); + + hsize_t chunk_size[] = {static_cast(nBlockYSize), + static_cast(nBlockXSize)}; + + const float afFillValue[] = {fNoDataValue, fNoDataValue}; + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams && + H5_CHECK(H5Pset_fill_time(hParams, H5D_FILL_TIME_ALLOC)) >= 0 && + H5_CHECK(H5Pset_fill_value(hParams, hDataType, afFillValue)) >= 0 && + H5_CHECK(H5Pset_layout(hParams, H5D_CHUNKED)) >= 0 && + H5_CHECK(H5Pset_chunk(hParams, 2, chunk_size)) >= 0; + + if (bRet && bDeflate) + { + bRet = H5_CHECK(H5Pset_deflate(hParams, nCompressionLevel)) >= 0; + } + + GH5_HIDDatasetHolder hDatasetID; + if (bRet) + { + hDatasetID.reset(H5_CHECK(H5Dcreate(m_valuesGroup, "values", hDataType, + hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + + const int nYBlocks = static_cast(DIV_ROUND_UP(nYSize, nBlockYSize)); + const int nXBlocks = static_cast(DIV_ROUND_UP(nXSize, nBlockXSize)); + std::vector afValues(static_cast(nBlockYSize) * nBlockXSize * + nComponents); + const bool bReverseY = m_gt[5] < 0; + + float fMinDepth = std::numeric_limits::infinity(); + float fMaxDepth = -std::numeric_limits::infinity(); + float fMinUncertainty = std::numeric_limits::infinity(); + float fMaxUncertainty = -std::numeric_limits::infinity(); + + int bHasNoDataBand1 = FALSE; + const char *pszFirstBandDesc = + m_poSrcDS->GetRasterBand(1)->GetDescription(); + const float fMulFactor = + EQUAL(pszFirstBandDesc, "elevation") ? -1.0f : 1.0f; + if (fMulFactor < 0.0f) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Automatically convert from elevation to depth by negating " + "elevation values"); + } + const double dfSrcNoDataBand1 = + m_poSrcDS->GetRasterBand(1)->GetNoDataValue(&bHasNoDataBand1); + const float fSrcNoDataBand1 = static_cast(dfSrcNoDataBand1); + int bHasNoDataBand2 = FALSE; + const double dfSrcNoDataBand2 = + nComponents == 2 + ? m_poSrcDS->GetRasterBand(2)->GetNoDataValue(&bHasNoDataBand2) + : 0.0; + const float fSrcNoDataBand2 = static_cast(dfSrcNoDataBand2); + + for (int iY = 0; iY < nYBlocks && bRet; iY++) + { + const int nSrcYOff = bReverseY + ? std::max(0, nYSize - (iY + 1) * nBlockYSize) + : iY * nBlockYSize; + const int nReqCountY = std::min(nBlockYSize, nYSize - iY * nBlockYSize); + for (int iX = 0; iX < nXBlocks && bRet; iX++) + { + const int nReqCountX = + std::min(nBlockXSize, nXSize - iX * nBlockXSize); + + bRet = + m_poSrcDS->RasterIO( + GF_Read, iX * nBlockXSize, nSrcYOff, nReqCountX, nReqCountY, + bReverseY ? afValues.data() + + (nReqCountY - 1) * nReqCountX * nComponents + : afValues.data(), + nReqCountX, nReqCountY, GDT_Float32, nComponents, nullptr, + static_cast(sizeof(float)) * nComponents, + bReverseY ? -static_cast(sizeof(float)) * + nComponents * nReqCountX + : 0, + sizeof(float), nullptr) == CE_None; + + if (bRet) + { + for (int i = 0; i < nReqCountY * nReqCountX; i++) + { + { + float fVal = afValues[i * nComponents]; + if ((bHasNoDataBand1 && fVal == fSrcNoDataBand1) || + std::isnan(fVal)) + { + afValues[i * nComponents] = fNoDataValue; + } + else + { + fVal *= fMulFactor; + afValues[i * nComponents] = fVal; + fMinDepth = std::min(fMinDepth, fVal); + fMaxDepth = std::max(fMaxDepth, fVal); + } + } + if (nComponents == 2) + { + const float fVal = afValues[i * nComponents + 1]; + if ((bHasNoDataBand2 && fVal == fSrcNoDataBand2) || + std::isnan(fVal)) + { + afValues[i * nComponents + 1] = fNoDataValue; + } + else + { + fMinUncertainty = std::min(fMinUncertainty, fVal); + fMaxUncertainty = std::max(fMaxUncertainty, fVal); + } + } + } + } + + H5OFFSET_TYPE offset[] = { + static_cast(iY) * + static_cast(nBlockYSize), + static_cast(iX) * + static_cast(nBlockXSize)}; + hsize_t count[2] = {static_cast(nReqCountY), + static_cast(nReqCountX)}; + GH5_HIDSpaceHolder hMemSpace( + H5_CHECK(H5Screate_simple(2, count, nullptr))); + bRet = + bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + hMemSpace && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hMemSpace, hFileSpace, + H5P_DEFAULT, afValues.data())) >= 0 && + pfnProgress((static_cast(iY) * nXBlocks + iX + 1) / + (static_cast(nXBlocks) * nYBlocks), + "", pProgressData) != 0; + } + } + + if (fMinDepth > fMaxDepth) + { + fMinDepth = fMaxDepth = fNoDataValue; + } + else if (!(fMinDepth >= MIN_DEPTH_VALUE && fMaxDepth <= MAX_DEPTH_VALUE)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Range of depth in the dataset is [%f, %f] whereas the " + "allowed range is [%d, %d]", + fMinDepth, fMaxDepth, MIN_DEPTH_VALUE, MAX_DEPTH_VALUE); + } + + if (fMinUncertainty > fMaxUncertainty) + { + fMinUncertainty = fMaxUncertainty = fNoDataValue; + } + else if (fMinUncertainty < 0) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Negative uncertainty value found, which is not allowed"); + } + + return bRet && + WriteFloat32Value(m_valuesGroup, "minimumDepth", fMinDepth) && + WriteFloat32Value(m_valuesGroup, "maximumDepth", fMaxDepth) && + WriteFloat32Value(m_valuesGroup, "minimumUncertainty", + fMinUncertainty) && + WriteFloat32Value(m_valuesGroup, "maximumUncertainty", + fMaxUncertainty); +} + +/************************************************************************/ +/* S102Creator::CopyQualityValues() */ +/************************************************************************/ + +bool S102Creator::CopyQualityValues(GDALDataset *poQualityDS, + const std::set &oSetRATId, + GDALProgressFunc pfnProgress, + void *pProgressData) +{ + CPLAssert(m_valuesGroup.get() >= 0); + + const int nYSize = poQualityDS->GetRasterYSize(); + const int nXSize = poQualityDS->GetRasterXSize(); + + hsize_t dims[] = {static_cast(nYSize), + static_cast(nXSize)}; + + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(2, dims, nullptr))); + bool bRet = hDataSpace; + + const bool bDeflate = + EQUAL(m_aosOptions.FetchNameValueDef("COMPRESS", "DEFLATE"), "DEFLATE"); + const int nCompressionLevel = + atoi(m_aosOptions.FetchNameValueDef("ZLEVEL", "6")); + const int nBlockSize = + std::min(4096, std::max(100, atoi(m_aosOptions.FetchNameValueDef( + "BLOCK_SIZE", "100")))); + const int nBlockXSize = std::min(nXSize, nBlockSize); + const int nBlockYSize = std::min(nYSize, nBlockSize); + constexpr uint32_t nNoDataValue = 0; + + hsize_t chunk_size[] = {static_cast(nBlockYSize), + static_cast(nBlockXSize)}; + + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams && + H5_CHECK(H5Pset_fill_time(hParams, H5D_FILL_TIME_ALLOC)) >= 0 && + H5_CHECK(H5Pset_fill_value(hParams, H5T_STD_U32LE, &nNoDataValue)) >= + 0 && + H5_CHECK(H5Pset_layout(hParams, H5D_CHUNKED)) >= 0 && + H5_CHECK(H5Pset_chunk(hParams, 2, chunk_size)) >= 0; + + if (bRet && bDeflate) + { + bRet = H5_CHECK(H5Pset_deflate(hParams, nCompressionLevel)) >= 0; + } + + GH5_HIDDatasetHolder hDatasetID; + if (bRet) + { + hDatasetID.reset(H5_CHECK(H5Dcreate( + m_valuesGroup, "values", H5T_STD_U32LE, hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = bRet && hFileSpace; + + const int nYBlocks = static_cast(DIV_ROUND_UP(nYSize, nBlockYSize)); + const int nXBlocks = static_cast(DIV_ROUND_UP(nXSize, nBlockXSize)); + std::vector anValues(static_cast(nBlockYSize) * + nBlockXSize); + const bool bReverseY = m_gt[5] < 0; + + int bHasSrcNoData = FALSE; + const double dfSrcNoData = + poQualityDS->GetRasterBand(1)->GetNoDataValue(&bHasSrcNoData); + const uint32_t nSrcNoData = static_cast(dfSrcNoData); + + std::set oSetRATIdCopy(oSetRATId); + for (int iY = 0; iY < nYBlocks && bRet; iY++) + { + const int nSrcYOff = bReverseY + ? std::max(0, nYSize - (iY + 1) * nBlockYSize) + : iY * nBlockYSize; + const int nReqCountY = std::min(nBlockYSize, nYSize - iY * nBlockYSize); + for (int iX = 0; iX < nXBlocks && bRet; iX++) + { + const int nReqCountX = + std::min(nBlockXSize, nXSize - iX * nBlockXSize); + + bRet = + poQualityDS->GetRasterBand(1)->RasterIO( + GF_Read, iX * nBlockXSize, nSrcYOff, nReqCountX, nReqCountY, + bReverseY ? anValues.data() + (nReqCountY - 1) * nReqCountX + : anValues.data(), + nReqCountX, nReqCountY, GDT_UInt32, 0, + bReverseY ? -static_cast(sizeof(uint32_t)) * + nReqCountX + : 0, + nullptr) == CE_None; + + if (bRet) + { + for (int i = 0; i < nReqCountY * nReqCountX; i++) + { + if (bHasSrcNoData && anValues[i] == nSrcNoData) + { + anValues[i] = nNoDataValue; + } + else if (anValues[i] != 0 && + !cpl::contains(oSetRATIdCopy, anValues[i])) + { + CPLError( + CE_Warning, CPLE_AppDefined, + "Quality grid contains nodes with id %u, but there " + "is no such entry in the feature attribute table", + anValues[i]); + oSetRATIdCopy.insert(anValues[i]); + } + } + } + + H5OFFSET_TYPE offset[] = { + static_cast(iY) * + static_cast(nBlockYSize), + static_cast(iX) * + static_cast(nBlockXSize)}; + hsize_t count[2] = {static_cast(nReqCountY), + static_cast(nReqCountX)}; + GH5_HIDSpaceHolder hMemSpace(H5Screate_simple(2, count, nullptr)); + bRet = + bRet && hMemSpace && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + H5_CHECK(H5Dwrite(hDatasetID, H5T_NATIVE_UINT, hMemSpace, + hFileSpace, H5P_DEFAULT, anValues.data())) >= + 0 && + pfnProgress((static_cast(iY) * nXBlocks + iX + 1) / + (static_cast(nXBlocks) * nYBlocks), + "", pProgressData) != 0; + } + } + + return bRet; +} + +/************************************************************************/ +/* S102Dataset::CreateCopy() */ +/************************************************************************/ + +/* static */ +GDALDataset *S102Dataset::CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int /* bStrict*/, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData) +{ + S102Creator creator(pszFilename, poSrcDS, papszOptions); + if (!creator.Create(pfnProgress, pProgressData)) + return nullptr; + + VSIStatBufL sStatBuf; + if (VSIStatL(pszFilename, &sStatBuf) == 0 && + sStatBuf.st_size > 10 * 1024 * 1024) + { + CPLError(CE_Warning, CPLE_AppDefined, + "%s file size exceeds 10 MB, which is the upper limit " + "suggested for wireless transmission to marine vessels", + pszFilename); + } + + GDALOpenInfo oOpenInfo(pszFilename, GA_ReadOnly); + return Open(&oOpenInfo); +} + /************************************************************************/ /* S102DatasetDriverUnload() */ /************************************************************************/ @@ -774,6 +1992,7 @@ void GDALRegister_S102() S102DriverSetCommonMetadata(poDriver); poDriver->pfnOpen = S102Dataset::Open; + poDriver->pfnCreateCopy = S102Dataset::CreateCopy; poDriver->pfnUnloadDriver = S102DatasetDriverUnload; GetGDALDriverManager()->RegisterDriver(poDriver); diff --git a/scripts/typos_allowlist.txt b/scripts/typos_allowlist.txt index 21ab2941d814..17f0d45fcc2f 100644 --- a/scripts/typos_allowlist.txt +++ b/scripts/typos_allowlist.txt @@ -371,3 +371,5 @@ for Bosnia and Herzegovina (country code "BA"), limited to the "county" subtype. gdal vector sql --oo MODEL=OeREBKRM09vs.imd --config OGR_STROKE_CURVE=TRUE --sql 'SELECT Rechtsstatus,publiziertAb,MetadatenGeobasisdaten,Eigentumsbeschraenkung,ZustaendigeStelle,Flaeche FROM "OeREBKRM09trsfr.Transferstruktur.Geometrie"' -f "ESRI Shapefile" ch.bazl.sicherheitszonenplan.oereb_20131118.xtf shpdir (0, 2, "metres"), (1, 2, "metres"), + {"depth", "depth", "metres", "1000000", "H5T_FLOAT", + {"uncertainty", "uncertainty", "metres", "1000000", "H5T_FLOAT", diff --git a/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py b/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py index 5e3f18517079..85091a286fec 100755 --- a/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py +++ b/swig/python/gdal-utils/osgeo_utils/samples/validate_s102.py @@ -391,7 +391,7 @@ def check(self): self._validate_verticalCoordinateBase(f) self._validate_verticalDatumReference(f) - self._validate_verticalDatum(f) + self._validate_verticalDatum("top level", f) self._validate_epoch(f) self._validate_metadata(f, self.filename) self._validate_horizontalCRS(f) @@ -448,16 +448,15 @@ def _validate_verticalDatumReference(self, f): } self._validate_enumeration(f, "verticalDatumReference", expected_values) - def _validate_verticalDatum(self, f): - if "verticalDatum" in f.attrs: - value = f.attrs["verticalDatum"] - if isinstance(value, int) and not ( - (value >= 1 and value <= 30) or value == 44 - ): - # 102_Dev1006 - self._critical_error( - f"Top level attribute verticalDatum has value '{value}', whereas it should be in [1, 30] range or 44" - ) + def _validate_verticalDatum(self, ctxt_name, f): + verticalDatum = _get_int_attr_or_none(f, "verticalDatum") + if verticalDatum is not None and not ( + (verticalDatum >= 1 and verticalDatum <= 30) or verticalDatum == 44 + ): + # 102_Dev1006 + self._critical_error( + f"{ctxt_name} attribute verticalDatum has value '{verticalDatum}', whereas it should be in [1, 30] range or 44" + ) def _validate_epoch(self, f): self._log_check("102_Dev1007") @@ -1451,6 +1450,15 @@ def _validate_BathymetryCoverage_instance(self, f, BathymetryCoverage, instance) f"BathymetryCoverage feature instance group {instance.name}: Count of values groups does not match attribute numGRP in instance group" ) + self._validate_verticalDatum(instance.name, instance) + verticalDatum = _get_int_attr_or_none(instance, "verticalDatum") + topVerticalDatum = _get_int_attr_or_none(f, "verticalDatum") + if verticalDatum is not None and topVerticalDatum is not None: + if verticalDatum == topVerticalDatum: + self._error( + f"BathymetryCoverage feature instance group {instance.name} has same value for 'verticalDatum' attribute as top level attribute" + ) + # Check that QualityOfBathymetryCoverage.QualityOfBathymetryCoverage.01 # has same attributes as BathymetryCoverage.BathymetryCoverage.01 self._log_check("102_Dev3017") From 46aa4ebf55c31d9db3387f6f668edd308b400901 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Tue, 4 Nov 2025 21:48:26 +0100 Subject: [PATCH 10/20] GDALValidateOptions(): only emit debug message for deprecated_alias not plain alias --- gcore/gdaldriver.cpp | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/gcore/gdaldriver.cpp b/gcore/gdaldriver.cpp index a457fdf14ea2..04daa6618ec6 100644 --- a/gcore/gdaldriver.cpp +++ b/gcore/gdaldriver.cpp @@ -2378,14 +2378,23 @@ int GDALValidateOptions(const char *pszOptionList, { break; } - const char *pszAlias = CPLGetXMLValue( - psChildNode, "alias", - CPLGetXMLValue(psChildNode, "deprecated_alias", "")); - if (EQUAL(pszAlias, pszKey)) + const char *pszAlias = + CPLGetXMLValue(psChildNode, "alias", nullptr); + const char *pszDeprecatedAlias = + pszAlias ? nullptr + : CPLGetXMLValue(psChildNode, "deprecated_alias", + nullptr); + if (!pszAlias && pszDeprecatedAlias) + pszAlias = pszDeprecatedAlias; + if (pszAlias && EQUAL(pszAlias, pszKey)) { - CPLDebug("GDAL", - "Using deprecated alias '%s'. New name is '%s'", - pszAlias, pszOptionName); + if (pszDeprecatedAlias) + { + CPLDebug( + "GDAL", + "Using deprecated alias '%s'. New name is '%s'", + pszAlias, pszOptionName); + } break; } } From 0c9c27a73c374f48136c2bb6fae6e425f060b05e Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 13:55:45 +0100 Subject: [PATCH 11/20] HDF5 multidim: avoid harmless unsigned integer overflow --- frmts/hdf5/hdf5multidim.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/frmts/hdf5/hdf5multidim.cpp b/frmts/hdf5/hdf5multidim.cpp index 420a78255cfb..7b5ffd131a5c 100644 --- a/frmts/hdf5/hdf5multidim.cpp +++ b/frmts/hdf5/hdf5multidim.cpp @@ -2268,8 +2268,12 @@ static void CopyBuffer(size_t nDims, const size_t *count, --anStackCount[iDim]; if (anStackCount[iDim] == 0) break; - pabyDstBufferStack[iDim] += - bufferStride[iDim] * nBufferDataTypeSize; + if (bufferStride[iDim] >= 0) + pabyDstBufferStack[iDim] += + bufferStride[iDim] * nBufferDataTypeSize; + else + pabyDstBufferStack[iDim] -= + (-bufferStride[iDim]) * nBufferDataTypeSize; pabySrcBufferStack[iDim] += anSrcStride[iDim]; } } From 0be134c87d5f470af3d33541dac14ee66d731621 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Sat, 1 Nov 2025 19:20:26 +0100 Subject: [PATCH 12/20] Add validate_s104.py sample validation script --- .../osgeo_utils/samples/validate_s104.py | 2274 +++++++++++++++++ 1 file changed, 2274 insertions(+) create mode 100755 swig/python/gdal-utils/osgeo_utils/samples/validate_s104.py diff --git a/swig/python/gdal-utils/osgeo_utils/samples/validate_s104.py b/swig/python/gdal-utils/osgeo_utils/samples/validate_s104.py new file mode 100755 index 000000000000..7cb4386fa9bc --- /dev/null +++ b/swig/python/gdal-utils/osgeo_utils/samples/validate_s104.py @@ -0,0 +1,2274 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +############################################################################### +# +# Project: GDAL/OGR +# Purpose: Test compliance of IHO S104 v2.0 dataset +# Author: Even Rouault +# +############################################################################### +# Copyright (c) 2025, Even Rouault +# +# SPDX-License-Identifier: MIT +############################################################################### + +# Validates against +# https://registry.iho.int/productspec/view.do?idx=209&product_ID=S-104&statusS=5&domainS=ALL&category=product_ID&searchValue= and +# https://iho.int/uploads/user/pubs/standards/s-100/S-100_5.2.0_Final_Clean.pdf + +# "104_DevXXXX" are for traceability with respect to requirements of the spreadsheet: +# https://github.com/iho-ohi/S-100-Validation-Checks/raw/refs/heads/main/Documents/S-158-104/0.2.0/S-158_104_0_2_0_20241209.xlsx +# Note that there are a few checks in that spreadsheet that are specific only of 1.1.0, and not 2.0.0... + + +import os +import re +import struct +import sys + +# Standard Python modules +from collections import namedtuple + +# Extension modules +import h5py +import numpy as np + +try: + from osgeo import osr + + osr.UseExceptions() + gdal_available = True +except ImportError: + gdal_available = False + +ERROR = "Error" +CRITICAL_ERROR = "Critical error" + +AttributeDefinition = namedtuple( + "AttributeDefinition", ["name", "required", "type", "fixed_value"] +) + + +def _get_int_value_or_none(v): + try: + return int(v) + except ValueError: + return None + + +def _get_int_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_int_value_or_none(group.attrs[attr_name]) + + +def _get_float_value_or_none(v): + try: + return float(v) + except ValueError: + return None + + +def _get_float_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_float_value_or_none(group.attrs[attr_name]) + + +def _cast_to_float32(v): + return struct.unpack("f", struct.pack("f", v))[0] + + +class S104ValidationException(Exception): + pass + + +class S104Checker: + def __init__(self, filename, abort_at_first_error=False): + self.filename = filename + self.abort_at_first_error = abort_at_first_error + self.errors = [] + self.warnings = [] + self.checks_done = set([]) + + def _log_check(self, name): + self.checks_done.add(name) + + def _warning(self, msg): + self.warnings += [msg] + + def _error(self, msg): + self.errors += [(ERROR, msg)] + if self.abort_at_first_error: + raise S104ValidationException(f"{ERROR}: {msg}") + + def _critical_error(self, msg): + self.errors += [(CRITICAL_ERROR, msg)] + if self.abort_at_first_error: + raise S104ValidationException(f"{CRITICAL_ERROR}: {msg}") + + def _is_uint8(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 1 + ) + + def _is_uint16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 2 + ) + + def _is_uint32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 4 + ) + + def _is_int16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 2 + ) + + def _is_int32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 4 + ) + + def _is_float32(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 4 + + def _is_float64(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 8 + + def _is_string(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeStringID) + + def _is_enumeration(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeEnumID) + + def _check_attributes(self, ctxt_name, group, attr_list): + + for attr_def in attr_list: + if attr_def.required and attr_def.name not in group.attrs: + # 104_Dev1002: check presence of required attributes + self._error( + f"Required {ctxt_name} attribute '{attr_def.name}' is missing" + ) + + elif attr_def.name in group.attrs: + attr = group.attrs[attr_def.name] + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + h5_type = group.attrs.get_id(attr_def.name).get_type() + + # 104_Dev1002: check type + + if attr_def.type == "string": + if not self._is_string(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string " + ) + + elif attr_def.type == "time": + if not self._is_string(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + + pattern = re.compile( + r"^(?:[01]\d|2[0-3])[0-5]\d[0-5]\d(?:Z|[+-](?:[01]\d|2[0-3])[0-5]\d)$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid time: {attr}" + ) + + elif attr_def.type == "date": + if not isinstance(h5_type, h5py.h5t.TypeStringID): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + elif h5_type.get_size() != 8: + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a 8-character string" + ) + + pattern = re.compile( + r"^(?:[0-9]{4})(?:(?:0[1-9]|1[0-2])(?:0[1-9]|[12][0-9]|3[01]))$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid date: {attr}" + ) + + elif attr_def.type == "datetime": + if not isinstance(h5_type, h5py.h5t.TypeStringID): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + pattern = re.compile( + r"^(?:" + r"([0-9]{4})" # year + r"(?:(?:0[1-9]|1[0-2])" # month + r"(?:0[1-9]|[12][0-9]|3[01]))" # day + r"T" # literal 'T' separator + r"(?:[01]\d|2[0-3])" # hour + r"[0-5]\d" # minute + r"[0-5]\d" # second + r"(?:Z|[+-](?:[01]\d|2[0-3])[0-5]\d)" # timezone (Z or hhmm) + r")$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid datetime: {attr}" + ) + + elif attr_def.type == "uint8": + if not self._is_uint8(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint8" + ) + + elif attr_def.type == "uint16": + if not self._is_uint16(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint16" + ) + + elif attr_def.type == "uint32": + if not self._is_uint32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint32" + ) + + elif attr_def.type == "int32": + if not self._is_int32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a int32" + ) + + elif attr_def.type == "float32": + if not self._is_float32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float32" + ) + + elif attr_def.type == "float64": + if not self._is_float64(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float64" + ) + + elif attr_def.type == "enumeration": + if not self._is_enumeration(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not an enumeration" + ) + + else: + + raise Exception( + f"Programming error: unexpected type {attr_def.type}" + ) + + if attr_def.fixed_value: + if attr != attr_def.fixed_value: + self._error( + f"{ctxt_name} attribute '{attr_def.name}' has value '{attr}', whereas '{attr_def.fixed_value}' is expected" + ) + + attr_dict = {a.name: a for a in attr_list} + for attr in group.attrs: + if attr not in attr_dict: + self._warning(f"Extra element in {ctxt_name} group: '{attr}'") + + def check(self): + + try: + f = h5py.File(self.filename, "r") + except Exception as e: + self._critical_error(str(e)) + return + + self._log_check("104_Dev9005") + file_size = os.stat(self.filename).st_size + if file_size > 10 * 1024 * 1024: + self._warning( + f"File size of {self.filename} = {file_size}, which exceeds 10 MB" + ) + + basename = os.path.basename(self.filename) + if not basename.startswith("104"): + self._warning("File name should start with '104'") + if not basename.upper().endswith(".H5"): + self._warning("File name should end with '.H5'") + pattern = r"^104[a-zA-Z0-9]{4}[a-zA-Z0-9\-_]{1,54}\.(?:h5|H5)$" + if not re.match(pattern, basename): + self._warning( + f"File name '{basename}' does not match expected pattern '{pattern}'" + ) + + self._log_check("104_Dev1018") + for key in f.keys(): + if key not in ( + "Group_F", + "WaterLevel", + ): + self._warning(f"Unexpected element {key} in top level group") + + if "Group_F" in f.keys(): + self._validate_group_f(f, f["Group_F"]) + else: + self._critical_error("No feature information group ('Group_F')") + + # Cf Table 12-1 - General metadata, related to the entire HDF5 file + topLevelAttributesList = [ + AttributeDefinition( + name="productSpecification", + required=True, + type="string", + fixed_value="INT.IHO.S-104.2.0", + ), + AttributeDefinition( + name="issueDate", required=True, type="date", fixed_value=None + ), + AttributeDefinition( + name="horizontalCRS", required=True, type="int32", fixed_value=None + ), + AttributeDefinition( + name="westBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="geographicIdentifier", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="nameOfHorizontalCRS", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="typeOfHorizontalCRS", + required=False, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="horizontalCS", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="horizontalDatum", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="nameOfHorizontalDatum", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="primeMeridian", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="spheroid", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="projectionMethod", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="projectionParameter1", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter2", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter3", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter4", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter5", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="falseNorthing", required=False, type="float64", fixed_value=None + ), + AttributeDefinition( + name="falseEasting", required=False, type="float64", fixed_value=None + ), + AttributeDefinition( + name="epoch", required=False, type="string", fixed_value=None + ), + AttributeDefinition( + name="issueTime", required=True, type="time", fixed_value=None + ), + AttributeDefinition( + name="waterLevelTrendThreshold", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="datasetDeliveryInterval", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="trendInterval", required=False, type="uint32", fixed_value=None + ), + AttributeDefinition( + name="verticalDatumEpoch", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="verticalCS", required=True, type="int32", fixed_value=None + ), + AttributeDefinition( + name="verticalCoordinateBase", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="verticalDatumReference", + required=True, + type="enumeration", + fixed_value=None, + ), + # S104_Dev1003 + AttributeDefinition( + name="verticalDatum", required=True, type="int32", fixed_value=None + ), + ] + + self._log_check("104_Dev1002") + self._log_check("104_Dev1003") + self._check_attributes("top level", f, topLevelAttributesList) + + self._log_check("104_Dev1004") + if "epoch" in f.attrs and not f.attrs["epoch"]: + self._warning("Attribute epoch present but empty or blank.") + + self._log_check("104_Dev1005") + if "verticalDatum" in f.attrs and not f.attrs["verticalDatum"]: + self._warning("Attribute verticalDatum present but empty or blank.") + + self._log_check("104_Dev1007") + self._validate_metadata(f, self.filename) + self._validate_nameOfHorizontalCRS(f) + self._validate_typeOfHorizontalCRS(f) + self._validate_horizontalCS(f) + self._validate_horizontalDatum(f) + self._validate_nameOfHorizontalDatum(f) + self._validate_primeMeridian(f) + self._validate_spheroid(f) + self._validate_projectionMethod(f) + self._validate_projectionParameters(f) + self._validate_datasetDeliveryInterval(f) + self._validate_verticalCS(f) + self._validate_verticalCoordinateBase(f) + self._validate_verticalDatumReference(f) + self._validate_verticalDatum("top level", f) + self._validate_epoch(f) + self._validate_horizontalCRS(f) + self._validate_bounds("top level", f) + + if "WaterLevel" in f.keys(): + self._validate_WaterLevel(f) + else: + self._critical_error("Missing /WaterLevel group") + + self.checks_done = sorted(self.checks_done) + + def _validate_enumeration(self, group, attr_name, expected_values): + h5_type = group.attrs.get_id(attr_name).get_type() + if isinstance(h5_type, h5py.h5t.TypeEnumID): + if h5_type.get_nmembers() != len(expected_values): + self._warning( + f"Expected {len(expected_values)} members for enumeration {attr_name}" + ) + else: + for code in expected_values: + try: + value = h5_type.enum_nameof(code).decode("utf-8") + except Exception: + value = None + self._warning( + f"Enumeration {attr_name}: did not find value for code {code}" + ) + if value: + expected = expected_values[code] + if value != expected: + self._error( + f"Enumeration {attr_name}: for code {code}, found value {value}, whereas {expected} was expected" + ) + + def _validate_metadata(self, f, filename): + if "metadata" in f.attrs: + metadata = f.attrs["metadata"] + if isinstance(metadata, str) and metadata: + basename = os.path.basename(filename) + if basename.endswith(".h5") or basename.endswith(".H5"): + basename = basename[0:-3] + if metadata not in (f"MD_{basename}.xml", f"MD_{basename}.XML"): + self._critical_error( + f"Top level attribute metadata has value '{metadata}', whereas it should be empty, 'MD_{basename}.xml' or 'MD_{basename}.XML'" + ) + + def _is_horizontalCRS_minus_1(self, f): + return _get_int_attr_or_none(f, "horizontalCRS") == -1 + + def _validate_nameOfHorizontalCRS(self, f): + if "nameOfHorizontalCRS" in f.attrs: + nameOfHorizontalCRS = f.attrs["nameOfHorizontalCRS"] + if isinstance(nameOfHorizontalCRS, str) and not nameOfHorizontalCRS: + self._warning( + "Top level attribute nameOfHorizontalCRS must not be the empty string" + ) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute nameOfHorizontalCRS is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _validate_typeOfHorizontalCRS(self, f): + if "typeOfHorizontalCRS" in f.attrs: + expected_values = { + 1: "geodeticCRS2D", + 2: "projectedCRS", + } + self._validate_enumeration(f, "typeOfHorizontalCRS", expected_values) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute typeOfHorizontalCRS is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _validate_horizontalCS(self, f): + if "horizontalCS" in f.attrs: + horizontalCS = _get_int_attr_or_none(f, "horizontalCS") + typeOfHorizontalCRS = _get_int_attr_or_none(f, "typeOfHorizontalCRS") + if typeOfHorizontalCRS == 1: # geodeticCRS2D + if horizontalCS != 6422: + self._warning( + "Top level attribute horizontalCS value should be 6422 since typeOfHorizontalCRS=1" + ) + elif typeOfHorizontalCRS == 2: # projectedCRS + if horizontalCS not in (4400, 4500): + self._warning( + "Top level attribute horizontalCS value should be 4400 or 4500 since typeOfHorizontalCRS=2" + ) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute horizontalCS is missing, but it is mandatory when horizontalCRS = -1" + ) + + @staticmethod + def _get_proj_db(): + try: + from osgeo import osr + except ImportError: + return None + for path in osr.GetPROJSearchPaths(): + filename = os.path.join(path, "proj.db") + if os.path.exists(filename): + import sqlite3 + + return sqlite3.connect(filename) + return None + + def _validate_horizontalDatum(self, f): + if "horizontalDatum" in f.attrs: + horizontalDatum = _get_int_attr_or_none(f, "horizontalDatum") + if horizontalDatum is not None and horizontalDatum != -1: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM geodetic_datum WHERE auth_name = 'EPSG' and code = ?", + (horizontalDatum,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute horizontalDatum = {horizontalDatum} does not match with a known EPSG datum" + ) + + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute horizontalDatum is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _is_horizontalDatum_minus_1(self, f): + return _get_int_attr_or_none(f, "horizontalDatum") == -1 + + def _validate_nameOfHorizontalDatum(self, f): + if "nameOfHorizontalDatum" in f.attrs: + nameOfHorizontalDatum = f.attrs["nameOfHorizontalDatum"] + if isinstance(nameOfHorizontalDatum, str) and not nameOfHorizontalDatum: + self._warning( + "Top level attribute nameOfHorizontalDatum must not be the empty string" + ) + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute nameOfHorizontalDatum is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_primeMeridian(self, f): + if "primeMeridian" in f.attrs: + primeMeridian = _get_int_attr_or_none(f, "primeMeridian") + if primeMeridian is not None: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM prime_meridian WHERE auth_name = 'EPSG' and code = ?", + (primeMeridian,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute primeMeridian = {primeMeridian} does not match with a known EPSG prime meridian" + ) + + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute primeMeridian is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_spheroid(self, f): + if "spheroid" in f.attrs: + spheroid = _get_int_attr_or_none(f, "spheroid") + if spheroid is not None: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM ellipsoid WHERE auth_name = 'EPSG' and code = ?", + (spheroid,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute spheroid = {spheroid} does not match with a known EPSG spheroid" + ) + + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute spheroid is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_projectionMethod(self, f): + if "projectionMethod" in f.attrs: + projectionMethod = _get_int_attr_or_none(f, "projectionMethod") + if projectionMethod is not None: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM conversion_method WHERE auth_name = 'EPSG' and code = ?", + (projectionMethod,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute projectionMethod = {projectionMethod} does not match with a known EPSG projectionMethod" + ) + + else: + typeOfHorizontalCRS = _get_int_attr_or_none(f, "typeOfHorizontalCRS") + if typeOfHorizontalCRS == 2: + self._warning( + "Top level attribute projectionMethod is missing, but it is mandatory when typeOfHorizontalCRS = 2" + ) + + def _validate_projectionParameters(self, f): + + for attr_name in ( + "projectionParameter1", + "projectionParameter2", + "projectionParameter3", + "projectionParameter4", + "projectionParameter5", + "falseNorthing", + "falseEasting", + ): + if attr_name in f.attrs and "projectionMethod" not in f.attrs: + self._warning( + f"Top level attribute {attr_name} is present, but it should not be because projectionMethod is not set" + ) + + def _validate_datasetDeliveryInterval(self, f): + + if "datasetDeliveryInterval" in f.attrs: + datasetDeliveryInterval = f.attrs["datasetDeliveryInterval"] + if isinstance(datasetDeliveryInterval, str): + iso8601_duration_regex = re.compile( + r"^P" # starts with 'P' + r"(?:(\d+(?:\.\d+)?)Y)?" # years + r"(?:(\d+(?:\.\d+)?)M)?" # months + r"(?:(\d+(?:\.\d+)?)W)?" # weeks + r"(?:(\d+(?:\.\d+)?)D)?" # days + r"(?:T" # optional time part + r"(?:(\d+(?:\.\d+)?)H)?" # hours + r"(?:(\d+(?:\.\d+)?)M)?" # minutes + r"(?:(\d+(?:\.\d+)?)S)?" # seconds + r")?$" + ) + if not iso8601_duration_regex.match(datasetDeliveryInterval): + self._error( + "Top level attribute datasetDeliveryInterval is not a valid ISO8601 duration" + ) + + def _validate_verticalCS(self, f): + verticalCS = _get_int_attr_or_none(f, "verticalCS") + if verticalCS is not None and verticalCS not in (6498, 6499): + self._error("Top level attribute verticalCS must be 6498 or 6499") + + def _validate_verticalCoordinateBase(self, f): + if "verticalCoordinateBase" in f.attrs: + expected_values = { + 1: "seaSurface", + 2: "verticalDatum", + 3: "seaBottom", + } + self._validate_enumeration(f, "verticalCoordinateBase", expected_values) + + def _validate_verticalDatumReference(self, f): + if "verticalDatumReference" in f.attrs: + expected_values = { + 1: "s100VerticalDatum", + 2: "EPSG", + } + self._validate_enumeration(f, "verticalDatumReference", expected_values) + + def _validate_verticalDatum(self, ctxt_name, f): + verticalDatum = _get_int_attr_or_none(f, "verticalDatum") + if verticalDatum is None: + return + verticalDatumReference = _get_int_attr_or_none(f, "verticalDatumReference") + if verticalDatumReference == 1: + if not ( + (verticalDatum >= 1 and verticalDatum <= 30) + or verticalDatum in (44, 46, 47, 48, 49) + ): + self._warning( + f"{ctxt_name} attribute verticalDatum has value '{verticalDatum}', whereas it should be in [1, 30] range or 44, 46, 47, 48 or 49" + ) + elif verticalDatumReference == 2: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM vertical_datum WHERE auth_name = 'EPSG' and code = ?", + (verticalDatum,), + ) + if not cursor.fetchone(): + self._warning( + f"{ctxt_name} attribute verticalDatum = {verticalDatum} does not match with a known EPSG verticalDatum" + ) + + def _validate_epoch(self, f): + self._log_check("104_Dev1007") + epoch = _get_float_attr_or_none(f, "epoch") + if epoch and not (epoch >= 1980 and epoch <= 2100): + self._warning(f"Top level attribute epoch has invalid value: {epoch}") + + def _validate_horizontalCRS(self, f): + self._log_check("104_Dev1009") + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS is not None and horizontalCRS != -1: + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM crs_view WHERE auth_name = 'EPSG' and code = ? and type in ('geographic 2D', 'projected')", + (horizontalCRS,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute horizontalCRS = {horizontalCRS} does not match with a known EPSG horizontal CRS" + ) + + def _is_geographic_2D(self, f): + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS is not None: + if horizontalCRS == 4326: + return True + conn = S104Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM geodetic_crs WHERE auth_name = 'EPSG' and code = ? and type = 'geographic 2D'", + (horizontalCRS,), + ) + if cursor.fetchone(): + return True + return False + + def _validate_bounds(self, ctxt_name, f): + west = _get_float_attr_or_none(f, "westBoundLongitude") + east = _get_float_attr_or_none(f, "eastBoundLongitude") + north = _get_float_attr_or_none(f, "northBoundLatitude") + south = _get_float_attr_or_none(f, "southBoundLatitude") + if ( + west is not None + and east is not None + and north is not None + and south is not None + ): + + if not (west >= -180 and west <= 180): + self._warning( + f"{ctxt_name}: westBoundLongitude is not in [-180, 180] range" + ) + if not (east >= -180 and east <= 180): + self._warning( + f"{ctxt_name}: eastBoundLongitude is not in [-180, 180] range" + ) + if west >= east: + self._warning( + f"{ctxt_name}: westBoundLongitude is greater or equal to eastBoundLongitude" + ) + if not (north >= -90 and north <= 90): + self._warning( + f"{ctxt_name}: northBoundLatitude is not in [-90, 90] range" + ) + if not (south >= -90 and south <= 90): + self._warning( + f"{ctxt_name}: southBoundLatitude is not in [-90, 90] range" + ) + if south >= north: + self._warning( + f"{ctxt_name}: southBoundLatitude is greater or equal to northBoundLatitude" + ) + + def _validate_group_f(self, rootGroup, group_f): + + for key in group_f.keys(): + if key not in ( + "featureCode", + "WaterLevel", + ): + self._warning(f"Unexpected element {key} in Group_F") + + self._log_check("104_Dev1008") + if "featureCode" in group_f.keys(): + self._validate_group_f_featureCode( + rootGroup, group_f, group_f["featureCode"] + ) + else: + self._critical_error( + "No featureCode array in feature information group ('/Group_F/featureCode')" + ) + + def _validate_group_f_featureCode(self, rootGroup, group_f, featureCode): + + if not isinstance(featureCode, h5py.Dataset): + self._critical_error("'/Group_F/featureCode' is not a dataset") + return + + if len(featureCode.shape) != 1: + self._critical_error( + "'/Group_F/featureCode' is not a one-dimensional dataset" + ) + return + + self._log_check("104_Dev1009") + values = set([v.decode("utf-8") for v in featureCode[:]]) + if "WaterLevel" not in values: + self._critical_error("WaterLevel feature missing from featureCode array") + + self._log_check("104_Dev1010") + for value in values: + if value not in ("WaterLevel",): + # + self._critical_error( + f"Group_F feature information must correspond to feature catalog. Did not expect {value}" + ) + + if value not in group_f.keys(): + self._critical_error( + f"Feature information dataset for feature type {value} missing" + ) + + if value not in rootGroup.keys(): + self._critical_error(f"No feature instances for feature type {value}") + + if "WaterLevel" in group_f.keys(): + self._validate_group_f_WaterLevel(group_f) + + def _validate_group_f_WaterLevel(self, group_f): + self._log_check("104_Dev1012") + + WaterLevel = group_f["WaterLevel"] + if not isinstance(WaterLevel, h5py.Dataset): + self._critical_error("'/Group_F/WaterLevel' is not a dataset") + elif len(WaterLevel.shape) != 1: + self._critical_error( + "'/Group_F/WaterLevel' is not a one-dimensional dataset" + ) + elif WaterLevel.dtype != [ + ("code", "O"), + ("name", "O"), + ("uom.name", "O"), + ("fillValue", "O"), + ("datatype", "O"), + ("lower", "O"), + ("upper", "O"), + ("closure", "O"), + ]: + self._critical_error("'/Group_F/WaterLevel' has not expected data type") + else: + self._log_check("104_Dev1013") + + if WaterLevel.shape not in ((2,), (3,)): + self._critical_error("'/Group_F/WaterLevel' is not of shape 2 or 3") + + type = WaterLevel.id.get_type() + assert isinstance(type, h5py.h5t.TypeCompoundID) + for member_idx in range(type.get_nmembers()): + subtype = type.get_member_type(member_idx) + if not isinstance(subtype, h5py.h5t.TypeStringID): + self._critical_error( + f"Member of index {member_idx} in /Group_F/WaterLevel is not a string" + ) + return + if not subtype.is_variable_str(): + self._critical_error( + f"Member of index {member_idx} in /Group_F/WaterLevel is not a variable length string" + ) + + values = WaterLevel[:] + expected_values = [ + (0, 0, "waterLevelHeight"), + (0, 1, "Water Level Height"), + (0, 2, "metre"), + (0, 3, "-9999.00"), + (0, 4, "H5T_FLOAT"), + (0, 5, "-99.99"), + (0, 6, "99.99"), + (0, 7, "closedInterval"), + (1, 0, "waterLevelTrend"), + (1, 1, "Water Level Trend"), + (1, 2, ""), + (1, 3, "0"), + (1, 4, "H5T_ENUM"), + (1, 5, ""), + (1, 6, ""), + (1, 7, ""), + (2, 0, "uncertainty"), + (2, 1, "Uncertainty"), + (2, 2, "metre"), + (2, 3, "-1.00"), + (2, 4, "H5T_FLOAT"), + (2, 5, "0.00"), + (2, 6, "99.99"), + (2, 7, "closedInterval"), + ] + + for row, col, expected_value in expected_values: + if row < WaterLevel.shape[0]: + value = values[row][col].decode("utf-8") + if value != expected_value: + self._critical_error( + f"/Group_F/WaterLevel: row {row}, {col}, got value '{value}', whereas '{expected_value}' is expected" + ) + + def _validate_WaterLevel(self, f): + WaterLevel = f["WaterLevel"] + if not isinstance(WaterLevel, h5py.Group): + self._critical_error("/WaterLevel is not a group") + return + + # Cf Table 12-2 - Feature Type metadata, pertaining to the WaterLevel feature type + + self._log_check("104_Dev2002") # for dimension + attr_list = [ + AttributeDefinition( + name="dataCodingFormat", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="dimension", + required=True, + type="uint8", + fixed_value=2, + ), + AttributeDefinition( + name="commonPointRule", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="horizontalPositionUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="verticalUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="timeUncertainty", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numInstances", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="methodWaterLevelProduct", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="minDatasetHeight", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="maxDatasetHeight", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="sequencingRule.type", + required=True, + type="enumeration", + fixed_value=1, + ), + AttributeDefinition( + name="sequencingRule.scanDirection", + required=True, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="interpolationType", + required=True, + type="enumeration", + fixed_value=1, + ), + AttributeDefinition( + name="dataOffsetCode", + required=False, + type="enumeration", + fixed_value=5, + ), + ] + + self._log_check("104_Dev2001") + self._log_check("104_Dev2008") + self._log_check("104_Dev2009") + self._log_check("104_Dev2018") + self._log_check("104_Dev2019") + self._check_attributes("WaterLevel group", WaterLevel, attr_list) + + self._log_check("104_Dev2003") + if "commonPointRule" in WaterLevel.attrs: + expected_values = { + 1: "average", + 2: "low", + 3: "high", + 4: "all", + } + self._validate_enumeration(WaterLevel, "commonPointRule", expected_values) + + self._log_check("104_Dev2004") + commonPointRule = _get_int_attr_or_none(WaterLevel, "commonPointRule") + if commonPointRule != 4: + self._warning( + '/WaterLevel["commonPointRule"] attribute value is not the recommended value 4 (all)' + ) + + if "dataCodingFormat" in WaterLevel.attrs: + expected_values = { + 1: "Fixed Stations", + 2: "Regular Grid", + 3: "Ungeorectified Grid", + 4: "Moving Platform", + 5: "Irregular Grid", + 6: "Variable cell size", + 7: "TIN", + 8: "Fixed Stations (Stationwise)", + 9: "Feature oriented Regular Grid", + } + self._validate_enumeration(WaterLevel, "dataCodingFormat", expected_values) + + self._log_check("104_Dev2005") + horizontalPositionUncertainty = _get_float_attr_or_none( + WaterLevel, "horizontalPositionUncertainty" + ) + if horizontalPositionUncertainty and not ( + horizontalPositionUncertainty == -1.0 or horizontalPositionUncertainty >= 0 + ): + self._warning( + '/WaterLevel["horizontalPositionUncertainty"] attribute value must be -1 or positive' + ) + + verticalUncertainty = _get_float_attr_or_none(WaterLevel, "verticalUncertainty") + if verticalUncertainty and not ( + verticalUncertainty == -1.0 or verticalUncertainty >= 0 + ): + self._warning( + '/WaterLevel["verticalUncertainty"] attribute value must be -1 or positive' + ) + + self._log_check("104_Dev2006") + timeUncertainty = _get_float_attr_or_none(WaterLevel, "timeUncertainty") + if timeUncertainty and not (timeUncertainty == -1.0 or timeUncertainty >= 0): + self._warning( + '/WaterLevel["timeUncertainty"] attribute value must be -1 or positive' + ) + + self._log_check("104_Dev2007") + numInstances = _get_int_attr_or_none(WaterLevel, "numInstances") + if numInstances is not None: + if numInstances <= 0: + self._critical_error( + '/WaterLevel["numInstances"] attribute value must be >= 1' + ) + numInstances = None + + scanDirection_values = None + if "sequencingRule.scanDirection" in WaterLevel.attrs: + scanDirection = WaterLevel.attrs["sequencingRule.scanDirection"] + if isinstance(scanDirection, str): + # strip leading space. IMHO there should not be any, but + # the examples in the specification sometimes show one... + scanDirection_values = [x.lstrip() for x in scanDirection.split(",")] + + self._log_check("104_Dev2016") + if len(scanDirection_values) != 2: + self._warning( + '/WaterLevel["sequencingRule.scanDirection"] attribute should have 2 values' + ) + elif "axisNames" in WaterLevel.keys(): + + scanDirection_values_without_orientation = [] + for v in scanDirection_values: + if v.startswith("-"): + scanDirection_values_without_orientation.append(v[1:]) + else: + scanDirection_values_without_orientation.append(v) + scanDirection_values_without_orientation = set( + scanDirection_values_without_orientation + ) + + self._log_check("104_Dev2017") + axisNames = WaterLevel["axisNames"] + if ( + isinstance(axisNames, h5py.Dataset) + and axisNames.shape == (2,) + and isinstance(axisNames.id.get_type(), h5py.h5t.TypeStringID) + ): + axisNames_values = set( + [v.decode("utf-8") for v in axisNames[:]] + ) + if scanDirection_values_without_orientation != axisNames_values: + self._warning( + f"Sequencing rule scanDirection contents ({scanDirection_values_without_orientation}) does not match axis names ({axisNames_values}" + ) + + self._validate_axisNames(f, WaterLevel) + + subgroups = set( + [name for name, item in WaterLevel.items() if isinstance(item, h5py.Group)] + ) + + minDatasetHeight = _get_float_attr_or_none(WaterLevel, "minDatasetHeight") + if ( + minDatasetHeight is not None + and minDatasetHeight != -9999.0 + and minDatasetHeight < -99.99 + ): + self._warning( + f"{WaterLevel.name}: minDatasetHeight={minDatasetHeight} should be in [-99.99, 99.99] range" + ) + + maxDatasetHeight = _get_float_attr_or_none(WaterLevel, "maxDatasetHeight") + if maxDatasetHeight is not None and maxDatasetHeight > 99.99: + self._warning( + f"{WaterLevel.name}: maxDatasetHeight={maxDatasetHeight} should be in [-99.99, 99.99] range" + ) + + if ( + minDatasetHeight is not None + and maxDatasetHeight is not None + and minDatasetHeight != -9999.0 + and maxDatasetHeight != -9999.0 + and minDatasetHeight > maxDatasetHeight + ): + self._warning( + f"Group_001: minDatasetHeight={minDatasetHeight} > maxDatasetHeight={maxDatasetHeight}" + ) + + self._log_check("104_Dev2013") + if len(subgroups) == 0: + self._critical_error("/WaterLevel has no groups") + else: + for i in range(1, len(subgroups) + 1): + expected_name = "WaterLevel.%02d" % i + if expected_name not in subgroups: + self._critical_error( + "/WaterLevel/{expected_name} group does not exist" + ) + + for name in subgroups: + if not name.startswith("WaterLevel."): + self._warning("/WaterLevel/{expected_name} is an unexpected group") + + self._log_check("104_Dev2014") + if numInstances and len(subgroups) != numInstances: + self._critical_error( + f"/WaterLevel has {len(subgroups)} groups whereas numInstances={numInstances}" + ) + + self._log_check("104_Dev2015") + self._validate_sequencingRuleType(WaterLevel) + + # Attributes and groups already checked above + self._log_check("104_Dev2021") + for name, item in WaterLevel.items(): + if isinstance(item, h5py.Dataset) and name != "axisNames": + self._warning(f"/WaterLevel has unexpected dataset {name}") + + if isinstance(item, h5py.Group) and name.startswith("WaterLevel."): + self._validate_WaterLevel_instance(f, WaterLevel, item) + + def _validate_sequencingRuleType(self, f): + if "sequencingRule.type" in f.attrs: + expected_values = { + 1: "linear", + 2: "boustrophedonic", + 3: "CantorDiagonal", + 4: "spiral", + 5: "Morton", + 6: "Hilbert", + } + self._validate_enumeration(f, "sequencingRule.type", expected_values) + + def _validate_WaterLevel_instance(self, f, WaterLevel, instance): + + # Cf Table 12-3 - Feature Instance metadata, pertaining to the feature instance + attr_list = [ + AttributeDefinition( + name="westBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numberOfTimes", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="timeRecordInterval", + required=False, + type="uint16", + fixed_value=None, + ), + AttributeDefinition( + name="dateTimeOfFirstRecord", + required=True, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="dateTimeOfLastRecord", + required=True, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="numGRP", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="dataDynamicity", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="verticalDatumEpoch", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="gridOriginLongitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridOriginLatitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLongitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLatitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLongitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLatitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="startSequence", + required=True, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="verticalDatumReference", + required=False, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="verticalDatum", + required=False, + type="int32", + fixed_value=None, + ), + ] + + self._log_check("104_Dev3001") + self._log_check("104_Dev3005") + self._log_check("104_Dev3019") + self._log_check("104_Dev3020") + self._check_attributes( + f"WaterLevel feature instance group {instance.name}", + instance, + attr_list, + ) + + self._log_check("104_Dev3021") + countGroups = 0 + for name, item in instance.items(): + if isinstance(item, h5py.Dataset) and name not in ( + "uncertainty", + "domainExtent.polygon", + ): + self._warning( + f"WaterLevel feature instance group {instance.name} has unexpected dataset '{name}'" + ) + + elif isinstance(item, h5py.Group): + if name.startswith("Group_"): + countGroups += 1 + else: + self._warning( + f"WaterLevel feature instance group {instance.name} has unexpected group '{name}'" + ) + + if ( + "dateTimeOfFirstRecord" in instance.attrs + and "dateTimeOfLastRecord" in instance.attrs + ): + dateTimeOfFirstRecord = instance.attrs["dateTimeOfFirstRecord"] + dateTimeOfLastRecord = instance.attrs["dateTimeOfLastRecord"] + if isinstance(dateTimeOfLastRecord, str) and isinstance( + dateTimeOfLastRecord, str + ): + self._log_check("104_Dev3006") + if dateTimeOfLastRecord < dateTimeOfFirstRecord: + self._error( + f"WaterLevel feature instance group {instance.name}: dateTimeOfLastRecord < dateTimeOfFirstRecord" + ) + else: + dateTimeOfFirstRecord = None + dateTimeOfLastRecord = None + else: + dateTimeOfFirstRecord = None + dateTimeOfLastRecord = None + + numGRP = _get_int_attr_or_none(instance, "numGRP") + if numGRP is not None: + self._log_check("104_Dev3007") + if numGRP <= 0: + self._error( + f"WaterLevel feature instance group {instance.name}: numGRP is <= 0" + ) + self._log_check("104_Dev3023") + if numGRP != countGroups: + self._error( + f"WaterLevel feature instance group {instance.name}: Count of values groups does not match attribute numGRP in instance group" + ) + + numberOfTimes = _get_int_attr_or_none(instance, "numberOfTimes") + if numberOfTimes is not None: + self._log_check("104_Dev3003") + if numberOfTimes <= 0: + self._error( + f"WaterLevel feature instance group {instance.name}: numberOfTimes is <= 0" + ) + if numGRP is not None and numberOfTimes != numGRP: + self._error( + f"WaterLevel feature instance group {instance.name}: numberOfTimes is different from numGRP" + ) + + timeRecordInterval = _get_int_attr_or_none(instance, "timeRecordInterval") + if timeRecordInterval is not None: + self._log_check("104_Dev3004") + if timeRecordInterval <= 0: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: timeRecordInterval is <= 0" + ) + elif ( + dateTimeOfFirstRecord + and dateTimeOfLastRecord + and len(dateTimeOfFirstRecord) == len("YYYYMMDDTHHMMSSZ") + and len(dateTimeOfLastRecord) == len("YYYYMMDDTHHMMSSZ") + and numberOfTimes + ): + from datetime import datetime, timezone + + start = ( + datetime.strptime(dateTimeOfFirstRecord, "%Y%m%dT%H%M%SZ") + .replace(tzinfo=timezone.utc) + .timestamp() + ) + end = ( + datetime.strptime(dateTimeOfLastRecord, "%Y%m%dT%H%M%SZ") + .replace(tzinfo=timezone.utc) + .timestamp() + ) + computedNumberOfTimes = 1 + (end - start) / timeRecordInterval + if computedNumberOfTimes != numberOfTimes: + self._warning( + f"WaterLevel feature instance group {instance.name}: given dateTimeOfFirstRecord, dateTimeOfFirstRecord and timeRecordInterval, the number of groups should be {computedNumberOfTimes} whereas it is {numberOfTimes}" + ) + + present = [] + missing = [] + for name in ( + "westBoundLongitude", + "eastBoundLongitude", + "northBoundLatitude", + "southBoundLatitude", + ): + if name in instance.attrs: + present.append(name) + else: + missing.append(name) + + if present and missing: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: attributes {present} are present, but {missing} are missing" + ) + + westBoundLongitude = _get_float_attr_or_none(instance, "westBoundLongitude") + eastBoundLongitude = _get_float_attr_or_none(instance, "eastBoundLongitude") + northBoundLatitude = _get_float_attr_or_none(instance, "northBoundLatitude") + southBoundLatitude = _get_float_attr_or_none(instance, "southBoundLatitude") + + top_westBoundLongitude = _get_float_attr_or_none(f, "westBoundLongitude") + top_eastBoundLongitude = _get_float_attr_or_none(f, "eastBoundLongitude") + top_northBoundLatitude = _get_float_attr_or_none(f, "northBoundLatitude") + top_southBoundLatitude = _get_float_attr_or_none(f, "southBoundLatitude") + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS and horizontalCRS > 0: + if self._is_geographic_2D(f): + self._validate_bounds( + f"WaterLevel feature instance group {instance.name}", + instance, + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + if westBoundLongitude < top_westBoundLongitude: + self._error( + f"WaterLevel feature instance group {instance.name}: westBoundLongitude={westBoundLongitude} < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southBoundLatitude < top_southBoundLatitude: + self._error( + f"WaterLevel feature instance group {instance.name}: southBoundLatitude={southBoundLatitude} < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastBoundLongitude > top_eastBoundLongitude: + self._error( + f"WaterLevel feature instance group {instance.name}: eastBoundLongitude={eastBoundLongitude} > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northBoundLatitude > top_northBoundLatitude: + self._error( + f"WaterLevel feature instance group {instance.name}: northBoundLatitude={northBoundLatitude} > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + if ( + abs(westBoundLongitude) <= 180 + and abs(eastBoundLongitude) <= 180 + and abs(northBoundLatitude) <= 90 + and abs(southBoundLatitude) <= 90 + ): + self._error( + f"WaterLevel feature instance group {instance.name}: westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude are longitudes/latitudes whereas they should be projected coordinates, given the horizontalCRS is projected" + ) + + if gdal_available: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(int(horizontalCRS)) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation( + horizontalCRS_srs, longlat_srs + ) + westLon, southLat, eastLon, northLat = ct.TransformBounds( + westBoundLongitude, + southBoundLatitude, + eastBoundLongitude, + northBoundLatitude, + 21, + ) + + self._log_check("104_Dev3004") + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if westLon + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: westLon={westLon} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if southLat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: southLat={southLat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if eastLon - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: eastLon={eastLon} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if northLat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: northLat={northLat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + # Add an epsilon to take into account potential different ways of doing bounding box reprojection + epsilon = 0.01 + if westLon + epsilon < top_westBoundLongitude: + self._error( + f"WaterLevel feature instance group {instance.name}: westBoundLongitude={westLon} ({westBoundLongitude}) < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southLat + epsilon < top_southBoundLatitude: + self._error( + f"WaterLevel feature instance group {instance.name}: southBoundLatitude={southLat} ({southBoundLatitude}) < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastLon - epsilon > top_eastBoundLongitude: + self._error( + f"WaterLevel feature instance group {instance.name}: eastBoundLongitude={eastLon} ({eastBoundLongitude}) > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northLat - epsilon > top_northBoundLatitude: + self._error( + f"WaterLevel feature instance group {instance.name}: northBoundLatitude={northLat} ({northBoundLatitude}) > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + self._warning( + "Test checking consistency of bounds in WaterLevel feature instance group compared to top level attributes skipped due to GDAL not available" + ) + + if eastBoundLongitude <= westBoundLongitude: + self._error( + f"WaterLevel feature instance group {instance.name}: eastBoundLongitude <= westBoundLongitude" + ) + if northBoundLatitude <= southBoundLatitude: + self._error( + f"WaterLevel feature instance group {instance.name}: northBoundLatitude <= southBoundLatitude" + ) + + if "domainExtent.polygon" in instance.keys() and present: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: both dataset 'domainExtent.polygon' and westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude attributes are present" + ) + + gridOriginLongitude = _get_float_attr_or_none(instance, "gridOriginLongitude") + gridOriginLatitude = _get_float_attr_or_none(instance, "gridOriginLatitude") + if gridOriginLongitude is not None and gridOriginLatitude is not None: + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + self._log_check("104_Dev3009") + + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + gridOriginLongitude + 1e-6 * abs(gridOriginLongitude) + < westBoundLongitude + ): + self._error( + f"WaterLevel feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} < westBoundLongitude={westBoundLongitude}" + ) + if ( + gridOriginLongitude - 1e-6 * abs(gridOriginLongitude) + > eastBoundLongitude + ): + self._error( + f"WaterLevel feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} > eastBoundLongitude={eastBoundLongitude}" + ) + if ( + gridOriginLatitude + 1e-6 * abs(gridOriginLatitude) + < southBoundLatitude + ): + self._error( + f"WaterLevel feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} < southBoundLatitude={southBoundLatitude}" + ) + if ( + gridOriginLatitude - 1e-6 * abs(gridOriginLatitude) + > northBoundLatitude + ): + self._error( + f"WaterLevel feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} > northBoundLatitude={northBoundLatitude}" + ) + + if gdal_available and horizontalCRS > 0: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(horizontalCRS) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation(horizontalCRS_srs, longlat_srs) + origin_long, origin_lat, _ = ct.TransformPoint( + gridOriginLongitude, gridOriginLatitude, 0 + ) + + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if origin_long + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: origin_long={origin_long} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if origin_lat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: origin_lat={origin_lat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if origin_long - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: origin_long={origin_long} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if origin_lat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"WaterLevel feature instance group {instance.name}: origin_lat={origin_lat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + self._log_check("104_Dev3010") + gridSpacingLongitudinal = _get_float_attr_or_none( + instance, "gridSpacingLongitudinal" + ) + if gridSpacingLongitudinal is not None and gridSpacingLongitudinal <= 0: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLongitudinal <= 0" + ) + + self._log_check("104_Dev3010") + gridSpacingLatitudinal = _get_float_attr_or_none( + instance, "gridSpacingLatitudinal" + ) + if gridSpacingLatitudinal is not None and gridSpacingLatitudinal <= 0: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLatitudinal <= 0" + ) + + self._log_check("104_Dev3011") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and gridSpacingLongitudinal * (1 - 1e-2) + > 0.5 * (eastBoundLongitude - westBoundLongitude) + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLongitudinal={gridSpacingLongitudinal} > 0.5 * (eastBoundLongitude - westBoundLongitude)={0.5 * (eastBoundLongitude - westBoundLongitude)}" + ) + + self._log_check("104_Dev3011") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and gridSpacingLatitudinal * (1 - 1e-2) + > 0.5 * (northBoundLatitude - southBoundLatitude) + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLatitudinal={gridSpacingLatitudinal} > 0.5 * (northBoundLatitude - southBoundLatitude)={0.5 * (northBoundLatitude - southBoundLatitude)}" + ) + + self._log_check("104_Dev3012") + numPointsLongitudinal = _get_int_attr_or_none(instance, "numPointsLongitudinal") + if numPointsLongitudinal < 1: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: Grid must be at least 1X1: numPointsLongitudinal < 1" + ) + + self._log_check("104_Dev3012") + numPointsLatitudinal = _get_int_attr_or_none(instance, "numPointsLatitudinal") + if numPointsLatitudinal < 1: + self._critical_error( + f"WaterLevel feature instance group {instance.name}: Grid must be at least 1X1: numPointsLatitudinal < 1" + ) + + self._log_check("104_Dev3013") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and numPointsLongitudinal is not None + and numPointsLongitudinal > 1 + and abs( + gridSpacingLongitudinal + - (eastBoundLongitude - westBoundLongitude) + / (numPointsLongitudinal - 1) + ) + > 1e-2 * gridSpacingLongitudinal + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLongitudinal={gridSpacingLongitudinal} != (eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)={(eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)}" + ) + + self._log_check("104_Dev3009") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and numPointsLatitudinal is not None + and numPointsLatitudinal > 1 + and ( + gridSpacingLatitudinal + - (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1) + ) + > 1e-2 * gridSpacingLatitudinal + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLatitudinal={gridSpacingLatitudinal} != (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)={(northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)}" + ) + + self._log_check("104_Dev3014") + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + westBoundLongitude is not None + and gridOriginLongitude is not None + and abs(westBoundLongitude - gridOriginLongitude) + > 1e-6 * abs(westBoundLongitude) + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Grid origin does not coincide with instance bounding box; westBoundLongitude={westBoundLongitude} != gridOriginLongitude={_cast_to_float32(gridOriginLongitude)}" + ) + + self._log_check("104_Dev3014") + if ( + southBoundLatitude is not None + and gridOriginLatitude is not None + and abs(southBoundLatitude - gridOriginLatitude) + > 1e-6 * abs(southBoundLatitude) + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: Grid origin does not coincide with instance bounding box: southBoundLatitude={southBoundLatitude} != gridOriginLatitude={_cast_to_float32(gridOriginLatitude)}" + ) + + self._log_check("104_Dev3015") + if "startSequence" in instance.attrs: + startSequence = instance.attrs["startSequence"] + if isinstance(startSequence, str): + startSequence = startSequence.split(",") + if ( + len(startSequence) != 2 + or _get_int_value_or_none(startSequence[0]) is None + or _get_int_value_or_none(startSequence[1]) is None + ): + self._warning( + f"WaterLevel feature instance group {instance.name}: invalid content for startSequence in instance" + ) + else: + self._log_check("104_Dev3016") + if startSequence != ["0", "0"]: + # other tests are probably not compatible of a non (0,0) startSequence + self._warning( + f"WaterLevel feature instance group {instance.name}: Values in startSequence in instance group are incompatible with the scan direction in sequencingRule" + ) + + self._log_check("104_Dev3022") + for idx_grp in range(1, numGRP + 1): + grp_name = "Group_%03d" % idx_grp + if grp_name not in instance.keys() or not isinstance( + instance[grp_name], h5py.Group + ): + self._critical_error( + f"WaterLevel feature instance group {instance.name}: no {grp_name} subgroup" + ) + else: + self._validate_Group_XXX( + f, + instance[grp_name], + numPointsLongitudinal, + numPointsLatitudinal, + dateTimeOfFirstRecord, + dateTimeOfLastRecord, + ) + + if "uncertainty" in instance.keys() and isinstance( + instance["uncertainty"], h5py.Dataset + ): + uncertainty = instance["uncertainty"] + if uncertainty.shape != (1,): + self._critical_error( + f"{instance.name}/uncertainty' is not a one-dimensional dataset of shape 1" + ) + elif uncertainty.dtype not in ( + [ + ("name", "O"), + ("value", "d"), + ], + [ + ("name", "O"), + ("value", "f"), + ], + ): + self._critical_error( + f"{instance.name}/uncertainty' has not expected data type" + ) + + self._validate_verticalDatum(instance.name, instance) + verticalDatum = _get_int_attr_or_none(instance, "verticalDatum") + topVerticalDatum = _get_int_attr_or_none(f, "verticalDatum") + if verticalDatum is not None and topVerticalDatum is not None: + if verticalDatum == topVerticalDatum: + self._error( + f"WaterLevel feature instance group {instance.name} has same value for 'verticalDatum' attribute as top level attribute" + ) + + def _validate_Group_XXX( + self, + f, + Group_XXX, + numPointsLongitudinal, + numPointsLatitudinal, + dateTimeOfFirstRecord, + dateTimeOfLastRecord, + ): + + # Cf Table 12-4 - Values Group attributes + attr_list = [ + AttributeDefinition( + name="timePoint", + required=True, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="waterLevelTrendThreshold", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="timeRecordInterval", + required=False, + type="uint32", + fixed_value=None, + ), + ] + + self._log_check("104_Dev5001") + self._check_attributes( + "Group_XXX", + Group_XXX, + attr_list, + ) + + if ( + "timePoint" in Group_XXX.attrs + and dateTimeOfFirstRecord + and dateTimeOfLastRecord + ): + timePoint = Group_XXX.attrs["timePoint"] + if isinstance(timePoint, str): + self._log_check("104_Dev5002") + if not ( + timePoint >= dateTimeOfFirstRecord + and timePoint <= dateTimeOfLastRecord + ): + self._warning( + f"{Group_XXX.name}: timePoint value not in [dateTimeOfFirstRecord, dateTimeOfLastRecord] range" + ) + + self._log_check("104_Dev5003") + if "values" not in Group_XXX.keys() or not isinstance( + Group_XXX["values"], h5py.Dataset + ): + self._critical_error(f"{Group_XXX.name}/values dataset missing") + else: + self._validate_values( + f, + Group_XXX["values"], + numPointsLongitudinal, + numPointsLatitudinal, + ) + + def _validate_values( + self, + f, + values, + numPointsLongitudinal, + numPointsLatitudinal, + ): + + self._log_check("104_Dev5005") + if len(values.shape) != 2: + self._critical_error(f"{values.name} dataset is not 2-dimensional") + return + + self._log_check("104_Dev5006") + if ( + numPointsLatitudinal + and numPointsLongitudinal + and values.shape != (numPointsLatitudinal, numPointsLongitudinal) + ): + self._critical_error( + f"{values.name} dataset shape is {values.shape} instead of {(numPointsLatitudinal, numPointsLongitudinal)}" + ) + return + + self._log_check("104_Dev5011") + values_type = values.id.get_type() + if not isinstance(values_type, h5py.h5t.TypeCompoundID): + self._critical_error(f"{values.name} type is not compound") + return + + self._log_check("104_Dev5012") + Group_F_WaterLevel = None + if "Group_F" in f: + Group_F = f["Group_F"] + if isinstance(Group_F, h5py.Group) and "WaterLevel" in Group_F: + Group_F_WaterLevel = Group_F["WaterLevel"] + if ( + isinstance(Group_F_WaterLevel, h5py.Dataset) + and len(Group_F_WaterLevel.shape) == 1 + ): + num_components = None + if num_components and values_type.get_nmembers() != num_components: + self._critical_error( + f"{values.name} type has {values_type.get_nmembers()} members whereas {num_components} are expected from /Group_F/WaterLevel" + ) + return + else: + Group_F_WaterLevel = None + + # Check consistency between "values" and "/Group_F/WaterLevel" + found_waterLevelHeight = False + found_waterLevelTrend = False + found_uncertainty = False + for member_idx in range(values_type.get_nmembers()): + subtype = values_type.get_member_type(member_idx) + component_name = values_type.get_member_name(member_idx) + if Group_F_WaterLevel: + expected = Group_F_WaterLevel[member_idx][0] + if component_name != expected: + self._critical_error( + f"{values.name} member {member_idx} name = {component_name} is not Group_F_WaterLevel[{member_idx}]['name']] = {expected}" + ) + assert isinstance(component_name, bytes) + if component_name == b"waterLevelHeight": + found_waterLevelHeight = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + elif component_name == b"waterLevelTrend": + found_waterLevelTrend = True + if not self._is_enumeration(subtype): + self._critical_error( + f"{values.name} member {component_name} is not an enumeration" + ) + elif component_name == b"uncertainty": + found_uncertainty = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + minDatasetHeight = _get_float_attr_or_none(f["WaterLevel"], "minDatasetHeight") + maxDatasetHeight = _get_float_attr_or_none(f["WaterLevel"], "maxDatasetHeight") + if found_waterLevelHeight and minDatasetHeight and maxDatasetHeight: + if minDatasetHeight > maxDatasetHeight: + self._error("minDatasetHeight > maxDatasetHeight") + else: + self._log_check("104_Dev5013") + masked_height = np.ma.masked_equal(values[:]["waterLevelHeight"], -9999) + actualMinHeight = masked_height.min() + if actualMinHeight < minDatasetHeight: + self._error( + f"{values.name} : minimum waterLevelHeight is {actualMinHeight}, whereas minDatasetHeight attribute = {minDatasetHeight}" + ) + + actualMaxHeight = masked_height.max() + if actualMaxHeight > maxDatasetHeight: + self._error( + f"{values.name} : maximum waterLevelHeight is {actualMaxHeight}, whereas maxDatasetHeight attribute = {maxDatasetHeight}" + ) + + if found_waterLevelTrend: + masked_trend = np.ma.masked_equal(values[:]["waterLevelTrend"], 0) + actualMinTrend = masked_trend.min() + if actualMinTrend < 1: + self._error( + f"{values.name} : minimum waterLevelTrend is {actualMinTrend}, whereas it should be >= 1" + ) + actualMaxTrend = masked_trend.max() + if actualMaxTrend > 3: + self._error( + f"{values.name} : maximum waterLevelTrend is {actualMaxTrend}, whereas it should be < 3" + ) + + if found_uncertainty: + masked_uncertainty = np.ma.masked_equal(values[:]["uncertainty"], -1.0) + actualMinUncertainty = masked_uncertainty.min() + if actualMinUncertainty < 0: + self._error( + f"{values.name} : minimum uncertainty is {actualMinUncertainty}, whereas it should be >= 0" + ) + + def _validate_axisNames(self, f, group): + + groupName = group.name + + self._log_check("104_Dev2012") + if "axisNames" not in group.keys(): + self._error(f"{groupName}/axisNames dataset does not exist") + elif not isinstance(group["axisNames"], h5py.Dataset): + self._error(f"{groupName}/axisNames is not a dataset") + else: + axisNames = group["axisNames"] + if axisNames.shape != (2,): + self._error( + f"{groupName}/axisNames dataset is not a one-dimensional array of length 2" + ) + else: + type = axisNames.id.get_type() + if not isinstance(type, h5py.h5t.TypeStringID): + self._error(f"{groupName}/axisNames type is not a string") + else: + values = [v.decode("utf-8") for v in axisNames[:]] + if values not in ( + ["Easting", "Northing"], + ["Latitude", "Longitude"], + ): + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"] or ["Latitude", "Longitude"]. Got {values}' + ) + elif "horizontalCRS" in f.attrs: + horizontalCRS = f.attrs["horizontalCRS"] + if isinstance(horizontalCRS, int): + if self._is_geographic_2D(f): + if values != ["Latitude", "Longitude"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Latitude", "Longitude"]' + ) + else: + if values != ["Easting", "Northing"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"]' + ) + + +# Public function +def check( + filename, + abort_at_first_error=False, +): + """Check specified filename and return a tuple (errors, warnings, checks_done)""" + checker = S104Checker( + filename, + abort_at_first_error=abort_at_first_error, + ) + checker.check() + return checker.errors, checker.warnings, checker.checks_done + + +def usage(): + print("Usage: validate_s104.py [-q] ") + print("") + print("Validates a S104 files against the Edition 2.0 specification.") + print("") + print("-q: quiet mode. Only exit code indicates success (0) or error (1)") + + +def main(argv=sys.argv): + filename = None + quiet = False + + for arg in argv[1:]: + if arg == "-q": + quiet = True + elif arg == "-h": + usage() + return 0 + elif arg[0] == "-": + print(f"Invalid option: {arg}\n") + return 2 + else: + filename = arg + + if filename is None: + print("Filename missing\n") + return 2 + + errors, warnings, checks_done = check( + filename, + abort_at_first_error=False, + ) + + if not quiet: + print(f"Checks done: {checks_done}") + + if warnings: + print("") + print("Warnings:") + for msg in warnings: + print(f"Warning: {msg}") + + if errors: + print("") + print("Errors:") + for criticity, msg in errors: + print(f"{criticity}: {msg}") + print("") + print("Errors found: validation failed!") + else: + print("") + print("No errors found: validation succeeded.") + + return 1 if errors else 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) From 20965c3784489c499eaff305c4937b2d908c2571 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Sat, 1 Nov 2025 21:07:37 +0100 Subject: [PATCH 13/20] S104: add write (CreateCopy()) support for S104 v2.0 --- .../expected_gdalinfo_formats.txt | 2 +- ...indows_conda_expected_gdalinfo_formats.txt | 2 +- apps/gdal_translate_bin.cpp | 5 +- autotest/gdrivers/s104.py | 1431 ++++++++++++++++- doc/source/drivers/raster/s104.rst | 255 ++- frmts/hdf5/hdf5drivercore.cpp | 74 + frmts/hdf5/s100.cpp | 425 ++++- frmts/hdf5/s100.h | 10 +- frmts/hdf5/s102dataset.cpp | 12 +- frmts/hdf5/s104dataset.cpp | 1153 ++++++++++++- 10 files changed, 3305 insertions(+), 64 deletions(-) diff --git a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt index 465379ccf7c4..76d45e44dd49 100644 --- a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt +++ b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt @@ -103,7 +103,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, GXF -raster- (rov): GeoSoft Grid Exchange Format (*.gxf) BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) - S104 -raster,multidimensional raster- (rovs): S-104 Water Level Information for Surface Navigation Product (*.h5) + S104 -raster,multidimensional raster- (rwvs): S-104 Water Level Information for Surface Navigation Product (*.h5) S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) HDF5Image -raster- (rov): HDF5 Dataset diff --git a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt index 2be4a3b9cd0b..868656101a21 100644 --- a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt +++ b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt @@ -105,7 +105,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, KEA -raster- (rw+uv): KEA Image Format (.kea) (*.kea) BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) - S104 -raster,multidimensional raster- (rovs): S-104 Water Level Information for Surface Navigation Product (*.h5) + S104 -raster,multidimensional raster- (rwvs): S-104 Water Level Information for Surface Navigation Product (*.h5) S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) HDF5Image -raster- (rov): HDF5 Dataset diff --git a/apps/gdal_translate_bin.cpp b/apps/gdal_translate_bin.cpp index 5133f66b62eb..9a528d4c00d3 100644 --- a/apps/gdal_translate_bin.cpp +++ b/apps/gdal_translate_bin.cpp @@ -167,7 +167,10 @@ MAIN_START(argc, argv) /* -------------------------------------------------------------------- */ if (!sOptionsForBinary.bCopySubDatasets && GDALGetRasterCount(hDataset) == 0 && - CSLCount(GDALGetMetadata(hDataset, "SUBDATASETS")) > 0) + CSLCount(GDALGetMetadata(hDataset, "SUBDATASETS")) > 0 && + // S104 driver knows how to handle a source dataset with subdatasets + // and no input bands. + !EQUAL(sOptionsForBinary.osFormat.c_str(), "S104")) { fprintf(stderr, "Input file contains subdatasets. Please, select one " "of them for reading.\n"); diff --git a/autotest/gdrivers/s104.py b/autotest/gdrivers/s104.py index 13d0312c44f2..9e5c98813a30 100755 --- a/autotest/gdrivers/s104.py +++ b/autotest/gdrivers/s104.py @@ -7,15 +7,18 @@ # Author: Even Rouault # ############################################################################### -# Copyright (c) 2023, Even Rouault +# Copyright (c) 2023-2025, Even Rouault # # SPDX-License-Identifier: MIT ############################################################################### import os import struct +import sys +import gdaltest import pytest +from test_py_scripts import samples_path from osgeo import gdal, osr @@ -325,3 +328,1429 @@ def test_s104_multiple_feature_instance_groups(): 0, ) assert ds.GetMetadataItem("VERTICAL_DATUM_MEANING") == "lowWater" + + +############################################################################### + + +def validate( + filename, expected_errors=None, expected_warnings=None, expected_check_count=None +): + + path = samples_path + if path not in sys.path: + sys.path.append(path) + try: + import validate_s104 + except ImportError: + print("Cannot import validate_s104") + return True + + errors, warnings, checks_done = validate_s104.check(filename) + + if expected_errors: + assert errors == expected_errors + else: + if errors: + print(errors) + assert not errors + + if expected_warnings: + assert warnings == expected_warnings + else: + if warnings: + print(warnings) + assert not warnings + + if expected_check_count: + assert len(checks_done) == expected_check_count + + +############################################################################### + + +def test_s104_validator(): + + # Fake product: many unconformities + expected_errors = [ + ( + "Critical error", + "/Group_F/WaterLevel: row 0, 2, got value 'metres', whereas 'metre' is expected", + ), + ("Error", "top level attribute 'issueDate' is not a valid date: 2025-10-07"), + ("Error", "top level attribute 'horizontalCRS' is not a int32"), + ("Error", "top level attribute 'westBoundLongitude' is not a float32"), + ("Error", "top level attribute 'eastBoundLongitude' is not a float32"), + ("Error", "top level attribute 'southBoundLatitude' is not a float32"), + ("Error", "top level attribute 'northBoundLatitude' is not a float32"), + ("Error", "top level attribute 'issueTime' is not a valid time: 12:34:56"), + ("Error", "top level attribute 'verticalCS' is not a int32"), + ("Error", "top level attribute 'verticalCoordinateBase' is not an enumeration"), + ("Error", "top level attribute 'verticalDatumReference' is not an enumeration"), + ("Error", "top level attribute 'verticalDatum' is not a int32"), + ( + "Error", + "WaterLevel group attribute 'dataCodingFormat' is not an enumeration", + ), + ("Error", "WaterLevel group attribute 'dimension' is not a uint8"), + ("Error", "WaterLevel group attribute 'commonPointRule' is not an enumeration"), + ( + "Error", + "WaterLevel group attribute 'horizontalPositionUncertainty' is not a float32", + ), + ("Error", "WaterLevel group attribute 'verticalUncertainty' is not a float32"), + ("Error", "WaterLevel group attribute 'numInstances' is not a uint32"), + ( + "Error", + "WaterLevel group attribute 'sequencingRule.type' is not an enumeration", + ), + ( + "Error", + "WaterLevel group attribute 'interpolationType' is not an enumeration", + ), + ("Error", "WaterLevel group attribute 'dataOffsetCode' is not an enumeration"), + ( + "Error", + '/WaterLevel/axisNames must conform to CRS. Expected ["Easting", "Northing"] or ["Latitude", "Longitude"]. Got [\'longitude\', \'latitude\']', + ), + ( + "Error", + "Required WaterLevel feature instance group /WaterLevel/WaterLevel.01 attribute 'dataDynamicity' is missing", + ), + ( + "Critical error", + "/WaterLevel/WaterLevel.01/Group_001/values member b'waterLevelTrend' is not an enumeration", + ), + ( + "Error", + "Required WaterLevel feature instance group /WaterLevel/WaterLevel.02 attribute 'dataDynamicity' is missing", + ), + ( + "Error", + "WaterLevel feature instance group /WaterLevel/WaterLevel.02 attribute 'verticalDatumReference' is not an enumeration", + ), + ( + "Error", + "WaterLevel feature instance group /WaterLevel/WaterLevel.02 attribute 'verticalDatum' is not a int32", + ), + ( + "Critical error", + "/WaterLevel/WaterLevel.02/Group_001/values member b'waterLevelTrend' is not an enumeration", + ), + ] + expected_warnings = [ + "File name should start with '104'", + "File name 'multiple_feature_instance_groups.h5' does not match expected pattern '^104[a-zA-Z0-9]{4}[a-zA-Z0-9\\-_]{1,54}\\.(?:h5|H5)$'", + ] + validate( + "data/s104/multiple_feature_instance_groups.h5", + expected_errors=expected_errors, + expected_warnings=expected_warnings, + ) + + +############################################################################### + + +def test_s104_write_errors(tmp_vsimem): + + with pytest.raises( + Exception, match="Source dataset x must have two or three bands" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("x", 2, 2), + format="S104", + ) + + with pytest.raises( + Exception, match="Source dataset dimension must be at least 1x1 pixel" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("x", 0, 0, 2), + format="S104", + ) + + with pytest.raises( + Exception, match="S104 driver requires a source dataset with a geotransform" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("", 1, 1, 2), + format="S104", + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + with pytest.raises( + Exception, match="S104 driver requires a source dataset with a geotransform" + ): + gdal.Translate(tmp_vsimem / "104xxxxyyyy.h5", src_ds, format="S104") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0.2, 0, 0, 1]) + with pytest.raises( + Exception, + match="S104 driver requires a source dataset with a non-rotated geotransform", + ): + gdal.Translate(tmp_vsimem / "104xxxxyyyy.h5", src_ds, format="S104") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + with pytest.raises( + Exception, match="S104 driver requires a source dataset with a CRS" + ): + gdal.Translate(tmp_vsimem / "104xxxxyyyy.h5", src_ds, format="S104") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=longlat") + src_ds.SetSpatialRef(srs) + with pytest.raises( + Exception, match="VERTICAL_DATUM creation option must be specified" + ): + gdal.Translate(tmp_vsimem / "104xxxxyyyy.h5", src_ds, format="S104") + + src_ds = gdal.GetDriverByName("MEM").Create("x", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with pytest.raises(Exception, match="VERTICAL_DATUM value is invalid"): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=["VERTICAL_DATUM=invalid"], + ) + + with pytest.raises( + Exception, + match="TIME_POINT creation option value must be set, or source dataset must have a timePoint metadata item", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=["VERTICAL_DATUM=MLLW"], + ) + + with pytest.raises( + Exception, + match="TIME_POINT creation option value must be set to a YYYYMMDDTHHMMSSZ datetime value.", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=["VERTICAL_DATUM=MLLW", "TIME_POINT=invalid"], + ) + + with pytest.raises( + Exception, match="VERTICAL_CS creation option must be specified" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=["VERTICAL_DATUM=MLLW", "TIME_POINT=20251104T225013Z"], + ) + + with pytest.raises( + Exception, + match=r"VERTICAL_CS creation option must be set either to 6498 \(depth/down, metre\), or 6499 \(height/up, metre\)", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=invalid", + ], + ) + + with pytest.raises( + Exception, match="WATER_LEVEL_TREND_THRESHOLD creation option must be specified" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + ], + ) + + with pytest.raises( + Exception, + match="WATER_LEVEL_TREND_THRESHOLD creation option value must be a numeric value", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=invalid", + ], + ) + + with pytest.raises( + Exception, match="DATA_DYNAMICITY creation option must be specified" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + ], + ) + + with pytest.raises( + Exception, + match="DATA_DYNAMICITY creation option must be set to observation/1, astronomicalPrediction/2, analysisOrHybrid/3 or hydrodynamicForecast/5", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=invalid", + ], + ) + + with pytest.raises( + Exception, match="Cannot create file /i/do_not/exist/104xxxxyyyy.h5" + ): + gdal.Translate( + tmp_vsimem / "/i/do_not/exist/104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + with pytest.raises(Exception, match="non_existing"): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + "DATASETS=non_existing", + ], + ) + + with pytest.raises( + Exception, match="Dataset data/byte.tif does not have the same dimensions as x" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + "DATASETS=data/byte.tif", + ], + ) + + with pytest.raises( + Exception, + match="DATASETS_TIME_POINT does not have the same number of values as DATASETS", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + "DATASETS=data/byte.tif", + "DATASETS_TIME_POINT=foo,bar", + ], + ) + + gdal.GetDriverByName("GTiff").CreateCopy(tmp_vsimem / "aux.tif", src_ds) + + with pytest.raises( + Exception, + match="Dataset /vsimem/test_s104_write_errors/aux.tif does not have a timePoint metadata item, and the DATASETS_TIME_POINT creation option is not set", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + f"DATASETS={tmp_vsimem}/aux.tif", + ], + ) + + with pytest.raises( + Exception, + match="timePoint value for dataset /vsimem/test_s104_write_errors/aux.tif is invalid, but does not conform to a YYYYMMDDTHHMMSSZ datetime value", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + f"DATASETS={tmp_vsimem}/aux.tif", + "DATASETS_TIME_POINT=invalid", + ], + ) + + with pytest.raises( + Exception, + match=r"Several datasets are at timePoint 20251104T225013Z \(/vsimem/test_s104_write_errors/aux.tif vs x\)", + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + f"DATASETS={tmp_vsimem}/aux.tif", + "DATASETS_TIME_POINT=20251104T225013Z", + ], + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=eqc") + src_ds.SetSpatialRef(srs) + with pytest.raises( + Exception, match="Projection method Equirectangular is not supported by S100" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=merc +a=1") + src_ds.SetSpatialRef(srs) + with pytest.raises(Exception, match="Unknown code for ellipsoid of CRS"): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=merc +ellps=GRS80 +pm=5") + src_ds.SetSpatialRef(srs) + with pytest.raises(Exception, match="Unknown code for prime meridian of CRS"): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.SetFromUserInput("EPSG:4326+3855") + src_ds.SetSpatialRef(srs) + with pytest.raises( + Exception, match="The CRS must be a geographic 2D or projected 2D CRS" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + +############################################################################### + + +def test_s104_write_warnings(tmp_vsimem): + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S104 dataset filenames should start with '104'" + ): + gdal.Translate( + tmp_vsimem / "non_conformant_prefix.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S104 dataset filenames should have a '.H5' extension" + ): + gdal.Translate( + tmp_vsimem / "104xxxxyyyy.oops", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + +############################################################################### + + +def test_s104_write_basic(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + ], + ) + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T225013Z", + "dateTimeOfLastRecord": "20251104T225013Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "1.5", + "numberOfTimes": "1", + "timePoint": "20251104T225013Z", + "uncertainty": "-1.000000", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_check_count=57, + ) + + +############################################################################### + + +def test_s104_write_with_uncertainty_band(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 3, gdal.GDT_Float32) + src_ds.GetRasterBand(1).SetNoDataValue(-1.0) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, -1.0, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(3).SetNoDataValue(-1.0) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, -1.0, 2, 3, 4, 5, 6, 7, 8, 9) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + ], + ) + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + -9999.0, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(3).ReadRaster()) == ( + 7, + 8, + 9, + 4, + 5, + 6, + -1.0, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T225013Z", + "dateTimeOfLastRecord": "20251104T225013Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "2.5", + "numberOfTimes": "1", + "timePoint": "20251104T225013Z", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_check_count=57, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "proj4,out_proj4", + [ + ("+proj=longlat +ellps=GRS80 +pm=paris +no_defs", None), + ( + "+proj=merc +lat_ts=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=merc +lat_0=0 +lon_0=2.5 +k=0.99 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + "+proj=merc +lat_ts=8.13653121977138 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + ), + ( + "+proj=tmerc +lat_0=1.5 +lon_0=2.5 +k=0.99 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=omerc +lat_0=1.5 +lonc=2.5 +alpha=3.5 +gamma=4.5 +k=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=omerc +no_uoff +lat_0=1.5 +lonc=2.5 +alpha=3.5 +gamma=4.5 +k=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=lcc +lat_0=1.5 +lon_0=4.5 +lat_1=2.5 +lat_2=3.5 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=lcc +lat_1=49 +lat_0=49 +lon_0=4.5 +k_0=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=sterea +lat_0=1.5 +lon_0=2.5 +k=0.9 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=stere +lat_0=90 +lon_0=2.5 +k=0.9 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=krovak +axis=swu +lat_0=49.5 +lon_0=42.5 +alpha=30.2881397527778 +k=0.9999 +x_0=0 +y_0=0 +ellps=bessel +pm=ferro +units=m +no_defs", + None, + ), + ( + "+proj=poly +lat_0=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=aea +lat_0=1.5 +lon_0=4.5 +lat_1=2.5 +lat_2=3.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=laea +lat_0=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ], +) +def test_s104_write_custom_crs(tmp_path, proj4, out_proj4): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + if proj4 == "+proj=longlat +ellps=GRS80 +pm=paris +no_defs": + src_ds.SetGeoTransform([2, 1.1, 0, 49, 0, 1.2]) + else: + src_ds.SetGeoTransform([1000, 1.1, 0, 10000, 0, 1.2]) + srs = osr.SpatialReference() + srs.ImportFromProj4(proj4) + src_ds.SetSpatialRef(srs) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.2", + "DATA_DYNAMICITY=5", + ], + ) + + ds = gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":Group_001') + assert ds.GetSpatialRef().ExportToProj4() == (out_proj4 if out_proj4 else proj4) + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_check_count=56, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "value,warning_msg,validate_warning", + [ + ( + -100.5, + "Range of water level height in the dataset is [-100.500000, 9.500000] whereas the allowed range is [-99.99, 99.99]", + "/WaterLevel: minDatasetHeight=-100.5 should be in [-99.99, 99.99] range", + ), + ( + 100.5, + "Range of water level height in the dataset is [2.500000, 100.500000] whereas the allowed range is [-99.99, 99.99]", + "/WaterLevel: maxDatasetHeight=100.5 should be in [-99.99, 99.99] range", + ), + ], +) +def test_s104_write_out_of_range_water_height( + tmp_path, value, warning_msg, validate_warning +): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, value, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_Warning, match=warning_msg): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + ], + ) + + validate(tmp_path / "104xxxxyyyy.h5", expected_warnings=[validate_warning]) + + +############################################################################### + + +def test_s104_write_out_of_range_uncertainty(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 3, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, -10, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Negative uncertainty value found (-10.000000), which is not allowed (except nodata value -1.0)", + ): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + ], + ) + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_errors=[ + ( + "Error", + "/WaterLevel/WaterLevel.01/Group_001/values : minimum uncertainty is -10.0, whereas it should be >= 0", + ) + ], + ) + + +############################################################################### + + +def test_s104_write_large_file(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 1200, 1200, 3, gdal.GDT_Float32) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + filename = str(tmp_path / "104xxxxyyyy.h5") + with gdaltest.error_raised( + gdal.CE_Warning, + match="file size exceeds 10 MB", + ): + gdal.Translate( + filename, + src_ds, + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "TIME_POINT=20251104T225013Z", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + "COMPRESS=NONE", + ], + ) + + expected_warnings = [ + f"File size of {filename} = 12988072, which exceeds 10 MB", + ] + validate(filename, expected_warnings=expected_warnings) + + +############################################################################### + + +def test_s104_write_multiple_timestamps(tmp_path): + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in1.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251104T120000Z") + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in2.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251104T130000Z") + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in3.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251104T140000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + gdal.Open(tmp_path / "in1.tif"), + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + "UNCERTAINTY=1", + f"DATASETS={tmp_path}/in1.tif,{tmp_path}/in2.tif,{tmp_path}/in3.tif", + ], + ) + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T120000Z", + "dateTimeOfLastRecord": "20251104T140000Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "1.5", + "numberOfTimes": "3", + "timePoint": "20251104T120000Z", + "timeRecordInterval": "3600", + "uncertainty": "1.000000", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_check_count=57, + ) + + # Test S104->S104 translation + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyybis.h5", + gdal.Open(tmp_path / "104xxxxyyyy.h5"), + format="S104", + ) + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyybis.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T120000Z", + "dateTimeOfLastRecord": "20251104T140000Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "1.5", + "numberOfTimes": "3", + "timePoint": "20251104T120000Z", + "timeRecordInterval": "3600", + "uncertainty": "1.000000", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + + validate( + tmp_path / "104xxxxyyyybis.h5", + expected_check_count=57, + ) + + +############################################################################### + + +def test_s104_write_multiple_vertical_datums(tmp_path): + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in1.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251104T120000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + gdal.Open(tmp_path / "in1.tif"), + format="S104", + creationOptions=[ + "VERTICAL_DATUM=MLLW", + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + ], + ) + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in2.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251104T120000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyy.h5", + gdal.Open(tmp_path / "in2.tif"), + format="S104", + creationOptions=[ + "VERTICAL_DATUM=1027", # EGM2008 geoid + "VERTICAL_CS=DEPTH", + "WATER_LEVEL_TREND_THRESHOLD=0.25", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251104", + "ISSUE_TIME=231403Z", + "APPEND_SUBDATASET=YES", + ], + ) + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":WaterLevel.01:Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + dict1 = { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T120000Z", + "dateTimeOfLastRecord": "20251104T120000Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "1.5", + "numberOfTimes": "1", + "timePoint": "20251104T120000Z", + "uncertainty": "-1.000000", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + assert ds.GetMetadata_Dict() == dict1 + + with gdal.Open(f'S104:"{tmp_path}/104xxxxyyyy.h5":WaterLevel.02:Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + dict2 = { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_EPSG_CODE": "1027", + "VERTICAL_DATUM_NAME": "EGM2008 geoid", + "commonPointRule": "4", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251104T120000Z", + "dateTimeOfLastRecord": "20251104T120000Z", + "horizontalPositionUncertainty": "-1", + "issueDate": "20251104", + "issueTime": "231403Z", + "maxDatasetHeight": "9.5", + "minDatasetHeight": "1.5", + "numberOfTimes": "1", + "timePoint": "20251104T120000Z", + "uncertainty": "-1.000000", + "verticalCS": "6498", + "verticalUncertainty": "-1", + "waterLevelTrendThreshold": "0.25", + } + assert ds.GetMetadata_Dict() == dict2 + + validate( + tmp_path / "104xxxxyyyy.h5", + expected_check_count=57, + ) + + # Test S104->S104 translation + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "104xxxxyyyybis.h5", + gdal.Open(tmp_path / "104xxxxyyyy.h5"), + format="S104", + ) + + with gdal.Open( + f'S104:"{tmp_path}/104xxxxyyyybis.h5":WaterLevel.01:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == dict1 + + with gdal.Open( + f'S104:"{tmp_path}/104xxxxyyyybis.h5":WaterLevel.02:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("B" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == dict2 + + validate( + tmp_path / "104xxxxyyyybis.h5", + expected_check_count=57, + ) diff --git a/doc/source/drivers/raster/s104.rst b/doc/source/drivers/raster/s104.rst index 010c367042cf..a4769202c5ee 100644 --- a/doc/source/drivers/raster/s104.rst +++ b/doc/source/drivers/raster/s104.rst @@ -10,7 +10,7 @@ S104 -- S-104 Water Level Information for Surface Navigation Product .. versionadded:: 3.9 -This driver provides read-only support for water level data in the S-104 format, +This driver provides support for water level data in the S-104 format, which is a specific product profile in an HDF5 file. S-104 files have two image bands representing water level height (band 1) @@ -32,9 +32,13 @@ using different vertical datums) are supported. In that case, each feature instance group and timestamp group is exposed as a GDAL subdataset, whose name is of the form ``S104:"{filename.h5}":WaterLevel.{XX}:Group_{YYY}``. +Write support for S-104 v2.0 has been added in GDAL 3.13 + Driver capabilities ------------------- +.. supports_createcopy:: + .. supports_georeferencing:: .. supports_virtualio:: @@ -57,6 +61,247 @@ The following open options are supported: exposed by the driver by setting this option to NO (in which case the 6th term of the geotransform matrix will be positive) +Write support +------------- + +.. versionadded:: 3.13 + +Creation of a S-104 v2.0 dataset from another existing GDAL supported dataset is +possible using the :cpp:func:`GDALDriver::CreateCopy` function, or utilities +like :ref:`gdal_translate` or :ref:`gdal_raster_convert`. The input dataset +must have two or three bands. The first band must represent height in meters, and the second band +must contain the water level trend value with the following codes: + +- 0 = nodata +- 1 = decreasing +- 2 = increasing +- 3 = steady + +The third band when present must contain the uncertainty of water level heights +(in meters). + +If several grids are available at different timestamps, they can be provided with +the :co:`DATASETS` creation option, possibly with the :co:`DATASETS_TIME_POINT` creation option +if the datasets do not have a ``timePoint`` metadata item. + +If several vertical datums are needed, the :co:`APPEND_SUBDATASET` creation option +can be set to ``YES`` to add an extra feature instance group ("WaterLevel.XX") +to an existing S-104 dataset. + +The following creation options are available: + +- .. co:: TIME_POINT + :choices: + + Timestamp as YYYYMMDDTHHMMSSZ format (required). + +- .. co:: VERTICAL_DATUM + :choices: + + Vertical datum. This is a required creation option. + + Possible values are either: + + - a S100 vertical datum. Expressed as numeric code in the + 1 to 30 range, or value 44, 46, 47, 48, 49. Or their string meaning or + abbreviation among + the following list: + + * 1: ``meanLowWaterSprings`` / ``MLWS`` + * 2: ``meanLowerLowWaterSprings`` + * 3: ``meanSeaLevel`` / ``MSL`` + * 4: ``lowestLowWater`` + * 5: ``meanLowWater`` / ``MLW`` + * 6: ``lowestLowWaterSprings`` + * 7: ``approximateMeanLowWaterSprings`` + * 8: ``indianSpringLowWater`` + * 9: ``lowWaterSprings`` + * 10: ``approximateLowestAstronomicalTide`` + * 11: ``nearlyLowestLowWater`` + * 12: ``meanLowerLowWater`` / ``MLLW`` + * 13: ``lowWater`` / ``LW`` + * 14: ``approximateMeanLowWater`` + * 15: ``approximateMeanLowerLowWater`` + * 16: ``meanHighWater`` / ``MHW`` + * 17: ``meanHighWaterSprings`` / ``MHWS`` + * 18: ``highWater`` / ``HW`` + * 19: ``approximateMeanSeaLevel`` + * 20: ``highWaterSprings`` + * 21: ``meanHigherHighWater`` / ``MHHW`` + * 22: ``equinoctialSpringLowWater`` + * 23: ``lowestAstronomicalTide`` / ``LAT`` + * 24: ``localDatum`` + * 25: ``internationalGreatLakesDatum1985`` + * 26: ``meanWaterLevel`` + * 27: ``lowerLowWaterLargeTide`` + * 28: ``higherHighWaterLargeTide`` + * 29: ``nearlyHighestHighWater`` + * 30: ``highestAstronomicalTide`` / ``HAT`` + * 44: ``balticSeaChartDatum2000`` + * 46: ``internationalGreatLakesDatum2020`` + * 47: ``seaFloor`` + * 48: ``seaSurface`` + * 49: ``hydrographicZero`` + + - an EPSG vertical datum code + +- .. co:: VERTICAL_CS + :choices: DEPTH or HEIGHT + + Vertical coordinate system. This is a required creation option. + + Depth is the down direction relative to the vertical datum surface, with meter unit. + Height is the up direction relative to the vertical datum surface, with meter unit. + +- .. co:: WATER_LEVEL_TREND_THRESHOLD + :choices: + + Critical value used to determine steady water level trend. + Units are meters/hour (m/hr). + This is a required creation option. + +- .. co:: DATA_DYNAMICITY + :choices: observation, astronomicalPrediction, analysisOrHybrid or hydrodynamicForecast + + Classification of data according to the relationship between the time of + its collection, generation, or calculation of generation parameters, + in relation to the time of publication of the dataset. + This is a required creation option. + +- .. co:: DATASETS + :choices: + + Comma-separated list of datasets at different timestamps. If each dataset + does not have a ``timePoint`` metadata item, the :co:`DATASETS_TIME_POINT` + creation option must be specified. + + The source dataset may or may not be put in the DATASETS creation option. + +- .. co:: DATASETS_TIME_POINT + :choices: + + Comma-separated list of different timestamps. If must have the same number + of values as the :co:`DATASETS` creation option. + Each time point value must be specified as a ``YYYYMMDDTHHMMSSZ`` timestamp. + +- .. co:: ISSUE_DATE + :choices: as + + If not specified, defaults to the current date. + +- .. co:: ISSUE_TIME + :choices: + + Issue time as or + + If not specified, defaults to the current time (in Z timezone). + +- .. co:: TREND_INTERVAL + :choices: + + Interval, in minutes, over which trend at a a particular time is calculated + +- .. co:: DATASET_DELIVERY_INTERVAL + :choices: + + Expected time interval between availability of successive datasets for + time-varying data. + Must be formatted as ``PnYnMnDTnHnMnS`` (ISO-8601 duration) + +- .. co:: TIME_RECORD_INTERVAL + :choices: + + Interval in seconds between time records. + +- .. co:: COMMON_POINT_RULE + :choices: average, low, high, all + :default: all + + Procedure used for evaluating the coverage at a position that falls on + the boundary or in an area of overlap between geographic objects. + +- .. co:: UNCERTAINTY + :choices: + + Uncertainty of depth values in meter + +- .. co:: HORIZONTAL_POSITION_UNCERTAINTY + :choices: + + Horizontal position uncertainty in meter + +- .. co:: VERTICAL_UNCERTAINTY + :choices: + + Vertical uncertainty in meter + +- .. co:: TIME_UNCERTAINTY + :choices: + + Time uncertainty in second + +- .. co:: COMPRESS + :choices: NONE, DEFLATE + :default: DEFLATE + + Compression for elevation and uncertainty grids. + +- .. co:: ZLEVEL + :choices: 1-9 + :default: 6 + + Deflate compression level. + +- .. co:: BLOCK_SIZE + :choices: + + Chunking size of the HDF5 arrays. Default + to 100, or the maximum dimension of the raster if smaller than 100. + +- .. co:: APPEND_SUBDATASET + :choices: YES, NO + :default: NO + + Whether to append the new dataset to an existing S-104 dataset as + an extra feature instance group ("WaterLevel.XX") + + +Validation script +----------------- + +.. versionadded:: 3.13 + +The Python script :source_file:`swig/python/gdal-utils/osgeo_utils/samples/validate_s104.py` +can be used to validate the conformity of a S-104 v2.0 dataset against the specification. +It requires the `h5py `__ Python module to be installed +(typically through "pip install h5py") + +Its usage is: + +:: + + $ python validate_s104.py 104TESTXXXX.h5 + + +Note that the GDAL S-104 reader is more tolerant that the validation script and +can read files with slight non-conformities. + + +Examples +-------- + +- Converting a GeoTIFF with water level height and trend, with the minimum required metadata items: + + :: + + $ gdal_translate water_level_height_and_trend.tif 104TESTXXXX.h5 -of S104 \ + -co TIME_POINT=20251105T012600Z \ + -co VERTICAL_DATUM=MMLW \ + -co VERTICAL_CS=HEIGHT \ + -co WATER_LEVEL_TREND_THRESHOLD=0.2 \ + -co DATA_DYNAMICITY=hydrodynamicForecast + + See Also -------- @@ -65,3 +310,11 @@ See Also - :ref:`BAG driver ` - :ref:`S-102 driver ` - :ref:`S-111 driver ` + + +.. below is an allow-list for spelling checker. + +.. spelling:word-list:: + hhmmssZ + hhmmss + HHMM diff --git a/frmts/hdf5/hdf5drivercore.cpp b/frmts/hdf5/hdf5drivercore.cpp index ffca313619ec..8089c47dfb2c 100644 --- a/frmts/hdf5/hdf5drivercore.cpp +++ b/frmts/hdf5/hdf5drivercore.cpp @@ -598,6 +598,7 @@ void S104DriverSetCommonMetadata(GDALDriver *poDriver) poDriver->SetMetadataItem(GDAL_DCAP_VIRTUALIO, "YES"); poDriver->SetMetadataItem(GDAL_DMD_EXTENSION, "h5"); poDriver->SetMetadataItem(GDAL_DMD_SUBDATASETS, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATE_SUBDATASETS, "YES"); poDriver->SetMetadataItem( GDAL_DMD_OPENOPTIONLIST, @@ -606,8 +607,81 @@ void S104DriverSetCommonMetadata(GDALDriver *poDriver) "description='Whether the top line of the dataset should be the " "northern-most one'/>" ""); + + poDriver->SetMetadataItem( + GDAL_DMD_CREATIONOPTIONLIST, + "" + " " + " " + " " + " " + " "); + poDriver->pfnIdentify = S104DatasetIdentify; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATECOPY, "YES"); } /************************************************************************/ diff --git a/frmts/hdf5/s100.cpp b/frmts/hdf5/s100.cpp index fcdbd28bd1ba..4f4e43a4c473 100644 --- a/frmts/hdf5/s100.cpp +++ b/frmts/hdf5/s100.cpp @@ -24,6 +24,7 @@ #include #include #include +#include /************************************************************************/ /* S100BaseDataset() */ @@ -108,45 +109,45 @@ char **S100BaseDataset::GetFileList() /* S100ReadSRS() */ /************************************************************************/ +constexpr int PROJECTION_METHOD_MERCATOR = 9805; +static_assert(PROJECTION_METHOD_MERCATOR == + EPSG_CODE_METHOD_MERCATOR_VARIANT_B); +constexpr int PROJECTION_METHOD_TRANSVERSE_MERCATOR = 9807; +static_assert(PROJECTION_METHOD_TRANSVERSE_MERCATOR == + EPSG_CODE_METHOD_TRANSVERSE_MERCATOR); +constexpr int PROJECTION_METHOD_OBLIQUE_MERCATOR = 9815; +static_assert(PROJECTION_METHOD_OBLIQUE_MERCATOR == + EPSG_CODE_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_B); +constexpr int PROJECTION_METHOD_HOTINE_OBLIQUE_MERCATOR = 9812; +static_assert(PROJECTION_METHOD_HOTINE_OBLIQUE_MERCATOR == + EPSG_CODE_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_A); +constexpr int PROJECTION_METHOD_LCC_1SP = 9801; +static_assert(PROJECTION_METHOD_LCC_1SP == + EPSG_CODE_METHOD_LAMBERT_CONIC_CONFORMAL_1SP); +constexpr int PROJECTION_METHOD_LCC_2SP = 9802; +static_assert(PROJECTION_METHOD_LCC_2SP == + EPSG_CODE_METHOD_LAMBERT_CONIC_CONFORMAL_2SP); +constexpr int PROJECTION_METHOD_OBLIQUE_STEREOGRAPHIC = 9809; +static_assert(PROJECTION_METHOD_OBLIQUE_STEREOGRAPHIC == + EPSG_CODE_METHOD_OBLIQUE_STEREOGRAPHIC); +constexpr int PROJECTION_METHOD_POLAR_STEREOGRAPHIC = 9810; +static_assert(PROJECTION_METHOD_POLAR_STEREOGRAPHIC == + EPSG_CODE_METHOD_POLAR_STEREOGRAPHIC_VARIANT_A); +constexpr int PROJECTION_METHOD_KROVAK_OBLIQUE_CONIC_CONFORMAL = 9819; +static_assert(PROJECTION_METHOD_KROVAK_OBLIQUE_CONIC_CONFORMAL == + EPSG_CODE_METHOD_KROVAK); +constexpr int PROJECTION_METHOD_AMERICAN_POLYCONIC = 9818; +static_assert(PROJECTION_METHOD_AMERICAN_POLYCONIC == + EPSG_CODE_METHOD_AMERICAN_POLYCONIC); +constexpr int PROJECTION_METHOD_ALBERS_EQUAL_AREA = 9822; +static_assert(PROJECTION_METHOD_ALBERS_EQUAL_AREA == + EPSG_CODE_METHOD_ALBERS_EQUAL_AREA); +constexpr int PROJECTION_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA = 9820; +static_assert(PROJECTION_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA == + EPSG_CODE_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA); + bool S100ReadSRS(const GDALGroup *poRootGroup, OGRSpatialReference &oSRS) { - constexpr int PROJECTION_METHOD_MERCATOR = 9805; - static_assert(PROJECTION_METHOD_MERCATOR == - EPSG_CODE_METHOD_MERCATOR_VARIANT_B); - constexpr int PROJECTION_METHOD_TRANSVERSE_MERCATOR = 9807; - static_assert(PROJECTION_METHOD_TRANSVERSE_MERCATOR == - EPSG_CODE_METHOD_TRANSVERSE_MERCATOR); - constexpr int PROJECTION_METHOD_OBLIQUE_MERCATOR = 9815; - static_assert(PROJECTION_METHOD_OBLIQUE_MERCATOR == - EPSG_CODE_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_B); - constexpr int PROJECTION_METHOD_HOTINE_OBLIQUE_MERCATOR = 9812; - static_assert(PROJECTION_METHOD_HOTINE_OBLIQUE_MERCATOR == - EPSG_CODE_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_A); - constexpr int PROJECTION_METHOD_LCC_1SP = 9801; - static_assert(PROJECTION_METHOD_LCC_1SP == - EPSG_CODE_METHOD_LAMBERT_CONIC_CONFORMAL_1SP); - constexpr int PROJECTION_METHOD_LCC_2SP = 9802; - static_assert(PROJECTION_METHOD_LCC_2SP == - EPSG_CODE_METHOD_LAMBERT_CONIC_CONFORMAL_2SP); - constexpr int PROJECTION_METHOD_OBLIQUE_STEREOGRAPHIC = 9809; - static_assert(PROJECTION_METHOD_OBLIQUE_STEREOGRAPHIC == - EPSG_CODE_METHOD_OBLIQUE_STEREOGRAPHIC); - constexpr int PROJECTION_METHOD_POLAR_STEREOGRAPHIC = 9810; - static_assert(PROJECTION_METHOD_POLAR_STEREOGRAPHIC == - EPSG_CODE_METHOD_POLAR_STEREOGRAPHIC_VARIANT_A); - constexpr int PROJECTION_METHOD_KROVAK_OBLIQUE_CONIC_CONFORMAL = 9819; - static_assert(PROJECTION_METHOD_KROVAK_OBLIQUE_CONIC_CONFORMAL == - EPSG_CODE_METHOD_KROVAK); - constexpr int PROJECTION_METHOD_AMERICAN_POLYCONIC = 9818; - static_assert(PROJECTION_METHOD_AMERICAN_POLYCONIC == - EPSG_CODE_METHOD_AMERICAN_POLYCONIC); - constexpr int PROJECTION_METHOD_ALBERS_EQUAL_AREA = 9822; - static_assert(PROJECTION_METHOD_ALBERS_EQUAL_AREA == - EPSG_CODE_METHOD_ALBERS_EQUAL_AREA); - constexpr int PROJECTION_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA = 9820; - static_assert(PROJECTION_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA == - EPSG_CODE_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA); - // Get SRS oSRS.SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); auto poHorizontalCRS = poRootGroup->GetAttribute("horizontalCRS"); @@ -1112,6 +1113,9 @@ bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG) m_aosOptions.FetchNameValue("VERTICAL_DATUM"); if (!pszVerticalDatum) pszVerticalDatum = m_poSrcDS->GetMetadataItem("VERTICAL_DATUM_MEANING"); + if (!pszVerticalDatum) + pszVerticalDatum = + m_poSrcDS->GetMetadataItem("VERTICAL_DATUM_EPSG_CODE"); if (!pszVerticalDatum) { CPLError(CE_Failure, CPLE_AppDefined, @@ -1122,9 +1126,24 @@ bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG) S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(pszVerticalDatum); if (m_nVerticalDatum <= 0) { - CPLError(CE_Failure, CPLE_AppDefined, - "VERTICAL_DATUM value is invalid"); - return false; + auto pjCtxt = OSRGetProjTLSContext(); + PJ *vertical_datum = + proj_create_from_database(pjCtxt, "EPSG", pszVerticalDatum, + PJ_CATEGORY_DATUM, false, nullptr); + const bool bIsValid = + vertical_datum != nullptr && + proj_get_type(vertical_datum) == PJ_TYPE_VERTICAL_REFERENCE_FRAME; + proj_destroy(vertical_datum); + if (bIsValid) + { + m_nVerticalDatum = atoi(pszVerticalDatum); + } + else + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_DATUM value is invalid"); + return false; + } } const std::string osFilename = CPLGetFilename(m_osDestFilename.c_str()); @@ -1201,6 +1220,17 @@ bool S100BaseWriter::WriteUInt8Value(hid_t hGroup, const char *pszName, GH5_WriteAttribute(hGroup, pszName, value); } +/************************************************************************/ +/* S100BaseWriter::WriteUInt16Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteUInt16Value(hid_t hGroup, const char *pszName, + int value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_STD_U16LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + /************************************************************************/ /* S100BaseWriter::WriteUInt32Value() */ /************************************************************************/ @@ -1212,6 +1242,17 @@ bool S100BaseWriter::WriteUInt32Value(hid_t hGroup, const char *pszName, GH5_WriteAttribute(hGroup, pszName, value); } +/************************************************************************/ +/* S100BaseWriter::WriteInt32Value() */ +/************************************************************************/ + +bool S100BaseWriter::WriteInt32Value(hid_t hGroup, const char *pszName, + int value) +{ + return GH5_CreateAttribute(hGroup, pszName, H5T_STD_I32LE) && + GH5_WriteAttribute(hGroup, pszName, value); +} + /************************************************************************/ /* S100BaseWriter::WriteFloat32Value() */ /************************************************************************/ @@ -1309,7 +1350,7 @@ bool S100BaseWriter::WriteIssueDate() /* S100BaseWriter::WriteIssueTime() */ /************************************************************************/ -bool S100BaseWriter::WriteIssueTime() +bool S100BaseWriter::WriteIssueTime(bool bAutogenerateFromCurrent) { const char *pszIssueTime = m_aosOptions.FetchNameValue("ISSUE_TIME"); if (!pszIssueTime) @@ -1318,6 +1359,17 @@ bool S100BaseWriter::WriteIssueTime() if (pszTmp && strlen(pszTmp) == 7 && pszTmp[6] == 'Z') pszIssueTime = pszTmp; } + std::string osIssueTime; // keep in that scope + if (!pszIssueTime && bAutogenerateFromCurrent) + { + time_t now; + time(&now); + struct tm brokenDown; + CPLUnixTimeToYMDHMS(now, &brokenDown); + osIssueTime = CPLSPrintf("%02d%02d%02dZ", brokenDown.tm_hour, + brokenDown.tm_min, brokenDown.tm_sec); + pszIssueTime = osIssueTime.c_str(); + } return !pszIssueTime || pszIssueTime[0] == 0 || WriteVarLengthStringValue(m_hdf5, "issueTime", pszIssueTime); } @@ -1347,10 +1399,284 @@ bool S100BaseWriter::WriteTopLevelBoundingBox() /* S100BaseWriter::WriteHorizontalCRS() */ /************************************************************************/ -bool S100BaseWriter::WriteHorizontalCRS(int nCode) +bool S100BaseWriter::WriteHorizontalCRS() { - return GH5_CreateAttribute(m_hdf5, "horizontalCRS", H5T_STD_I32LE) && - GH5_WriteAttribute(m_hdf5, "horizontalCRS", nCode); + bool ret = WriteInt32Value(m_hdf5, "horizontalCRS", + m_nEPSGCode > 0 ? m_nEPSGCode : -1); + if (ret && m_nEPSGCode <= 0) + { + ret = WriteVarLengthStringValue(m_hdf5, "nameOfHorizontalCRS", + m_poSRS->GetName()); + { + GH5_HIDTypeHolder hEnumType(H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + ret = ret && hEnumType; + if (ret) + { + uint8_t val; + val = 1; + ret = ret && H5_CHECK(H5Tenum_insert(hEnumType, "geodeticCRS2D", + &val)) >= 0; + val = 2; + ret = ret && H5_CHECK(H5Tenum_insert(hEnumType, "projectedCRS", + &val)) >= 0; + ret = ret && + GH5_CreateAttribute(m_hdf5, "typeOfHorizontalCRS", + hEnumType) && + GH5_WriteAttribute(m_hdf5, "typeOfHorizontalCRS", + m_poSRS->IsGeographic() ? 1 : 2); + } + } + + const int nHorizontalCS = m_poSRS->IsGeographic() ? 6422 + : m_poSRS->EPSGTreatsAsNorthingEasting() + ? 4500 + : 4400; + ret = ret && WriteInt32Value(m_hdf5, "horizontalCS", nHorizontalCS); + + const char *pszDatumKey = + m_poSRS->IsGeographic() ? "GEOGCS|DATUM" : "PROJCS|GEOGCS|DATUM"; + const char *pszDatumAuthName = m_poSRS->GetAuthorityName(pszDatumKey); + const char *pszDatumCode = m_poSRS->GetAuthorityCode(pszDatumKey); + const int nDatum = (pszDatumAuthName && pszDatumCode && + EQUAL(pszDatumAuthName, "EPSG")) + ? atoi(pszDatumCode) + : -1; + ret = ret && WriteInt32Value(m_hdf5, "horizontalDatum", nDatum); + if (ret && nDatum < 0) + { + const char *pszDatum = m_poSRS->GetAttrValue(pszDatumKey); + if (!pszDatum) + pszDatum = "unknown"; + ret = WriteVarLengthStringValue(m_hdf5, "nameOfHorizontalDatum", + pszDatum); + + const char *pszSpheroidKey = m_poSRS->IsGeographic() + ? "GEOGCS|DATUM|SPHEROID" + : "PROJCS|GEOGCS|DATUM|SPHEROID"; + const char *pszSpheroidAuthName = + m_poSRS->GetAuthorityName(pszSpheroidKey); + const char *pszSpheroidCode = + m_poSRS->GetAuthorityCode(pszSpheroidKey); + const char *pszSpheroidName = m_poSRS->GetAttrValue(pszSpheroidKey); + const int nSpheroid = + (pszSpheroidAuthName && pszSpheroidCode && + EQUAL(pszSpheroidAuthName, "EPSG")) + ? atoi(pszSpheroidCode) + : (pszSpheroidName && EQUAL(pszSpheroidName, "Bessel 1841")) + ? 7004 + : -1; + if (nSpheroid <= 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Unknown code for ellipsoid of CRS"); + return false; + } + ret = ret && WriteInt32Value(m_hdf5, "spheroid", nSpheroid); + + const char *pszPrimeMeridianKey = m_poSRS->IsGeographic() + ? "GEOGCS|PRIMEM" + : "PROJCS|GEOGCS|PRIMEM"; + const char *pszPrimeMeridianAuthName = + m_poSRS->GetAuthorityName(pszPrimeMeridianKey); + const char *pszPrimeMeridianCode = + m_poSRS->GetAuthorityCode(pszPrimeMeridianKey); + const char *pszPrimeMeridianName = + m_poSRS->GetAttrValue(pszPrimeMeridianKey); + const int nPrimeMeridian = + (pszPrimeMeridianAuthName && pszPrimeMeridianCode && + EQUAL(pszPrimeMeridianAuthName, "EPSG")) + ? atoi(pszPrimeMeridianCode) + : (pszPrimeMeridianName && EQUAL(pszPrimeMeridianName, "Ferro")) + ? 8909 + : -1; + if (nPrimeMeridian <= 0) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Unknown code for prime meridian of CRS"); + return false; + } + ret = + ret && WriteInt32Value(m_hdf5, "primeMeridian", nPrimeMeridian); + } + + const char *pszProjection = m_poSRS->IsProjected() + ? m_poSRS->GetAttrValue("PROJECTION") + : nullptr; + if (pszProjection) + { + int nProjectionMethod = 0; + double adfParams[] = {std::numeric_limits::quiet_NaN(), + std::numeric_limits::quiet_NaN(), + std::numeric_limits::quiet_NaN(), + std::numeric_limits::quiet_NaN(), + std::numeric_limits::quiet_NaN()}; + if (EQUAL(pszProjection, SRS_PT_MERCATOR_2SP)) + { + nProjectionMethod = PROJECTION_METHOD_MERCATOR; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + } + else if (EQUAL(pszProjection, SRS_PT_MERCATOR_1SP)) + { + auto poTmpSRS = std::unique_ptr( + m_poSRS->convertToOtherProjection(SRS_PT_MERCATOR_2SP)); + nProjectionMethod = PROJECTION_METHOD_MERCATOR; + adfParams[0] = + poTmpSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0); + adfParams[1] = + poTmpSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + } + else if (EQUAL(pszProjection, SRS_PT_TRANSVERSE_MERCATOR)) + { + nProjectionMethod = PROJECTION_METHOD_TRANSVERSE_MERCATOR; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, + SRS_PT_HOTINE_OBLIQUE_MERCATOR_AZIMUTH_CENTER)) + { + nProjectionMethod = PROJECTION_METHOD_OBLIQUE_MERCATOR; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_CENTER, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_LONGITUDE_OF_CENTER, 0.0); + adfParams[2] = m_poSRS->GetNormProjParm(SRS_PP_AZIMUTH, 0.0); + adfParams[3] = + m_poSRS->GetNormProjParm(SRS_PP_RECTIFIED_GRID_ANGLE, 0.0); + adfParams[4] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_HOTINE_OBLIQUE_MERCATOR)) + { + nProjectionMethod = PROJECTION_METHOD_HOTINE_OBLIQUE_MERCATOR; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_CENTER, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_LONGITUDE_OF_CENTER, 0.0); + adfParams[2] = m_poSRS->GetNormProjParm(SRS_PP_AZIMUTH, 0.0); + adfParams[3] = + m_poSRS->GetNormProjParm(SRS_PP_RECTIFIED_GRID_ANGLE, 0.0); + adfParams[4] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_LAMBERT_CONFORMAL_CONIC_1SP)) + { + nProjectionMethod = PROJECTION_METHOD_LCC_1SP; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP)) + { + nProjectionMethod = PROJECTION_METHOD_LCC_2SP; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0); + adfParams[3] = + m_poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_2, 0.0); + } + else if (EQUAL(pszProjection, SRS_PT_OBLIQUE_STEREOGRAPHIC)) + { + nProjectionMethod = PROJECTION_METHOD_OBLIQUE_STEREOGRAPHIC; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_POLAR_STEREOGRAPHIC)) + { + nProjectionMethod = PROJECTION_METHOD_POLAR_STEREOGRAPHIC; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_KROVAK)) + { + nProjectionMethod = + PROJECTION_METHOD_KROVAK_OBLIQUE_CONIC_CONFORMAL; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + adfParams[2] = m_poSRS->GetNormProjParm(SRS_PP_AZIMUTH, 0.0); + adfParams[3] = + m_poSRS->GetNormProjParm(SRS_PP_PSEUDO_STD_PARALLEL_1, 0.0); + adfParams[4] = + m_poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0); + } + else if (EQUAL(pszProjection, SRS_PT_POLYCONIC)) + { + nProjectionMethod = PROJECTION_METHOD_AMERICAN_POLYCONIC; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0); + } + else if (EQUAL(pszProjection, SRS_PT_ALBERS_CONIC_EQUAL_AREA)) + { + nProjectionMethod = PROJECTION_METHOD_ALBERS_EQUAL_AREA; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_CENTER, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_LONGITUDE_OF_CENTER, 0.0); + adfParams[2] = + m_poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0); + adfParams[3] = + m_poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_2, 0.0); + } + else if (EQUAL(pszProjection, SRS_PT_LAMBERT_AZIMUTHAL_EQUAL_AREA)) + { + nProjectionMethod = + PROJECTION_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA; + adfParams[0] = + m_poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_CENTER, 0.0); + adfParams[1] = + m_poSRS->GetNormProjParm(SRS_PP_LONGITUDE_OF_CENTER, 0.0); + } + else + { + CPLError(CE_Failure, CPLE_NotSupported, + "Projection method %s is not supported by S100", + pszProjection); + return false; + } + + ret = ret && WriteInt32Value(m_hdf5, "projectionMethod", + nProjectionMethod); + for (int i = 0; i < 5 && !std::isnan(adfParams[i]); ++i) + { + const std::string osAttrName = + "projectionParameter" + std::to_string(i + 1); + ret = ret && WriteFloat64Value(m_hdf5, osAttrName.c_str(), + adfParams[i]); + } + + ret = ret && WriteFloat64Value(m_hdf5, "falseNorthing", + m_poSRS->GetNormProjParm( + SRS_PP_FALSE_NORTHING, 0.0)); + ret = ret && WriteFloat64Value(m_hdf5, "falseEasting", + m_poSRS->GetNormProjParm( + SRS_PP_FALSE_EASTING, 0.0)); + } + } + return ret; } /************************************************************************/ @@ -1593,9 +1919,11 @@ bool S100BaseWriter::WriteInterpolationType(hid_t hGroup, int nCode) /* S100BaseWriter::WriteNumInstances() */ /************************************************************************/ -bool S100BaseWriter::WriteNumInstances(hid_t hGroup, int numInstances) +bool S100BaseWriter::WriteNumInstances(hid_t hGroup, hid_t hType, + int numInstances) { - return WriteUInt8Value(hGroup, "numInstances", numInstances); + return GH5_CreateAttribute(hGroup, "numInstances", hType) && + GH5_WriteAttribute(hGroup, "numInstances", numInstances); } /************************************************************************/ @@ -1733,9 +2061,10 @@ bool S100BaseWriter::WriteFIGGridRelatedParameters(hid_t hGroup) /* S100BaseWriter::WriteNumGRP() */ /************************************************************************/ -bool S100BaseWriter::WriteNumGRP(hid_t hGroup, int numGRP) +bool S100BaseWriter::WriteNumGRP(hid_t hGroup, hid_t hType, int numGRP) { - return WriteUInt8Value(hGroup, "numGRP", numGRP); + return GH5_CreateAttribute(hGroup, "numGRP", hType) && + GH5_WriteAttribute(hGroup, "numGRP", numGRP); } /************************************************************************/ diff --git a/frmts/hdf5/s100.h b/frmts/hdf5/s100.h index 367f0e66f47a..6f35e4f7718d 100644 --- a/frmts/hdf5/s100.h +++ b/frmts/hdf5/s100.h @@ -73,6 +73,8 @@ class S100BaseWriter CPL_NON_FINAL bool BaseChecks(const char *pszDriverName, bool crsMustBeEPSG); static bool WriteUInt8Value(hid_t hGroup, const char *pszName, int value); + static bool WriteUInt16Value(hid_t hGroup, const char *pszName, int value); + static bool WriteInt32Value(hid_t hGroup, const char *pszName, int value); static bool WriteUInt32Value(hid_t hGroup, const char *pszName, unsigned value); static bool WriteFloat32Value(hid_t hGroup, const char *pszName, @@ -91,9 +93,9 @@ class S100BaseWriter CPL_NON_FINAL bool CreateFile(); bool WriteProductSpecification(const char *pszProductSpecification); bool WriteIssueDate(); - bool WriteIssueTime(); + bool WriteIssueTime(bool bAutogenerateFromCurrent); bool WriteTopLevelBoundingBox(); - bool WriteHorizontalCRS(int nCode); + bool WriteHorizontalCRS(); bool WriteVerticalCS(int nCode); bool WriteVerticalCoordinateBase(int nCode); static bool WriteVerticalDatumReference(hid_t hGroup, int nCode); @@ -107,7 +109,7 @@ class S100BaseWriter CPL_NON_FINAL static bool WriteHorizontalPositionUncertainty(hid_t hGroup, float fValue); static bool WriteVerticalUncertainty(hid_t hGroup, float fValue); static bool WriteInterpolationType(hid_t hGroup, int nCode); - static bool WriteNumInstances(hid_t hGroup, int numInstances); + static bool WriteNumInstances(hid_t hGroup, hid_t hType, int numInstances); static bool WriteSequencingRuleScanDirection(hid_t hGroup, const char *pszValue); static bool WriteSequencingRuleType(hid_t hGroup, int nCode); @@ -115,7 +117,7 @@ class S100BaseWriter CPL_NON_FINAL bool CreateFeatureInstanceGroup(const char *name); bool WriteFIGGridRelatedParameters(hid_t hGroup); - static bool WriteNumGRP(hid_t hGroup, int numGRP); + static bool WriteNumGRP(hid_t hGroup, hid_t hType, int numGRP); bool CreateValuesGroup(const char *name); diff --git a/frmts/hdf5/s102dataset.cpp b/frmts/hdf5/s102dataset.cpp index 12686dcf5ba8..4d2089766ff8 100644 --- a/frmts/hdf5/s102dataset.cpp +++ b/frmts/hdf5/s102dataset.cpp @@ -1097,7 +1097,7 @@ bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) H5T_STD_U16LE, m_nVerticalDatum); } - ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + ret = ret && WriteNumGRP(m_featureInstanceGroup, H5T_STD_U8LE, 1); ret = ret && CreateValuesGroup("Group_001"); ret = ret && WriteVarLengthStringValue(m_valuesGroup, "timePoint", @@ -1133,8 +1133,8 @@ bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) bool ret = CreateFile(); ret = ret && WriteProductSpecification("INT.IHO.S-102.3.0.0"); ret = ret && WriteIssueDate(); - ret = ret && WriteIssueTime(); - ret = ret && WriteHorizontalCRS(m_nEPSGCode); + ret = ret && WriteIssueTime(/* bAutogenerateFromCurrent = */ false); + ret = ret && WriteHorizontalCRS(); ret = ret && WriteTopLevelBoundingBox(); ret = ret && WriteVerticalCS(6498); // Depth, metre, down ret = ret && WriteVerticalCoordinateBase(2); // verticalDatum @@ -1150,7 +1150,7 @@ bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) ret = ret && CreateFeatureInstanceGroup("BathymetryCoverage.01"); ret = ret && WriteFIGGridRelatedParameters(m_featureInstanceGroup); - ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + ret = ret && WriteNumGRP(m_featureInstanceGroup, H5T_STD_U8LE, 1); ret = ret && CreateValuesGroup("Group_001"); @@ -1178,7 +1178,7 @@ bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) ret = ret && CreateFeatureInstanceGroup("QualityOfBathymetryCoverage.01"); ret = ret && WriteFIGGridRelatedParameters(m_featureInstanceGroup); - ret = ret && WriteNumGRP(m_featureInstanceGroup, 1); + ret = ret && WriteNumGRP(m_featureInstanceGroup, H5T_STD_U8LE, 1); ret = ret && CreateValuesGroup("Group_001"); pScaledProgressData.reset(GDALCreateScaledProgress( @@ -1232,7 +1232,7 @@ bool S102Creator::WriteFeatureGroupAttributes(bool isQuality) ? static_cast(CPLAtof(pszVerticalUncertainty)) : -1.0f); ret = ret && WriteInterpolationType(m_featureGroup, 1); // Nearest neighbor - ret = ret && WriteNumInstances(m_featureGroup, 1); + ret = ret && WriteNumInstances(m_featureGroup, H5T_STD_U8LE, 1); ret = ret && WriteSequencingRuleScanDirection(m_featureGroup, m_poSRS->IsProjected() ? "Easting, Northing" diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index baac1eda3717..d284f277d54d 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -5,7 +5,7 @@ * Author: Even Rouault * ****************************************************************************** - * Copyright (c) 2023, Even Rouault + * Copyright (c) 2023-2025, Even Rouault * * SPDX-License-Identifier: MIT ****************************************************************************/ @@ -22,8 +22,14 @@ #include "gdal_proxy.h" #include "gdal_rat.h" +#include "cpl_time.h" + +#include +#include +#include #include #include +#include /************************************************************************/ /* S104Dataset */ @@ -40,6 +46,11 @@ class S104Dataset final : public S100BaseDataset ~S104Dataset() override; static GDALDataset *Open(GDALOpenInfo *); + static GDALDataset *CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int bStrict, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData); }; S104Dataset::~S104Dataset() = default; @@ -631,6 +642,1116 @@ GDALDataset *S104Dataset::Open(GDALOpenInfo *poOpenInfo) return poDS.release(); } +/************************************************************************/ +/* S104Creator */ +/************************************************************************/ + +class S104Creator final : public S100BaseWriter +{ + public: + S104Creator(const char *pszDestFilename, GDALDataset *poSrcDS, + CSLConstList papszOptions) + : S100BaseWriter(pszDestFilename, poSrcDS, papszOptions) + { + } + + ~S104Creator() override; + + bool Create(GDALProgressFunc pfnProgress, void *pProgressData); + + static constexpr const char *FEATURE_TYPE = "WaterLevel"; + + protected: + bool Close() override + { + return BaseClose(); + } + + private: + bool WriteFeatureGroupAttributes(); + bool WriteUncertaintyDataset(); + bool FillFeatureInstanceGroup( + const std::map> + &oMapTimestampToDS, + GDALProgressFunc pfnProgress, void *pProgressData); + bool CopyValues(GDALDataset *poSrcDS, GDALProgressFunc pfnProgress, + void *pProgressData); + bool CreateGroupF(); +}; + +/************************************************************************/ +/* S104Creator::~S104Creator() */ +/************************************************************************/ + +S104Creator::~S104Creator() +{ + S104Creator::Close(); +} + +/************************************************************************/ +/* S104Creator::Create() */ +/************************************************************************/ + +bool S104Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) +{ + CPLStringList aosDatasets( + CSLTokenizeString2(m_aosOptions.FetchNameValue("DATASETS"), ",", 0)); + if (m_poSrcDS->GetRasterCount() == 0 && aosDatasets.empty()) + { + // Deal with S104 -> S104 translation; + CSLConstList papszSubdatasets = m_poSrcDS->GetMetadata("SUBDATASETS"); + if (papszSubdatasets) + { + int iSubDS = 0; + std::string osFirstDataset; + std::string osDatasets; + for (const auto &[pszItem, pszValue] : + cpl::IterateNameValue(papszSubdatasets)) + { + if (STARTS_WITH(pszItem, "SUBDATASET_") && + cpl::ends_with(std::string_view(pszItem), "_NAME") && + STARTS_WITH(pszValue, "S104:")) + { + if (strstr(pszValue, ":WaterLevel.")) + { + auto poTmpDS = + std::unique_ptr(GDALDataset::Open( + pszValue, + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDS) + return false; + CPLStringList aosOptions(m_aosOptions); + if (iSubDS > 0) + aosOptions.SetNameValue("APPEND_SUBDATASET", "YES"); + S104Creator oAuxCreator(m_osDestFilename.c_str(), + poTmpDS.get(), + aosOptions.List()); + const int nSubDSCount = + ((CSLCount(papszSubdatasets) + 1) / 2); + std::unique_ptr + pScaledProgressData( + GDALCreateScaledProgress( + static_cast(iSubDS) / nSubDSCount, + static_cast(iSubDS + 1) / + nSubDSCount, + pfnProgress, pProgressData), + GDALDestroyScaledProgress); + ++iSubDS; + if (!oAuxCreator.Create(GDALScaledProgress, + pScaledProgressData.get())) + return false; + } + else + { + if (osFirstDataset.empty()) + osFirstDataset = pszValue; + if (!osDatasets.empty()) + osDatasets += ','; + osDatasets += pszValue; + } + } + } + if (iSubDS > 0) + { + return true; + } + else if (!osDatasets.empty()) + { + auto poTmpDS = std::unique_ptr( + GDALDataset::Open(osFirstDataset.c_str(), + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDS) + return false; + CPLStringList aosOptions(m_aosOptions); + aosOptions.SetNameValue("DATASETS", osDatasets.c_str()); + S104Creator oAuxCreator(m_osDestFilename.c_str(), poTmpDS.get(), + aosOptions.List()); + return oAuxCreator.Create(pfnProgress, pProgressData); + } + } + } + + if (m_poSrcDS->GetRasterCount() != 2 && m_poSrcDS->GetRasterCount() != 3) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Source dataset %s must have two or three bands", + m_poSrcDS->GetDescription()); + return false; + } + + if (!BaseChecks("S104", /* crsMustBeEPSG = */ false)) + return false; + + std::map> + oMapTimestampToDS; + CPLStringList aosDatasetsTimePoint(CSLTokenizeString2( + m_aosOptions.FetchNameValue("DATASETS_TIME_POINT"), ",", 0)); + if (!aosDatasets.empty()) + { + if (!aosDatasetsTimePoint.empty() && + aosDatasetsTimePoint.size() != aosDatasets.size()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATASETS_TIME_POINT does not have the same number of " + "values as DATASETS"); + return false; + } + int i = 0; + for (const char *pszDataset : aosDatasets) + { + auto poDS = std::unique_ptr(GDALDataset::Open( + pszDataset, GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poDS) + return false; + if (poDS->GetRasterXSize() != m_poSrcDS->GetRasterXSize() || + poDS->GetRasterYSize() != m_poSrcDS->GetRasterYSize()) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same dimensions as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + if (poDS->GetRasterCount() != m_poSrcDS->GetRasterCount()) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s must have %d bands", + poDS->GetDescription(), m_poSrcDS->GetRasterCount()); + return false; + } + auto poSRS = poDS->GetSpatialRef(); + if (!poSRS || !poSRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same CRS as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + GDALGeoTransform gt; + if (poDS->GetGeoTransform(gt) != CE_None || gt != m_gt) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same geotransform as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + const char *pszVerticalDatum = + poDS->GetMetadataItem("VERTICAL_DATUM"); + if (pszVerticalDatum) + { + const int nVerticalDatum = + S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev( + pszVerticalDatum); + if (nVerticalDatum != m_nVerticalDatum) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same vertical datum " + "as %s", + poDS->GetDescription(), + m_poSrcDS->GetDescription()); + return false; + } + } + const char *pszTimePoint = poDS->GetMetadataItem("timePoint"); + if (!pszTimePoint && !aosDatasetsTimePoint.empty()) + pszTimePoint = aosDatasetsTimePoint[i]; + if (!pszTimePoint) + { + CPLError( + CE_Failure, CPLE_NotSupported, + "Dataset %s does not have a timePoint metadata item, and " + "the DATASETS_TIME_POINT creation option is not set", + poDS->GetDescription()); + return false; + } + if (strlen(pszTimePoint) != strlen("YYYYMMDDTHHMMSSZ") || + pszTimePoint[8] != 'T' || pszTimePoint[15] != 'Z') + { + CPLError(CE_Failure, CPLE_AppDefined, + "timePoint value for dataset %s is %s, but does not " + "conform to a YYYYMMDDTHHMMSSZ datetime value.", + poDS->GetDescription(), pszTimePoint); + return false; + } + if (cpl::contains(oMapTimestampToDS, pszTimePoint)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Several datasets are at timePoint %s.", pszTimePoint); + return false; + } + oMapTimestampToDS[pszTimePoint] = pszDataset; + ++i; + } + } + + { + const char *pszTimePoint = m_aosOptions.FetchNameValueDef( + "TIME_POINT", m_poSrcDS->GetMetadataItem("timePoint")); + if (!pszTimePoint) + { + CPLError(CE_Failure, CPLE_AppDefined, + "TIME_POINT creation option value must " + "be set, or source dataset must have a timePoint metadata " + "item."); + return false; + } + if (strlen(pszTimePoint) != strlen("YYYYMMDDTHHMMSSZ") || + pszTimePoint[8] != 'T' || pszTimePoint[15] != 'Z') + { + CPLError(CE_Failure, CPLE_AppDefined, + "TIME_POINT creation option value must " + "be set to a YYYYMMDDTHHMMSSZ datetime value."); + return false; + } + + if (oMapTimestampToDS.empty()) + { + oMapTimestampToDS[pszTimePoint] = m_poSrcDS; + } + else + { + const auto oIter = oMapTimestampToDS.find(pszTimePoint); + if (oIter != oMapTimestampToDS.end() && + CPLString(std::get(oIter->second)) + .replaceAll('\\', '/') != + CPLString(m_poSrcDS->GetDescription()) + .replaceAll('\\', '/')) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Several datasets are at timePoint %s (%s vs %s).", + pszTimePoint, + std::get(oIter->second).c_str(), + m_poSrcDS->GetDescription()); + return false; + } + } + } + if (oMapTimestampToDS.size() > 999) + { + CPLError( + CE_Failure, CPLE_AppDefined, + "Only up to 999 datasets are supported for a same vertical datum"); + return false; + } + + if (m_poSRS->IsVertical() || m_poSRS->IsCompound() || m_poSRS->IsLocal() || + m_poSRS->GetAxesCount() != 2) + { + CPLError(CE_Failure, CPLE_NotSupported, + "The CRS must be a geographic 2D or projected 2D CRS"); + return false; + } + + const bool bAppendSubdataset = + CPLTestBool(m_aosOptions.FetchNameValueDef("APPEND_SUBDATASET", "NO")); + if (bAppendSubdataset) + { + GDALOpenInfo oOpenInfo(m_osDestFilename.c_str(), GA_ReadOnly); + auto poOriDS = + std::unique_ptr(S104Dataset::Open(&oOpenInfo)); + if (!poOriDS) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s is not a valid existing S104 dataset", + m_osDestFilename.c_str()); + return false; + } + const auto poOriSRS = poOriDS->GetSpatialRef(); + if (!poOriSRS) + { + // shouldn't happen + return false; + } + if (!poOriSRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "CRS of %s is not the same as the one of %s", + m_osDestFilename.c_str(), m_poSrcDS->GetDescription()); + return false; + } + poOriDS.reset(); + + OGREnvelope sExtent; + if (m_poSrcDS->GetExtentWGS84LongLat(&sExtent) != OGRERR_NONE) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot get dataset extent in WGS84 longitude/latitude"); + return false; + } + + bool ret = OpenFileUpdateMode(); + if (ret) + { + m_featureGroup.reset(H5_CHECK(H5Gopen(m_hdf5, "WaterLevel"))); + } + + ret = ret && m_featureGroup; + double dfNumInstances = 0; + ret = ret && GH5_FetchAttribute(m_featureGroup, "numInstances", + dfNumInstances, true); + if (ret && !(dfNumInstances >= 1 && dfNumInstances <= 99 && + std::round(dfNumInstances) == dfNumInstances)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid value for numInstances"); + ret = false; + } + else if (ret && dfNumInstances == 99) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Too many existing feature instances"); + ret = false; + } + else + { + double dfMainVerticalDatum = 0; + ret = ret && GH5_FetchAttribute(m_hdf5, "verticalDatum", + dfMainVerticalDatum, true); + + const int newNumInstances = static_cast(dfNumInstances) + 1; + ret = ret && GH5_WriteAttribute(m_featureGroup, "numInstances", + newNumInstances); + ret = ret && CreateFeatureInstanceGroup( + CPLSPrintf("WaterLevel.%02d", newNumInstances)); + ret = ret && FillFeatureInstanceGroup(oMapTimestampToDS, + pfnProgress, pProgressData); + if (dfMainVerticalDatum != m_nVerticalDatum) + { + ret = ret && WriteVerticalDatumReference( + m_featureInstanceGroup, + m_nVerticalDatum <= 1024 ? 1 : 2); + ret = + ret && WriteVerticalDatum(m_featureInstanceGroup, + H5T_STD_I32LE, m_nVerticalDatum); + } + } + + return Close() && ret; + } + else + { + bool ret = CreateFile(); + ret = ret && WriteProductSpecification("INT.IHO.S-104.2.0"); + ret = ret && WriteIssueDate(); + ret = ret && WriteIssueTime(/* bAutogenerateFromCurrent = */ true); + ret = ret && WriteHorizontalCRS(); + ret = ret && WriteTopLevelBoundingBox(); + + const char *pszGeographicIdentifier = m_aosOptions.FetchNameValueDef( + "GEOGRAPHIC_IDENTIFIER", + m_poSrcDS->GetMetadataItem("geographicIdentifier")); + if (pszGeographicIdentifier) + { + ret = + ret && WriteVarLengthStringValue(m_hdf5, "geographicIdentifier", + pszGeographicIdentifier); + } + + const char *pszVerticalCS = m_aosOptions.FetchNameValueDef( + "VERTICAL_CS", m_poSrcDS->GetMetadataItem("verticalCS")); + if (!pszVerticalCS) + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_CS creation option must be specified"); + return false; + } + const int nVerticalCS = EQUAL(pszVerticalCS, "DEPTH") ? 6498 + : EQUAL(pszVerticalCS, "HEIGHT") + ? 6499 + : atoi(pszVerticalCS); + if (nVerticalCS != 6498 && nVerticalCS != 6499) + { + CPLError(CE_Failure, CPLE_NotSupported, + "VERTICAL_CS creation option must be set either to 6498 " + "(depth/down, metre), or 6499 (height/up, metre)"); + return false; + } + + ret = ret && WriteVerticalCS(nVerticalCS); + ret = ret && WriteVerticalCoordinateBase(2); // verticalDatum + // 1=s100VerticalDatum, 2=EPSG + ret = ret && WriteVerticalDatumReference( + m_hdf5, m_nVerticalDatum <= 1024 ? 1 : 2); + ret = + ret && WriteVerticalDatum(m_hdf5, H5T_STD_I32LE, m_nVerticalDatum); + + const char *pszWaterLevelTrendThreshold = + m_aosOptions.FetchNameValueDef( + "WATER_LEVEL_TREND_THRESHOLD", + m_poSrcDS->GetMetadataItem("waterLevelTrendThreshold")); + if (!pszWaterLevelTrendThreshold) + { + CPLError(CE_Failure, CPLE_AppDefined, + "WATER_LEVEL_TREND_THRESHOLD creation option must be " + "specified."); + return false; + } + if (CPLGetValueType(pszWaterLevelTrendThreshold) == CPL_VALUE_STRING) + { + CPLError(CE_Failure, CPLE_AppDefined, + "WATER_LEVEL_TREND_THRESHOLD creation option value must " + "be a numeric value."); + return false; + } + ret = ret && WriteFloat32Value(m_hdf5, "waterLevelTrendThreshold", + CPLAtof(pszWaterLevelTrendThreshold)); + + const char *pszDatasetDeliveryInterval = m_aosOptions.FetchNameValueDef( + "DATASET_DELIVERY_INTERVAL", + m_poSrcDS->GetMetadataItem("datasetDeliveryInterval")); + if (pszDatasetDeliveryInterval) + { + ret = ret && + WriteVarLengthStringValue(m_hdf5, "datasetDeliveryInterval", + pszDatasetDeliveryInterval); + } + + const char *pszTrendInterval = m_aosOptions.FetchNameValueDef( + "TREND_INTERVAL", m_poSrcDS->GetMetadataItem("trendInterval")); + if (pszTrendInterval) + { + if (CPLGetValueType(pszTrendInterval) != CPL_VALUE_INTEGER) + { + CPLError(CE_Failure, CPLE_AppDefined, + "TREND_INTERVAL creation option value must " + "be an integer value."); + return false; + } + ret = ret && WriteUInt32Value(m_hdf5, "trendInterval", + atoi(pszTrendInterval)); + } + + // WaterLevel + ret = ret && CreateFeatureGroup(FEATURE_TYPE); + ret = ret && WriteFeatureGroupAttributes(); + ret = ret && WriteAxisNames(m_featureGroup); + + ret = ret && CreateFeatureInstanceGroup("WaterLevel.01"); + ret = ret && FillFeatureInstanceGroup(oMapTimestampToDS, pfnProgress, + pProgressData); + + ret = ret && CreateGroupF(); + + return Close() && ret; + } +} + +/************************************************************************/ +/* S104Creator::WriteFeatureGroupAttributes() */ +/************************************************************************/ + +bool S104Creator::WriteFeatureGroupAttributes() +{ + CPLAssert(m_featureGroup); + + // 4 = all (recommended) + const char *pszCommonPointRule = m_aosOptions.FetchNameValueDef( + "COMMON_POINT_RULE", m_poSrcDS->GetMetadataItem("commonPointRule")); + if (!pszCommonPointRule) + pszCommonPointRule = "4"; // all (recommended) + const int nCommonPointRule = atoi(pszCommonPointRule); + bool ret = WriteCommonPointRule(m_featureGroup, nCommonPointRule); + ret = ret && WriteDataCodingFormat(m_featureGroup, 2); // Regular grid + ret = ret && WriteDataOffsetCode(m_featureGroup, 5); // Center of cell + ret = ret && WriteDimension(m_featureGroup, 2); + const char *pszHorizontalPositionUncertainty = + m_aosOptions.FetchNameValueDef( + "HORIZONTAL_POSITION_UNCERTAINTY", + m_poSrcDS->GetMetadataItem("horizontalPositionUncertainty")); + ret = + ret && + WriteHorizontalPositionUncertainty( + m_featureGroup, + pszHorizontalPositionUncertainty && + pszHorizontalPositionUncertainty[0] + ? static_cast(CPLAtof(pszHorizontalPositionUncertainty)) + : -1.0f); + const char *pszVerticalUncertainty = m_aosOptions.FetchNameValueDef( + "VERTICAL_UNCERTAINTY", + m_poSrcDS->GetMetadataItem("verticalUncertainty")); + ret = ret && WriteVerticalUncertainty( + m_featureGroup, + pszVerticalUncertainty && pszVerticalUncertainty[0] + ? static_cast(CPLAtof(pszVerticalUncertainty)) + : -1.0f); + const char *pszTimeUncertainty = m_aosOptions.FetchNameValueDef( + "TIME_UNCERTAINTY", m_poSrcDS->GetMetadataItem("timeUncertainty")); + if (pszTimeUncertainty) + WriteFloat32Value(m_featureGroup, "timeUncertainty", + CPLAtof(pszTimeUncertainty)); + const char *pszMethodWaterLevelProduct = m_aosOptions.FetchNameValueDef( + "METHOD_WATER_LEVEL_PRODUCT", + m_poSrcDS->GetMetadataItem("methodWaterLevelProduct")); + if (pszMethodWaterLevelProduct) + WriteVarLengthStringValue(m_featureGroup, "methodWaterLevelProduct", + pszMethodWaterLevelProduct); + ret = ret && WriteInterpolationType(m_featureGroup, 1); // Nearest neighbor + ret = ret && WriteNumInstances(m_featureGroup, H5T_STD_U32LE, 1); + ret = ret && WriteSequencingRuleScanDirection(m_featureGroup, + m_poSRS->IsProjected() + ? "Easting, Northing" + : "Longitude, Latitude"); + ret = ret && WriteSequencingRuleType(m_featureGroup, 1); // Linear + return ret; +} + +/************************************************************************/ +/* S104Creator::WriteUncertaintyDataset() */ +/************************************************************************/ + +bool S104Creator::WriteUncertaintyDataset() +{ + CPLAssert(m_featureInstanceGroup); + + GH5_HIDTypeHolder hDataType( + H5_CHECK(H5Tcreate(H5T_COMPOUND, sizeof(char *) + sizeof(float)))); + GH5_HIDTypeHolder hVarLengthStringDataType(H5_CHECK(H5Tcopy(H5T_C_S1))); + bool bRet = + hVarLengthStringDataType && + H5_CHECK(H5Tset_size(hVarLengthStringDataType, H5T_VARIABLE)) >= 0; + bRet = bRet && hVarLengthStringDataType && + H5_CHECK( + H5Tset_strpad(hVarLengthStringDataType, H5T_STR_NULLTERM)) >= 0; + bRet = bRet && hDataType && + H5_CHECK(H5Tinsert(hDataType, "name", 0, + hVarLengthStringDataType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "value", sizeof(char *), + H5T_IEEE_F32LE)) >= 0; + hsize_t dims[] = {1}; + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(1, dims, nullptr))); + GH5_HIDDatasetHolder hDatasetID; + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams; + if (bRet) + { + hDatasetID.reset( + H5_CHECK(H5Dcreate(m_featureInstanceGroup, "uncertainty", hDataType, + hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + H5OFFSET_TYPE offset[] = {0}; + hsize_t count[1] = {1}; + const char *pszName = "uncertainty"; + GByte abyValues[sizeof(char *) + sizeof(float)]; + memcpy(abyValues, &pszName, sizeof(char **)); + const char *pszUncertainty = m_aosOptions.FetchNameValueDef( + "UNCERTAINTY", m_poSrcDS->GetMetadataItem("uncertainty")); + float fVal = + pszUncertainty ? static_cast(CPLAtof(pszUncertainty)) : -1.0f; + CPL_LSBPTR32(&fVal); + memcpy(abyValues + sizeof(char *), &fVal, sizeof(fVal)); + bRet = bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hDataSpace, hFileSpace, + H5P_DEFAULT, abyValues)) >= 0; + return bRet; +} + +/************************************************************************/ +/* S104Creator::FillFeatureInstanceGroup() */ +/************************************************************************/ + +bool S104Creator::FillFeatureInstanceGroup( + const std::map> + &oMapTimestampToDS, + GDALProgressFunc pfnProgress, void *pProgressData) +{ + bool ret = WriteFIGGridRelatedParameters(m_featureInstanceGroup); + + const int numInstances = static_cast(oMapTimestampToDS.size()); + + ret = + ret && WriteNumGRP(m_featureInstanceGroup, H5T_STD_U32LE, numInstances); + ret = ret && WriteUInt32Value(m_featureInstanceGroup, "numberOfTimes", + numInstances); + + // Check if value groups are spaced at a regular time interval + GIntBig nLastInterval = 0; + GIntBig nLastTS = 0; + for (const auto &[key, value] : oMapTimestampToDS) + { + CPL_IGNORE_RET_VAL(value); + int nYear, nMonth, nDay, nHour, nMinute, nSecond; + if (sscanf(key.c_str(), "%04d%02d%02dT%02d%02d%02dZ", &nYear, &nMonth, + &nDay, &nHour, &nMinute, &nSecond) == 6) + { + struct tm brokenDown; + memset(&brokenDown, 0, sizeof(brokenDown)); + brokenDown.tm_year = nYear - 1900; + brokenDown.tm_mon = nMonth - 1; + brokenDown.tm_mday = nDay; + brokenDown.tm_hour = nHour; + brokenDown.tm_min = nMinute; + brokenDown.tm_sec = nMinute; + const GIntBig nTS = CPLYMDHMSToUnixTime(&brokenDown); + if (nLastTS != 0) + { + if (nLastInterval == 0) + { + nLastInterval = nTS - nLastTS; + } + else if (nLastInterval != nTS - nLastTS) + { + nLastInterval = 0; + break; + } + } + nLastTS = nTS; + } + } + + const char *pszTimeRecordInterval = m_aosOptions.FetchNameValueDef( + "TIME_RECORD_INTERVAL", + m_poSrcDS->GetMetadataItem("timeRecordInterval")); + if (pszTimeRecordInterval) + { + ret = ret && + WriteUInt16Value(m_featureInstanceGroup, "timeRecordInterval", + atoi(pszTimeRecordInterval)); + } + else if (nLastInterval > 0 && nLastInterval < 65536) + { + ret = ret && + WriteUInt16Value(m_featureInstanceGroup, "timeRecordInterval", + static_cast(nLastInterval)); + } + + ret = ret && WriteVarLengthStringValue( + m_featureInstanceGroup, "dateTimeOfFirstRecord", + oMapTimestampToDS.begin()->first.c_str()); + ret = ret && WriteVarLengthStringValue( + m_featureInstanceGroup, "dateTimeOfLastRecord", + oMapTimestampToDS.rbegin()->first.c_str()); + + const char *pszDataDynamicity = m_aosOptions.FetchNameValueDef( + "DATA_DYNAMICITY", m_poSrcDS->GetMetadataItem("dataDynamicity")); + if (!pszDataDynamicity) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATA_DYNAMICITY creation option must " + "be specified."); + return false; + } + { + GH5_HIDTypeHolder hDataDynamicityEnumDataType( + H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + ret = ret && hDataDynamicityEnumDataType; + + uint8_t val; + val = 1; + ret = ret && H5_CHECK(H5Tenum_insert(hDataDynamicityEnumDataType, + "observation", &val)) >= 0; + val = 2; + ret = ret && + H5_CHECK(H5Tenum_insert(hDataDynamicityEnumDataType, + "astronomicalPrediction", &val)) >= 0; + val = 3; + ret = ret && H5_CHECK(H5Tenum_insert(hDataDynamicityEnumDataType, + "analysisOrHybrid", &val)) >= 0; + val = 5; + ret = + ret && H5_CHECK(H5Tenum_insert(hDataDynamicityEnumDataType, + "hydrodynamicForecast", &val)) >= 0; + + const int nDataDynamicity = + EQUAL(pszDataDynamicity, "observation") ? 1 + : EQUAL(pszDataDynamicity, "astronomicalPrediction") ? 2 + : EQUAL(pszDataDynamicity, "analysisOrHybrid") ? 3 + : EQUAL(pszDataDynamicity, "hydrodynamicForecast") + ? 5 + : atoi(pszDataDynamicity); + if (nDataDynamicity != 1 && nDataDynamicity != 2 && + nDataDynamicity != 3 && nDataDynamicity != 5) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATA_DYNAMICITY creation option must " + "be set to observation/1, astronomicalPrediction/2, " + "analysisOrHybrid/3 or hydrodynamicForecast/5."); + return false; + } + ret = ret && + GH5_CreateAttribute(m_featureInstanceGroup, "dataDynamicity", + hDataDynamicityEnumDataType) && + GH5_WriteAttribute(m_featureInstanceGroup, "dataDynamicity", + nDataDynamicity); + } + + if (m_poSrcDS->GetRasterCount() == 2 || + m_aosOptions.FetchNameValue("UNCERTAINTY")) + { + ret = ret && WriteUncertaintyDataset(); + } + + int iInstance = 0; + double dfLastRatio = 0; + for (const auto &iter : oMapTimestampToDS) + { + ++iInstance; + ret = ret && CreateValuesGroup(CPLSPrintf("Group_%03d", iInstance)); + + ret = ret && WriteVarLengthStringValue(m_valuesGroup, "timePoint", + iter.first.c_str()); + + std::unique_ptr poTmpDSHolder; + GDALDataset *poSrcDS; + if (std::holds_alternative(iter.second)) + { + poTmpDSHolder.reset( + GDALDataset::Open(std::get(iter.second).c_str(), + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDSHolder) + { + return false; + } + poSrcDS = poTmpDSHolder.get(); + } + else + { + CPLAssert(std::holds_alternative(iter.second)); + poSrcDS = std::get(iter.second); + } + + const double dfNewRatio = static_cast(iInstance) / numInstances; + std::unique_ptr + pScaledProgressData( + GDALCreateScaledProgress(dfLastRatio, dfNewRatio, pfnProgress, + pProgressData), + GDALDestroyScaledProgress); + ret = ret && CopyValues(poSrcDS, GDALScaledProgress, + pScaledProgressData.get()); + dfLastRatio = dfNewRatio; + } + + return ret; +} + +/************************************************************************/ +/* S104Creator::CreateGroupF() */ +/************************************************************************/ + +// Per S-104 v2.0 spec +#define MIN_WATER_LEVEL_HEIGHT_VALUE -99.99 +#define MAX_WATER_LEVEL_HEIGHT_VALUE 99.99 + +#define STRINGIFY(x) #x +#define XSTRINGIFY(x) STRINGIFY(x) + +bool S104Creator::CreateGroupF() +{ + bool ret = S100BaseWriter::CreateGroupF(); + + CPLStringList aosFeatureCodes; + aosFeatureCodes.push_back(FEATURE_TYPE); + ret = ret && WriteOneDimensionalVarLengthStringArray( + m_GroupF, "featureCode", aosFeatureCodes.List()); + + { + std::vector> rows{ + {"waterLevelHeight", "Water Level Height", "metre", "-9999.00", + "H5T_FLOAT", XSTRINGIFY(MIN_WATER_LEVEL_HEIGHT_VALUE), + XSTRINGIFY(MAX_WATER_LEVEL_HEIGHT_VALUE), "closedInterval"}, + {"waterLevelTrend", "Water Level Trend", "", "0", "H5T_ENUM", "", + "", ""}, + {"uncertainty", "Uncertainty", "metre", "-1.00", "H5T_FLOAT", + "0.00", "99.99", "closedInterval"}}; + rows.resize(m_poSrcDS->GetRasterCount()); + ret = ret && WriteGroupFDataset(FEATURE_TYPE, rows); + } + + return ret; +} + +/************************************************************************/ +/* S104Creator::CopyValues() */ +/************************************************************************/ + +bool S104Creator::CopyValues(GDALDataset *poSrcDS, GDALProgressFunc pfnProgress, + void *pProgressData) +{ + CPLAssert(m_valuesGroup.get() >= 0); + + const int nYSize = poSrcDS->GetRasterYSize(); + const int nXSize = poSrcDS->GetRasterXSize(); + + hsize_t dims[] = {static_cast(nYSize), + static_cast(nXSize)}; + + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(2, dims, nullptr))); + bool bRet = hDataSpace; + + const bool bDeflate = + EQUAL(m_aosOptions.FetchNameValueDef("COMPRESS", "DEFLATE"), "DEFLATE"); + const int nCompressionLevel = + atoi(m_aosOptions.FetchNameValueDef("ZLEVEL", "6")); + const int nBlockSize = + std::min(4096, std::max(100, atoi(m_aosOptions.FetchNameValueDef( + "BLOCK_SIZE", "100")))); + const int nBlockXSize = std::min(nXSize, nBlockSize); + const int nBlockYSize = std::min(nYSize, nBlockSize); + constexpr float fNoDataValueHeight = -9999.0f; + constexpr GByte nNoDataValueTrend = 0; + constexpr float fNoDataValueUncertainty = -1.0f; + const int nComponents = poSrcDS->GetRasterCount(); + + GH5_HIDTypeHolder hTrendEnumDataType( + H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + bRet = bRet && hTrendEnumDataType; + { + uint8_t val; + val = 1; + bRet = bRet && H5_CHECK(H5Tenum_insert(hTrendEnumDataType, "Decreasing", + &val)) >= 0; + val = 2; + bRet = bRet && H5_CHECK(H5Tenum_insert(hTrendEnumDataType, "Increasing", + &val)) >= 0; + val = 3; + bRet = bRet && H5_CHECK(H5Tenum_insert(hTrendEnumDataType, "Steady", + &val)) >= 0; + } + + GH5_HIDTypeHolder hDataType(H5_CHECK( + H5Tcreate(H5T_COMPOUND, sizeof(float) + sizeof(GByte) + + (nComponents == 3 ? sizeof(float) : 0)))); + bRet = bRet && hDataType && + H5_CHECK(H5Tinsert(hDataType, "waterLevelHeight", 0, + H5T_IEEE_F32LE)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "waterLevelTrend", sizeof(float), + hTrendEnumDataType)) >= 0; + if (nComponents == 3 && bRet) + { + bRet = H5_CHECK(H5Tinsert(hDataType, "uncertainty", + sizeof(float) + sizeof(GByte), + H5T_IEEE_F32LE)) >= 0; + } + + hsize_t chunk_size[] = {static_cast(nBlockYSize), + static_cast(nBlockXSize)}; + + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams && + H5_CHECK(H5Pset_fill_time(hParams, H5D_FILL_TIME_ALLOC)) >= 0 && + H5_CHECK(H5Pset_layout(hParams, H5D_CHUNKED)) >= 0 && + H5_CHECK(H5Pset_chunk(hParams, 2, chunk_size)) >= 0; + + if (bRet && bDeflate) + { + bRet = H5_CHECK(H5Pset_deflate(hParams, nCompressionLevel)) >= 0; + } + + GH5_HIDDatasetHolder hDatasetID; + if (bRet) + { + hDatasetID.reset(H5_CHECK(H5Dcreate(m_valuesGroup, "values", hDataType, + hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + + const int nYBlocks = static_cast(DIV_ROUND_UP(nYSize, nBlockYSize)); + const int nXBlocks = static_cast(DIV_ROUND_UP(nXSize, nBlockXSize)); + std::vector afValues(static_cast(nBlockYSize) * nBlockXSize * + nComponents); + std::vector abyValues( + static_cast(nBlockYSize) * nBlockXSize * + (sizeof(float) + sizeof(GByte) + sizeof(float))); + const bool bReverseY = m_gt[5] < 0; + + float fMinHeight = std::numeric_limits::infinity(); + float fMaxHeight = -std::numeric_limits::infinity(); + float fMinTrend = std::numeric_limits::infinity(); + float fMaxTrend = -std::numeric_limits::infinity(); + float fMinUncertainty = std::numeric_limits::infinity(); + float fMaxUncertainty = -std::numeric_limits::infinity(); + + int bHasNoDataBand1 = FALSE; + const double dfSrcNoDataBand1 = + poSrcDS->GetRasterBand(1)->GetNoDataValue(&bHasNoDataBand1); + const float fSrcNoDataBand1 = static_cast(dfSrcNoDataBand1); + + int bHasNoDataBand3 = FALSE; + const double dfSrcNoDataBand3 = + nComponents == 3 + ? poSrcDS->GetRasterBand(3)->GetNoDataValue(&bHasNoDataBand3) + : 0.0; + const float fSrcNoDataBand3 = static_cast(dfSrcNoDataBand3); + + for (int iY = 0; iY < nYBlocks && bRet; iY++) + { + const int nSrcYOff = bReverseY + ? std::max(0, nYSize - (iY + 1) * nBlockYSize) + : iY * nBlockYSize; + const int nReqCountY = std::min(nBlockYSize, nYSize - iY * nBlockYSize); + for (int iX = 0; iX < nXBlocks && bRet; iX++) + { + const int nReqCountX = + std::min(nBlockXSize, nXSize - iX * nBlockXSize); + + bRet = + poSrcDS->RasterIO( + GF_Read, iX * nBlockXSize, nSrcYOff, nReqCountX, nReqCountY, + bReverseY ? afValues.data() + + (nReqCountY - 1) * nReqCountX * nComponents + : afValues.data(), + nReqCountX, nReqCountY, GDT_Float32, nComponents, nullptr, + static_cast(sizeof(float)) * nComponents, + bReverseY ? -static_cast(sizeof(float)) * + nComponents * nReqCountX + : 0, + sizeof(float), nullptr) == CE_None; + + if (bRet) + { + size_t nOffset = 0; + for (int i = 0; i < nReqCountY * nReqCountX; i++) + { + { + float fVal = afValues[i * nComponents]; + if ((bHasNoDataBand1 && fVal == fSrcNoDataBand1) || + std::isnan(fVal)) + { + fVal = fNoDataValueHeight; + } + else + { + fMinHeight = std::min(fMinHeight, fVal); + fMaxHeight = std::max(fMaxHeight, fVal); + } + CPL_LSBPTR32(&fVal); + memcpy(abyValues.data() + nOffset, &fVal, sizeof(fVal)); + nOffset += sizeof(fVal); + } + { + const float fVal = afValues[i * nComponents + 1]; + if (fVal != nNoDataValueTrend) + { + fMinTrend = std::min(fMinTrend, fVal); + fMaxTrend = std::max(fMaxTrend, fVal); + } + abyValues[nOffset] = static_cast(fVal); + nOffset += sizeof(GByte); + } + if (nComponents == 3) + { + float fVal = afValues[i * nComponents + 2]; + if ((bHasNoDataBand3 && fVal == fSrcNoDataBand3) || + std::isnan(fVal)) + { + fVal = fNoDataValueUncertainty; + } + else + { + fMinUncertainty = std::min(fMinUncertainty, fVal); + fMaxUncertainty = std::max(fMaxUncertainty, fVal); + } + CPL_LSBPTR32(&fVal); + memcpy(abyValues.data() + nOffset, &fVal, sizeof(fVal)); + nOffset += sizeof(fVal); + } + } + } + + H5OFFSET_TYPE offset[] = { + static_cast(iY) * + static_cast(nBlockYSize), + static_cast(iX) * + static_cast(nBlockXSize)}; + hsize_t count[2] = {static_cast(nReqCountY), + static_cast(nReqCountX)}; + GH5_HIDSpaceHolder hMemSpace( + H5_CHECK(H5Screate_simple(2, count, nullptr))); + bRet = + bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + hMemSpace && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hMemSpace, hFileSpace, + H5P_DEFAULT, abyValues.data())) >= 0 && + pfnProgress((static_cast(iY) * nXBlocks + iX + 1) / + (static_cast(nXBlocks) * nYBlocks), + "", pProgressData) != 0; + } + } + + if (fMinHeight > fMaxHeight) + { + fMinHeight = fMaxHeight = fNoDataValueHeight; + } + else if (!(fMinHeight >= MIN_WATER_LEVEL_HEIGHT_VALUE && + fMaxHeight <= MAX_WATER_LEVEL_HEIGHT_VALUE)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Range of water level height in the dataset is [%f, %f] " + "whereas the " + "allowed range is [%.2f, %.2f]", + fMinHeight, fMaxHeight, MIN_WATER_LEVEL_HEIGHT_VALUE, + MAX_WATER_LEVEL_HEIGHT_VALUE); + } + + if (fMaxTrend >= fMinTrend && fMinTrend < 1) + { + CPLError( + CE_Warning, CPLE_AppDefined, + "Negative water level trend value found, which is not allowed"); + } + if (fMaxTrend >= fMinTrend && fMaxTrend > 3) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Water level trend value > 3 found, which is not allowed"); + } + + if (fMaxUncertainty >= fMinUncertainty && fMinUncertainty < 0) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Negative uncertainty value found (%f), which is not allowed " + "(except nodata value -1.0)", + fMinUncertainty); + } + + if (bRet) + { + double prevMinHeight = 0; + double prevMaxHeight = 0; + if (GH5_FetchAttribute(m_featureGroup, "minDatasetHeight", + prevMinHeight) && + GH5_FetchAttribute(m_featureGroup, "maxDatasetHeight", + prevMaxHeight)) + { + if (fMinHeight != fNoDataValueHeight) + { + prevMinHeight = std::min(prevMinHeight, fMinHeight); + prevMaxHeight = std::max(prevMaxHeight, fMaxHeight); + bRet = GH5_WriteAttribute(m_featureGroup, "minDatasetHeight", + prevMinHeight) && + GH5_WriteAttribute(m_featureGroup, "maxDatasetHeight", + prevMaxHeight); + } + } + else + { + bRet = WriteFloat32Value(m_featureGroup, "minDatasetHeight", + fMinHeight) && + WriteFloat32Value(m_featureGroup, "maxDatasetHeight", + fMaxHeight); + } + } + + return bRet; +} + /************************************************************************/ /* S104DatasetDriverUnload() */ /************************************************************************/ @@ -640,6 +1761,35 @@ static void S104DatasetDriverUnload(GDALDriver *) HDF5UnloadFileDriver(); } +/************************************************************************/ +/* S104Dataset::CreateCopy() */ +/************************************************************************/ + +/* static */ +GDALDataset *S104Dataset::CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int /* bStrict*/, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData) +{ + S104Creator creator(pszFilename, poSrcDS, papszOptions); + if (!creator.Create(pfnProgress, pProgressData)) + return nullptr; + + VSIStatBufL sStatBuf; + if (VSIStatL(pszFilename, &sStatBuf) == 0 && + sStatBuf.st_size > 10 * 1024 * 1024) + { + CPLError(CE_Warning, CPLE_AppDefined, + "%s file size exceeds 10 MB, which is the upper limit " + "suggested for wireless transmission to marine vessels", + pszFilename); + } + + GDALOpenInfo oOpenInfo(pszFilename, GA_ReadOnly); + return Open(&oOpenInfo); +} + /************************************************************************/ /* GDALRegister_S104() */ /************************************************************************/ @@ -656,6 +1806,7 @@ void GDALRegister_S104() S104DriverSetCommonMetadata(poDriver); poDriver->pfnOpen = S104Dataset::Open; + poDriver->pfnCreateCopy = S104Dataset::CreateCopy; poDriver->pfnUnloadDriver = S104DatasetDriverUnload; GetGDALDriverManager()->RegisterDriver(poDriver); From d0e042001c7afaac8be0508d02f46103b4428c96 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 15:03:53 +0100 Subject: [PATCH 14/20] S111 reader: report speedUncertainty and directionUncertainty bands when present --- doc/source/drivers/raster/s111.rst | 5 ++++- frmts/hdf5/s111dataset.cpp | 33 +++++++++++++++++++++++++++++- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/doc/source/drivers/raster/s111.rst b/doc/source/drivers/raster/s111.rst index 9cc7eae4eef5..187bcecf6cc5 100644 --- a/doc/source/drivers/raster/s111.rst +++ b/doc/source/drivers/raster/s111.rst @@ -13,13 +13,16 @@ S111 -- S-111 Surface Currents Product This driver provides read-only support for surface currents in the S-111 format, which is a specific product profile in an HDF5 file. -S-111 files have two image bands representing the following values for each +S-111 files have at a minimum two image bands representing the following values for each cell in a raster grid area: - surface current speed (band 1), in knots - surface current direction (band 2), in degree measured from true north clock-wise. +Additional optional bands can be present to represent the speed uncertainty +(in knots) and the direction uncertainty (in degree).s + When opening a S-111 file, no raster band is directly available. But a list of subdatasets will be reported, one for each timestamp available in the file. diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index e1e3318ce83a..6b1371223efb 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -518,7 +518,7 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) } const auto &oComponents = oType.GetComponents(); - if (!(oComponents.size() == 2 && + if (!(oComponents.size() >= 2 && ((oComponents[0]->GetName() == "surfaceCurrentSpeed" && oComponents[0]->GetType().GetNumericDataType() == GDT_Float32 && oComponents[1]->GetName() == "surfaceCurrentDirection" && @@ -631,6 +631,37 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) poSurfaceCurrentDirectionBand->GDALRasterBand::SetMetadataItem( "ANGLE_CONVENTION", "From true north, clockwise"); poDS->SetBand(2, poSurfaceCurrentDirectionBand.release()); + + for (size_t i = 2; i < oComponents.size(); ++i) + { + if (oComponents[i]->GetName() == "speedUncertainty" && + oComponents[i]->GetType().GetNumericDataType() == GDT_Float32) + { + auto poSubArray = + poValuesArray->GetView("[\"speedUncertainty\"]"); + auto poSubArrayDS = std::unique_ptr( + poSubArray->AsClassicDataset(1, 0)); + auto poSubArrayBand = + std::make_unique(std::move(poSubArrayDS)); + poSubArrayBand->SetDescription("speedUncertainty"); + poSubArrayBand->m_osUnitType = "knot"; + poDS->SetBand(poDS->nBands + 1, poSubArrayBand.release()); + } + else if (oComponents[i]->GetName() == "directionUncertainty" && + oComponents[i]->GetType().GetNumericDataType() == + GDT_Float32) + { + auto poSubArray = + poValuesArray->GetView("[\"directionUncertainty\"]"); + auto poSubArrayDS = std::unique_ptr( + poSubArray->AsClassicDataset(1, 0)); + auto poSubArrayBand = + std::make_unique(std::move(poSubArrayDS)); + poSubArrayBand->SetDescription("directionUncertainty"); + poSubArrayBand->m_osUnitType = "degree"; + poDS->SetBand(poDS->nBands + 1, poSubArrayBand.release()); + } + } } poDS->GDALDataset::SetMetadataItem(GDALMD_AREA_OR_POINT, GDALMD_AOP_POINT); From 5eb7788e1d8569cc866f1d5e33718e37a6a4168b Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 19:07:06 +0100 Subject: [PATCH 15/20] s111.rst: fix erroneous mention of several vertical datums --- doc/source/drivers/raster/s111.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/drivers/raster/s111.rst b/doc/source/drivers/raster/s111.rst index 187bcecf6cc5..2ac4ed297e8a 100644 --- a/doc/source/drivers/raster/s111.rst +++ b/doc/source/drivers/raster/s111.rst @@ -33,8 +33,8 @@ Georeferencing is reported. Note that the driver currently only supports regularly gridded S111 datasets. -Since GDAL 3.12, multiple feature instance groups per dataset (to encode grids -using different vertical datums) are supported. In that case, each feature +Since GDAL 3.12, multiple feature instance groups per dataset are supported. +In that case, each feature instance group and timestamp group is exposed as a GDAL subdataset, whose name is of the form ``S111:"{filename.h5}":SurfaceCurrent.{XX}:Group_{YYY}``. From b7e870d27420cc42c2b830e7fdbe25efc010298b Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 19:53:53 +0100 Subject: [PATCH 16/20] S111 reader: report timePoint metadata item --- autotest/gdrivers/s111.py | 1 + frmts/hdf5/s111dataset.cpp | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/autotest/gdrivers/s111.py b/autotest/gdrivers/s111.py index a9f2ee6ae5df..eefc8f0781ea 100755 --- a/autotest/gdrivers/s111.py +++ b/autotest/gdrivers/s111.py @@ -220,6 +220,7 @@ def test_s111_multiple_feature_instance_groups(): "minDatasetCurrentSpeed": "0", "numberOfTimes": "1", "surfaceCurrentDepth": "-4.5", + "timePoint": "20190606T120000Z", "timeRecordInterval": "3600", "uncertaintySurfaceCurrentDirection": "-1.000000", "uncertaintySurfaceCurrentSpeed": "-1.000000", diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index 6b1371223efb..7f3d40f1c07a 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -492,6 +492,20 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) return nullptr; } + // Read additional metadata + for (const char *pszAttrName : {"timePoint"}) + { + auto poAttr = poGroup->GetAttribute(pszAttrName); + if (poAttr) + { + const char *pszVal = poAttr->ReadAsString(); + if (pszVal) + { + poDS->GDALDataset::SetMetadataItem(pszAttrName, pszVal); + } + } + } + auto poValuesArray = poGroup->OpenMDArray("values"); if (!poValuesArray) { From 7e99235bce95554865f3fb3c61821273bb4b2ed7 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 19:55:05 +0100 Subject: [PATCH 17/20] S111 reader: report dataDynamicity metadata item --- autotest/gdrivers/s111.py | 1 + frmts/hdf5/s111dataset.cpp | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/autotest/gdrivers/s111.py b/autotest/gdrivers/s111.py index eefc8f0781ea..ca46dd119fc5 100755 --- a/autotest/gdrivers/s111.py +++ b/autotest/gdrivers/s111.py @@ -210,6 +210,7 @@ def test_s111_multiple_feature_instance_groups(): "VERTICAL_DATUM_MEANING": "meanLowerLowWater", "verticalCS": "6498", "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "dataDynamicity": "5", "dateTimeOfFirstRecord": "20190606T120000Z", "dateTimeOfLastRecord": "20190606T120000Z", "depthTypeIndex": "1", diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index 7f3d40f1c07a..ef17ec824f77 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -334,7 +334,7 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) // Read additional metadata for (const char *pszAttrName : {"timeRecordInterval", "dateTimeOfFirstRecord", "dateTimeOfLastRecord", - "numberOfTimes"}) + "numberOfTimes", "dataDynamicity"}) { auto poAttr = poFeatureInstance->GetAttribute(pszAttrName); if (poAttr) From 90970819c91c51de26692098f14c1ab36038ead5 Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 20:47:29 +0100 Subject: [PATCH 18/20] S111 reader: make it more tolerant about type of 'value' component in 'uncertainty' array --- frmts/hdf5/s111dataset.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index ef17ec824f77..072f4c70c456 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -385,7 +385,9 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) oComponents[0]->GetName() == "name" && oComponents[0]->GetType().GetClass() == GEDTC_STRING && oComponents[1]->GetName() == "value" && - oComponents[1]->GetType().GetNumericDataType() == GDT_Float64) + (oComponents[1]->GetType().GetNumericDataType() == + GDT_Float32 || + oComponents[1]->GetType().GetNumericDataType() == GDT_Float64)) { auto poName = poUncertainty->GetView("[\"name\"]"); auto poValue = poUncertainty->GetView("[\"value\"]"); @@ -400,9 +402,9 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) if (poName->Read(arrayStartIdx, count, arrayStep, bufferStride, oComponents[0]->GetType(), apszStr) && - poValue->Read(arrayStartIdx, count, arrayStep, - bufferStride, oComponents[1]->GetType(), - adfVals)) + poValue->Read( + arrayStartIdx, count, arrayStep, bufferStride, + GDALExtendedDataType::Create(GDT_Float64), adfVals)) { for (int i = 0; i < 2; ++i) { From bfb05ddc8e7da0dd8239bb59ffba0f604ed3f60f Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 17:12:12 +0100 Subject: [PATCH 19/20] Add validate_s111.py sample validation script --- .../osgeo_utils/samples/validate_s111.py | 2473 +++++++++++++++++ 1 file changed, 2473 insertions(+) create mode 100755 swig/python/gdal-utils/osgeo_utils/samples/validate_s111.py diff --git a/swig/python/gdal-utils/osgeo_utils/samples/validate_s111.py b/swig/python/gdal-utils/osgeo_utils/samples/validate_s111.py new file mode 100755 index 000000000000..965e4a78f05e --- /dev/null +++ b/swig/python/gdal-utils/osgeo_utils/samples/validate_s111.py @@ -0,0 +1,2473 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +############################################################################### +# +# Project: GDAL/OGR +# Purpose: Test compliance of IHO S111 v2.0 dataset +# Author: Even Rouault +# +############################################################################### +# Copyright (c) 2025, Even Rouault +# +# SPDX-License-Identifier: MIT +############################################################################### + +# Validates against +# https://registry.iho.int/productspec/view.do?idx=209&product_ID=S-111&statusS=5&domainS=ALL&category=product_ID&searchValue= and +# https://iho.int/uploads/user/pubs/standards/s-100/S-100_5.2.0_Final_Clean.pdf + +# "111_DevXXXX" are for traceability with respect to requirements of the spreadsheet: +# https://github.com/iho-ohi/S-100-Validation-Checks/blob/main/Documents/S-158-111/0.2.0/S-158_111_0_2_0_20241209.xlsx +# Note that there are a few checks in that spreadsheet that are specific only of 1.1.0, and not 2.0.0... + + +import os +import re +import struct +import sys + +# Standard Python modules +from collections import namedtuple + +# Extension modules +import h5py +import numpy as np + +try: + from osgeo import osr + + osr.UseExceptions() + gdal_available = True +except ImportError: + gdal_available = False + +ERROR = "Error" +CRITICAL_ERROR = "Critical error" + +AttributeDefinition = namedtuple( + "AttributeDefinition", ["name", "required", "type", "fixed_value"] +) + + +def _get_int_value_or_none(v): + try: + return int(v) + except ValueError: + return None + + +def _get_int_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_int_value_or_none(group.attrs[attr_name]) + + +def _get_float_value_or_none(v): + try: + return float(v) + except ValueError: + return None + + +def _get_float_attr_or_none(group, attr_name): + if attr_name not in group.attrs: + return None + return _get_float_value_or_none(group.attrs[attr_name]) + + +def _cast_to_float32(v): + return struct.unpack("f", struct.pack("f", v))[0] + + +class S111ValidationException(Exception): + pass + + +class S111Checker: + def __init__(self, filename, abort_at_first_error=False): + self.filename = filename + self.abort_at_first_error = abort_at_first_error + self.errors = [] + self.warnings = [] + self.checks_done = set([]) + + def _log_check(self, name): + self.checks_done.add(name) + + def _warning(self, msg): + self.warnings += [msg] + + def _error(self, msg): + self.errors += [(ERROR, msg)] + if self.abort_at_first_error: + raise S111ValidationException(f"{ERROR}: {msg}") + + def _critical_error(self, msg): + self.errors += [(CRITICAL_ERROR, msg)] + if self.abort_at_first_error: + raise S111ValidationException(f"{CRITICAL_ERROR}: {msg}") + + def _is_uint8(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 1 + ) + + def _is_uint16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 2 + ) + + def _is_uint32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_NONE + and h5_type.get_size() == 4 + ) + + def _is_int16(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 2 + ) + + def _is_int32(self, h5_type): + return ( + isinstance(h5_type, h5py.h5t.TypeIntegerID) + and h5_type.get_sign() == h5py.h5t.SGN_2 + and h5_type.get_size() == 4 + ) + + def _is_float32(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 4 + + def _is_float64(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeFloatID) and h5_type.get_size() == 8 + + def _is_string(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeStringID) + + def _is_enumeration(self, h5_type): + return isinstance(h5_type, h5py.h5t.TypeEnumID) + + def _check_attributes(self, ctxt_name, group, attr_list): + + for attr_def in attr_list: + if attr_def.required and attr_def.name not in group.attrs: + # 111_Dev1002: check presence of required attributes + self._error( + f"Required {ctxt_name} attribute '{attr_def.name}' is missing" + ) + + elif attr_def.name in group.attrs: + attr = group.attrs[attr_def.name] + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + h5_type = group.attrs.get_id(attr_def.name).get_type() + + # 111_Dev1002: check type + + if attr_def.type == "string": + if not self._is_string(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string " + ) + + elif attr_def.type == "time": + if not self._is_string(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + + pattern = re.compile( + r"^(?:[01]\d|2[0-3])[0-5]\d[0-5]\d(?:Z|[+-](?:[01]\d|2[0-3])[0-5]\d)$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid time: {attr}" + ) + + elif attr_def.type == "date": + if not isinstance(h5_type, h5py.h5t.TypeStringID): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + elif h5_type.get_size() != 8: + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a 8-character string" + ) + + pattern = re.compile( + r"^(?:[0-9]{4})(?:(?:0[1-9]|1[0-2])(?:0[1-9]|[12][0-9]|3[01]))$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid date: {attr}" + ) + + elif attr_def.type == "datetime": + if not isinstance(h5_type, h5py.h5t.TypeStringID): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a string" + ) + pattern = re.compile( + r"^(?:" + r"([0-9]{4})" # year + r"(?:(?:0[1-9]|1[0-2])" # month + r"(?:0[1-9]|[12][0-9]|3[01]))" # day + r"T" # literal 'T' separator + r"(?:[01]\d|2[0-3])" # hour + r"[0-5]\d" # minute + r"[0-5]\d" # second + r"(?:Z|[+-](?:[01]\d|2[0-3])[0-5]\d)" # timezone (Z or hhmm) + r")$" + ) + if not pattern.match(attr): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a valid datetime: {attr}" + ) + + elif attr_def.type == "uint8": + if not self._is_uint8(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint8" + ) + + elif attr_def.type == "uint16": + if not self._is_uint16(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint16" + ) + + elif attr_def.type == "uint32": + if not self._is_uint32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a uint32" + ) + + elif attr_def.type == "int32": + if not self._is_int32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a int32" + ) + + elif attr_def.type == "float32": + if not self._is_float32(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float32" + ) + + elif attr_def.type == "float64": + if not self._is_float64(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not a float64" + ) + + elif attr_def.type == "enumeration": + if not self._is_enumeration(h5_type): + self._error( + f"{ctxt_name} attribute '{attr_def.name}' is not an enumeration" + ) + + else: + + raise Exception( + f"Programming error: unexpected type {attr_def.type}" + ) + + if attr_def.fixed_value: + if attr != attr_def.fixed_value: + self._error( + f"{ctxt_name} attribute '{attr_def.name}' has value '{attr}', whereas '{attr_def.fixed_value}' is expected" + ) + + attr_dict = {a.name: a for a in attr_list} + for attr in group.attrs: + if attr not in attr_dict: + self._warning(f"Extra element in {ctxt_name} group: '{attr}'") + + def check(self): + + try: + f = h5py.File(self.filename, "r") + except Exception as e: + self._critical_error(str(e)) + return + + self._log_check("111_Dev9005") + file_size = os.stat(self.filename).st_size + if file_size > 10 * 1024 * 1024: + self._warning( + f"File size of {self.filename} = {file_size}, which exceeds 10 MB" + ) + + basename = os.path.basename(self.filename) + if not basename.startswith("111"): + self._warning("File name should start with '111'") + if not basename.upper().endswith(".H5"): + self._warning("File name should end with '.H5'") + pattern = r"^111[a-zA-Z0-9]{4}[a-zA-Z0-9\-_]{1,54}\.(?:h5|H5)$" + if not re.match(pattern, basename): + self._warning( + f"File name '{basename}' does not match expected pattern '{pattern}'" + ) + + self._log_check("111_Dev1018") + for key in f.keys(): + if key not in ( + "Group_F", + "SurfaceCurrent", + ): + self._warning(f"Unexpected element {key} in top level group") + + self._log_check("111_Dev1001") + if "Group_F" in f.keys(): + self._validate_group_f(f, f["Group_F"]) + else: + self._critical_error("No feature information group ('Group_F')") + + # Cf Table 12-1 - General metadata, related to the entire HDF5 file + topLevelAttributesList = [ + AttributeDefinition( + name="productSpecification", + required=True, + type="string", + fixed_value="INT.IHO.S-111.2.0", + ), + AttributeDefinition( + name="issueDate", required=True, type="date", fixed_value=None + ), + AttributeDefinition( + name="horizontalCRS", required=True, type="int32", fixed_value=None + ), + AttributeDefinition( + name="westBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="geographicIdentifier", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="nameOfHorizontalCRS", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="typeOfHorizontalCRS", + required=False, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="horizontalCS", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="horizontalDatum", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="nameOfHorizontalDatum", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="primeMeridian", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="spheroid", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="projectionMethod", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="projectionParameter1", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter2", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter3", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter4", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="projectionParameter5", + required=False, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="falseNorthing", required=False, type="float64", fixed_value=None + ), + AttributeDefinition( + name="falseEasting", required=False, type="float64", fixed_value=None + ), + AttributeDefinition( + name="epoch", required=False, type="string", fixed_value=None + ), + AttributeDefinition( + name="issueTime", required=True, type="time", fixed_value=None + ), + AttributeDefinition( + name="datasetDeliveryInterval", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="depthTypeIndex", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="surfaceCurrentDepth", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="verticalCS", required=False, type="int32", fixed_value=None + ), + AttributeDefinition( + name="verticalCoordinateBase", + required=True, + type="enumeration", + fixed_value=2, + ), + AttributeDefinition( + name="verticalDatumReference", + required=False, + type="enumeration", + fixed_value=None, + ), + # S111_Dev1003 + AttributeDefinition( + name="verticalDatum", required=False, type="int32", fixed_value=None + ), + ] + + self._log_check("111_Dev1002") + self._log_check("111_Dev1003") + self._check_attributes("top level", f, topLevelAttributesList) + + self._log_check("111_Dev1004") + if "epoch" in f.attrs and not f.attrs["epoch"]: + self._warning("Attribute epoch present but empty or blank.") + + self._log_check("111_Dev1005") + if "verticalDatum" in f.attrs and not f.attrs["verticalDatum"]: + self._warning("Attribute verticalDatum present but empty or blank.") + + depthTypeIndex = _get_int_attr_or_none(f, "depthTypeIndex") + if depthTypeIndex == 1: + if "verticalCS" not in f.attrs: + self._error( + "verticalCS missing, but mandatory because depthTypeIndex = 1" + ) + if "verticalDatumReference" not in f.attrs: + self._error( + "verticalDatumReference missing, but mandatory because depthTypeIndex = 1" + ) + if "verticalDatum" not in f.attrs: + self._error( + "verticalDatum missing, but mandatory because depthTypeIndex = 1" + ) + + self._log_check("111_Dev1007") + self._validate_metadata(f, self.filename) + self._validate_nameOfHorizontalCRS(f) + self._validate_typeOfHorizontalCRS(f) + self._validate_horizontalCS(f) + self._validate_horizontalDatum(f) + self._validate_nameOfHorizontalDatum(f) + self._validate_primeMeridian(f) + self._validate_spheroid(f) + self._validate_projectionMethod(f) + self._validate_projectionParameters(f) + self._validate_datasetDeliveryInterval(f) + self._validate_verticalCS(f) + self._validate_verticalCoordinateBase(f) + self._validate_verticalDatumReference(f) + self._validate_verticalDatum("top level", f) + self._validate_epoch(f) + self._validate_horizontalCRS(f) + self._validate_bounds("top level", f) + + if "SurfaceCurrent" in f.keys(): + self._validate_SurfaceCurrent(f) + else: + self._critical_error("Missing /SurfaceCurrent group") + + self.checks_done = sorted(self.checks_done) + + def _validate_enumeration(self, group, attr_name, expected_values): + h5_type = group.attrs.get_id(attr_name).get_type() + if isinstance(h5_type, h5py.h5t.TypeEnumID): + if h5_type.get_nmembers() != len(expected_values): + self._warning( + f"Expected {len(expected_values)} members for enumeration {attr_name}" + ) + else: + for code in expected_values: + try: + value = h5_type.enum_nameof(code).decode("utf-8") + except Exception: + value = None + self._warning( + f"Enumeration {attr_name}: did not find value for code {code}" + ) + if value: + expected = expected_values[code] + if value != expected: + self._error( + f"Enumeration {attr_name}: for code {code}, found value {value}, whereas {expected} was expected" + ) + + def _validate_metadata(self, f, filename): + if "metadata" in f.attrs: + metadata = f.attrs["metadata"] + if isinstance(metadata, str) and metadata: + basename = os.path.basename(filename) + if basename.endswith(".h5") or basename.endswith(".H5"): + basename = basename[0:-3] + if metadata not in (f"MD_{basename}.xml", f"MD_{basename}.XML"): + self._critical_error( + f"Top level attribute metadata has value '{metadata}', whereas it should be empty, 'MD_{basename}.xml' or 'MD_{basename}.XML'" + ) + + def _is_horizontalCRS_minus_1(self, f): + return _get_int_attr_or_none(f, "horizontalCRS") == -1 + + def _validate_nameOfHorizontalCRS(self, f): + if "nameOfHorizontalCRS" in f.attrs: + nameOfHorizontalCRS = f.attrs["nameOfHorizontalCRS"] + if isinstance(nameOfHorizontalCRS, str) and not nameOfHorizontalCRS: + self._warning( + "Top level attribute nameOfHorizontalCRS must not be the empty string" + ) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute nameOfHorizontalCRS is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _validate_typeOfHorizontalCRS(self, f): + if "typeOfHorizontalCRS" in f.attrs: + expected_values = { + 1: "geodeticCRS2D", + 2: "projectedCRS", + } + self._validate_enumeration(f, "typeOfHorizontalCRS", expected_values) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute typeOfHorizontalCRS is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _validate_horizontalCS(self, f): + if "horizontalCS" in f.attrs: + horizontalCS = _get_int_attr_or_none(f, "horizontalCS") + typeOfHorizontalCRS = _get_int_attr_or_none(f, "typeOfHorizontalCRS") + if typeOfHorizontalCRS == 1: # geodeticCRS2D + if horizontalCS != 6422: + self._warning( + "Top level attribute horizontalCS value should be 6422 since typeOfHorizontalCRS=1" + ) + elif typeOfHorizontalCRS == 2: # projectedCRS + if horizontalCS not in (4400, 4500): + self._warning( + "Top level attribute horizontalCS value should be 4400 or 4500 since typeOfHorizontalCRS=2" + ) + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute horizontalCS is missing, but it is mandatory when horizontalCRS = -1" + ) + + @staticmethod + def _get_proj_db(): + try: + from osgeo import osr + except ImportError: + return None + for path in osr.GetPROJSearchPaths(): + filename = os.path.join(path, "proj.db") + if os.path.exists(filename): + import sqlite3 + + return sqlite3.connect(filename) + return None + + def _validate_horizontalDatum(self, f): + if "horizontalDatum" in f.attrs: + horizontalDatum = _get_int_attr_or_none(f, "horizontalDatum") + if horizontalDatum is not None and horizontalDatum != -1: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM geodetic_datum WHERE auth_name = 'EPSG' and code = ?", + (horizontalDatum,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute horizontalDatum = {horizontalDatum} does not match with a known EPSG datum" + ) + + elif self._is_horizontalCRS_minus_1(f): + self._warning( + "Top level attribute horizontalDatum is missing, but it is mandatory when horizontalCRS = -1" + ) + + def _is_horizontalDatum_minus_1(self, f): + return _get_int_attr_or_none(f, "horizontalDatum") == -1 + + def _validate_nameOfHorizontalDatum(self, f): + if "nameOfHorizontalDatum" in f.attrs: + nameOfHorizontalDatum = f.attrs["nameOfHorizontalDatum"] + if isinstance(nameOfHorizontalDatum, str) and not nameOfHorizontalDatum: + self._warning( + "Top level attribute nameOfHorizontalDatum must not be the empty string" + ) + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute nameOfHorizontalDatum is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_primeMeridian(self, f): + if "primeMeridian" in f.attrs: + primeMeridian = _get_int_attr_or_none(f, "primeMeridian") + if primeMeridian is not None: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM prime_meridian WHERE auth_name = 'EPSG' and code = ?", + (primeMeridian,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute primeMeridian = {primeMeridian} does not match with a known EPSG prime meridian" + ) + + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute primeMeridian is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_spheroid(self, f): + if "spheroid" in f.attrs: + spheroid = _get_int_attr_or_none(f, "spheroid") + if spheroid is not None: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM ellipsoid WHERE auth_name = 'EPSG' and code = ?", + (spheroid,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute spheroid = {spheroid} does not match with a known EPSG spheroid" + ) + + elif self._is_horizontalDatum_minus_1(f): + self._warning( + "Top level attribute spheroid is missing, but it is mandatory when horizontalDatum = -1" + ) + + def _validate_projectionMethod(self, f): + if "projectionMethod" in f.attrs: + projectionMethod = _get_int_attr_or_none(f, "projectionMethod") + if projectionMethod is not None: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM conversion_method WHERE auth_name = 'EPSG' and code = ?", + (projectionMethod,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute projectionMethod = {projectionMethod} does not match with a known EPSG projectionMethod" + ) + + else: + typeOfHorizontalCRS = _get_int_attr_or_none(f, "typeOfHorizontalCRS") + if typeOfHorizontalCRS == 2: + self._warning( + "Top level attribute projectionMethod is missing, but it is mandatory when typeOfHorizontalCRS = 2" + ) + + def _validate_projectionParameters(self, f): + + for attr_name in ( + "projectionParameter1", + "projectionParameter2", + "projectionParameter3", + "projectionParameter4", + "projectionParameter5", + "falseNorthing", + "falseEasting", + ): + if attr_name in f.attrs and "projectionMethod" not in f.attrs: + self._warning( + f"Top level attribute {attr_name} is present, but it should not be because projectionMethod is not set" + ) + + def _validate_datasetDeliveryInterval(self, f): + + if "datasetDeliveryInterval" in f.attrs: + datasetDeliveryInterval = f.attrs["datasetDeliveryInterval"] + if isinstance(datasetDeliveryInterval, str): + iso8601_duration_regex = re.compile( + r"^P" # starts with 'P' + r"(?:(\d+(?:\.\d+)?)Y)?" # years + r"(?:(\d+(?:\.\d+)?)M)?" # months + r"(?:(\d+(?:\.\d+)?)W)?" # weeks + r"(?:(\d+(?:\.\d+)?)D)?" # days + r"(?:T" # optional time part + r"(?:(\d+(?:\.\d+)?)H)?" # hours + r"(?:(\d+(?:\.\d+)?)M)?" # minutes + r"(?:(\d+(?:\.\d+)?)S)?" # seconds + r")?$" + ) + if not iso8601_duration_regex.match(datasetDeliveryInterval): + self._error( + "Top level attribute datasetDeliveryInterval is not a valid ISO8601 duration" + ) + + def _validate_verticalCS(self, f): + verticalCS = _get_int_attr_or_none(f, "verticalCS") + if verticalCS is not None and verticalCS not in (6498, 6499): + self._error("Top level attribute verticalCS must be 6498 or 6499") + + def _validate_verticalCoordinateBase(self, f): + if "verticalCoordinateBase" in f.attrs: + expected_values = { + 1: "seaSurface", + 2: "verticalDatum", + 3: "seaBottom", + } + self._validate_enumeration(f, "verticalCoordinateBase", expected_values) + + def _validate_verticalDatumReference(self, f): + if "verticalDatumReference" in f.attrs: + expected_values = { + 1: "s100VerticalDatum", + 2: "EPSG", + } + self._validate_enumeration(f, "verticalDatumReference", expected_values) + + def _validate_verticalDatum(self, ctxt_name, f): + verticalDatum = _get_int_attr_or_none(f, "verticalDatum") + if verticalDatum is None: + return + verticalDatumReference = _get_int_attr_or_none(f, "verticalDatumReference") + if verticalDatumReference == 1: + if not ( + (verticalDatum >= 1 and verticalDatum <= 30) + or verticalDatum in (44, 46, 47, 48, 49) + ): + self._warning( + f"{ctxt_name} attribute verticalDatum has value '{verticalDatum}', whereas it should be in [1, 30] range or 44, 46, 47, 48 or 49" + ) + elif verticalDatumReference == 2: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM vertical_datum WHERE auth_name = 'EPSG' and code = ?", + (verticalDatum,), + ) + if not cursor.fetchone(): + self._warning( + f"{ctxt_name} attribute verticalDatum = {verticalDatum} does not match with a known EPSG verticalDatum" + ) + + def _validate_epoch(self, f): + epoch = _get_float_attr_or_none(f, "epoch") + if epoch and not (epoch >= 1980 and epoch <= 2100): + self._warning(f"Top level attribute epoch has invalid value: {epoch}") + + def _validate_horizontalCRS(self, f): + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS is not None and horizontalCRS != -1: + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM crs_view WHERE auth_name = 'EPSG' and code = ? and type in ('geographic 2D', 'projected')", + (horizontalCRS,), + ) + if not cursor.fetchone(): + self._warning( + f"Top level attribute horizontalCRS = {horizontalCRS} does not match with a known EPSG horizontal CRS" + ) + + def _is_geographic_2D(self, f): + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS is not None: + if horizontalCRS == 4326: + return True + conn = S111Checker._get_proj_db() + if conn: + cursor = conn.cursor() + cursor.execute( + "SELECT 1 FROM geodetic_crs WHERE auth_name = 'EPSG' and code = ? and type = 'geographic 2D'", + (horizontalCRS,), + ) + if cursor.fetchone(): + return True + return False + + def _validate_bounds(self, ctxt_name, f): + west = _get_float_attr_or_none(f, "westBoundLongitude") + east = _get_float_attr_or_none(f, "eastBoundLongitude") + north = _get_float_attr_or_none(f, "northBoundLatitude") + south = _get_float_attr_or_none(f, "southBoundLatitude") + if ( + west is not None + and east is not None + and north is not None + and south is not None + ): + + if not (west >= -180 and west <= 180): + self._warning( + f"{ctxt_name}: westBoundLongitude is not in [-180, 180] range" + ) + if not (east >= -180 and east <= 180): + self._warning( + f"{ctxt_name}: eastBoundLongitude is not in [-180, 180] range" + ) + if west >= east: + self._warning( + f"{ctxt_name}: westBoundLongitude is greater or equal to eastBoundLongitude" + ) + if not (north >= -90 and north <= 90): + self._warning( + f"{ctxt_name}: northBoundLatitude is not in [-90, 90] range" + ) + if not (south >= -90 and south <= 90): + self._warning( + f"{ctxt_name}: southBoundLatitude is not in [-90, 90] range" + ) + if south >= north: + self._warning( + f"{ctxt_name}: southBoundLatitude is greater or equal to northBoundLatitude" + ) + + def _validate_group_f(self, rootGroup, group_f): + + for key in group_f.keys(): + if key not in ( + "featureCode", + "SurfaceCurrent", + ): + self._warning(f"Unexpected element {key} in Group_F") + + self._log_check("111_Dev1008") + if "featureCode" in group_f.keys(): + self._validate_group_f_featureCode( + rootGroup, group_f, group_f["featureCode"] + ) + else: + self._critical_error( + "No featureCode array in feature information group ('/Group_F/featureCode')" + ) + + def _validate_group_f_featureCode(self, rootGroup, group_f, featureCode): + + if not isinstance(featureCode, h5py.Dataset): + self._critical_error("'/Group_F/featureCode' is not a dataset") + return + + if len(featureCode.shape) != 1: + self._critical_error( + "'/Group_F/featureCode' is not a one-dimensional dataset" + ) + return + + self._log_check("111_Dev1009") + values = set([v.decode("utf-8") for v in featureCode[:]]) + if "SurfaceCurrent" not in values: + self._critical_error( + "SurfaceCurrent feature missing from featureCode array" + ) + + self._log_check("111_Dev1010") + for value in values: + if value not in ("SurfaceCurrent",): + # + self._critical_error( + f"Group_F feature information must correspond to feature catalog. Did not expect {value}" + ) + + if value not in group_f.keys(): + self._critical_error( + f"Feature information dataset for feature type {value} missing" + ) + + if value not in rootGroup.keys(): + self._critical_error(f"No feature instances for feature type {value}") + + if "SurfaceCurrent" in group_f.keys(): + self._validate_group_f_SurfaceCurrent(group_f) + + def _validate_group_f_SurfaceCurrent(self, group_f): + self._log_check("111_Dev1012") + + SurfaceCurrent = group_f["SurfaceCurrent"] + if not isinstance(SurfaceCurrent, h5py.Dataset): + self._critical_error("'/Group_F/SurfaceCurrent' is not a dataset") + elif len(SurfaceCurrent.shape) != 1: + self._critical_error( + "'/Group_F/SurfaceCurrent' is not a one-dimensional dataset" + ) + elif SurfaceCurrent.dtype != [ + ("code", "O"), + ("name", "O"), + ("uom.name", "O"), + ("fillValue", "O"), + ("datatype", "O"), + ("lower", "O"), + ("upper", "O"), + ("closure", "O"), + ]: + self._critical_error("'/Group_F/SurfaceCurrent' has not expected data type") + else: + self._log_check("111_Dev1013") + + if SurfaceCurrent.shape not in ((2,), (3,), (4,), (5,), (6,)): + self._critical_error( + "'/Group_F/SurfaceCurrent' is not of shape 2, 3, 4, 5 or 6" + ) + + type = SurfaceCurrent.id.get_type() + assert isinstance(type, h5py.h5t.TypeCompoundID) + for member_idx in range(type.get_nmembers()): + subtype = type.get_member_type(member_idx) + if not isinstance(subtype, h5py.h5t.TypeStringID): + self._critical_error( + f"Member of index {member_idx} in /Group_F/SurfaceCurrent is not a string" + ) + return + if not subtype.is_variable_str(): + self._critical_error( + f"Member of index {member_idx} in /Group_F/SurfaceCurrent is not a variable length string" + ) + + values = SurfaceCurrent[:] + # Table 10-3 - Sample contents of the one-dimensional compound array + expected_values = [ + [ + "surfaceCurrentSpeed", + "Surface Current Speed", + "knot", + "-9999.00", + "H5T_FLOAT", + "0.00", + "99.00", + "geSemiInterval", + ], + [ + "surfaceCurrentDirection", + "Surface Current Direction", + "degree", + "-9999.0", + "H5T_FLOAT", + "0.0", + "359.9", + "closedInterval", + ], + [ + "surfaceCurrentTime", + "Surface Current Time", + "", + "00010101T000000Z", + "H5T_STRING", + "19000101T000000Z", + "21500101T000000Z", + "closedInterval", + ], + [ + "speedUncertainty", + "Speed Uncertainty", + "knot", + "-1.0", + "H5T_FLOAT", + "0.00", + "99.00", + "geSemiInterval", + ], + [ + "directionUncertainty", + "Direction Uncertainty", + "degree", + "-1.0", + "H5T_FLOAT", + "0.0", + "359.9", + "closedInterval", + ], + ] + + for row in range(SurfaceCurrent.shape[0]): + row_content = [values[row][col].decode("utf-8") for col in range(8)] + if row_content not in expected_values: + self._critical_error( + f"/Group_F/SurfaceCurrent: row {row}, got value '{row_content}', which is not in '{expected_values}'" + ) + + def _validate_SurfaceCurrent(self, f): + SurfaceCurrent = f["SurfaceCurrent"] + if not isinstance(SurfaceCurrent, h5py.Group): + self._critical_error("/SurfaceCurrent is not a group") + return + + # Cf Table 12-2 - Feature Type metadata, pertaining to the SurfaceCurrent feature type + + self._log_check("111_Dev2002") # for dimension + attr_list = [ + AttributeDefinition( + name="dataCodingFormat", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="dimension", + required=True, + type="uint8", + fixed_value=2, + ), + AttributeDefinition( + name="commonPointRule", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="horizontalPositionUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="verticalUncertainty", + required=True, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="timeUncertainty", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numInstances", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="methodCurrentsProduct", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="minDatasetCurrentSpeed", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="maxDatasetCurrentSpeed", + required=True, + type="float64", + fixed_value=None, + ), + ] + + dataCodingFormat = _get_int_attr_or_none(SurfaceCurrent, "dataCodingFormat") + if dataCodingFormat not in (1, 2, 3, 4, 8): + self._error(f"dataCodingFormat={dataCodingFormat} is invalid") + + if dataCodingFormat == 2: # regular grid + attr_list += [ + AttributeDefinition( + name="sequencingRule.type", + required=True, + type="enumeration", + fixed_value=None, + ), + AttributeDefinition( + name="sequencingRule.scanDirection", + required=True, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="interpolationType", + required=True, + type="enumeration", + fixed_value=10, # discrete + ), + AttributeDefinition( + name="dataOffsetCode", + required=False, + type="enumeration", + fixed_value=None, + ), + ] + elif dataCodingFormat == 3: # ungeorectified grid + attr_list += [ + AttributeDefinition( + name="interpolationType", + required=True, + type="enumeration", + fixed_value=10, # discrete + ), + ] + + self._log_check("111_Dev2001") + self._log_check("111_Dev2008") + self._log_check("111_Dev2009") + self._log_check("111_Dev2018") + self._log_check("111_Dev2019") + self._check_attributes("SurfaceCurrent group", SurfaceCurrent, attr_list) + + self._log_check("111_Dev2003") + if "commonPointRule" in SurfaceCurrent.attrs: + expected_values = { + 1: "average", + 2: "low", + 3: "high", + 4: "all", + } + self._validate_enumeration( + SurfaceCurrent, "commonPointRule", expected_values + ) + + self._log_check("111_Dev2004") + commonPointRule = _get_int_attr_or_none(SurfaceCurrent, "commonPointRule") + if commonPointRule != 3: + self._warning( + '/SurfaceCurrent["commonPointRule"] attribute value is not the recommended value 3 (high)' + ) + + if "dataCodingFormat" in SurfaceCurrent.attrs: + expected_values = { + 1: "Fixed Stations", + 2: "Regular Grid", + 3: "Ungeorectified Grid", + 4: "Moving Platform", + 5: "Irregular Grid", + 6: "Variable cell size", + 7: "TIN", + 8: "Fixed Stations (Stationwise)", + 9: "Feature oriented Regular Grid", + } + self._validate_enumeration( + SurfaceCurrent, "dataCodingFormat", expected_values + ) + + self._log_check("111_Dev2005") + horizontalPositionUncertainty = _get_float_attr_or_none( + SurfaceCurrent, "horizontalPositionUncertainty" + ) + if horizontalPositionUncertainty and not ( + horizontalPositionUncertainty == -1.0 or horizontalPositionUncertainty >= 0 + ): + self._warning( + '/SurfaceCurrent["horizontalPositionUncertainty"] attribute value must be -1 or positive' + ) + + verticalUncertainty = _get_float_attr_or_none( + SurfaceCurrent, "verticalUncertainty" + ) + if verticalUncertainty and not ( + verticalUncertainty == -1.0 or verticalUncertainty >= 0 + ): + self._warning( + '/SurfaceCurrent["verticalUncertainty"] attribute value must be -1 or positive' + ) + + self._log_check("111_Dev2006") + timeUncertainty = _get_float_attr_or_none(SurfaceCurrent, "timeUncertainty") + if timeUncertainty and not (timeUncertainty == -1.0 or timeUncertainty >= 0): + self._warning( + '/SurfaceCurrent["timeUncertainty"] attribute value must be -1 or positive' + ) + + self._log_check("111_Dev2007") + numInstances = _get_int_attr_or_none(SurfaceCurrent, "numInstances") + if numInstances is not None: + if numInstances <= 0: + self._critical_error( + '/SurfaceCurrent["numInstances"] attribute value must be >= 1' + ) + numInstances = None + + scanDirection_values = None + if "sequencingRule.scanDirection" in SurfaceCurrent.attrs: + scanDirection = SurfaceCurrent.attrs["sequencingRule.scanDirection"] + if isinstance(scanDirection, str): + # strip leading space. IMHO there should not be any, but + # the examples in the specification sometimes show one... + scanDirection_values = [x.lstrip() for x in scanDirection.split(",")] + + self._log_check("111_Dev2016") + if len(scanDirection_values) != 2: + self._warning( + '/SurfaceCurrent["sequencingRule.scanDirection"] attribute should have 2 values' + ) + elif "axisNames" in SurfaceCurrent.keys(): + + scanDirection_values_without_orientation = [] + for v in scanDirection_values: + if v.startswith("-"): + scanDirection_values_without_orientation.append(v[1:]) + else: + scanDirection_values_without_orientation.append(v) + scanDirection_values_without_orientation = set( + scanDirection_values_without_orientation + ) + + self._log_check("111_Dev2017") + axisNames = SurfaceCurrent["axisNames"] + if ( + isinstance(axisNames, h5py.Dataset) + and axisNames.shape == (2,) + and isinstance(axisNames.id.get_type(), h5py.h5t.TypeStringID) + ): + axisNames_values = set( + [v.decode("utf-8") for v in axisNames[:]] + ) + if scanDirection_values_without_orientation != axisNames_values: + self._warning( + f"Sequencing rule scanDirection contents ({scanDirection_values_without_orientation}) does not match axis names ({axisNames_values}" + ) + + self._validate_axisNames(f, SurfaceCurrent) + + subgroups = set( + [ + name + for name, item in SurfaceCurrent.items() + if isinstance(item, h5py.Group) + ] + ) + + minDatasetCurrentSpeed = _get_float_attr_or_none( + SurfaceCurrent, "minDatasetCurrentSpeed" + ) + if ( + minDatasetCurrentSpeed is not None + and minDatasetCurrentSpeed != -9999.0 + and minDatasetCurrentSpeed < 0 + ): + self._warning( + f"{SurfaceCurrent.name}: minDatasetCurrentSpeed={minDatasetCurrentSpeed} should be in [0, 99.99] range" + ) + + maxDatasetCurrentSpeed = _get_float_attr_or_none( + SurfaceCurrent, "maxDatasetCurrentSpeed" + ) + if maxDatasetCurrentSpeed is not None and maxDatasetCurrentSpeed > 99.99: + self._warning( + f"{SurfaceCurrent.name}: maxDatasetCurrentSpeed={maxDatasetCurrentSpeed} should be in [0, 99.99] range" + ) + + if ( + minDatasetCurrentSpeed is not None + and maxDatasetCurrentSpeed is not None + and minDatasetCurrentSpeed != -9999.0 + and maxDatasetCurrentSpeed != -9999.0 + and minDatasetCurrentSpeed > maxDatasetCurrentSpeed + ): + self._warning( + f"{SurfaceCurrent.name}: minDatasetCurrentSpeed={minDatasetCurrentSpeed} > maxDatasetCurrentSpeed={maxDatasetCurrentSpeed}" + ) + + self._log_check("111_Dev2013") + if len(subgroups) == 0: + self._critical_error("/SurfaceCurrent has no groups") + else: + for i in range(1, len(subgroups) + 1): + expected_name = "SurfaceCurrent.%02d" % i + if expected_name not in subgroups: + self._critical_error( + "/SurfaceCurrent/{expected_name} group does not exist" + ) + + for name in subgroups: + if not name.startswith("SurfaceCurrent."): + self._warning( + "/SurfaceCurrent/{expected_name} is an unexpected group" + ) + + self._log_check("111_Dev2014") + if numInstances and len(subgroups) != numInstances: + self._critical_error( + f"/SurfaceCurrent has {len(subgroups)} groups whereas numInstances={numInstances}" + ) + + self._log_check("111_Dev2015") + self._validate_sequencingRuleType(SurfaceCurrent) + + # Attributes and groups already checked above + self._log_check("111_Dev2021") + for name, item in SurfaceCurrent.items(): + if isinstance(item, h5py.Dataset) and name != "axisNames": + self._warning(f"/SurfaceCurrent has unexpected dataset {name}") + + if isinstance(item, h5py.Group) and name.startswith("SurfaceCurrent."): + self._validate_SurfaceCurrent_instance(f, SurfaceCurrent, item) + + def _validate_sequencingRuleType(self, f): + if "sequencingRule.type" in f.attrs: + expected_values = { + 1: "linear", + 2: "boustrophedonic", + 3: "CantorDiagonal", + 4: "spiral", + 5: "Morton", + 6: "Hilbert", + } + self._validate_enumeration(f, "sequencingRule.type", expected_values) + + def _validate_SurfaceCurrent_instance(self, f, SurfaceCurrent, instance): + + # Cf Table 12-3 - Feature Instance metadata, pertaining to the feature instance + attr_list = [ + AttributeDefinition( + name="westBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="eastBoundLongitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="southBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="northBoundLatitude", + required=False, + type="float32", + fixed_value=None, + ), + AttributeDefinition( + name="numberOfTimes", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="timeRecordInterval", + required=False, + type="uint16", + fixed_value=None, + ), + AttributeDefinition( + name="dateTimeOfFirstRecord", + required=False, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="dateTimeOfLastRecord", + required=False, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="numGRP", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="dataDynamicity", + required=True, + type="enumeration", + fixed_value=None, + ), + ] + + dataCodingFormat = _get_int_attr_or_none(SurfaceCurrent, "dataCodingFormat") + if dataCodingFormat == 1: + attr_list += [ + AttributeDefinition( + name="numberOfStations", + required=True, + type="uint32", + fixed_value=None, + ), + ] + + elif dataCodingFormat == 2: + attr_list += [ + AttributeDefinition( + name="gridOriginLongitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridOriginLatitude", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLongitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="gridSpacingLatitudinal", + required=True, + type="float64", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLongitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="numPointsLatitudinal", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="startSequence", + required=True, + type="string", + fixed_value=None, + ), + ] + elif dataCodingFormat == 3: + attr_list += [ + AttributeDefinition( + name="numberOfNodes", + required=True, + type="uint32", + fixed_value=None, + ), + ] + elif dataCodingFormat == 7: + attr_list += [ + AttributeDefinition( + name="numberOfStations", + required=True, + type="uint32", + fixed_value=1, + ), + ] + elif dataCodingFormat == 8: + attr_list += [ + AttributeDefinition( + name="numberOfStations", + required=True, + type="uint32", + fixed_value=None, + ), + ] + + self._log_check("111_Dev3001") + self._log_check("111_Dev3005") + self._log_check("111_Dev3019") + self._log_check("111_Dev3020") + self._check_attributes( + f"SurfaceCurrent feature instance group {instance.name}", + instance, + attr_list, + ) + + self._log_check("111_Dev3021") + countGroups = 0 + for name, item in instance.items(): + if isinstance(item, h5py.Dataset) and name not in ( + "uncertainty", + "domainExtent.polygon", + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name} has unexpected dataset '{name}'" + ) + + elif isinstance(item, h5py.Group): + if name.startswith("Group_"): + countGroups += 1 + else: + self._warning( + f"SurfaceCurrent feature instance group {instance.name} has unexpected group '{name}'" + ) + + if ( + "dateTimeOfFirstRecord" in instance.attrs + and "dateTimeOfLastRecord" in instance.attrs + ): + dateTimeOfFirstRecord = instance.attrs["dateTimeOfFirstRecord"] + dateTimeOfLastRecord = instance.attrs["dateTimeOfLastRecord"] + if isinstance(dateTimeOfLastRecord, str) and isinstance( + dateTimeOfLastRecord, str + ): + self._log_check("111_Dev3006") + if dateTimeOfLastRecord < dateTimeOfFirstRecord: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: dateTimeOfLastRecord < dateTimeOfFirstRecord" + ) + else: + dateTimeOfFirstRecord = None + dateTimeOfLastRecord = None + else: + dateTimeOfFirstRecord = None + dateTimeOfLastRecord = None + + numGRP = _get_int_attr_or_none(instance, "numGRP") + if numGRP is not None: + self._log_check("111_Dev3007") + if numGRP <= 0: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: numGRP is <= 0" + ) + if dataCodingFormat == 2: + self._log_check("111_Dev3023") + if numGRP != countGroups: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: Count of values groups does not match attribute numGRP in instance group" + ) + + numberOfTimes = _get_int_attr_or_none(instance, "numberOfTimes") + if numberOfTimes is not None: + self._log_check("111_Dev3003") + if numberOfTimes <= 0: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: numberOfTimes is <= 0" + ) + if dataCodingFormat == 2: + if numGRP is not None and numberOfTimes != numGRP: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: numberOfTimes is different from numGRP" + ) + + timeRecordInterval = _get_int_attr_or_none(instance, "timeRecordInterval") + if timeRecordInterval is not None: + self._log_check("111_Dev3004") + if timeRecordInterval <= 0: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: timeRecordInterval is <= 0" + ) + elif ( + dateTimeOfFirstRecord + and dateTimeOfLastRecord + and len(dateTimeOfFirstRecord) == len("YYYYMMDDTHHMMSSZ") + and len(dateTimeOfLastRecord) == len("YYYYMMDDTHHMMSSZ") + and numberOfTimes + ): + from datetime import datetime, timezone + + start = ( + datetime.strptime(dateTimeOfFirstRecord, "%Y%m%dT%H%M%SZ") + .replace(tzinfo=timezone.utc) + .timestamp() + ) + end = ( + datetime.strptime(dateTimeOfLastRecord, "%Y%m%dT%H%M%SZ") + .replace(tzinfo=timezone.utc) + .timestamp() + ) + computedNumberOfTimes = 1 + (end - start) / timeRecordInterval + if computedNumberOfTimes != numberOfTimes: + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: given dateTimeOfFirstRecord, dateTimeOfFirstRecord and timeRecordInterval, the number of groups should be {computedNumberOfTimes} whereas it is {numberOfTimes}" + ) + + present = [] + missing = [] + for name in ( + "westBoundLongitude", + "eastBoundLongitude", + "northBoundLatitude", + "southBoundLatitude", + ): + if name in instance.attrs: + present.append(name) + else: + missing.append(name) + + if present and missing: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: attributes {present} are present, but {missing} are missing" + ) + + westBoundLongitude = _get_float_attr_or_none(instance, "westBoundLongitude") + eastBoundLongitude = _get_float_attr_or_none(instance, "eastBoundLongitude") + northBoundLatitude = _get_float_attr_or_none(instance, "northBoundLatitude") + southBoundLatitude = _get_float_attr_or_none(instance, "southBoundLatitude") + + top_westBoundLongitude = _get_float_attr_or_none(f, "westBoundLongitude") + top_eastBoundLongitude = _get_float_attr_or_none(f, "eastBoundLongitude") + top_northBoundLatitude = _get_float_attr_or_none(f, "northBoundLatitude") + top_southBoundLatitude = _get_float_attr_or_none(f, "southBoundLatitude") + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + + horizontalCRS = _get_int_attr_or_none(f, "horizontalCRS") + if horizontalCRS and horizontalCRS > 0: + if self._is_geographic_2D(f): + + self._validate_bounds( + f"SurfaceCurrent feature instance group {instance.name}", + instance, + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + self._log_check("111_Dev3002") + if westBoundLongitude < top_westBoundLongitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: westBoundLongitude={westBoundLongitude} < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southBoundLatitude < top_southBoundLatitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: southBoundLatitude={southBoundLatitude} < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastBoundLongitude > top_eastBoundLongitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: eastBoundLongitude={eastBoundLongitude} > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northBoundLatitude > top_northBoundLatitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: northBoundLatitude={northBoundLatitude} > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + if ( + abs(westBoundLongitude) <= 180 + and abs(eastBoundLongitude) <= 180 + and abs(northBoundLatitude) <= 90 + and abs(southBoundLatitude) <= 90 + ): + self._error( + f"SurfaceCurrent feature instance group {instance.name}: westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude are longitudes/latitudes whereas they should be projected coordinates, given the horizontalCRS is projected" + ) + + if gdal_available: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(int(horizontalCRS)) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation( + horizontalCRS_srs, longlat_srs + ) + westLon, southLat, eastLon, northLat = ct.TransformBounds( + westBoundLongitude, + southBoundLatitude, + eastBoundLongitude, + northBoundLatitude, + 21, + ) + + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if westLon + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: westLon={westLon} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if southLat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: southLat={southLat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if eastLon - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: eastLon={eastLon} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if northLat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: northLat={northLat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + if ( + top_westBoundLongitude is not None + and top_eastBoundLongitude is not None + and top_northBoundLatitude is not None + and top_southBoundLatitude is not None + ): + # Add an epsilon to take into account potential different ways of doing bounding box reprojection + epsilon = 0.01 + if westLon + epsilon < top_westBoundLongitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: westBoundLongitude={westLon} ({westBoundLongitude}) < top_westBoundLongitude={top_westBoundLongitude}" + ) + if southLat + epsilon < top_southBoundLatitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: southBoundLatitude={southLat} ({southBoundLatitude}) < top_southBoundLatitude={top_southBoundLatitude}" + ) + if eastLon - epsilon > top_eastBoundLongitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: eastBoundLongitude={eastLon} ({eastBoundLongitude}) > top_eastBoundLongitude={top_eastBoundLongitude}" + ) + if northLat - epsilon > top_northBoundLatitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: northBoundLatitude={northLat} ({northBoundLatitude}) > top_northBoundLatitude={top_northBoundLatitude}" + ) + + else: + self._warning( + "Test checking consistency of bounds in SurfaceCurrent feature instance group compared to top level attributes skipped due to GDAL not available" + ) + + if eastBoundLongitude <= westBoundLongitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: eastBoundLongitude <= westBoundLongitude" + ) + if northBoundLatitude <= southBoundLatitude: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: northBoundLatitude <= southBoundLatitude" + ) + + if "domainExtent.polygon" in instance.keys() and present: + self._error( + f"BathymetryCoverage feature instance group {instance.name}: both dataset 'domainExtent.polygon' and westBoundLongitude, eastBoundLongitude, northBoundLatitude, southBoundLatitude attributes are present" + ) + + gridOriginLongitude = _get_float_attr_or_none(instance, "gridOriginLongitude") + gridOriginLatitude = _get_float_attr_or_none(instance, "gridOriginLatitude") + if gridOriginLongitude is not None and gridOriginLatitude is not None: + + if ( + westBoundLongitude is not None + and eastBoundLongitude is not None + and northBoundLatitude is not None + and southBoundLatitude is not None + ): + self._log_check("111_Dev3009") + + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + gridOriginLongitude + 1e-6 * abs(gridOriginLongitude) + < westBoundLongitude + ): + self._error( + f"SurfaceCurrent feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} < westBoundLongitude={westBoundLongitude}" + ) + if ( + gridOriginLongitude - 1e-6 * abs(gridOriginLongitude) + > eastBoundLongitude + ): + self._error( + f"SurfaceCurrent feature instance group {instance.name}: gridOriginLongitude={gridOriginLongitude} > eastBoundLongitude={eastBoundLongitude}" + ) + if ( + gridOriginLatitude + 1e-6 * abs(gridOriginLatitude) + < southBoundLatitude + ): + self._error( + f"SurfaceCurrent feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} < southBoundLatitude={southBoundLatitude}" + ) + if ( + gridOriginLatitude - 1e-6 * abs(gridOriginLatitude) + > northBoundLatitude + ): + self._error( + f"SurfaceCurrent feature instance group {instance.name}: gridOriginLatitude={gridOriginLatitude} > northBoundLatitude={northBoundLatitude}" + ) + + if gdal_available and horizontalCRS > 0: + horizontalCRS_srs = osr.SpatialReference() + horizontalCRS_srs.SetAxisMappingStrategy( + osr.OAMS_TRADITIONAL_GIS_ORDER + ) + horizontalCRS_srs.ImportFromEPSG(horizontalCRS) + + longlat_srs = osr.SpatialReference() + longlat_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + longlat_srs.ImportFromEPSG(4326) + ct = osr.CoordinateTransformation(horizontalCRS_srs, longlat_srs) + origin_long, origin_lat, _ = ct.TransformPoint( + gridOriginLongitude, gridOriginLatitude, 0 + ) + + crs_area_of_use = horizontalCRS_srs.GetAreaOfUse() + # Add a substantial epsilon as going a bit outside of the CRS area of use is usually fine + epsilon = 1 + if origin_long + epsilon < crs_area_of_use.west_lon_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: origin_long={origin_long} < crs_area_of_use.west_lon_degree={crs_area_of_use.west_lon_degree}" + ) + if origin_lat + epsilon < crs_area_of_use.south_lat_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: origin_lat={origin_lat} < crs_area_of_use.south_lat_degree={crs_area_of_use.south_lat_degree}" + ) + if origin_long - epsilon > crs_area_of_use.east_lon_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: origin_long={origin_long} > crs_area_of_use.east_lon_degree={crs_area_of_use.east_lon_degree}" + ) + if origin_lat - epsilon > crs_area_of_use.north_lat_degree: + self._error( + f"SurfaceCurrent feature instance group {instance.name}: origin_lat={origin_lat} > crs_area_of_use.north_lat_degree={crs_area_of_use.north_lat_degree}" + ) + + self._log_check("111_Dev3010") + gridSpacingLongitudinal = _get_float_attr_or_none( + instance, "gridSpacingLongitudinal" + ) + if gridSpacingLongitudinal is not None and gridSpacingLongitudinal <= 0: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLongitudinal <= 0" + ) + + self._log_check("111_Dev3010") + gridSpacingLatitudinal = _get_float_attr_or_none( + instance, "gridSpacingLatitudinal" + ) + if gridSpacingLatitudinal is not None and gridSpacingLatitudinal <= 0: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: Grid spacing attribute in instance group has value out of range: gridSpacingLatitudinal <= 0" + ) + + self._log_check("111_Dev3011") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and gridSpacingLongitudinal * (1 - 1e-2) + > 0.5 * (eastBoundLongitude - westBoundLongitude) + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLongitudinal={gridSpacingLongitudinal} > 0.5 * (eastBoundLongitude - westBoundLongitude)={0.5 * (eastBoundLongitude - westBoundLongitude)}" + ) + + self._log_check("111_Dev3011") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and gridSpacingLatitudinal * (1 - 1e-2) + > 0.5 * (northBoundLatitude - southBoundLatitude) + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Value of gridSpacingLongitudinal or gridSpacingLatitudinal in instance group too high: gridSpacingLatitudinal={gridSpacingLatitudinal} > 0.5 * (northBoundLatitude - southBoundLatitude)={0.5 * (northBoundLatitude - southBoundLatitude)}" + ) + + self._log_check("111_Dev3012") + numPointsLongitudinal = _get_int_attr_or_none(instance, "numPointsLongitudinal") + if numPointsLongitudinal < 1: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: Grid must be at least 1X1: numPointsLongitudinal < 1" + ) + + self._log_check("111_Dev3012") + numPointsLatitudinal = _get_int_attr_or_none(instance, "numPointsLatitudinal") + if numPointsLatitudinal < 1: + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: Grid must be at least 1X1: numPointsLatitudinal < 1" + ) + + self._log_check("111_Dev3013") + if ( + gridSpacingLongitudinal is not None + and eastBoundLongitude is not None + and westBoundLongitude is not None + and numPointsLongitudinal is not None + and numPointsLongitudinal > 1 + and abs( + gridSpacingLongitudinal + - (eastBoundLongitude - westBoundLongitude) + / (numPointsLongitudinal - 1) + ) + > 1e-2 * gridSpacingLongitudinal + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLongitudinal={gridSpacingLongitudinal} != (eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)={(eastBoundLongitude - westBoundLongitude) / (numPointsLongitudinal - 1)}" + ) + + self._log_check("111_Dev3009") + if ( + gridSpacingLatitudinal is not None + and southBoundLatitude is not None + and northBoundLatitude is not None + and numPointsLatitudinal is not None + and numPointsLatitudinal > 1 + and ( + gridSpacingLatitudinal + - (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1) + ) + > 1e-2 * gridSpacingLatitudinal + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Grid dimensions are incompatible with instance bounding box: gridSpacingLatitudinal={gridSpacingLatitudinal} != (northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)={(northBoundLatitude - southBoundLatitude) / (numPointsLatitudinal - 1)}" + ) + + self._log_check("111_Dev3014") + # gridOriginLongitude is encoded as a float64, whereas westBoundLongitude on a float32 + # hence add some tolerance so comparison is fair + if ( + westBoundLongitude is not None + and gridOriginLongitude is not None + and abs(westBoundLongitude - gridOriginLongitude) + > 1e-6 * abs(westBoundLongitude) + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Grid origin does not coincide with instance bounding box; westBoundLongitude={westBoundLongitude} != gridOriginLongitude={_cast_to_float32(gridOriginLongitude)}" + ) + + self._log_check("111_Dev3014") + if ( + southBoundLatitude is not None + and gridOriginLatitude is not None + and abs(southBoundLatitude - gridOriginLatitude) + > 1e-6 * abs(southBoundLatitude) + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Grid origin does not coincide with instance bounding box: southBoundLatitude={southBoundLatitude} != gridOriginLatitude={_cast_to_float32(gridOriginLatitude)}" + ) + + self._log_check("111_Dev3015") + if "startSequence" in instance.attrs: + startSequence = instance.attrs["startSequence"] + if isinstance(startSequence, str): + startSequence = startSequence.split(",") + if ( + len(startSequence) != 2 + or _get_int_value_or_none(startSequence[0]) is None + or _get_int_value_or_none(startSequence[1]) is None + ): + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: invalid content for startSequence in instance" + ) + else: + self._log_check("111_Dev3016") + if startSequence != ["0", "0"]: + # other tests are probably not compatible of a non (0,0) startSequence + self._warning( + f"SurfaceCurrent feature instance group {instance.name}: Values in startSequence in instance group are incompatible with the scan direction in sequencingRule" + ) + + self._log_check("111_Dev3022") + for idx_grp in range(1, numGRP + 1): + grp_name = "Group_%03d" % idx_grp + if grp_name not in instance.keys() or not isinstance( + instance[grp_name], h5py.Group + ): + self._critical_error( + f"SurfaceCurrent feature instance group {instance.name}: no {grp_name} subgroup" + ) + else: + self._validate_Group_XXX( + f, + instance[grp_name], + numPointsLongitudinal, + numPointsLatitudinal, + dateTimeOfFirstRecord, + dateTimeOfLastRecord, + ) + + if "uncertainty" in instance.keys() and isinstance( + instance["uncertainty"], h5py.Dataset + ): + uncertainty = instance["uncertainty"] + if uncertainty.shape != (2,): + self._critical_error( + f"{instance.name}/uncertainty' is not a one-dimensional dataset of shape 2" + ) + elif uncertainty.dtype not in ( + [ + ("name", "O"), + ("value", "d"), + ], + [ + ("name", "O"), + ("value", "f"), + ], + ): + self._critical_error( + f"{instance.name}/uncertainty' has not expected data type" + ) + + def _validate_Group_XXX( + self, + f, + Group_XXX, + numPointsLongitudinal, + numPointsLatitudinal, + dateTimeOfFirstRecord, + dateTimeOfLastRecord, + ): + + dataCodingFormat = _get_int_attr_or_none( + f["SurfaceCurrent"], "dataCodingFormat" + ) + + # Cf Table 12-4 - Values Group attributes + if dataCodingFormat in (1, 2, 3, 4): + attr_list = [ + AttributeDefinition( + name="timePoint", + required=True, + type="datetime", + fixed_value=None, + ), + ] + if dataCodingFormat in (4, 8): + attr_list += [ + AttributeDefinition( + name="timeIntervalIndex", + required=True, + type="uint8", + fixed_value=None, + ), + AttributeDefinition( + name="timeRecordInterval", + required=False, + type="uint16", + fixed_value=None, + ), + AttributeDefinition( + name="stationName", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="stationIdentification", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="numberOfTimes", + required=True, + type="uint32", + fixed_value=None, + ), + AttributeDefinition( + name="startDateTime", + required=True, + type="datetime", + fixed_value=None, + ), + AttributeDefinition( + name="endDateTime", + required=True, + type="datetime", + fixed_value=None, + ), + ] + if dataCodingFormat == 8: + attr_list += [ + AttributeDefinition( + name="dataCodingFormatlocationMRN", + required=False, + type="string", + fixed_value=None, + ), + AttributeDefinition( + name="stationURL", + required=False, + type="string", + fixed_value=None, + ), + ] + + self._log_check("111_Dev5001") + self._check_attributes( + "Group_XXX", + Group_XXX, + attr_list, + ) + + if ( + "timePoint" in Group_XXX.attrs + and dateTimeOfFirstRecord + and dateTimeOfLastRecord + ): + timePoint = Group_XXX.attrs["timePoint"] + if isinstance(timePoint, str): + self._log_check("111_Dev5002") + if not ( + timePoint >= dateTimeOfFirstRecord + and timePoint <= dateTimeOfLastRecord + ): + self._warning( + f"{Group_XXX.name}: timePoint value not in [dateTimeOfFirstRecord, dateTimeOfLastRecord] range" + ) + + self._log_check("111_Dev5003") + if "values" not in Group_XXX.keys() or not isinstance( + Group_XXX["values"], h5py.Dataset + ): + self._critical_error(f"{Group_XXX.name}/values dataset missing") + else: + self._validate_values( + f, + Group_XXX["values"], + numPointsLongitudinal, + numPointsLatitudinal, + ) + + def _validate_values( + self, + f, + values, + numPointsLongitudinal, + numPointsLatitudinal, + ): + + self._log_check("111_Dev5005") + if len(values.shape) != 2: + self._critical_error(f"{values.name} dataset is not 2-dimensional") + return + + self._log_check("111_Dev5006") + if ( + numPointsLatitudinal + and numPointsLongitudinal + and values.shape != (numPointsLatitudinal, numPointsLongitudinal) + ): + self._critical_error( + f"{values.name} dataset shape is {values.shape} instead of {(numPointsLatitudinal, numPointsLongitudinal)}" + ) + return + + self._log_check("111_Dev5011") + values_type = values.id.get_type() + if not isinstance(values_type, h5py.h5t.TypeCompoundID): + self._critical_error(f"{values.name} type is not compound") + return + + self._log_check("111_Dev5012") + Group_F_SurfaceCurrent = None + if "Group_F" in f: + Group_F = f["Group_F"] + if isinstance(Group_F, h5py.Group) and "SurfaceCurrent" in Group_F: + Group_F_SurfaceCurrent = Group_F["SurfaceCurrent"] + if ( + isinstance(Group_F_SurfaceCurrent, h5py.Dataset) + and len(Group_F_SurfaceCurrent.shape) == 1 + ): + num_components = None + if num_components and values_type.get_nmembers() != num_components: + self._critical_error( + f"{values.name} type has {values_type.get_nmembers()} members whereas {num_components} are expected from /Group_F/SurfaceCurrent" + ) + return + else: + Group_F_SurfaceCurrent = None + + # Check consistency between "values" and "/Group_F/SurfaceCurrent" + found_surfaceCurrentSpeed = False + found_surfaceCurrentDirection = False + found_speedUncertainty = False + found_directionUncertainty = False + for member_idx in range(values_type.get_nmembers()): + subtype = values_type.get_member_type(member_idx) + component_name = values_type.get_member_name(member_idx) + if Group_F_SurfaceCurrent: + expected = Group_F_SurfaceCurrent[member_idx][0] + if component_name != expected: + self._critical_error( + f"{values.name} member {member_idx} name = {component_name} is not Group_F_SurfaceCurrent[{member_idx}]['name']] = {expected}" + ) + assert isinstance(component_name, bytes) + if component_name == b"surfaceCurrentSpeed": + found_surfaceCurrentSpeed = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + elif component_name == b"surfaceCurrentDirection": + found_surfaceCurrentDirection = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + elif component_name == b"speedUncertainty": + found_speedUncertainty = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + elif component_name == b"directionUncertainty": + found_directionUncertainty = True + if not self._is_float32(subtype): + self._critical_error( + f"{values.name} member {component_name} is not a float32" + ) + + minDatasetCurrentSpeed = _get_float_attr_or_none( + f["SurfaceCurrent"], "minDatasetCurrentSpeed" + ) + maxDatasetCurrentSpeed = _get_float_attr_or_none( + f["SurfaceCurrent"], "maxDatasetCurrentSpeed" + ) + if found_surfaceCurrentSpeed: + masked_height = np.ma.masked_equal(values[:]["surfaceCurrentSpeed"], -9999) + actualMinSpeed = masked_height.min() + actualMaxSpeed = masked_height.max() + + if ( + minDatasetCurrentSpeed is not None + and maxDatasetCurrentSpeed is not None + and minDatasetCurrentSpeed != -9999 + and maxDatasetCurrentSpeed != -9999 + and minDatasetCurrentSpeed <= maxDatasetCurrentSpeed + ): + self._log_check("111_Dev5013") + if actualMinSpeed < minDatasetCurrentSpeed: + self._error( + f"{values.name} : minimum surfaceCurrentSpeed is {actualMinSpeed}, whereas minDatasetCurrentSpeed attribute = {minDatasetCurrentSpeed}" + ) + + if actualMaxSpeed > maxDatasetCurrentSpeed: + self._error( + f"{values.name} : maximum surfaceCurrentSpeed is {actualMaxSpeed}, whereas maxDatasetCurrentSpeed attribute = {maxDatasetCurrentSpeed}" + ) + + self._log_check("111_Dev5014") + if actualMinSpeed < 0: + self._error( + f"{values.name} : minimum surfaceCurrentSpeed is {actualMinSpeed}, whereas it should be >= 0" + ) + + if actualMaxSpeed > 99.0: + self._error( + f"{values.name} : maximum surfaceCurrentSpeed is {actualMaxSpeed}, whereas it should be <= 99" + ) + + if found_surfaceCurrentDirection: + masked_direction = np.ma.masked_equal( + values[:]["surfaceCurrentDirection"], -9999 + ) + self._log_check("111_Dev5015") + actualMin = masked_direction.min() + if actualMin < 0: + self._error( + f"{values.name} : minimum surfaceCurrentDirection is {actualMin}, whereas it should be >= 0" + ) + actualMax = masked_direction.max() + if actualMax > 359.9: + self._error( + f"{values.name} : maximum surfaceCurrentDirection is {actualMax}, whereas it should be <= 359.9" + ) + + if found_speedUncertainty: + self._log_check("111_Dev5016") + masked_uncertainty = np.ma.masked_equal(values[:]["speedUncertainty"], -1.0) + actualMin = masked_uncertainty.min() + if actualMin < 0: + self._error( + f"{values.name} : minimum speedUncertainty is {actualMin}, whereas it should be >= 0" + ) + actualMax = masked_uncertainty.max() + if actualMax > 99.0: + self._error( + f"{values.name} : maximum speedUncertainty is {actualMax}, whereas it should be <= 99" + ) + + if found_directionUncertainty: + self._log_check("111_Dev5016") + + masked_uncertainty = np.ma.masked_equal( + values[:]["directionUncertainty"], -1.0 + ) + actualMin = masked_uncertainty.min() + if actualMin < 0: + self._error( + f"{values.name} : minimum directionUncertainty is {actualMin}, whereas it should be >= 0" + ) + actualMax = masked_uncertainty.max() + if actualMax > 359.9: + self._error( + f"{values.name} : maximum directionUncertainty is {actualMax}, whereas it should be <= 359.9" + ) + + def _validate_axisNames(self, f, group): + + groupName = group.name + + self._log_check("111_Dev2012") + if "axisNames" not in group.keys(): + self._error(f"{groupName}/axisNames dataset does not exist") + elif not isinstance(group["axisNames"], h5py.Dataset): + self._error(f"{groupName}/axisNames is not a dataset") + else: + axisNames = group["axisNames"] + if axisNames.shape != (2,): + self._error( + f"{groupName}/axisNames dataset is not a one-dimensional array of length 2" + ) + else: + type = axisNames.id.get_type() + if not isinstance(type, h5py.h5t.TypeStringID): + self._error(f"{groupName}/axisNames type is not a string") + else: + values = [v.decode("utf-8") for v in axisNames[:]] + if values not in ( + ["Easting", "Northing"], + ["Latitude", "Longitude"], + ): + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"] or ["Latitude", "Longitude"]. Got {values}' + ) + elif "horizontalCRS" in f.attrs: + horizontalCRS = f.attrs["horizontalCRS"] + if isinstance(horizontalCRS, int): + if self._is_geographic_2D(f): + if values != ["Latitude", "Longitude"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Latitude", "Longitude"]' + ) + else: + if values != ["Easting", "Northing"]: + self._error( + f'{groupName}/axisNames must conform to CRS. Expected ["Easting", "Northing"]' + ) + + +# Public function +def check( + filename, + abort_at_first_error=False, +): + """Check specified filename and return a tuple (errors, warnings, checks_done)""" + checker = S111Checker( + filename, + abort_at_first_error=abort_at_first_error, + ) + checker.check() + return checker.errors, checker.warnings, checker.checks_done + + +def usage(): + print("Usage: validate_s111.py [-q] ") + print("") + print("Validates a S111 files against the Edition 2.0 specification.") + print("") + print("-q: quiet mode. Only exit code indicates success (0) or error (1)") + + +def main(argv=sys.argv): + filename = None + quiet = False + + for arg in argv[1:]: + if arg == "-q": + quiet = True + elif arg == "-h": + usage() + return 0 + elif arg[0] == "-": + print(f"Invalid option: {arg}\n") + return 2 + else: + filename = arg + + if filename is None: + print("Filename missing\n") + return 2 + + errors, warnings, checks_done = check( + filename, + abort_at_first_error=False, + ) + + if not quiet: + print(f"Checks done: {checks_done}") + + if warnings: + print("") + print("Warnings:") + for msg in warnings: + print(f"Warning: {msg}") + + if errors: + print("") + print("Errors:") + for criticity, msg in errors: + print(f"{criticity}: {msg}") + print("") + print("Errors found: validation failed!") + else: + print("") + print("No errors found: validation succeeded.") + + return 1 if errors else 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) From f1ee83f5e241229b45223326b001f9bd963e0b1a Mon Sep 17 00:00:00 2001 From: Even Rouault Date: Wed, 5 Nov 2025 19:52:18 +0100 Subject: [PATCH 20/20] S111: add write (CreateCopy()) support for S111 v2.0 --- .../expected_gdalinfo_formats.txt | 2 +- ...indows_conda_expected_gdalinfo_formats.txt | 2 +- apps/gdal_translate_bin.cpp | 5 +- autotest/gdrivers/s111.py | 1402 ++++++++++++++++- doc/source/drivers/raster/s111.rst | 256 ++- frmts/hdf5/hdf5drivercore.cpp | 79 + frmts/hdf5/s100.cpp | 53 +- frmts/hdf5/s100.h | 3 +- frmts/hdf5/s102dataset.cpp | 3 +- frmts/hdf5/s104dataset.cpp | 3 +- frmts/hdf5/s111dataset.cpp | 1222 +++++++++++++- 11 files changed, 2995 insertions(+), 35 deletions(-) diff --git a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt index 76d45e44dd49..f742ba4814e2 100644 --- a/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt +++ b/.github/workflows/ubuntu_24.04/expected_gdalinfo_formats.txt @@ -104,7 +104,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) S104 -raster,multidimensional raster- (rwvs): S-104 Water Level Information for Surface Navigation Product (*.h5) - S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) + S111 -raster,multidimensional raster- (rwvs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) HDF5Image -raster- (rov): HDF5 Dataset NWT_GRD -raster- (rw+v): Northwood Numeric Grid Format .grd/.tab (*.grd) diff --git a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt index 868656101a21..3f11adc10f0c 100644 --- a/.github/workflows/windows_conda_expected_gdalinfo_formats.txt +++ b/.github/workflows/windows_conda_expected_gdalinfo_formats.txt @@ -106,7 +106,7 @@ Supported Formats: (ro:read-only, rw:read-write, +:write from scratch, u:update, BAG -raster,multidimensional raster,vector- (rw+v): Bathymetry Attributed Grid (*.bag) S102 -raster,multidimensional raster- (rwvs): S-102 Bathymetric Surface Product (*.h5) S104 -raster,multidimensional raster- (rwvs): S-104 Water Level Information for Surface Navigation Product (*.h5) - S111 -raster,multidimensional raster- (rovs): S-111 Surface Currents Product (*.h5) + S111 -raster,multidimensional raster- (rwvs): S-111 Surface Currents Product (*.h5) HDF5 -raster,multidimensional raster- (rovs): Hierarchical Data Format Release 5 (*.h5, *.hdf5) HDF5Image -raster- (rov): HDF5 Dataset NWT_GRD -raster- (rw+v): Northwood Numeric Grid Format .grd/.tab (*.grd) diff --git a/apps/gdal_translate_bin.cpp b/apps/gdal_translate_bin.cpp index 9a528d4c00d3..151d84214c23 100644 --- a/apps/gdal_translate_bin.cpp +++ b/apps/gdal_translate_bin.cpp @@ -168,9 +168,10 @@ MAIN_START(argc, argv) if (!sOptionsForBinary.bCopySubDatasets && GDALGetRasterCount(hDataset) == 0 && CSLCount(GDALGetMetadata(hDataset, "SUBDATASETS")) > 0 && - // S104 driver knows how to handle a source dataset with subdatasets + // S104 and S111 drivers know how to handle a source dataset with subdatasets // and no input bands. - !EQUAL(sOptionsForBinary.osFormat.c_str(), "S104")) + !EQUAL(sOptionsForBinary.osFormat.c_str(), "S104") && + !EQUAL(sOptionsForBinary.osFormat.c_str(), "S111")) { fprintf(stderr, "Input file contains subdatasets. Please, select one " "of them for reading.\n"); diff --git a/autotest/gdrivers/s111.py b/autotest/gdrivers/s111.py index ca46dd119fc5..a20bb4bd5556 100755 --- a/autotest/gdrivers/s111.py +++ b/autotest/gdrivers/s111.py @@ -7,17 +7,20 @@ # Author: Even Rouault # ############################################################################### -# Copyright (c) 2023, Even Rouault +# Copyright (c) 2023-2025, Even Rouault # # SPDX-License-Identifier: MIT ############################################################################### import os import struct +import sys +import gdaltest import pytest +from test_py_scripts import samples_path -from osgeo import gdal +from osgeo import gdal, osr pytestmark = pytest.mark.require_driver("S111") @@ -255,3 +258,1398 @@ def test_s111_multiple_feature_instance_groups(): 0, ) assert ds.GetMetadataItem("VERTICAL_DATUM_MEANING") == "lowWater" + + +############################################################################### + + +def validate( + filename, expected_errors=None, expected_warnings=None, expected_check_count=None +): + + path = samples_path + if path not in sys.path: + sys.path.append(path) + try: + import validate_s111 + except ImportError: + print("Cannot import validate_s111") + return True + + errors, warnings, checks_done = validate_s111.check(filename) + + if expected_errors: + assert errors == expected_errors + else: + if errors: + print(errors) + assert not errors + + if expected_warnings: + assert warnings == expected_warnings + else: + if warnings: + print(warnings) + assert not warnings + + if expected_check_count: + assert len(checks_done) == expected_check_count + + +############################################################################### + + +def test_s111_validator(): + + # Fake product: many unconformities + expected_errors = [ + ( + "Critical error", + "/Group_F/SurfaceCurrent: row 0, got value '['surfaceCurrentSpeed', 'Surface Current Speed', 'knot', '-9999.00', 'H5T_FLOAT', '0.00', '', 'geSemiInterval']', which is not in '[['surfaceCurrentSpeed', 'Surface Current Speed', 'knot', '-9999.00', 'H5T_FLOAT', '0.00', '99.00', 'geSemiInterval'], ['surfaceCurrentDirection', 'Surface Current Direction', 'degree', '-9999.0', 'H5T_FLOAT', '0.0', '359.9', 'closedInterval'], ['surfaceCurrentTime', 'Surface Current Time', '', '00010101T000000Z', 'H5T_STRING', '19000101T000000Z', '21500101T000000Z', 'closedInterval'], ['speedUncertainty', 'Speed Uncertainty', 'knot', '-1.0', 'H5T_FLOAT', '0.00', '99.00', 'geSemiInterval'], ['directionUncertainty', 'Direction Uncertainty', 'degree', '-1.0', 'H5T_FLOAT', '0.0', '359.9', 'closedInterval']]'", + ), + ( + "Critical error", + "/Group_F/SurfaceCurrent: row 1, got value '['surfaceCurrentDirection', 'Surface Current Direction', 'degree', '-9999.00', 'H5T_FLOAT', '0.00', '359.9', 'closedInterval']', which is not in '[['surfaceCurrentSpeed', 'Surface Current Speed', 'knot', '-9999.00', 'H5T_FLOAT', '0.00', '99.00', 'geSemiInterval'], ['surfaceCurrentDirection', 'Surface Current Direction', 'degree', '-9999.0', 'H5T_FLOAT', '0.0', '359.9', 'closedInterval'], ['surfaceCurrentTime', 'Surface Current Time', '', '00010101T000000Z', 'H5T_STRING', '19000101T000000Z', '21500101T000000Z', 'closedInterval'], ['speedUncertainty', 'Speed Uncertainty', 'knot', '-1.0', 'H5T_FLOAT', '0.00', '99.00', 'geSemiInterval'], ['directionUncertainty', 'Direction Uncertainty', 'degree', '-1.0', 'H5T_FLOAT', '0.0', '359.9', 'closedInterval']]'", + ), + ("Error", "top level attribute 'issueDate' is not a valid date: 2025-10-07"), + ("Error", "top level attribute 'horizontalCRS' is not a int32"), + ("Error", "top level attribute 'westBoundLongitude' is not a float32"), + ("Error", "top level attribute 'eastBoundLongitude' is not a float32"), + ("Error", "top level attribute 'southBoundLatitude' is not a float32"), + ("Error", "top level attribute 'northBoundLatitude' is not a float32"), + ("Error", "top level attribute 'issueTime' is not a valid time: 12:34:56"), + ("Error", "top level attribute 'depthTypeIndex' is not an enumeration"), + ("Error", "top level attribute 'surfaceCurrentDepth' is not a float32"), + ("Error", "top level attribute 'verticalCS' is not a int32"), + ("Error", "top level attribute 'verticalCoordinateBase' is not an enumeration"), + ("Error", "top level attribute 'verticalDatumReference' is not an enumeration"), + ("Error", "top level attribute 'verticalDatum' is not a int32"), + ( + "Error", + "SurfaceCurrent group attribute 'dataCodingFormat' is not an enumeration", + ), + ("Error", "SurfaceCurrent group attribute 'dimension' is not a uint8"), + ( + "Error", + "SurfaceCurrent group attribute 'commonPointRule' is not an enumeration", + ), + ( + "Error", + "SurfaceCurrent group attribute 'horizontalPositionUncertainty' is not a float32", + ), + ( + "Error", + "SurfaceCurrent group attribute 'verticalUncertainty' is not a float32", + ), + ("Error", "SurfaceCurrent group attribute 'numInstances' is not a uint32"), + ( + "Error", + "SurfaceCurrent group attribute 'minDatasetCurrentSpeed' is not a float64", + ), + ( + "Error", + "SurfaceCurrent group attribute 'maxDatasetCurrentSpeed' is not a float64", + ), + ( + "Error", + "SurfaceCurrent group attribute 'sequencingRule.type' is not an enumeration", + ), + ( + "Error", + "SurfaceCurrent group attribute 'interpolationType' is not an enumeration", + ), + ( + "Error", + "SurfaceCurrent group attribute 'dataOffsetCode' is not an enumeration", + ), + ( + "Error", + '/SurfaceCurrent/axisNames must conform to CRS. Expected ["Easting", "Northing"] or ["Latitude", "Longitude"]. Got [\'longitude\', \'latitude\']', + ), + ( + "Error", + "SurfaceCurrent feature instance group /SurfaceCurrent/SurfaceCurrent.01 attribute 'dataDynamicity' is not an enumeration", + ), + ( + "Error", + "SurfaceCurrent feature instance group /SurfaceCurrent/SurfaceCurrent.02 attribute 'dataDynamicity' is not an enumeration", + ), + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.02/Group_001/values : maximum surfaceCurrentSpeed is 70.0, whereas maxDatasetCurrentSpeed attribute = 7.0", + ), + ] + expected_warnings = [ + "File name should start with '111'", + "File name 'multiple_feature_instance_groups.h5' does not match expected pattern '^111[a-zA-Z0-9]{4}[a-zA-Z0-9\\-_]{1,54}\\.(?:h5|H5)$'", + "Extra element in SurfaceCurrent feature instance group /SurfaceCurrent/SurfaceCurrent.02 group: 'verticalDatum'", + "Extra element in SurfaceCurrent feature instance group /SurfaceCurrent/SurfaceCurrent.02 group: 'verticalDatumReference'", + ] + + validate( + "data/s111/multiple_feature_instance_groups.h5", + expected_errors=expected_errors, + expected_warnings=expected_warnings, + ) + + +############################################################################### + + +def test_s111_write_errors(tmp_vsimem): + + with pytest.raises(Exception, match="Source dataset x must have 2 or 4 bands"): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("x", 2, 2), + format="S111", + ) + + with pytest.raises( + Exception, match="Source dataset dimension must be at least 1x1 pixel" + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("x", 0, 0, 2), + format="S111", + ) + + with pytest.raises( + Exception, match="S111 driver requires a source dataset with a geotransform" + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + gdal.GetDriverByName("MEM").Create("", 1, 1, 2), + format="S111", + ) + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + with pytest.raises( + Exception, match="S111 driver requires a source dataset with a geotransform" + ): + gdal.Translate(tmp_vsimem / "111xxxxyyyy.h5", src_ds, format="S111") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0.2, 0, 0, 1]) + with pytest.raises( + Exception, + match="S111 driver requires a source dataset with a non-rotated geotransform", + ): + gdal.Translate(tmp_vsimem / "111xxxxyyyy.h5", src_ds, format="S111") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + with pytest.raises( + Exception, match="S111 driver requires a source dataset with a CRS" + ): + gdal.Translate(tmp_vsimem / "111xxxxyyyy.h5", src_ds, format="S111") + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + srs = osr.SpatialReference() + srs.ImportFromProj4("+proj=longlat") + src_ds.SetSpatialRef(srs) + with pytest.raises( + Exception, + match="TIME_POINT creation option value must be set, or source dataset must have a timePoint metadata item", + ): + gdal.Translate(tmp_vsimem / "111xxxxyyyy.h5", src_ds, format="S111") + + with pytest.raises( + Exception, + match="TIME_POINT creation option value must be set to a YYYYMMDDTHHMMSSZ datetime value", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=["TIME_POINT=invalid"], + ) + + with pytest.raises(Exception, match="DEPTH_TYPE creation option must be specified"): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=["TIME_POINT=20251105T231000Z"], + ) + + with pytest.raises( + Exception, + match="DEPTH_TYPE creation option must be set to heightOrDepth/1 or layerAverage/2", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=["TIME_POINT=20251105T231000Z", "DEPTH_TYPE=invalid"], + ) + + with pytest.raises( + Exception, + match="VERTICAL_CS creation option must be specified when DEPTH_TYPE = heightOrDepth", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=["TIME_POINT=20251105T231000Z", "DEPTH_TYPE=1"], + ) + + with pytest.raises( + Exception, + match=r"VERTICAL_CS creation option must be set either to 6498 \(depth/down, metre\), or 6499 \(height/up, metre\)", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=invalid", + ], + ) + + with pytest.raises( + Exception, + match="VERTICAL_DATUM creation option must be specified when DEPTH_TYPE = heightOrDepth", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + ], + ) + + with pytest.raises( + Exception, + match="VERTICAL_DATUM value is invalid", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=invalid", + ], + ) + + with pytest.raises( + Exception, + match="SURFACE_CURRENT_DEPTH creation option must be specified", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + ], + ) + + with pytest.raises( + Exception, + match="SURFACE_CURRENT_DEPTH creation option value must be a numeric value", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=invalid", + ], + ) + + with pytest.raises( + Exception, + match="DATA_DYNAMICITY creation option must be specified", + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + ], + ) + + with pytest.raises( + Exception, + match="DATA_DYNAMICITY creation option must be set to observation/1, astronomicalPrediction/2, analysisOrHybrid/3 or hydrodynamicForecast/5", + ): + with gdal.quiet_errors(): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=invalid", + ], + ) + + +############################################################################### + + +def test_s111_write_warnings(tmp_vsimem): + + src_ds = gdal.GetDriverByName("MEM").Create("", 1, 1, 2) + src_ds.SetGeoTransform([0, 1, 0, 0, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S111 dataset filenames should start with '111'" + ): + gdal.Translate( + tmp_vsimem / "non_conformant_prefix.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + ], + ) + + with gdaltest.error_raised( + gdal.CE_Warning, match="S111 dataset filenames should have a '.H5' extension" + ): + gdal.Translate( + tmp_vsimem / "111xxxxyyyy.oops", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + ], + ) + + +############################################################################### + + +def test_s111_write_basic(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + with gdal.Open(f'S111:"{tmp_path}/111xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert ds.RasterCount == 2 + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251105T231000Z", + "dateTimeOfLastRecord": "20251105T231000Z", + "depthTypeIndex": "1", + "issueDate": "20251105", + "issueTime": "222950Z", + "maxDatasetCurrentSpeed": "9.5", + "minDatasetCurrentSpeed": "1.5", + "numberOfTimes": "1", + "surfaceCurrentDepth": "2", + "timePoint": "20251105T231000Z", + "uncertaintySurfaceCurrentDirection": "-1.000000", + "uncertaintySurfaceCurrentSpeed": "-1.000000", + "verticalCS": "6498", + } + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_check_count=59, + ) + + +############################################################################### + + +def test_s111_write_with_uncertainty_bands(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 4, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 10.5, 20.5, 30.5, 40.5, 50.5, 60.5, 70.5, 80.5, 90.5), + ) + src_ds.GetRasterBand(4).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack( + "f" * 9, 110.5, 120.5, 130.5, 140.5, 150.5, 160.5, 170.5, 180.5, 190.5 + ), + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=1", + "VERTICAL_CS=6498", + "VERTICAL_DATUM=MLLW", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + with gdal.Open(f'S111:"{tmp_path}/111xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert ds.RasterCount == 4 + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(3).ReadRaster()) == ( + 70.5, + 80.5, + 90.5, + 40.5, + 50.5, + 60.5, + 10.5, + 20.5, + 30.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(4).ReadRaster()) == ( + 170.5, + 180.5, + 190.5, + 140.5, + 150.5, + 160.5, + 110.5, + 120.5, + 130.5, + ) + assert ds.GetMetadata_Dict() == { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "VERTICAL_CS_MEANING": "depth, meters, orientation down", + "VERTICAL_DATUM_ABBREV": "MLLW", + "VERTICAL_DATUM_MEANING": "meanLowerLowWater", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251105T231000Z", + "dateTimeOfLastRecord": "20251105T231000Z", + "depthTypeIndex": "1", + "issueDate": "20251105", + "issueTime": "222950Z", + "maxDatasetCurrentSpeed": "9.5", + "minDatasetCurrentSpeed": "1.5", + "numberOfTimes": "1", + "surfaceCurrentDepth": "2", + "timePoint": "20251105T231000Z", + "verticalCS": "6498", + } + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_check_count=60, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "proj4,out_proj4", + [ + ("+proj=longlat +ellps=GRS80 +pm=paris +no_defs", None), + ( + "+proj=merc +lat_ts=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=merc +lat_0=0 +lon_0=2.5 +k=0.99 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + "+proj=merc +lat_ts=8.13653121977138 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + ), + ( + "+proj=tmerc +lat_0=1.5 +lon_0=2.5 +k=0.99 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=omerc +lat_0=1.5 +lonc=2.5 +alpha=3.5 +gamma=4.5 +k=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=omerc +no_uoff +lat_0=1.5 +lonc=2.5 +alpha=3.5 +gamma=4.5 +k=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=lcc +lat_0=1.5 +lon_0=4.5 +lat_1=2.5 +lat_2=3.5 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=lcc +lat_1=49 +lat_0=49 +lon_0=4.5 +k_0=0.99 +x_0=5.5 +y_0=6.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=sterea +lat_0=1.5 +lon_0=2.5 +k=0.9 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=stere +lat_0=90 +lon_0=2.5 +k=0.9 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=krovak +axis=swu +lat_0=49.5 +lon_0=42.5 +alpha=30.2881397527778 +k=0.9999 +x_0=0 +y_0=0 +ellps=bessel +pm=ferro +units=m +no_defs", + None, + ), + ( + "+proj=poly +lat_0=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=aea +lat_0=1.5 +lon_0=4.5 +lat_1=2.5 +lat_2=3.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ( + "+proj=laea +lat_0=1.5 +lon_0=2.5 +x_0=3.5 +y_0=4.5 +datum=WGS84 +units=m +no_defs", + None, + ), + ], +) +def test_s111_write_custom_crs(tmp_path, proj4, out_proj4): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + if proj4 == "+proj=longlat +ellps=GRS80 +pm=paris +no_defs": + src_ds.SetGeoTransform([2, 1.1, 0, 49, 0, 1.2]) + else: + src_ds.SetGeoTransform([1000, 1.1, 0, 10000, 0, 1.2]) + srs = osr.SpatialReference() + srs.ImportFromProj4(proj4) + src_ds.SetSpatialRef(srs) + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + ds = gdal.Open(f'S111:"{tmp_path}/111xxxxyyyy.h5":Group_001') + assert ds.GetSpatialRef().ExportToProj4() == (out_proj4 if out_proj4 else proj4) + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_check_count=59, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "value,warning_msg,expected_errors,expected_warnings", + [ + ( + -0.5, + "Range of surface current speed in the dataset is [-0.500000, 9.500000] whereas the allowed range is [0.00, 99.00]", + [ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : minimum surfaceCurrentSpeed is -0.5, whereas it should be >= 0", + ) + ], + [ + "/SurfaceCurrent: minDatasetCurrentSpeed=-0.5 should be in [0, 99.99] range" + ], + ), + ( + 100.5, + "Range of surface current speed in the dataset is [2.500000, 100.500000] whereas the allowed range is [0.00, 99.00]", + [ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : maximum surfaceCurrentSpeed is 100.5, whereas it should be <= 99", + ) + ], + [ + "/SurfaceCurrent: maxDatasetCurrentSpeed=100.5 should be in [0, 99.99] range" + ], + ), + ], +) +def test_s111_write_out_of_range_speed( + tmp_path, value, warning_msg, expected_errors, expected_warnings +): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, value, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_Warning, match=warning_msg): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_errors=expected_errors, + expected_warnings=expected_warnings, + ) + + +############################################################################### + + +@pytest.mark.parametrize( + "value,warning_msg,expected_errors,expected_warnings", + [ + ( + -0.5, + "Range of surface current direction in the dataset is [-0.500000, 3.000000] whereas the allowed range is [0.00, 359.90]", + [ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : minimum surfaceCurrentDirection is -0.5, whereas it should be >= 0", + ) + ], + [], + ), + ( + 360.0, + "Range of surface current direction in the dataset is [0.000000, 360.000000] whereas the allowed range is [0.00, 359.90]", + [ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : maximum surfaceCurrentDirection is 360.0, whereas it should be <= 359.9", + ) + ], + [], + ), + ], +) +def test_s111_write_out_of_range_dir( + tmp_path, value, warning_msg, expected_errors, expected_warnings +): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 2, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, value, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised(gdal.CE_Warning, match=warning_msg): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_errors=expected_errors, + expected_warnings=expected_warnings, + ) + + +############################################################################### + + +def test_s111_write_out_of_range_speed_uncertainty(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 4, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, -10, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(4).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Negative speed uncertainty value found (-10.000000), which is not allowed (except nodata value -1.0)", + ): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_errors=[ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : minimum speedUncertainty is -10.0, whereas it should be >= 0", + ) + ], + ) + + +############################################################################### + + +def test_s111_write_out_of_range_direction_uncertainty(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 3, 3, 4, gdal.GDT_Float32) + src_ds.GetRasterBand(1).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5) + ) + src_ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(3).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.GetRasterBand(4).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, -10, 2, 3, 0, 1, 2, 2, 0, 1) + ) + src_ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + with gdaltest.error_raised( + gdal.CE_Warning, + match="Negative direction uncertainty value found (-10.000000), which is not allowed (except nodata value -1.0)", + ): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_errors=[ + ( + "Error", + "/SurfaceCurrent/SurfaceCurrent.01/Group_001/values : minimum directionUncertainty is -10.0, whereas it should be >= 0", + ) + ], + ) + + +############################################################################### + + +def test_s111_write_large_file(tmp_path): + + src_ds = gdal.GetDriverByName("MEM").Create("", 1200, 1200, 4, gdal.GDT_Float32) + src_ds.SetGeoTransform([500000, 1, 0, 4500000, 0, 1]) + src_ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + + filename = str(tmp_path / "111xxxxyyyy.h5") + with gdaltest.error_raised( + gdal.CE_Warning, + match="file size exceeds 10 MB", + ): + gdal.Translate( + filename, + src_ds, + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + "COMPRESS=NONE", + ], + ) + + expected_warnings = [ + f"File size of {filename} = 23067944, which exceeds 10 MB", + ] + validate(filename, expected_warnings=expected_warnings) + + +############################################################################### + + +def test_s111_write_multiple_timestamps(tmp_path): + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in1.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251111T120000Z") + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in2.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251111T130000Z") + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in3.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251111T140000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + gdal.Open(tmp_path / "in1.tif"), + format="S111", + creationOptions=[ + "TIME_POINT=20251105T231000Z", + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + "UNCERTAINTY_SPEED=1.5", + "UNCERTAINTY_DIRECTION=2.5", + f"DATASETS={tmp_path}/in1.tif,{tmp_path}/in2.tif,{tmp_path}/in3.tif", + ], + ) + + with gdal.Open(f'S111:"{tmp_path}/111xxxxyyyy.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + expected_md = { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251111T120000Z", + "dateTimeOfLastRecord": "20251111T140000Z", + "depthTypeIndex": "2", + "issueDate": "20251105", + "issueTime": "222950Z", + "maxDatasetCurrentSpeed": "9.5", + "minDatasetCurrentSpeed": "1.5", + "numberOfTimes": "3", + "surfaceCurrentDepth": "2", + "timePoint": "20251111T120000Z", + "timeRecordInterval": "3600", + "uncertaintySurfaceCurrentDirection": "2.500000", + "uncertaintySurfaceCurrentSpeed": "1.500000", + } + assert ds.GetMetadata_Dict() == expected_md + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_check_count=60, + ) + + # Test S111->S111 translation + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyybis.h5", + gdal.Open(tmp_path / "111xxxxyyyy.h5"), + format="S111", + ) + + with gdal.Open(f'S111:"{tmp_path}/111xxxxyyyybis.h5":Group_001') as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == expected_md + + validate( + tmp_path / "111xxxxyyyybis.h5", + expected_check_count=60, + ) + + +############################################################################### + + +def test_s111_write_multiple_instances(tmp_path): + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in1.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251111T120000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + gdal.Open(tmp_path / "in1.tif"), + format="S111", + creationOptions=[ + "DEPTH_TYPE=2", + "SURFACE_CURRENT_DEPTH=2.0", + "DATA_DYNAMICITY=5", + "ISSUE_DATE=20251105", + "ISSUE_TIME=222950Z", + ], + ) + + with gdal.GetDriverByName("GTiff").Create( + tmp_path / "in2.tif", 3, 3, 2, gdal.GDT_Float32 + ) as ds: + ds.GetRasterBand(1).WriteRaster( + 0, + 0, + 3, + 3, + struct.pack("f" * 9, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5), + ) + ds.GetRasterBand(2).WriteRaster( + 0, 0, 3, 3, struct.pack("f" * 9, 1, 2, 3, 0, 1, 2, 2, 0, 1) + ) + ds.SetGeoTransform([500000, 1.1, 0, 4500000, 0, 1.2]) + ds.SetSpatialRef(osr.SpatialReference(epsg=32631)) + ds.SetMetadataItem("timePoint", "20251111T120000Z") + + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyy.h5", + gdal.Open(tmp_path / "in2.tif"), + format="S111", + creationOptions=[ + "DATA_DYNAMICITY=5", + "APPEND_SUBDATASET=YES", + ], + ) + + with gdal.Open( + f'S111:"{tmp_path}/111xxxxyyyy.h5":SurfaceCurrent.01:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + dict1 = { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251111T120000Z", + "dateTimeOfLastRecord": "20251111T120000Z", + "depthTypeIndex": "2", + "issueDate": "20251105", + "issueTime": "222950Z", + "maxDatasetCurrentSpeed": "9.5", + "minDatasetCurrentSpeed": "1.5", + "numberOfTimes": "1", + "surfaceCurrentDepth": "2", + "timePoint": "20251111T120000Z", + "uncertaintySurfaceCurrentDirection": "-1.000000", + "uncertaintySurfaceCurrentSpeed": "-1.000000", + } + assert ds.GetMetadata_Dict() == dict1 + + with gdal.Open( + f'S111:"{tmp_path}/111xxxxyyyy.h5":SurfaceCurrent.02:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + dict2 = { + "AREA_OR_POINT": "Point", + "DATA_DYNAMICITY_MEANING": "Hydrodynamic model forecast", + "dataDynamicity": "5", + "dateTimeOfFirstRecord": "20251111T120000Z", + "dateTimeOfLastRecord": "20251111T120000Z", + "depthTypeIndex": "2", + "issueDate": "20251105", + "issueTime": "222950Z", + "maxDatasetCurrentSpeed": "9.5", + "minDatasetCurrentSpeed": "1.5", + "numberOfTimes": "1", + "surfaceCurrentDepth": "2", + "timePoint": "20251111T120000Z", + "uncertaintySurfaceCurrentDirection": "-1.000000", + "uncertaintySurfaceCurrentSpeed": "-1.000000", + } + + assert ds.GetMetadata_Dict() == dict2 + + validate( + tmp_path / "111xxxxyyyy.h5", + expected_check_count=59, + ) + + # Test S111->S111 translation + with gdaltest.error_raised(gdal.CE_None): + gdal.Translate( + tmp_path / "111xxxxyyyybis.h5", + gdal.Open(tmp_path / "111xxxxyyyy.h5"), + format="S111", + ) + + with gdal.Open( + f'S111:"{tmp_path}/111xxxxyyyybis.h5":SurfaceCurrent.01:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == dict1 + + with gdal.Open( + f'S111:"{tmp_path}/111xxxxyyyybis.h5":SurfaceCurrent.02:Group_001' + ) as ds: + assert ds.GetSpatialRef().GetAuthorityCode(None) == "32631" + assert ds.GetGeoTransform() == pytest.approx( + (500000, 1.1, 0, 4500000 + 1.2 * 3, 0, -1.2) + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(1).ReadRaster()) == ( + 7.5, + 8.5, + 9.5, + 4.5, + 5.5, + 6.5, + 1.5, + 2.5, + 3.5, + ) + assert struct.unpack("f" * 9, ds.GetRasterBand(2).ReadRaster()) == ( + 2, + 0, + 1, + 0, + 1, + 2, + 1, + 2, + 3, + ) + assert ds.GetMetadata_Dict() == dict2 + + validate( + tmp_path / "111xxxxyyyybis.h5", + expected_check_count=59, + ) diff --git a/doc/source/drivers/raster/s111.rst b/doc/source/drivers/raster/s111.rst index 2ac4ed297e8a..79ef6b372e11 100644 --- a/doc/source/drivers/raster/s111.rst +++ b/doc/source/drivers/raster/s111.rst @@ -10,7 +10,7 @@ S111 -- S-111 Surface Currents Product .. versionadded:: 3.9 -This driver provides read-only support for surface currents in the S-111 format, +This driver provides support for surface currents in the S-111 format, which is a specific product profile in an HDF5 file. S-111 files have at a minimum two image bands representing the following values for each @@ -38,9 +38,13 @@ In that case, each feature instance group and timestamp group is exposed as a GDAL subdataset, whose name is of the form ``S111:"{filename.h5}":SurfaceCurrent.{XX}:Group_{YYY}``. +Write support for S-111 v2.0 has been added in GDAL 3.13 + Driver capabilities ------------------- +.. supports_createcopy:: + .. supports_georeferencing:: .. supports_virtualio:: @@ -63,6 +67,248 @@ The following open options are supported: exposed by the driver by setting this option to NO (in which case the 6th term of the geotransform matrix will be positive) +Write support +------------- + +.. versionadded:: 3.13 + +Creation of a S-111 v2.0 dataset from another existing GDAL supported dataset is +possible using the :cpp:func:`GDALDriver::CreateCopy` function, or utilities +like :ref:`gdal_translate` or :ref:`gdal_raster_convert`. The input dataset +must have two or four bands. The first band must contain the surface current +speed in knot, and the second band must contain the surface current direction, +in degree measured from true north clock-wise, between 0 and 359.9. + +The third and fourth bands when present must contain respectively the uncertainty +of the speed (in knot) and the uncertainty of the direction (in degree) + +If several grids are available at different timestamps, they can be provided with +the :co:`DATASETS` creation option, possibly with the :co:`DATASETS_TIME_POINT` creation option +if the datasets do not have a ``timePoint`` metadata item. + +If several feature instances are needed, the :co:`APPEND_SUBDATASET` creation option +can be set to ``YES`` to add an extra feature instance group ("SurfaceCurrent.XX") +to an existing S-111 dataset. + +The following creation options are available: + +- .. co:: TIME_POINT + :choices: + + Timestamp as YYYYMMDDTHHMMSSZ format (required). + +- .. co:: DEPTH_TYPE + :choices: ``heightOrDepth`` or ``layerAverage`` + + Type of depth (required). When selecting ``heightOrDepth``, the interpretation depends on the VERTICAL_CS value. + +- .. co:: VERTICAL_DATUM + :choices: + + Vertical datum. This is a required creation option when ``DEPTH_TYPE=heightOrDepth``. + + Possible values are either: + + - a S100 vertical datum. Expressed as numeric code in the + 1 to 30 range, or value 44, 46, 47, 48, 49. Or their string meaning or + abbreviation among + the following list: + + * 1: ``meanLowWaterSprings`` / ``MLWS`` + * 2: ``meanLowerLowWaterSprings`` + * 3: ``meanSeaLevel`` / ``MSL`` + * 4: ``lowestLowWater`` + * 5: ``meanLowWater`` / ``MLW`` + * 6: ``lowestLowWaterSprings`` + * 7: ``approximateMeanLowWaterSprings`` + * 8: ``indianSpringLowWater`` + * 9: ``lowWaterSprings`` + * 10: ``approximateLowestAstronomicalTide`` + * 11: ``nearlyLowestLowWater`` + * 12: ``meanLowerLowWater`` / ``MLLW`` + * 13: ``lowWater`` / ``LW`` + * 14: ``approximateMeanLowWater`` + * 15: ``approximateMeanLowerLowWater`` + * 16: ``meanHighWater`` / ``MHW`` + * 17: ``meanHighWaterSprings`` / ``MHWS`` + * 18: ``highWater`` / ``HW`` + * 19: ``approximateMeanSeaLevel`` + * 20: ``highWaterSprings`` + * 21: ``meanHigherHighWater`` / ``MHHW`` + * 22: ``equinoctialSpringLowWater`` + * 23: ``lowestAstronomicalTide`` / ``LAT`` + * 24: ``localDatum`` + * 25: ``internationalGreatLakesDatum1985`` + * 26: ``meanWaterLevel`` + * 27: ``lowerLowWaterLargeTide`` + * 28: ``higherHighWaterLargeTide`` + * 29: ``nearlyHighestHighWater`` + * 30: ``highestAstronomicalTide`` / ``HAT`` + * 44: ``balticSeaChartDatum2000`` + * 46: ``internationalGreatLakesDatum2020`` + * 47: ``seaFloor`` + * 48: ``seaSurface`` + * 49: ``hydrographicZero`` + + - an EPSG vertical datum code + +- .. co:: VERTICAL_CS + :choices: DEPTH or HEIGHT + + Vertical coordinate system. This is a required creation option when ``DEPTH_TYPE=heightOrDepth``. + + Depth is the down direction relative to the vertical datum surface, with meter unit. + Height is the up direction relative to the vertical datum surface, with meter unit. + +- .. co:: SURFACE_CURRENT_DEPTH + :choices: + + "Depth/height value or layer thickness (m) + This is a required creation option. + +- .. co:: DATA_DYNAMICITY + :choices: observation, astronomicalPrediction, analysisOrHybrid or hydrodynamicForecast + + Classification of data according to the relationship between the time of + its collection, generation, or calculation of generation parameters, + in relation to the time of publication of the dataset. + This is a required creation option. + +- .. co:: DATASETS + :choices: + + Comma-separated list of datasets at different timestamps. If each dataset + does not have a ``timePoint`` metadata item, the :co:`DATASETS_TIME_POINT` + creation option must be specified. + + The source dataset may or may not be put in the DATASETS creation option. + +- .. co:: DATASETS_TIME_POINT + :choices: + + Comma-separated list of different timestamps. If must have the same number + of values as the :co:`DATASETS` creation option. + Each time point value must be specified as a ``YYYYMMDDTHHMMSSZ`` timestamp. + +- .. co:: ISSUE_DATE + :choices: as + + If not specified, defaults to the current date. + +- .. co:: ISSUE_TIME + :choices: + + Issue time as or + + If not specified, defaults to the current time (in Z timezone). + +- .. co:: DATASET_DELIVERY_INTERVAL + :choices: + + Expected time interval between availability of successive datasets for + time-varying data. + Must be formatted as ``PnYnMnDTnHnMnS`` (ISO-8601 duration) + +- .. co:: TIME_RECORD_INTERVAL + :choices: + + Interval in seconds between time records. + +- .. co:: COMMON_POINT_RULE + :choices: average, low, high, all + :default: high + + Procedure used for evaluating the coverage at a position that falls on + the boundary or in an area of overlap between geographic objects. + +- .. co:: UNCERTAINTY_SPEED + :choices: + + Uncertainty of speeds in knot + +- .. co:: UNCERTAINTY_DIRECTION + :choices: + + Uncertainty of direction angles in degree + +- .. co:: HORIZONTAL_POSITION_UNCERTAINTY + :choices: + + Horizontal position uncertainty in meter + +- .. co:: VERTICAL_UNCERTAINTY + :choices: + + Vertical uncertainty in meter + +- .. co:: TIME_UNCERTAINTY + :choices: + + Time uncertainty in second + +- .. co:: COMPRESS + :choices: NONE, DEFLATE + :default: DEFLATE + + Compression for elevation and uncertainty grids. + +- .. co:: ZLEVEL + :choices: 1-9 + :default: 6 + + Deflate compression level. + +- .. co:: BLOCK_SIZE + :choices: + + Chunking size of the HDF5 arrays. Default + to 100, or the maximum dimension of the raster if smaller than 100. + +- .. co:: APPEND_SUBDATASET + :choices: YES, NO + :default: NO + + Whether to append the new dataset to an existing S-104 dataset as + an extra feature instance group ("SurfaceCurrent.XX") + + +Validation script +----------------- + +.. versionadded:: 3.13 + +The Python script :source_file:`swig/python/gdal-utils/osgeo_utils/samples/validate_s111.py` +can be used to validate the conformity of a S-111 v2.0 dataset against the specification. +It requires the `h5py `__ Python module to be installed +(typically through "pip install h5py") + +Its usage is: + +:: + + $ python validate_s111.py 111TESTXXXX.h5 + + +Note that the GDAL S-111 reader is more tolerant that the validation script and +can read files with slight non-conformities. + + +Examples +-------- + +- Converting a GeoTIFF with surface current speed and direction, with the minimum required metadata items: + + :: + + $ gdal_translate current_speed_and_direction.tif 111TESTXXXX.h5 -of S111 \ + -co TIME_POINT=20251105T012600Z \ + -co DEPTH_TYPE=1 \ + -co VERTICAL_DATUM=MMLW \ + -co VERTICAL_CS=HEIGHT \ + -co SURFACE_CURRENT_DEPTH=0.2 \ + -co DATA_DYNAMICITY=hydrodynamicForecast + + See Also -------- @@ -71,3 +317,11 @@ See Also - :ref:`BAG driver ` - :ref:`S-102 driver ` - :ref:`S-104 driver ` + + +.. below is an allow-list for spelling checker. + +.. spelling:word-list:: + hhmmssZ + hhmmss + HHMM diff --git a/frmts/hdf5/hdf5drivercore.cpp b/frmts/hdf5/hdf5drivercore.cpp index 8089c47dfb2c..120f3170cdb8 100644 --- a/frmts/hdf5/hdf5drivercore.cpp +++ b/frmts/hdf5/hdf5drivercore.cpp @@ -699,6 +699,7 @@ void S111DriverSetCommonMetadata(GDALDriver *poDriver) poDriver->SetMetadataItem(GDAL_DCAP_VIRTUALIO, "YES"); poDriver->SetMetadataItem(GDAL_DMD_EXTENSION, "h5"); poDriver->SetMetadataItem(GDAL_DMD_SUBDATASETS, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATE_SUBDATASETS, "YES"); poDriver->SetMetadataItem( GDAL_DMD_OPENOPTIONLIST, @@ -707,8 +708,86 @@ void S111DriverSetCommonMetadata(GDALDriver *poDriver) "description='Whether the top line of the dataset should be the " "northern-most one'/>" ""); + + poDriver->SetMetadataItem( + GDAL_DMD_CREATIONOPTIONLIST, + "" + " " + " " + " " + " " + " " + " "); + poDriver->pfnIdentify = S111DatasetIdentify; poDriver->SetMetadataItem(GDAL_DCAP_OPEN, "YES"); + poDriver->SetMetadataItem(GDAL_DCAP_CREATECOPY, "YES"); } /************************************************************************/ diff --git a/frmts/hdf5/s100.cpp b/frmts/hdf5/s100.cpp index 4f4e43a4c473..cb658afd084b 100644 --- a/frmts/hdf5/s100.cpp +++ b/frmts/hdf5/s100.cpp @@ -1060,7 +1060,8 @@ bool S100BaseWriter::BaseClose() /* S100BaseWriter::BaseChecks() */ /************************************************************************/ -bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG) +bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG, + bool verticalDatumRequired) { if (m_poSrcDS->GetRasterXSize() < 1 || m_poSrcDS->GetRasterYSize() < 1) { @@ -1118,33 +1119,39 @@ bool S100BaseWriter::BaseChecks(const char *pszDriverName, bool crsMustBeEPSG) m_poSrcDS->GetMetadataItem("VERTICAL_DATUM_EPSG_CODE"); if (!pszVerticalDatum) { - CPLError(CE_Failure, CPLE_AppDefined, - "VERTICAL_DATUM creation option must be specified"); - return false; - } - m_nVerticalDatum = - S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(pszVerticalDatum); - if (m_nVerticalDatum <= 0) - { - auto pjCtxt = OSRGetProjTLSContext(); - PJ *vertical_datum = - proj_create_from_database(pjCtxt, "EPSG", pszVerticalDatum, - PJ_CATEGORY_DATUM, false, nullptr); - const bool bIsValid = - vertical_datum != nullptr && - proj_get_type(vertical_datum) == PJ_TYPE_VERTICAL_REFERENCE_FRAME; - proj_destroy(vertical_datum); - if (bIsValid) - { - m_nVerticalDatum = atoi(pszVerticalDatum); - } - else + if (verticalDatumRequired) { CPLError(CE_Failure, CPLE_AppDefined, - "VERTICAL_DATUM value is invalid"); + "VERTICAL_DATUM creation option must be specified"); return false; } } + else + { + m_nVerticalDatum = + S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev(pszVerticalDatum); + if (m_nVerticalDatum <= 0) + { + auto pjCtxt = OSRGetProjTLSContext(); + PJ *vertical_datum = + proj_create_from_database(pjCtxt, "EPSG", pszVerticalDatum, + PJ_CATEGORY_DATUM, false, nullptr); + const bool bIsValid = vertical_datum != nullptr && + proj_get_type(vertical_datum) == + PJ_TYPE_VERTICAL_REFERENCE_FRAME; + proj_destroy(vertical_datum); + if (bIsValid) + { + m_nVerticalDatum = atoi(pszVerticalDatum); + } + else + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_DATUM value is invalid"); + return false; + } + } + } const std::string osFilename = CPLGetFilename(m_osDestFilename.c_str()); CPLAssert(pszDriverName[0] == 'S'); diff --git a/frmts/hdf5/s100.h b/frmts/hdf5/s100.h index 6f35e4f7718d..c9eda4e573c1 100644 --- a/frmts/hdf5/s100.h +++ b/frmts/hdf5/s100.h @@ -70,7 +70,8 @@ class S100BaseWriter CPL_NON_FINAL virtual bool Close() = 0; bool BaseClose(); - bool BaseChecks(const char *pszDriverName, bool crsMustBeEPSG); + bool BaseChecks(const char *pszDriverName, bool crsMustBeEPSG, + bool verticalDatumRequired); static bool WriteUInt8Value(hid_t hGroup, const char *pszName, int value); static bool WriteUInt16Value(hid_t hGroup, const char *pszName, int value); diff --git a/frmts/hdf5/s102dataset.cpp b/frmts/hdf5/s102dataset.cpp index 4d2089766ff8..7f6709f1fe94 100644 --- a/frmts/hdf5/s102dataset.cpp +++ b/frmts/hdf5/s102dataset.cpp @@ -847,7 +847,8 @@ bool S102Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) return false; } - if (!BaseChecks("S102", true)) + if (!BaseChecks("S102", /* crsMustBeEPSG = */ true, + /* verticalDatumRequired = */ true)) return false; const bool bAppendSubdataset = diff --git a/frmts/hdf5/s104dataset.cpp b/frmts/hdf5/s104dataset.cpp index d284f277d54d..343a226141cc 100644 --- a/frmts/hdf5/s104dataset.cpp +++ b/frmts/hdf5/s104dataset.cpp @@ -780,7 +780,8 @@ bool S104Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) return false; } - if (!BaseChecks("S104", /* crsMustBeEPSG = */ false)) + if (!BaseChecks("S104", /* crsMustBeEPSG = */ false, + /* verticalDatumRequired = */ true)) return false; std::map> diff --git a/frmts/hdf5/s111dataset.cpp b/frmts/hdf5/s111dataset.cpp index 072f4c70c456..11cb0f6ca4f7 100644 --- a/frmts/hdf5/s111dataset.cpp +++ b/frmts/hdf5/s111dataset.cpp @@ -1,11 +1,11 @@ /****************************************************************************** * * Project: Hierarchical Data Format Release 5 (HDF5) - * Purpose: Read S111 datasets. + * Purpose: Read/create S111 datasets. * Author: Even Rouault * ****************************************************************************** - * Copyright (c) 2023, Even Rouault + * Copyright (c) 2023-2025, Even Rouault * * SPDX-License-Identifier: MIT ****************************************************************************/ @@ -22,8 +22,15 @@ #include "gdal_proxy.h" #include "gdal_rat.h" +#include "cpl_time.h" + +#include +#include +#include +#include #include #include +#include /************************************************************************/ /* S111Dataset */ @@ -40,6 +47,11 @@ class S111Dataset final : public S100BaseDataset ~S111Dataset() override; static GDALDataset *Open(GDALOpenInfo *); + static GDALDataset *CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int bStrict, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData); }; S111Dataset::~S111Dataset() = default; @@ -697,6 +709,1211 @@ GDALDataset *S111Dataset::Open(GDALOpenInfo *poOpenInfo) return poDS.release(); } +/************************************************************************/ +/* S111Creator */ +/************************************************************************/ + +class S111Creator final : public S100BaseWriter +{ + public: + S111Creator(const char *pszDestFilename, GDALDataset *poSrcDS, + CSLConstList papszOptions) + : S100BaseWriter(pszDestFilename, poSrcDS, papszOptions) + { + } + + ~S111Creator() override; + + bool Create(GDALProgressFunc pfnProgress, void *pProgressData); + + static constexpr const char *FEATURE_TYPE = "SurfaceCurrent"; + + protected: + bool Close() override + { + return BaseClose(); + } + + private: + bool WriteFeatureGroupAttributes(); + bool WriteUncertaintyDataset(); + bool FillFeatureInstanceGroup( + const std::map> + &oMapTimestampToDS, + GDALProgressFunc pfnProgress, void *pProgressData); + bool CopyValues(GDALDataset *poSrcDS, GDALProgressFunc pfnProgress, + void *pProgressData); + bool CreateGroupF(); +}; + +/************************************************************************/ +/* S111Creator::~S111Creator() */ +/************************************************************************/ + +S111Creator::~S111Creator() +{ + S111Creator::Close(); +} + +/************************************************************************/ +/* S111Creator::Create() */ +/************************************************************************/ + +bool S111Creator::Create(GDALProgressFunc pfnProgress, void *pProgressData) +{ + CPLStringList aosDatasets( + CSLTokenizeString2(m_aosOptions.FetchNameValue("DATASETS"), ",", 0)); + if (m_poSrcDS->GetRasterCount() == 0 && aosDatasets.empty()) + { + // Deal with S111 -> S111 translation; + CSLConstList papszSubdatasets = m_poSrcDS->GetMetadata("SUBDATASETS"); + if (papszSubdatasets) + { + int iSubDS = 0; + std::string osFirstDataset; + std::string osDatasets; + for (const auto &[pszItem, pszValue] : + cpl::IterateNameValue(papszSubdatasets)) + { + if (STARTS_WITH(pszItem, "SUBDATASET_") && + cpl::ends_with(std::string_view(pszItem), "_NAME") && + STARTS_WITH(pszValue, "S111:")) + { + if (strstr(pszValue, ":SurfaceCurrent.")) + { + auto poTmpDS = + std::unique_ptr(GDALDataset::Open( + pszValue, + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDS) + return false; + CPLStringList aosOptions(m_aosOptions); + if (iSubDS > 0) + aosOptions.SetNameValue("APPEND_SUBDATASET", "YES"); + S111Creator oAuxCreator(m_osDestFilename.c_str(), + poTmpDS.get(), + aosOptions.List()); + const int nSubDSCount = + ((CSLCount(papszSubdatasets) + 1) / 2); + std::unique_ptr + pScaledProgressData( + GDALCreateScaledProgress( + static_cast(iSubDS) / nSubDSCount, + static_cast(iSubDS + 1) / + nSubDSCount, + pfnProgress, pProgressData), + GDALDestroyScaledProgress); + ++iSubDS; + if (!oAuxCreator.Create(GDALScaledProgress, + pScaledProgressData.get())) + return false; + } + else + { + if (osFirstDataset.empty()) + osFirstDataset = pszValue; + if (!osDatasets.empty()) + osDatasets += ','; + osDatasets += pszValue; + } + } + } + if (iSubDS > 0) + { + return true; + } + else if (!osDatasets.empty()) + { + auto poTmpDS = std::unique_ptr( + GDALDataset::Open(osFirstDataset.c_str(), + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDS) + return false; + CPLStringList aosOptions(m_aosOptions); + aosOptions.SetNameValue("DATASETS", osDatasets.c_str()); + S111Creator oAuxCreator(m_osDestFilename.c_str(), poTmpDS.get(), + aosOptions.List()); + return oAuxCreator.Create(pfnProgress, pProgressData); + } + } + } + + if (m_poSrcDS->GetRasterCount() != 2 && m_poSrcDS->GetRasterCount() != 4) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Source dataset %s must have 2 or 4 bands", + m_poSrcDS->GetDescription()); + return false; + } + + if (!BaseChecks("S111", /* crsMustBeEPSG = */ false, + /* verticalDatumRequired = */ false)) + return false; + + std::map> + oMapTimestampToDS; + CPLStringList aosDatasetsTimePoint(CSLTokenizeString2( + m_aosOptions.FetchNameValue("DATASETS_TIME_POINT"), ",", 0)); + if (!aosDatasets.empty()) + { + if (!aosDatasetsTimePoint.empty() && + aosDatasetsTimePoint.size() != aosDatasets.size()) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATASETS_TIME_POINT does not have the same number of " + "values as DATASETS"); + return false; + } + int i = 0; + for (const char *pszDataset : aosDatasets) + { + auto poDS = std::unique_ptr(GDALDataset::Open( + pszDataset, GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poDS) + return false; + if (poDS->GetRasterXSize() != m_poSrcDS->GetRasterXSize() || + poDS->GetRasterYSize() != m_poSrcDS->GetRasterYSize()) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same dimensions as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + if (poDS->GetRasterCount() != m_poSrcDS->GetRasterCount()) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s must have %d bands", + poDS->GetDescription(), m_poSrcDS->GetRasterCount()); + return false; + } + auto poSRS = poDS->GetSpatialRef(); + if (!poSRS || !poSRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same CRS as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + GDALGeoTransform gt; + if (poDS->GetGeoTransform(gt) != CE_None || gt != m_gt) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same geotransform as %s", + poDS->GetDescription(), m_poSrcDS->GetDescription()); + return false; + } + const char *pszVerticalDatum = + poDS->GetMetadataItem("VERTICAL_DATUM"); + if (pszVerticalDatum) + { + const int nVerticalDatum = + S100GetVerticalDatumCodeFromCodeMeaningOrAbbrev( + pszVerticalDatum); + if (nVerticalDatum != m_nVerticalDatum) + { + CPLError(CE_Failure, CPLE_NotSupported, + "Dataset %s does not have the same vertical datum " + "as %s", + poDS->GetDescription(), + m_poSrcDS->GetDescription()); + return false; + } + } + const char *pszTimePoint = poDS->GetMetadataItem("timePoint"); + if (!pszTimePoint && !aosDatasetsTimePoint.empty()) + pszTimePoint = aosDatasetsTimePoint[i]; + if (!pszTimePoint) + { + CPLError( + CE_Failure, CPLE_NotSupported, + "Dataset %s does not have a timePoint metadata item, and " + "the DATASETS_TIME_POINT creation option is not set", + poDS->GetDescription()); + return false; + } + if (strlen(pszTimePoint) != strlen("YYYYMMDDTHHMMSSZ") || + pszTimePoint[8] != 'T' || pszTimePoint[15] != 'Z') + { + CPLError(CE_Failure, CPLE_AppDefined, + "timePoint value for dataset %s is %s, but does not " + "conform to a YYYYMMDDTHHMMSSZ datetime value.", + poDS->GetDescription(), pszTimePoint); + return false; + } + if (cpl::contains(oMapTimestampToDS, pszTimePoint)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Several datasets are at timePoint %s.", pszTimePoint); + return false; + } + oMapTimestampToDS[pszTimePoint] = pszDataset; + ++i; + } + } + + { + const char *pszTimePoint = m_aosOptions.FetchNameValueDef( + "TIME_POINT", m_poSrcDS->GetMetadataItem("timePoint")); + if (!pszTimePoint) + { + CPLError(CE_Failure, CPLE_AppDefined, + "TIME_POINT creation option value must " + "be set, or source dataset must have a timePoint metadata " + "item."); + return false; + } + if (strlen(pszTimePoint) != strlen("YYYYMMDDTHHMMSSZ") || + pszTimePoint[8] != 'T' || pszTimePoint[15] != 'Z') + { + CPLError(CE_Failure, CPLE_AppDefined, + "TIME_POINT creation option value must " + "be set to a YYYYMMDDTHHMMSSZ datetime value."); + return false; + } + + if (oMapTimestampToDS.empty()) + { + oMapTimestampToDS[pszTimePoint] = m_poSrcDS; + } + else + { + const auto oIter = oMapTimestampToDS.find(pszTimePoint); + if (oIter != oMapTimestampToDS.end() && + CPLString(std::get(oIter->second)) + .replaceAll('\\', '/') != + CPLString(m_poSrcDS->GetDescription()) + .replaceAll('\\', '/')) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Several datasets are at timePoint %s (%s vs %s).", + pszTimePoint, + std::get(oIter->second).c_str(), + m_poSrcDS->GetDescription()); + return false; + } + } + } + if (oMapTimestampToDS.size() > 999) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Only up to 999 datasets are supported for a same feature " + "instance group"); + return false; + } + + if (m_poSRS->IsVertical() || m_poSRS->IsCompound() || m_poSRS->IsLocal() || + m_poSRS->GetAxesCount() != 2) + { + CPLError(CE_Failure, CPLE_NotSupported, + "The CRS must be a geographic 2D or projected 2D CRS"); + return false; + } + + const bool bAppendSubdataset = + CPLTestBool(m_aosOptions.FetchNameValueDef("APPEND_SUBDATASET", "NO")); + if (bAppendSubdataset) + { + GDALOpenInfo oOpenInfo(m_osDestFilename.c_str(), GA_ReadOnly); + auto poOriDS = + std::unique_ptr(S111Dataset::Open(&oOpenInfo)); + if (!poOriDS) + { + CPLError(CE_Failure, CPLE_AppDefined, + "%s is not a valid existing S111 dataset", + m_osDestFilename.c_str()); + return false; + } + const auto poOriSRS = poOriDS->GetSpatialRef(); + if (!poOriSRS) + { + // shouldn't happen + return false; + } + if (!poOriSRS->IsSame(m_poSRS)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "CRS of %s is not the same as the one of %s", + m_osDestFilename.c_str(), m_poSrcDS->GetDescription()); + return false; + } + poOriDS.reset(); + + OGREnvelope sExtent; + if (m_poSrcDS->GetExtentWGS84LongLat(&sExtent) != OGRERR_NONE) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Cannot get dataset extent in WGS84 longitude/latitude"); + return false; + } + + bool ret = OpenFileUpdateMode(); + if (ret) + { + m_featureGroup.reset(H5_CHECK(H5Gopen(m_hdf5, "SurfaceCurrent"))); + } + + ret = ret && m_featureGroup; + double dfNumInstances = 0; + ret = ret && GH5_FetchAttribute(m_featureGroup, "numInstances", + dfNumInstances, true); + if (ret && !(dfNumInstances >= 1 && dfNumInstances <= 99 && + std::round(dfNumInstances) == dfNumInstances)) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Invalid value for numInstances"); + ret = false; + } + else if (ret && dfNumInstances == 99) + { + CPLError(CE_Failure, CPLE_AppDefined, + "Too many existing feature instances"); + ret = false; + } + else + { + const int newNumInstances = static_cast(dfNumInstances) + 1; + ret = ret && GH5_WriteAttribute(m_featureGroup, "numInstances", + newNumInstances); + ret = ret && CreateFeatureInstanceGroup(CPLSPrintf( + "SurfaceCurrent.%02d", newNumInstances)); + ret = ret && FillFeatureInstanceGroup(oMapTimestampToDS, + pfnProgress, pProgressData); + } + + return Close() && ret; + } + else + { + bool ret = CreateFile(); + ret = ret && WriteProductSpecification("INT.IHO.S-111.2.0"); + ret = ret && WriteIssueDate(); + ret = ret && WriteIssueTime(/* bAutogenerateFromCurrent = */ true); + ret = ret && WriteHorizontalCRS(); + ret = ret && WriteTopLevelBoundingBox(); + + const char *pszGeographicIdentifier = m_aosOptions.FetchNameValueDef( + "GEOGRAPHIC_IDENTIFIER", + m_poSrcDS->GetMetadataItem("geographicIdentifier")); + if (pszGeographicIdentifier) + { + ret = + ret && WriteVarLengthStringValue(m_hdf5, "geographicIdentifier", + pszGeographicIdentifier); + } + + const char *pszDepthTypeIndex = m_aosOptions.FetchNameValueDef( + "DEPTH_TYPE", m_poSrcDS->GetMetadataItem("depthTypeIndex")); + if (!pszDepthTypeIndex) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DEPTH_TYPE creation option must be specified."); + return false; + } + + GH5_HIDTypeHolder hDepthTypeIndexEnumDataType( + H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + ret = ret && hDepthTypeIndexEnumDataType; + + uint8_t val; + val = 1; + ret = ret && H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "heightOrDepth", &val)) >= 0; + val = 2; + ret = ret && H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "layerAverage", &val)) >= 0; + + const int nDepthTypeIndex = EQUAL(pszDepthTypeIndex, "heightOrDepth") + ? 1 + : EQUAL(pszDepthTypeIndex, "layerAverage") + ? 2 + : atoi(pszDepthTypeIndex); + if (nDepthTypeIndex != 1 && nDepthTypeIndex != 2) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DEPTH_TYPE creation option must " + "be set to heightOrDepth/1 or layerAverage/2."); + return false; + } + ret = ret && + GH5_CreateAttribute(m_hdf5, "depthTypeIndex", + hDepthTypeIndexEnumDataType) && + GH5_WriteAttribute(m_hdf5, "depthTypeIndex", nDepthTypeIndex); + + const char *pszVerticalCS = m_aosOptions.FetchNameValueDef( + "VERTICAL_CS", m_poSrcDS->GetMetadataItem("verticalCS")); + if (!pszVerticalCS) + { + if (nDepthTypeIndex == 1) + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_CS creation option must be specified when " + "DEPTH_TYPE = heightOrDepth"); + return false; + } + } + else + { + const int nVerticalCS = EQUAL(pszVerticalCS, "DEPTH") ? 6498 + : EQUAL(pszVerticalCS, "HEIGHT") + ? 6499 + : atoi(pszVerticalCS); + if (nVerticalCS != 6498 && nVerticalCS != 6499) + { + CPLError( + CE_Failure, CPLE_NotSupported, + "VERTICAL_CS creation option must be set either to 6498 " + "(depth/down, metre), or 6499 (height/up, metre)"); + return false; + } + + ret = ret && WriteVerticalCS(nVerticalCS); + } + + ret = ret && WriteVerticalCoordinateBase(2); // verticalDatum + + if (m_nVerticalDatum > 0) + { + // 1=s100VerticalDatum, 2=EPSG + ret = ret && WriteVerticalDatumReference( + m_hdf5, m_nVerticalDatum <= 1024 ? 1 : 2); + ret = ret && + WriteVerticalDatum(m_hdf5, H5T_STD_I32LE, m_nVerticalDatum); + } + else if (nDepthTypeIndex == 1) + { + CPLError(CE_Failure, CPLE_AppDefined, + "VERTICAL_DATUM creation option must be specified when " + "DEPTH_TYPE = heightOrDepth"); + return false; + } + + const char *pszSurfaceCurrentTrendThreshold = + m_aosOptions.FetchNameValueDef( + "SURFACE_CURRENT_DEPTH", + m_poSrcDS->GetMetadataItem("surfaceCurrentDepth")); + if (!pszSurfaceCurrentTrendThreshold) + { + CPLError(CE_Failure, CPLE_AppDefined, + "SURFACE_CURRENT_DEPTH creation option must be " + "specified."); + return false; + } + if (CPLGetValueType(pszSurfaceCurrentTrendThreshold) == + CPL_VALUE_STRING) + { + CPLError(CE_Failure, CPLE_AppDefined, + "SURFACE_CURRENT_DEPTH creation option value must " + "be a numeric value."); + return false; + } + ret = + ret && WriteFloat32Value(m_hdf5, "surfaceCurrentDepth", + CPLAtof(pszSurfaceCurrentTrendThreshold)); + + const char *pszDatasetDeliveryInterval = m_aosOptions.FetchNameValueDef( + "DATASET_DELIVERY_INTERVAL", + m_poSrcDS->GetMetadataItem("datasetDeliveryInterval")); + if (pszDatasetDeliveryInterval) + { + ret = ret && + WriteVarLengthStringValue(m_hdf5, "datasetDeliveryInterval", + pszDatasetDeliveryInterval); + } + + // SurfaceCurrent + ret = ret && CreateFeatureGroup(FEATURE_TYPE); + ret = ret && WriteFeatureGroupAttributes(); + ret = ret && WriteAxisNames(m_featureGroup); + + ret = ret && CreateFeatureInstanceGroup("SurfaceCurrent.01"); + ret = ret && FillFeatureInstanceGroup(oMapTimestampToDS, pfnProgress, + pProgressData); + + ret = ret && CreateGroupF(); + + return Close() && ret; + } +} + +/************************************************************************/ +/* S111Creator::WriteFeatureGroupAttributes() */ +/************************************************************************/ + +bool S111Creator::WriteFeatureGroupAttributes() +{ + CPLAssert(m_featureGroup); + + // 3 = high (recommended) + const char *pszCommonPointRule = m_aosOptions.FetchNameValueDef( + "COMMON_POINT_RULE", m_poSrcDS->GetMetadataItem("commonPointRule")); + if (!pszCommonPointRule) + pszCommonPointRule = "3"; // all (recommended) + const int nCommonPointRule = atoi(pszCommonPointRule); + bool ret = WriteCommonPointRule(m_featureGroup, nCommonPointRule); + ret = ret && WriteDataCodingFormat(m_featureGroup, 2); // Regular grid + ret = ret && WriteDataOffsetCode(m_featureGroup, 5); // Center of cell + ret = ret && WriteDimension(m_featureGroup, 2); + const char *pszHorizontalPositionUncertainty = + m_aosOptions.FetchNameValueDef( + "HORIZONTAL_POSITION_UNCERTAINTY", + m_poSrcDS->GetMetadataItem("horizontalPositionUncertainty")); + ret = + ret && + WriteHorizontalPositionUncertainty( + m_featureGroup, + pszHorizontalPositionUncertainty && + pszHorizontalPositionUncertainty[0] + ? static_cast(CPLAtof(pszHorizontalPositionUncertainty)) + : -1.0f); + const char *pszVerticalUncertainty = m_aosOptions.FetchNameValueDef( + "VERTICAL_UNCERTAINTY", + m_poSrcDS->GetMetadataItem("verticalUncertainty")); + ret = ret && WriteVerticalUncertainty( + m_featureGroup, + pszVerticalUncertainty && pszVerticalUncertainty[0] + ? static_cast(CPLAtof(pszVerticalUncertainty)) + : -1.0f); + const char *pszTimeUncertainty = m_aosOptions.FetchNameValueDef( + "TIME_UNCERTAINTY", m_poSrcDS->GetMetadataItem("timeUncertainty")); + if (pszTimeUncertainty) + WriteFloat32Value(m_featureGroup, "timeUncertainty", + CPLAtof(pszTimeUncertainty)); + const char *pszMethodCurrentsProduct = m_aosOptions.FetchNameValueDef( + "METHOD_CURRENTS_PRODUCT", + m_poSrcDS->GetMetadataItem("methodCurrentsProduct")); + if (pszMethodCurrentsProduct) + WriteVarLengthStringValue(m_featureGroup, "methodCurrentsProduct", + pszMethodCurrentsProduct); + ret = ret && WriteInterpolationType(m_featureGroup, 10); // discrete + ret = ret && WriteNumInstances(m_featureGroup, H5T_STD_U32LE, 1); + ret = ret && WriteSequencingRuleScanDirection(m_featureGroup, + m_poSRS->IsProjected() + ? "Easting, Northing" + : "Longitude, Latitude"); + ret = ret && WriteSequencingRuleType(m_featureGroup, 1); // Linear + return ret; +} + +/************************************************************************/ +/* S111Creator::WriteUncertaintyDataset() */ +/************************************************************************/ + +bool S111Creator::WriteUncertaintyDataset() +{ + CPLAssert(m_featureInstanceGroup); + + GH5_HIDTypeHolder hDataType( + H5_CHECK(H5Tcreate(H5T_COMPOUND, sizeof(char *) + sizeof(float)))); + GH5_HIDTypeHolder hVarLengthStringDataType(H5_CHECK(H5Tcopy(H5T_C_S1))); + bool bRet = + hVarLengthStringDataType && + H5_CHECK(H5Tset_size(hVarLengthStringDataType, H5T_VARIABLE)) >= 0; + bRet = bRet && hVarLengthStringDataType && + H5_CHECK( + H5Tset_strpad(hVarLengthStringDataType, H5T_STR_NULLTERM)) >= 0; + bRet = bRet && hDataType && + H5_CHECK(H5Tinsert(hDataType, "name", 0, + hVarLengthStringDataType)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "value", sizeof(char *), + H5T_IEEE_F32LE)) >= 0; + constexpr hsize_t NUM_ROWS = 2; + hsize_t dims[] = {NUM_ROWS}; + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(1, dims, nullptr))); + GH5_HIDDatasetHolder hDatasetID; + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams; + if (bRet) + { + hDatasetID.reset( + H5_CHECK(H5Dcreate(m_featureInstanceGroup, "uncertainty", hDataType, + hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + GByte abyValues[NUM_ROWS * (sizeof(char *) + sizeof(float))]; + { + const char *pszName = "surfaceCurrentSpeed"; + const char *pszVal = m_aosOptions.FetchNameValueDef( + "UNCERTAINTY_SPEED", + m_poSrcDS->GetMetadataItem("uncertaintySurfaceCurrentSpeed")); + float fVal = pszVal ? static_cast(CPLAtof(pszVal)) : -1.0f; + CPL_LSBPTR32(&fVal); + memcpy(abyValues, &pszName, sizeof(char **)); + memcpy(abyValues + sizeof(char *), &fVal, sizeof(fVal)); + } + { + const char *pszName = "surfaceCurrentDirection"; + const char *pszVal = m_aosOptions.FetchNameValueDef( + "UNCERTAINTY_DIRECTION", + m_poSrcDS->GetMetadataItem("uncertaintySurfaceCurrentDirection")); + float fVal = pszVal ? static_cast(CPLAtof(pszVal)) : -1.0f; + CPL_LSBPTR32(&fVal); + memcpy(abyValues + sizeof(char *) + sizeof(float), &pszName, + sizeof(char **)); + memcpy(abyValues + sizeof(char *) + sizeof(float) + sizeof(char *), + &fVal, sizeof(fVal)); + } + + H5OFFSET_TYPE offset[] = {0}; + bRet = bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, dims, nullptr)) >= 0 && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hDataSpace, hFileSpace, + H5P_DEFAULT, abyValues)) >= 0; + return bRet; +} + +/************************************************************************/ +/* S111Creator::FillFeatureInstanceGroup() */ +/************************************************************************/ + +bool S111Creator::FillFeatureInstanceGroup( + const std::map> + &oMapTimestampToDS, + GDALProgressFunc pfnProgress, void *pProgressData) +{ + bool ret = WriteFIGGridRelatedParameters(m_featureInstanceGroup); + + const int numInstances = static_cast(oMapTimestampToDS.size()); + + ret = + ret && WriteNumGRP(m_featureInstanceGroup, H5T_STD_U32LE, numInstances); + ret = ret && WriteUInt32Value(m_featureInstanceGroup, "numberOfTimes", + numInstances); + + // Check if value groups are spaced at a regular time interval + GIntBig nLastInterval = 0; + GIntBig nLastTS = 0; + for (const auto &[key, value] : oMapTimestampToDS) + { + CPL_IGNORE_RET_VAL(value); + int nYear, nMonth, nDay, nHour, nMinute, nSecond; + if (sscanf(key.c_str(), "%04d%02d%02dT%02d%02d%02dZ", &nYear, &nMonth, + &nDay, &nHour, &nMinute, &nSecond) == 6) + { + struct tm brokenDown; + memset(&brokenDown, 0, sizeof(brokenDown)); + brokenDown.tm_year = nYear - 1900; + brokenDown.tm_mon = nMonth - 1; + brokenDown.tm_mday = nDay; + brokenDown.tm_hour = nHour; + brokenDown.tm_min = nMinute; + brokenDown.tm_sec = nMinute; + const GIntBig nTS = CPLYMDHMSToUnixTime(&brokenDown); + if (nLastTS != 0) + { + if (nLastInterval == 0) + { + nLastInterval = nTS - nLastTS; + } + else if (nLastInterval != nTS - nLastTS) + { + nLastInterval = 0; + break; + } + } + nLastTS = nTS; + } + } + + const char *pszTimeRecordInterval = m_aosOptions.FetchNameValueDef( + "TIME_RECORD_INTERVAL", + m_poSrcDS->GetMetadataItem("timeRecordInterval")); + if (pszTimeRecordInterval) + { + ret = ret && + WriteUInt16Value(m_featureInstanceGroup, "timeRecordInterval", + atoi(pszTimeRecordInterval)); + } + else if (nLastInterval > 0 && nLastInterval < 65536) + { + ret = ret && + WriteUInt16Value(m_featureInstanceGroup, "timeRecordInterval", + static_cast(nLastInterval)); + } + + ret = ret && WriteVarLengthStringValue( + m_featureInstanceGroup, "dateTimeOfFirstRecord", + oMapTimestampToDS.begin()->first.c_str()); + ret = ret && WriteVarLengthStringValue( + m_featureInstanceGroup, "dateTimeOfLastRecord", + oMapTimestampToDS.rbegin()->first.c_str()); + + const char *pszDataDynamicity = m_aosOptions.FetchNameValueDef( + "DATA_DYNAMICITY", m_poSrcDS->GetMetadataItem("dataDynamicity")); + if (!pszDataDynamicity) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATA_DYNAMICITY creation option must " + "be specified."); + return false; + } + { + GH5_HIDTypeHolder hDepthTypeIndexEnumDataType( + H5_CHECK(H5Tenum_create(H5T_STD_U8LE))); + ret = ret && hDepthTypeIndexEnumDataType; + + uint8_t val; + val = 1; + ret = ret && H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "observation", &val)) >= 0; + val = 2; + ret = ret && + H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "astronomicalPrediction", &val)) >= 0; + val = 3; + ret = ret && H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "analysisOrHybrid", &val)) >= 0; + val = 5; + ret = + ret && H5_CHECK(H5Tenum_insert(hDepthTypeIndexEnumDataType, + "hydrodynamicForecast", &val)) >= 0; + + const int nDataDynamicity = + EQUAL(pszDataDynamicity, "observation") ? 1 + : EQUAL(pszDataDynamicity, "astronomicalPrediction") ? 2 + : EQUAL(pszDataDynamicity, "analysisOrHybrid") ? 3 + : EQUAL(pszDataDynamicity, "hydrodynamicForecast") + ? 5 + : atoi(pszDataDynamicity); + if (nDataDynamicity != 1 && nDataDynamicity != 2 && + nDataDynamicity != 3 && nDataDynamicity != 5) + { + CPLError(CE_Failure, CPLE_AppDefined, + "DATA_DYNAMICITY creation option must " + "be set to observation/1, astronomicalPrediction/2, " + "analysisOrHybrid/3 or hydrodynamicForecast/5."); + return false; + } + ret = ret && + GH5_CreateAttribute(m_featureInstanceGroup, "dataDynamicity", + hDepthTypeIndexEnumDataType) && + GH5_WriteAttribute(m_featureInstanceGroup, "dataDynamicity", + nDataDynamicity); + } + + if (m_poSrcDS->GetRasterCount() == 2 || + m_aosOptions.FetchNameValue("UNCERTAINTY_SPEED") || + m_aosOptions.FetchNameValue("UNCERTAINTY_DIRECTION")) + { + ret = ret && WriteUncertaintyDataset(); + } + + int iInstance = 0; + double dfLastRatio = 0; + for (const auto &iter : oMapTimestampToDS) + { + ++iInstance; + ret = ret && CreateValuesGroup(CPLSPrintf("Group_%03d", iInstance)); + + ret = ret && WriteVarLengthStringValue(m_valuesGroup, "timePoint", + iter.first.c_str()); + + std::unique_ptr poTmpDSHolder; + GDALDataset *poSrcDS; + if (std::holds_alternative(iter.second)) + { + poTmpDSHolder.reset( + GDALDataset::Open(std::get(iter.second).c_str(), + GDAL_OF_RASTER | GDAL_OF_VERBOSE_ERROR)); + if (!poTmpDSHolder) + { + return false; + } + poSrcDS = poTmpDSHolder.get(); + } + else + { + CPLAssert(std::holds_alternative(iter.second)); + poSrcDS = std::get(iter.second); + } + + const double dfNewRatio = static_cast(iInstance) / numInstances; + std::unique_ptr + pScaledProgressData( + GDALCreateScaledProgress(dfLastRatio, dfNewRatio, pfnProgress, + pProgressData), + GDALDestroyScaledProgress); + ret = ret && CopyValues(poSrcDS, GDALScaledProgress, + pScaledProgressData.get()); + dfLastRatio = dfNewRatio; + } + + return ret; +} + +/************************************************************************/ +/* S111Creator::CreateGroupF() */ +/************************************************************************/ + +// Per S-111 v2.0 spec + +constexpr float string_to_float(std::string_view str) +{ + if (str.empty()) + return 0.0f; + + size_t i = 0; + bool neg = false; + if (str[0] == '-') + { + neg = true; + ++i; + } + else if (str[0] == '+') + { + ++i; + } + + float int_part = 0.0f; + for (; i < str.size() && str[i] >= '0' && str[i] <= '9'; ++i) + { + int_part = int_part * 10.0f + (str[i] - '0'); + } + + float frac_part = 0.0f; + float divisor = 1.0f; + if (i < str.size() && str[i] == '.') + { + for (++i; i < str.size() && str[i] >= '0' && str[i] <= '9'; ++i) + { + frac_part = frac_part * 10.0f + (str[i] - '0'); + divisor *= 10.0f; + } + } + + float result = int_part + frac_part / divisor; + return neg ? -result : result; +} + +constexpr const char *MIN_SPEED_STR = "0.00"; +constexpr float MIN_SPEED = string_to_float(MIN_SPEED_STR); +constexpr const char *MAX_SPEED_STR = "99.00"; +constexpr float MAX_SPEED = string_to_float(MAX_SPEED_STR); +constexpr const char *NODATA_SPEED_STR = "-9999.00"; +constexpr float NODATA_SPEED = string_to_float(NODATA_SPEED_STR); + +constexpr const char *MIN_DIR_STR = "0.0"; +constexpr float MIN_DIR = string_to_float(MIN_DIR_STR); +constexpr const char *MAX_DIR_STR = "359.9"; +constexpr float MAX_DIR = string_to_float(MAX_DIR_STR); +constexpr const char *NODATA_DIR_STR = "-9999.0"; +constexpr float NODATA_DIR = string_to_float(NODATA_DIR_STR); + +constexpr const char *NODATA_UNCT_STR = "-1.0"; +constexpr float NODATA_UNCT = string_to_float(NODATA_UNCT_STR); + +bool S111Creator::CreateGroupF() +{ + bool ret = S100BaseWriter::CreateGroupF(); + + CPLStringList aosFeatureCodes; + aosFeatureCodes.push_back(FEATURE_TYPE); + ret = ret && WriteOneDimensionalVarLengthStringArray( + m_GroupF, "featureCode", aosFeatureCodes.List()); + + { + std::vector> rows{ + {"surfaceCurrentSpeed", "Surface Current Speed", "knot", + NODATA_SPEED_STR, "H5T_FLOAT", MIN_SPEED_STR, MAX_SPEED_STR, + "geSemiInterval"}, + {"surfaceCurrentDirection", "Surface Current Direction", "degree", + "-9999.0", "H5T_FLOAT", MIN_DIR_STR, MAX_DIR_STR, + "closedInterval"}, + {"speedUncertainty", "Speed Uncertainty", "knot", NODATA_UNCT_STR, + "H5T_FLOAT", MIN_SPEED_STR, MAX_SPEED_STR, "geSemiInterval"}, + {"directionUncertainty", "Direction Uncertainty", "degree", + NODATA_UNCT_STR, "H5T_FLOAT", MIN_DIR_STR, MAX_DIR_STR, + "closedInterval"}, + }; + rows.resize(m_poSrcDS->GetRasterCount()); + ret = ret && WriteGroupFDataset(FEATURE_TYPE, rows); + } + + return ret; +} + +/************************************************************************/ +/* S111Creator::CopyValues() */ +/************************************************************************/ + +bool S111Creator::CopyValues(GDALDataset *poSrcDS, GDALProgressFunc pfnProgress, + void *pProgressData) +{ + CPLAssert(m_valuesGroup.get() >= 0); + + const int nYSize = poSrcDS->GetRasterYSize(); + const int nXSize = poSrcDS->GetRasterXSize(); + + hsize_t dims[] = {static_cast(nYSize), + static_cast(nXSize)}; + + GH5_HIDSpaceHolder hDataSpace(H5_CHECK(H5Screate_simple(2, dims, nullptr))); + bool bRet = hDataSpace; + + const bool bDeflate = + EQUAL(m_aosOptions.FetchNameValueDef("COMPRESS", "DEFLATE"), "DEFLATE"); + const int nCompressionLevel = + atoi(m_aosOptions.FetchNameValueDef("ZLEVEL", "6")); + const int nBlockSize = + std::min(4096, std::max(100, atoi(m_aosOptions.FetchNameValueDef( + "BLOCK_SIZE", "100")))); + const int nBlockXSize = std::min(nXSize, nBlockSize); + const int nBlockYSize = std::min(nYSize, nBlockSize); + const int nComponents = poSrcDS->GetRasterCount(); + CPLAssert(nComponents == 2 || nComponents == 4); + + GH5_HIDTypeHolder hDataType( + H5_CHECK(H5Tcreate(H5T_COMPOUND, sizeof(float) * nComponents))); + bRet = bRet && hDataType && + H5_CHECK(H5Tinsert(hDataType, "surfaceCurrentSpeed", 0, + H5T_IEEE_F32LE)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "surfaceCurrentDirection", + sizeof(float), H5T_IEEE_F32LE)) >= 0; + if (nComponents == 4 && bRet) + { + bRet = H5_CHECK(H5Tinsert(hDataType, "speedUncertainty", + 2 * sizeof(float), H5T_IEEE_F32LE)) >= 0 && + H5_CHECK(H5Tinsert(hDataType, "directionUncertainty", + 3 * sizeof(float), H5T_IEEE_F32LE)) >= 0; + } + + hsize_t chunk_size[] = {static_cast(nBlockYSize), + static_cast(nBlockXSize)}; + + GH5_HIDParametersHolder hParams(H5_CHECK(H5Pcreate(H5P_DATASET_CREATE))); + bRet = bRet && hParams && + H5_CHECK(H5Pset_fill_time(hParams, H5D_FILL_TIME_ALLOC)) >= 0 && + H5_CHECK(H5Pset_layout(hParams, H5D_CHUNKED)) >= 0 && + H5_CHECK(H5Pset_chunk(hParams, 2, chunk_size)) >= 0; + + if (bRet && bDeflate) + { + bRet = H5_CHECK(H5Pset_deflate(hParams, nCompressionLevel)) >= 0; + } + + GH5_HIDDatasetHolder hDatasetID; + if (bRet) + { + hDatasetID.reset(H5_CHECK(H5Dcreate(m_valuesGroup, "values", hDataType, + hDataSpace, hParams))); + bRet = hDatasetID; + } + + GH5_HIDSpaceHolder hFileSpace; + if (bRet) + { + hFileSpace.reset(H5_CHECK(H5Dget_space(hDatasetID))); + bRet = hFileSpace; + } + + const int nYBlocks = static_cast(DIV_ROUND_UP(nYSize, nBlockYSize)); + const int nXBlocks = static_cast(DIV_ROUND_UP(nXSize, nBlockXSize)); + std::vector afValues(static_cast(nBlockYSize) * nBlockXSize * + nComponents); + const bool bReverseY = m_gt[5] < 0; + + constexpr std::array afNoDataValue{NODATA_SPEED, NODATA_DIR, + NODATA_UNCT, NODATA_UNCT}; + + constexpr float INF = std::numeric_limits::infinity(); + std::array afMin{INF, INF, INF, INF}; + std::array afMax{-INF, -INF, -INF, -INF}; + + std::array abHasNoDataBand{FALSE, FALSE, FALSE, FALSE}; + std::array afSrcNoData{0, 0, 0, 0}; + for (int i = 0; i < nComponents; ++i) + { + afSrcNoData[i] = static_cast( + poSrcDS->GetRasterBand(i + 1)->GetNoDataValue(&abHasNoDataBand[i])); + } + + for (int iY = 0; iY < nYBlocks && bRet; iY++) + { + const int nSrcYOff = bReverseY + ? std::max(0, nYSize - (iY + 1) * nBlockYSize) + : iY * nBlockYSize; + const int nReqCountY = std::min(nBlockYSize, nYSize - iY * nBlockYSize); + for (int iX = 0; iX < nXBlocks && bRet; iX++) + { + const int nReqCountX = + std::min(nBlockXSize, nXSize - iX * nBlockXSize); + + bRet = + poSrcDS->RasterIO( + GF_Read, iX * nBlockXSize, nSrcYOff, nReqCountX, nReqCountY, + bReverseY ? afValues.data() + + (nReqCountY - 1) * nReqCountX * nComponents + : afValues.data(), + nReqCountX, nReqCountY, GDT_Float32, nComponents, nullptr, + static_cast(sizeof(float)) * nComponents, + bReverseY ? -static_cast(sizeof(float)) * + nComponents * nReqCountX + : 0, + sizeof(float), nullptr) == CE_None; + + if (bRet) + { + for (int i = 0; i < nReqCountY * nReqCountX; i++) + { + for (int iC = 0; iC < nComponents; ++iC) + { + float fVal = afValues[i * nComponents + iC]; + if ((abHasNoDataBand[iC] && fVal == afSrcNoData[iC]) || + std::isnan(fVal)) + { + afValues[i * nComponents + iC] = afNoDataValue[iC]; + } + else + { + afMin[iC] = std::min(afMin[iC], fVal); + afMax[iC] = std::max(afMax[iC], fVal); + } + CPL_LSBPTR32(&afValues[i * nComponents + iC]); + } + } + } + + H5OFFSET_TYPE offset[] = { + static_cast(iY) * + static_cast(nBlockYSize), + static_cast(iX) * + static_cast(nBlockXSize)}; + hsize_t count[2] = {static_cast(nReqCountY), + static_cast(nReqCountX)}; + GH5_HIDSpaceHolder hMemSpace( + H5_CHECK(H5Screate_simple(2, count, nullptr))); + bRet = + bRet && + H5_CHECK(H5Sselect_hyperslab(hFileSpace, H5S_SELECT_SET, offset, + nullptr, count, nullptr)) >= 0 && + hMemSpace && + H5_CHECK(H5Dwrite(hDatasetID, hDataType, hMemSpace, hFileSpace, + H5P_DEFAULT, afValues.data())) >= 0 && + pfnProgress((static_cast(iY) * nXBlocks + iX + 1) / + (static_cast(nXBlocks) * nYBlocks), + "", pProgressData) != 0; + } + } + + constexpr int IDX_SPEED = 0; + constexpr int IDX_DIR = 1; + constexpr int IDX_UNC_SPEED = 2; + constexpr int IDX_UNC_DIR = 3; + + if (afMin[IDX_SPEED] > afMax[IDX_SPEED]) + { + afMin[IDX_SPEED] = afMax[IDX_SPEED] = NODATA_SPEED; + } + else if (!(afMin[IDX_SPEED] >= MIN_SPEED && afMax[IDX_SPEED] <= MAX_SPEED)) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Range of surface current speed in the dataset is [%f, %f] " + "whereas the " + "allowed range is [%.2f, %.2f]", + afMin[IDX_SPEED], afMax[IDX_SPEED], MIN_SPEED, MAX_SPEED); + } + + if (afMin[IDX_DIR] > afMax[IDX_DIR]) + { + afMin[IDX_DIR] = afMax[IDX_DIR] = NODATA_DIR; + } + else if (!(afMin[IDX_DIR] >= MIN_DIR && afMax[IDX_DIR] <= MAX_DIR)) + { + CPLError( + CE_Warning, CPLE_AppDefined, + "Range of surface current direction in the dataset is [%f, %f] " + "whereas the " + "allowed range is [%.2f, %.2f]", + afMin[IDX_DIR], afMax[IDX_DIR], MIN_DIR, MAX_DIR); + } + + if (afMax[IDX_UNC_SPEED] >= afMin[IDX_UNC_SPEED] && + afMin[IDX_UNC_SPEED] < 0) + { + CPLError( + CE_Warning, CPLE_AppDefined, + "Negative speed uncertainty value found (%f), which is not allowed " + "(except nodata value %s)", + afMin[IDX_UNC_SPEED], NODATA_UNCT_STR); + } + + if (afMax[IDX_UNC_DIR] >= afMin[IDX_UNC_DIR] && afMin[IDX_UNC_DIR] < 0) + { + CPLError(CE_Warning, CPLE_AppDefined, + "Negative direction uncertainty value found (%f), which is " + "not allowed (except nodata value %s)", + afMin[IDX_UNC_DIR], NODATA_UNCT_STR); + } + + if (bRet) + { + double prevMinSpeed = 0; + double prevMaxSpeed = 0; + if (GH5_FetchAttribute(m_featureGroup, "minDatasetCurrentSpeed", + prevMinSpeed) && + GH5_FetchAttribute(m_featureGroup, "maxDatasetCurrentSpeed", + prevMaxSpeed)) + { + if (afMin[IDX_SPEED] != NODATA_SPEED) + { + prevMinSpeed = std::min(prevMinSpeed, afMin[IDX_SPEED]); + prevMaxSpeed = std::max(prevMaxSpeed, afMax[IDX_SPEED]); + bRet = + GH5_WriteAttribute(m_featureGroup, "minDatasetCurrentSpeed", + prevMinSpeed) && + GH5_WriteAttribute(m_featureGroup, "maxDatasetCurrentSpeed", + prevMaxSpeed); + } + } + else + { + bRet = WriteFloat64Value(m_featureGroup, "minDatasetCurrentSpeed", + afMin[IDX_SPEED]) && + WriteFloat64Value(m_featureGroup, "maxDatasetCurrentSpeed", + afMax[IDX_SPEED]); + } + } + + return bRet; +} + +/************************************************************************/ +/* S111Dataset::CreateCopy() */ +/************************************************************************/ + +/* static */ +GDALDataset *S111Dataset::CreateCopy(const char *pszFilename, + GDALDataset *poSrcDS, int /* bStrict*/, + char **papszOptions, + GDALProgressFunc pfnProgress, + void *pProgressData) +{ + S111Creator creator(pszFilename, poSrcDS, papszOptions); + if (!creator.Create(pfnProgress, pProgressData)) + return nullptr; + + VSIStatBufL sStatBuf; + if (VSIStatL(pszFilename, &sStatBuf) == 0 && + sStatBuf.st_size > 10 * 1024 * 1024) + { + CPLError(CE_Warning, CPLE_AppDefined, + "%s file size exceeds 10 MB, which is the upper limit " + "suggested for wireless transmission to marine vessels", + pszFilename); + } + + GDALOpenInfo oOpenInfo(pszFilename, GA_ReadOnly); + return Open(&oOpenInfo); +} + /************************************************************************/ /* S111DatasetDriverUnload() */ /************************************************************************/ @@ -722,6 +1939,7 @@ void GDALRegister_S111() S111DriverSetCommonMetadata(poDriver); poDriver->pfnOpen = S111Dataset::Open; + poDriver->pfnCreateCopy = S111Dataset::CreateCopy; poDriver->pfnUnloadDriver = S111DatasetDriverUnload; GetGDALDriverManager()->RegisterDriver(poDriver);